Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
8 changes: 8 additions & 0 deletions queue_job/fields.py
Original file line number Diff line number Diff line change
Expand Up @@ -69,14 +69,20 @@ def convert_to_record(self, value, record):
class JobEncoder(json.JSONEncoder):
"""Encode Odoo recordsets so that we can later recompose them"""

def _get_record_context(self, obj):
context = obj.env.context.copy()
return context

def default(self, obj):
if isinstance(obj, models.BaseModel):
context = self._get_record_context(obj)
return {
"_type": "odoo_recordset",
"model": obj._name,
"ids": obj.ids,
"uid": obj.env.uid,
"su": obj.env.su,
"context": context,
}
elif isinstance(obj, datetime):
return {"_type": "datetime_isoformat", "value": obj.isoformat()}
Expand Down Expand Up @@ -107,6 +113,8 @@ def object_hook(self, obj):
type_ = obj["_type"]
if type_ == "odoo_recordset":
model = self.env(user=obj.get("uid"), su=obj.get("su"))[obj["model"]]
if obj.get("context"):
model = model.with_context(**obj.get("context"))

return model.browse(obj["ids"])
elif type_ == "datetime_isoformat":
Expand Down
38 changes: 34 additions & 4 deletions queue_job/job.py
Original file line number Diff line number Diff line change
Expand Up @@ -59,6 +59,7 @@ def __init__(
description=None,
channel=None,
identity_key=None,
keep_context=False,
):
self.recordset = recordset
self.priority = priority
Expand All @@ -67,6 +68,7 @@ def __init__(
self.description = description
self.channel = channel
self.identity_key = identity_key
self.keep_context = keep_context

def __getattr__(self, name):
if name in self.recordset:
Expand All @@ -88,6 +90,7 @@ def delay(*args, **kwargs):
description=self.description,
channel=self.channel,
identity_key=self.identity_key,
keep_context=self.keep_context,
)

return delay
Expand Down Expand Up @@ -339,6 +342,7 @@ def enqueue(
description=None,
channel=None,
identity_key=None,
keep_context=False,
):
"""Create a Job and enqueue it in the queue. Return the job uuid.

Expand All @@ -359,6 +363,7 @@ def enqueue(
description=description,
channel=channel,
identity_key=identity_key,
keep_context=keep_context,
)
if new_job.identity_key:
existing = new_job.job_record_with_same_identity_key()
Expand Down Expand Up @@ -399,6 +404,7 @@ def __init__(
description=None,
channel=None,
identity_key=None,
keep_context=False,
):
""" Create a Job

Expand All @@ -423,8 +429,11 @@ def __init__(
:param identity_key: A hash to uniquely identify a job, or a function
that returns this hash (the function takes the job
as argument)
:param env: Odoo Environment
:type env: :class:`odoo.api.Environment`
:param keep_context: Determine if the current context should be restored.
Set to True to keep entire context.
Possibility to provide a list of keys to keep
from the current context.
:type keep_context: :bool or list
"""
if args is None:
args = ()
Expand All @@ -445,6 +454,7 @@ def __init__(
self.recordset = recordset

self.env = env
self.keep_context = keep_context
self.job_model = self.env["queue.job"]
self.job_model_name = "queue.job"

Expand Down Expand Up @@ -591,11 +601,19 @@ def _store_values(self, create=False):
"method_name": self.method_name,
"job_function_id": self.job_config.job_function_id,
"channel_method_name": self.job_function_name,
"records": self.recordset,
"args": self.args,
"kwargs": self.kwargs,
}
)
# By default the context is completely reset
# (compatibility with previous version).
context = {}
if self.keep_context:
context = self.env.context.copy()
if isinstance(self.keep_context, list):
context = {k: context.get(k) for k in self.keep_context}
recordset = self.recordset.with_context(context)
vals.update({"records": recordset})

vals_from_model = self._store_values_from_model()
# Sanitize values: make sure you cannot screw core values
Expand All @@ -615,6 +633,17 @@ def _store_values_from_model(self):
vals = handler(self)
return vals

def _get_record_context(self):
"""
Get the context to execute the job
"""
context = {}
company_ids = []
if self.company_id:
company_ids = [self.company_id]
context.update({"job_uuid": self.uuid, "allowed_company_ids": company_ids})
return context

@property
def func_string(self):
model = repr(self.recordset)
Expand All @@ -628,7 +657,8 @@ def db_record(self):

@property
def func(self):
recordset = self.recordset.with_context(job_uuid=self.uuid)
context = self._get_record_context()
recordset = self.recordset.with_context(**context)
return getattr(recordset, self.method_name)

@property
Expand Down
4 changes: 4 additions & 0 deletions queue_job/models/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,6 +44,7 @@ def with_delay(
description=None,
channel=None,
identity_key=None,
keep_context=True,
):
""" Return a ``DelayableRecordset``

Expand Down Expand Up @@ -81,6 +82,8 @@ def with_delay(
the new job will not be added. It is either a
string, either a function that takes the job as
argument (see :py:func:`..job.identity_exact`).
:param keep_context: boolean to set if the current context
should be restored on the recordset (default: True).
:return: instance of a DelayableRecordset
:rtype: :class:`odoo.addons.queue_job.job.DelayableRecordset`

Expand Down Expand Up @@ -108,6 +111,7 @@ def with_delay(
description=description,
channel=channel,
identity_key=identity_key,
keep_context=keep_context,
)

def _patch_job_auto_delay(self, method_name, context_key=None):
Expand Down
2 changes: 2 additions & 0 deletions queue_job/tests/test_json_field.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@ def test_encoder_recordset(self):
"model": "res.partner",
"ids": [partner.id],
"su": False,
"context": {},
}
self.assertEqual(json.loads(value_json), expected)

Expand All @@ -42,6 +43,7 @@ def test_encoder_recordset_list(self):
"model": "res.partner",
"ids": [partner.id],
"su": False,
"context": {},
},
]
self.assertEqual(json.loads(value_json), expected)
Expand Down
96 changes: 96 additions & 0 deletions test_queue_job/tests/test_job.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@
# License LGPL-3.0 or later (http://www.gnu.org/licenses/lgpl.html)

import hashlib
import json
from datetime import datetime, timedelta

import mock
Expand All @@ -13,6 +14,7 @@
NoSuchJobError,
RetryableJobError,
)
from odoo.addons.queue_job.fields import JobEncoder
from odoo.addons.queue_job.job import (
DONE,
ENQUEUED,
Expand Down Expand Up @@ -200,6 +202,49 @@ def test_store_extra_data(self):
stored.invalidate_cache()
self.assertEqual(stored.additional_info, "JUST_TESTING_BUT_FAILED")

def test_company_simple(self):
company = self.env.ref("base.main_company")
eta = datetime.now() + timedelta(hours=5)
test_job = Job(
self.method,
args=("o", "k"),
kwargs={"return_context": 1},
priority=15,
eta=eta,
description="My description",
)
test_job.worker_pid = 99999 # normally set on "set_start"
test_job.company_id = company.id
test_job.store()
job_read = Job.load(self.env, test_job.uuid)
self.assertEqual(test_job.func, job_read.func)
result_ctx = test_job.func(*tuple(test_job.args), **test_job.kwargs)
self.assertEqual(result_ctx.get("allowed_company_ids"), company.ids)

def test_company_complex(self):
company1 = self.env.ref("base.main_company")
company2 = company1.create({"name": "Queue job company"})
companies = company1 | company2
self.env.user.write({"company_ids": [(6, False, companies.ids)]})
# Ensure the main company still the first
self.assertEqual(self.env.user.company_id, company1)
eta = datetime.now() + timedelta(hours=5)
test_job = Job(
self.method,
args=("o", "k"),
kwargs={"return_context": 1},
priority=15,
eta=eta,
description="My description",
)
test_job.worker_pid = 99999 # normally set on "set_start"
test_job.company_id = company2.id
test_job.store()
job_read = Job.load(self.env, test_job.uuid)
self.assertEqual(test_job.func, job_read.func)
result_ctx = test_job.func(*tuple(test_job.args), **test_job.kwargs)
self.assertEqual(result_ctx.get("allowed_company_ids"), company2.ids)

def test_read(self):
eta = datetime.now() + timedelta(hours=5)
test_job = Job(
Expand Down Expand Up @@ -536,6 +581,57 @@ def test_context_uuid(self):
self.assertTrue(key_present)
self.assertEqual(result["job_uuid"], test_job._uuid)

def test_context_custom_keep_context_default(self):
"""
Use with_delay without specify 'keep_context' key.
So ensure the default False value to this params.
So the context shouldn't be restored with the recordset.
"""
delayable = (
self.env["test.queue.job"].with_context(world_origin=42).with_delay()
)
test_job = delayable.testing_method()
expected_ctx = {
"job_uuid": test_job.uuid,
"allowed_company_ids": [test_job.company_id],
}
self.assertEqual(test_job._get_record_context(), expected_ctx)

def test_context_custom_keep_context_false(self):
"""
Use with_delay without specify by specifying keep_context to False.
So the context shouldn't be restored with the recordset.
"""
delayable = (
self.env["test.queue.job"]
.with_context(world_origin=42)
.with_delay(keep_context=False)
)
test_job = delayable.testing_method()
expected_ctx = {
"job_uuid": test_job.uuid,
"allowed_company_ids": [test_job.company_id],
}
self.assertEqual(test_job._get_record_context(), expected_ctx)

def test_context_custom_keep_context_true(self):
"""
Use with_delay without specify by specifying keep_context to True.
So the context should be restored with the recordset.
"""
recordset = self.env["test.queue.job"].with_context(world_origin=42)
expected_ctx = recordset.env.context.copy()
value_json = json.dumps(recordset, cls=JobEncoder)
expected = {
"uid": recordset.env.uid,
"_type": "odoo_recordset",
"model": recordset._name,
"ids": recordset.ids,
"su": recordset.env.su,
"context": expected_ctx,
}
self.assertEqual(json.loads(value_json), expected)

def test_override_channel(self):
delayable = self.env["test.queue.job"].with_delay(channel="root.sub.sub")
test_job = delayable.testing_method(return_context=True)
Expand Down