diff --git a/queue_job/README.rst b/queue_job/README.rst index a6db9d9247..e3eaefd437 100644 --- a/queue_job/README.rst +++ b/queue_job/README.rst @@ -159,7 +159,7 @@ Example of job function: .. code-block:: XML - + action_done diff --git a/queue_job/__manifest__.py b/queue_job/__manifest__.py index 537db47cd0..e81bac5eba 100644 --- a/queue_job/__manifest__.py +++ b/queue_job/__manifest__.py @@ -3,7 +3,7 @@ { "name": "Job Queue", - "version": "13.0.3.2.0", + "version": "13.0.4.1.0", "author": "Camptocamp,ACSONE SA/NV,Odoo Community Association (OCA)", "website": "https://github.com/OCA/queue/queue_job", "license": "LGPL-3", diff --git a/queue_job/controllers/main.py b/queue_job/controllers/main.py index a0814bbbd5..4b658d627b 100644 --- a/queue_job/controllers/main.py +++ b/queue_job/controllers/main.py @@ -53,10 +53,12 @@ def retry_postpone(job, message, seconds=None): # ensure the job to run is in the correct state and lock the record env.cr.execute( - "SELECT state FROM queue_job WHERE uuid=%s AND state=%s FOR UPDATE", + "SELECT state, parent_id FROM queue_job WHERE uuid=%s AND state=%s FOR UPDATE", (job_uuid, ENQUEUED), ) - if not env.cr.fetchone(): + data = env.cr.fetchone() + + if not data: _logger.warn( "was requested to run job %s, but it does not exist, " "or is not in state %s", @@ -68,8 +70,17 @@ def retry_postpone(job, message, seconds=None): job = Job.load(env, job_uuid) assert job and job.state == ENQUEUED + if data and data[1]: + env.cr.execute( + "SELECT state FROM queue_job WHERE id=%s FOR UPDATE", ((data[1],)), + ) + st = env.cr.fetchone() + if st and st[0] != "done": + retry_postpone(job, "Sync Call", seconds=2) + return "" try: try: + self._try_perform_job(env, job) except OperationalError as err: # Automatically retry the typical transaction serialization @@ -77,10 +88,10 @@ def retry_postpone(job, message, seconds=None): if err.pgcode not in PG_CONCURRENCY_ERRORS_TO_RETRY: raise - retry_postpone( - job, tools.ustr(err.pgerror, errors="replace"), seconds=PG_RETRY - ) _logger.debug("%s OperationalError, postponed", job) + raise RetryableJobError( + tools.ustr(err.pgerror, errors="replace"), seconds=PG_RETRY + ) except NothingToDoJob as err: if str(err): @@ -95,25 +106,44 @@ def retry_postpone(job, message, seconds=None): # delay the job later, requeue retry_postpone(job, str(err), seconds=err.seconds) _logger.debug("%s postponed", job) + # Do not trigger the error up because we don't want an exception + # traceback in the logs we should have the traceback when all + # retries are exhausted + env.cr.rollback() - except (FailedJobError, Exception): + except (FailedJobError, Exception) as orig_exception: buff = StringIO() traceback.print_exc(file=buff) - _logger.error(buff.getvalue()) + traceback_txt = buff.getvalue() + _logger.error(traceback_txt) job.env.clear() with odoo.api.Environment.manage(): with odoo.registry(job.env.cr.dbname).cursor() as new_cr: job.env = job.env(cr=new_cr) - job.set_failed(exc_info=buff.getvalue()) + vals = self._get_failure_values(job, traceback_txt, orig_exception) + job.set_failed(**vals) job.store() new_cr.commit() + buff.close() raise return "" + def _get_failure_values(self, job, traceback_txt, orig_exception): + """Collect relevant data from exception.""" + exception_name = orig_exception.__class__.__name__ + if hasattr(orig_exception, "__module__"): + exception_name = orig_exception.__module__ + "." + exception_name + exc_message = getattr(orig_exception, "name", str(orig_exception)) + return { + "exc_info": traceback_txt, + "exc_name": exception_name, + "exc_message": exc_message, + } + @http.route("/queue_job/create_test_job", type="http", auth="user") def create_test_job( - self, priority=None, max_retries=None, channel="root", description="Test job" + self, priority=None, max_retries=None, channel=None, description="Test job" ): if not http.request.env.user.has_group("base.group_erp_manager"): raise Forbidden(_("Access Denied")) diff --git a/queue_job/i18n/de.po b/queue_job/i18n/de.po index 0ce7ee2439..f5dd5f2459 100644 --- a/queue_job/i18n/de.po +++ b/queue_job/i18n/de.po @@ -44,7 +44,6 @@ msgstr "Aktivitäten" #. module: queue_job #: model:ir.model.fields,field_description:queue_job.field_queue_job__activity_exception_decoration #, fuzzy -#| msgid "Exception Information" msgid "Activity Exception Decoration" msgstr "Exception-Information" @@ -195,6 +194,12 @@ msgstr "Zeit der Einreihung" msgid "Enqueued" msgstr "Eingereiht" +#. module: queue_job +#: model:ir.model.fields,field_description:queue_job.field_queue_job__exc_name +#: model_terms:ir.ui.view,arch_db:queue_job.view_queue_job_search +msgid "Exception" +msgstr "" + #. module: queue_job #: model:ir.model.fields,field_description:queue_job.field_queue_job__exc_info msgid "Exception Info" @@ -205,11 +210,31 @@ msgstr "Exception-Info" msgid "Exception Information" msgstr "Exception-Information" +#. module: queue_job +#: model:ir.model.fields,field_description:queue_job.field_queue_job__exc_message +msgid "Exception Message" +msgstr "" + +#. module: queue_job +#: model_terms:ir.ui.view,arch_db:queue_job.view_queue_job_search +msgid "Exception message" +msgstr "" + +#. module: queue_job +#: model_terms:ir.ui.view,arch_db:queue_job.view_queue_job_form +msgid "Exception:" +msgstr "" + #. module: queue_job #: model:ir.model.fields,field_description:queue_job.field_queue_job__eta msgid "Execute only after" msgstr "Erst ausführen nach" +#. module: queue_job +#: model:ir.model.fields,field_description:queue_job.field_queue_job__exec_time +msgid "Execution Time (avg)" +msgstr "" + #. module: queue_job #: model:ir.model.fields.selection,name:queue_job.selection__queue_job__state__failed #: model_terms:ir.ui.view,arch_db:queue_job.view_queue_job_search @@ -274,7 +299,6 @@ msgstr "Identitätsschlüssel" #. module: queue_job #: code:addons/queue_job/models/queue_job.py:0 #, fuzzy, python-format -#| msgid "The selected jobs will be requeued." msgid "If both parameters are 0, ALL jobs will be requeued!" msgstr "Die ausgewählten Jobs werden erneut eingereiht." @@ -337,7 +361,6 @@ msgstr "Job-Warteschlangenverwalter" #. module: queue_job #: model:ir.model.fields.selection,name:queue_job.selection__ir_model_fields__ttype__job_serialized #, fuzzy -#| msgid "Job failed" msgid "Job Serialized" msgstr "Job ist fehlgeschlagen" @@ -358,6 +381,8 @@ msgstr "Job unterbrochen und als Erledigt markiert: Es ist nicht zu tun." #: model:ir.model.fields,field_description:queue_job.field_queue_requeue_job__job_ids #: model:ir.ui.menu,name:queue_job.menu_queue_job #: model_terms:ir.ui.view,arch_db:queue_job.view_queue_job_form +#: model_terms:ir.ui.view,arch_db:queue_job.view_queue_job_graph +#: model_terms:ir.ui.view,arch_db:queue_job.view_queue_job_pivot #: model_terms:ir.ui.view,arch_db:queue_job.view_queue_job_search msgid "Jobs" msgstr "Jobs" @@ -426,7 +451,6 @@ msgstr "Nachrichten" #. module: queue_job #: model:ir.model.fields,field_description:queue_job.field_queue_job_function__method #, fuzzy -#| msgid "Method Name" msgid "Method" msgstr "Methodenname" @@ -438,6 +462,7 @@ msgstr "Methodenname" #. module: queue_job #: model:ir.model.fields,field_description:queue_job.field_queue_job__model_name #: model:ir.model.fields,field_description:queue_job.field_queue_job_function__model_id +#: model_terms:ir.ui.view,arch_db:queue_job.view_queue_job_search msgid "Model" msgstr "Modell" @@ -505,11 +530,6 @@ msgstr "Das ist die Anzahl von Nachrichten mit Übermittlungsfehler" msgid "Number of unread messages" msgstr "Das ist die Anzahl von ungelesenen Nachrichten" -#. module: queue_job -#: model:ir.model.fields,field_description:queue_job.field_queue_job__override_channel -msgid "Override Channel" -msgstr "Kanal überschreiben" - #. module: queue_job #: model:ir.model.fields,field_description:queue_job.field_queue_job_channel__parent_id msgid "Parent Channel" @@ -554,7 +574,7 @@ msgstr "Job einreihen" #. module: queue_job #: code:addons/queue_job/models/queue_job.py:0 #, python-format -msgid "Queue jobs must created by calling 'with_delay()'." +msgid "Queue jobs must be created by calling 'with_delay()'." msgstr "" #. module: queue_job @@ -565,7 +585,6 @@ msgstr "Datensatz" #. module: queue_job #: model:ir.model.fields,field_description:queue_job.field_queue_job__records #, fuzzy -#| msgid "Record" msgid "Record(s)" msgstr "Datensatz" @@ -577,7 +596,6 @@ msgstr "Zugehörige Aktion anzeigen" #. module: queue_job #: model:ir.model.fields,field_description:queue_job.field_queue_job_function__edit_related_action #, fuzzy -#| msgid "Related Record" msgid "Related Action" msgstr "Zugehöriger Datensatz" @@ -745,6 +763,16 @@ msgstr "Die ausgewählten Jobs werden erneut eingereiht." msgid "The selected jobs will be set to done." msgstr "Die ausgewählten Jobs werden als Erledigt markiert." +#. module: queue_job +#: model_terms:ir.ui.view,arch_db:queue_job.view_queue_job_form +msgid "Time (s)" +msgstr "" + +#. module: queue_job +#: model:ir.model.fields,help:queue_job.field_queue_job__exec_time +msgid "Time required to execute this job in seconds. Average when grouped." +msgstr "" + #. module: queue_job #: model:ir.model.fields,help:queue_job.field_queue_job__activity_exception_decoration msgid "Type of the exception activity on record." @@ -809,6 +837,9 @@ msgstr "Assistent zur erneuten Einreihung einer Job-Auswahl" msgid "Worker Pid" msgstr "" +#~ msgid "Override Channel" +#~ msgstr "Kanal überschreiben" + #~ msgid "If checked new messages require your attention." #~ msgstr "" #~ "Wenn es gesetzt ist, erfordern neue Nachrichten Ihre Aufmerksamkeit." diff --git a/queue_job/i18n/queue_job.pot b/queue_job/i18n/queue_job.pot index 96bac153cd..62025f0d1a 100644 --- a/queue_job/i18n/queue_job.pot +++ b/queue_job/i18n/queue_job.pot @@ -188,6 +188,12 @@ msgstr "" msgid "Enqueued" msgstr "" +#. module: queue_job +#: model:ir.model.fields,field_description:queue_job.field_queue_job__exc_name +#: model_terms:ir.ui.view,arch_db:queue_job.view_queue_job_search +msgid "Exception" +msgstr "" + #. module: queue_job #: model:ir.model.fields,field_description:queue_job.field_queue_job__exc_info msgid "Exception Info" @@ -198,11 +204,31 @@ msgstr "" msgid "Exception Information" msgstr "" +#. module: queue_job +#: model:ir.model.fields,field_description:queue_job.field_queue_job__exc_message +msgid "Exception Message" +msgstr "" + +#. module: queue_job +#: model_terms:ir.ui.view,arch_db:queue_job.view_queue_job_search +msgid "Exception message" +msgstr "" + +#. module: queue_job +#: model_terms:ir.ui.view,arch_db:queue_job.view_queue_job_form +msgid "Exception:" +msgstr "" + #. module: queue_job #: model:ir.model.fields,field_description:queue_job.field_queue_job__eta msgid "Execute only after" msgstr "" +#. module: queue_job +#: model:ir.model.fields,field_description:queue_job.field_queue_job__exec_time +msgid "Execution Time (avg)" +msgstr "" + #. module: queue_job #: model:ir.model.fields.selection,name:queue_job.selection__queue_job__state__failed #: model_terms:ir.ui.view,arch_db:queue_job.view_queue_job_search @@ -347,6 +373,8 @@ msgstr "" #: model:ir.model.fields,field_description:queue_job.field_queue_requeue_job__job_ids #: model:ir.ui.menu,name:queue_job.menu_queue_job #: model_terms:ir.ui.view,arch_db:queue_job.view_queue_job_form +#: model_terms:ir.ui.view,arch_db:queue_job.view_queue_job_graph +#: model_terms:ir.ui.view,arch_db:queue_job.view_queue_job_pivot #: model_terms:ir.ui.view,arch_db:queue_job.view_queue_job_search msgid "Jobs" msgstr "" @@ -425,6 +453,7 @@ msgstr "" #. module: queue_job #: model:ir.model.fields,field_description:queue_job.field_queue_job__model_name #: model:ir.model.fields,field_description:queue_job.field_queue_job_function__model_id +#: model_terms:ir.ui.view,arch_db:queue_job.view_queue_job_search msgid "Model" msgstr "" @@ -492,11 +521,6 @@ msgstr "" msgid "Number of unread messages" msgstr "" -#. module: queue_job -#: model:ir.model.fields,field_description:queue_job.field_queue_job__override_channel -msgid "Override Channel" -msgstr "" - #. module: queue_job #: model:ir.model.fields,field_description:queue_job.field_queue_job_channel__parent_id msgid "Parent Channel" @@ -540,7 +564,7 @@ msgstr "" #. module: queue_job #: code:addons/queue_job/models/queue_job.py:0 #, python-format -msgid "Queue jobs must created by calling 'with_delay()'." +msgid "Queue jobs must be created by calling 'with_delay()'." msgstr "" #. module: queue_job @@ -715,6 +739,16 @@ msgstr "" msgid "The selected jobs will be set to done." msgstr "" +#. module: queue_job +#: model_terms:ir.ui.view,arch_db:queue_job.view_queue_job_form +msgid "Time (s)" +msgstr "" + +#. module: queue_job +#: model:ir.model.fields,help:queue_job.field_queue_job__exec_time +msgid "Time required to execute this job in seconds. Average when grouped." +msgstr "" + #. module: queue_job #: model:ir.model.fields,help:queue_job.field_queue_job__activity_exception_decoration msgid "Type of the exception activity on record." diff --git a/queue_job/i18n/zh_CN.po b/queue_job/i18n/zh_CN.po index 2117912c34..241e5a166c 100644 --- a/queue_job/i18n/zh_CN.po +++ b/queue_job/i18n/zh_CN.po @@ -6,7 +6,7 @@ msgid "" msgstr "" "Project-Id-Version: Odoo Server 12.0\n" "Report-Msgid-Bugs-To: \n" -"PO-Revision-Date: 2020-03-23 06:13+0000\n" +"PO-Revision-Date: 2021-01-22 12:44+0000\n" "Last-Translator: 黎伟杰 <674416404@qq.com>\n" "Language-Team: none\n" "Language: zh_CN\n" @@ -14,7 +14,7 @@ msgstr "" "Content-Type: text/plain; charset=UTF-8\n" "Content-Transfer-Encoding: \n" "Plural-Forms: nplurals=1; plural=0;\n" -"X-Generator: Weblate 3.10\n" +"X-Generator: Weblate 4.3.2\n" #. module: queue_job #: model_terms:ir.ui.view,arch_db:queue_job.view_queue_job_form @@ -29,7 +29,7 @@ msgstr "" #: code:addons/queue_job/controllers/main.py:0 #, python-format msgid "Access Denied" -msgstr "" +msgstr "拒绝访问" #. module: queue_job #: model:ir.model.fields,field_description:queue_job.field_queue_job__message_needaction @@ -193,6 +193,12 @@ msgstr "排队时间" msgid "Enqueued" msgstr "排队" +#. module: queue_job +#: model:ir.model.fields,field_description:queue_job.field_queue_job__exc_name +#: model_terms:ir.ui.view,arch_db:queue_job.view_queue_job_search +msgid "Exception" +msgstr "" + #. module: queue_job #: model:ir.model.fields,field_description:queue_job.field_queue_job__exc_info msgid "Exception Info" @@ -203,11 +209,31 @@ msgstr "异常信息" msgid "Exception Information" msgstr "异常信息" +#. module: queue_job +#: model:ir.model.fields,field_description:queue_job.field_queue_job__exc_message +msgid "Exception Message" +msgstr "" + +#. module: queue_job +#: model_terms:ir.ui.view,arch_db:queue_job.view_queue_job_search +msgid "Exception message" +msgstr "" + +#. module: queue_job +#: model_terms:ir.ui.view,arch_db:queue_job.view_queue_job_form +msgid "Exception:" +msgstr "" + #. module: queue_job #: model:ir.model.fields,field_description:queue_job.field_queue_job__eta msgid "Execute only after" msgstr "仅在此之后执行" +#. module: queue_job +#: model:ir.model.fields,field_description:queue_job.field_queue_job__exec_time +msgid "Execution Time (avg)" +msgstr "" + #. module: queue_job #: model:ir.model.fields.selection,name:queue_job.selection__queue_job__state__failed #: model_terms:ir.ui.view,arch_db:queue_job.view_queue_job_search @@ -217,12 +243,12 @@ msgstr "失败" #. module: queue_job #: model:ir.model.fields,field_description:queue_job.field_ir_model_fields__ttype msgid "Field Type" -msgstr "" +msgstr "字段类型" #. module: queue_job #: model:ir.model,name:queue_job.model_ir_model_fields msgid "Fields" -msgstr "" +msgstr "字段" #. module: queue_job #: model:ir.model.fields,field_description:queue_job.field_queue_job__message_follower_ids @@ -272,7 +298,6 @@ msgstr "身份密钥" #. module: queue_job #: code:addons/queue_job/models/queue_job.py:0 #, fuzzy, python-format -#| msgid "The selected jobs will be requeued." msgid "If both parameters are 0, ALL jobs will be requeued!" msgstr "所选作业将重新排队。" @@ -333,10 +358,8 @@ msgstr "作业队列管理员" #. module: queue_job #: model:ir.model.fields.selection,name:queue_job.selection__ir_model_fields__ttype__job_serialized -#, fuzzy -#| msgid "Job failed" msgid "Job Serialized" -msgstr "作业失败" +msgstr "作业序列化" #. module: queue_job #: model:mail.message.subtype,name:queue_job.mt_job_failed @@ -355,6 +378,8 @@ msgstr "作业中断并设置为已完成:无需执行任何操作。" #: model:ir.model.fields,field_description:queue_job.field_queue_requeue_job__job_ids #: model:ir.ui.menu,name:queue_job.menu_queue_job #: model_terms:ir.ui.view,arch_db:queue_job.view_queue_job_form +#: model_terms:ir.ui.view,arch_db:queue_job.view_queue_job_graph +#: model_terms:ir.ui.view,arch_db:queue_job.view_queue_job_pivot #: model_terms:ir.ui.view,arch_db:queue_job.view_queue_job_search msgid "Jobs" msgstr "作业" @@ -364,12 +389,12 @@ msgstr "作业" #: model:ir.cron,cron_name:queue_job.ir_cron_queue_job_garbage_collector #: model:ir.cron,name:queue_job.ir_cron_queue_job_garbage_collector msgid "Jobs Garbage Collector" -msgstr "" +msgstr "作业垃圾收集器" #. module: queue_job #: model:ir.model.fields,field_description:queue_job.field_queue_job__kwargs msgid "Kwargs" -msgstr "关键字参数" +msgstr "Kwargs" #. module: queue_job #: model:ir.model.fields,field_description:queue_job.field_queue_job____last_update @@ -422,10 +447,8 @@ msgstr "消息" #. module: queue_job #: model:ir.model.fields,field_description:queue_job.field_queue_job_function__method -#, fuzzy -#| msgid "Method Name" msgid "Method" -msgstr "方法名称" +msgstr "方法" #. module: queue_job #: model:ir.model.fields,field_description:queue_job.field_queue_job__method_name @@ -435,6 +458,7 @@ msgstr "方法名称" #. module: queue_job #: model:ir.model.fields,field_description:queue_job.field_queue_job__model_name #: model:ir.model.fields,field_description:queue_job.field_queue_job_function__model_id +#: model_terms:ir.ui.view,arch_db:queue_job.view_queue_job_search msgid "Model" msgstr "模型" @@ -442,7 +466,7 @@ msgstr "模型" #: code:addons/queue_job/models/queue_job.py:0 #, python-format msgid "Model {} not found" -msgstr "" +msgstr "Model {} 找不到" #. module: queue_job #: model:ir.model.fields,field_description:queue_job.field_queue_job_channel__name @@ -502,11 +526,6 @@ msgstr "递送错误消息数量" msgid "Number of unread messages" msgstr "未读消息数量" -#. module: queue_job -#: model:ir.model.fields,field_description:queue_job.field_queue_job__override_channel -msgid "Override Channel" -msgstr "覆盖频道" - #. module: queue_job #: model:ir.model.fields,field_description:queue_job.field_queue_job_channel__parent_id msgid "Parent Channel" @@ -551,7 +570,7 @@ msgstr "队列作业" #. module: queue_job #: code:addons/queue_job/models/queue_job.py:0 #, python-format -msgid "Queue jobs must created by calling 'with_delay()'." +msgid "Queue jobs must be created by calling 'with_delay()'." msgstr "" #. module: queue_job @@ -562,7 +581,6 @@ msgstr "记录" #. module: queue_job #: model:ir.model.fields,field_description:queue_job.field_queue_job__records #, fuzzy -#| msgid "Record" msgid "Record(s)" msgstr "记录" @@ -574,7 +592,6 @@ msgstr "相关的" #. module: queue_job #: model:ir.model.fields,field_description:queue_job.field_queue_job_function__edit_related_action #, fuzzy -#| msgid "Related Record" msgid "Related Action" msgstr "相关记录" @@ -739,6 +756,16 @@ msgstr "所选作业将重新排队。" msgid "The selected jobs will be set to done." msgstr "所选作业将设置为完成。" +#. module: queue_job +#: model_terms:ir.ui.view,arch_db:queue_job.view_queue_job_form +msgid "Time (s)" +msgstr "" + +#. module: queue_job +#: model:ir.model.fields,help:queue_job.field_queue_job__exec_time +msgid "Time required to execute this job in seconds. Average when grouped." +msgstr "" + #. module: queue_job #: model:ir.model.fields,help:queue_job.field_queue_job__activity_exception_decoration msgid "Type of the exception activity on record." @@ -803,6 +830,9 @@ msgstr "重新排队向导所选的作业" msgid "Worker Pid" msgstr "" +#~ msgid "Override Channel" +#~ msgstr "覆盖频道" + #~ msgid "If checked new messages require your attention." #~ msgstr "查看是否有需要留意的新消息。" diff --git a/queue_job/job.py b/queue_job/job.py index 568808c002..349a73c8ce 100644 --- a/queue_job/job.py +++ b/queue_job/job.py @@ -214,6 +214,14 @@ class Job(object): A description of the result (for humans). + .. attribute:: exc_name + + Exception error name when the job failed. + + .. attribute:: exc_message + + Exception error message when the job failed. + .. attribute:: exc_info Exception information (traceback) when the job failed. @@ -377,7 +385,7 @@ def enqueue( def db_record_from_uuid(env, job_uuid): model = env["queue.job"].sudo() record = model.search([("uuid", "=", job_uuid)], limit=1) - return record.with_env(env) + return record.with_env(env).sudo() def __init__( self, @@ -441,13 +449,7 @@ def __init__( self.job_model_name = "queue.job" self.job_config = ( - self.env["queue.job.function"] - .sudo() - .job_config( - self.env["queue.job.function"].job_function_name( - self.model_name, self.method_name - ) - ) + self.env["queue.job.function"].sudo().job_config(self.job_function_name) ) self.state = PENDING @@ -484,6 +486,8 @@ def __init__( self.date_done = None self.result = None + self.exc_name = None + self.exc_message = None self.exc_info = None if "company_id" in env.context: @@ -524,17 +528,36 @@ def perform(self): def store(self): """Store the Job""" + job_model = self.env["queue.job"] + # The sentinel is used to prevent edition sensitive fields (such as + # method_name) from RPC methods. + edit_sentinel = job_model.EDIT_SENTINEL + + db_record = self.db_record() + if db_record: + db_record.with_context(_job_edit_sentinel=edit_sentinel).write( + self._store_values() + ) + else: + job_model.with_context(_job_edit_sentinel=edit_sentinel).sudo().create( + self._store_values(create=True) + ) + + def _store_values(self, create=False): vals = { "state": self.state, "priority": self.priority, "retry": self.retry, "max_retries": self.max_retries, + "exc_name": self.exc_name, + "exc_message": self.exc_message, "exc_info": self.exc_info, "company_id": self.company_id, "result": str(self.result) if self.result else False, "date_enqueued": False, "date_started": False, "date_done": False, + "exec_time": False, "eta": False, "identity_key": False, "worker_pid": self.worker_pid, @@ -546,40 +569,59 @@ def store(self): vals["date_started"] = self.date_started if self.date_done: vals["date_done"] = self.date_done + if self.exec_time: + vals["exec_time"] = self.exec_time if self.eta: vals["eta"] = self.eta if self.identity_key: vals["identity_key"] = self.identity_key - job_model = self.env["queue.job"] - # The sentinel is used to prevent edition sensitive fields (such as - # method_name) from RPC methods. - edit_sentinel = job_model.EDIT_SENTINEL - - db_record = self.db_record() - if db_record: - db_record.with_context(_job_edit_sentinel=edit_sentinel).write(vals) - else: - date_created = self.date_created - # The following values must never be modified after the - # creation of the job + if create: vals.update( { + "user_id": self.env.uid, + "channel": self.channel, + # The following values must never be modified after the + # creation of the job "uuid": self.uuid, "name": self.description, - "date_created": date_created, + "func_string": self.func_string, + "date_created": self.date_created, + "model_name": self.recordset._name, "method_name": self.method_name, + "job_function_id": self.job_config.job_function_id, + "channel_method_name": self.job_function_name, "records": self.recordset, "args": self.args, "kwargs": self.kwargs, } ) - # it the channel is not specified, lets the job_model compute - # the right one to use - if self.channel: - vals.update({"channel": self.channel}) - job_model.with_context(_job_edit_sentinel=edit_sentinel).sudo().create(vals) + vals_from_model = self._store_values_from_model() + # Sanitize values: make sure you cannot screw core values + vals_from_model = {k: v for k, v in vals_from_model.items() if k not in vals} + vals.update(vals_from_model) + return vals + + def _store_values_from_model(self): + vals = {} + value_handlers_candidates = ( + "_job_store_values_for_" + self.method_name, + "_job_store_values", + ) + for candidate in value_handlers_candidates: + handler = getattr(self.recordset, candidate, None) + if handler is not None: + vals = handler(self) + return vals + + @property + def func_string(self): + model = repr(self.recordset) + args = [repr(arg) for arg in self.args] + kwargs = ["{}={!r}".format(key, val) for key, val in self.kwargs.items()] + all_args = ", ".join(args + kwargs) + return "{}.{}({})".format(model, self.method_name, all_args) def db_record(self): return self.db_record_from_uuid(self.env, self.uuid) @@ -589,6 +631,11 @@ def func(self): recordset = self.recordset.with_context(job_uuid=self.uuid) return getattr(recordset, self.method_name) + @property + def job_function_name(self): + func_model = self.env["queue.job.function"].sudo() + return func_model.job_function_name(self.recordset._name, self.method_name) + @property def identity_key(self): if self._identity_key is None: @@ -646,10 +693,25 @@ def eta(self, value): else: self._eta = value + @property + def channel(self): + return self._channel or self.job_config.channel + + @channel.setter + def channel(self, value): + self._channel = value + + @property + def exec_time(self): + if self.date_done and self.date_started: + return (self.date_done - self.date_started).total_seconds() + return None + def set_pending(self, result=None, reset_retry=True): self.state = PENDING self.date_enqueued = None self.date_started = None + self.date_done = None self.worker_pid = None if reset_retry: self.retry = 0 @@ -669,15 +731,17 @@ def set_started(self): def set_done(self, result=None): self.state = DONE + self.exc_name = None self.exc_info = None self.date_done = datetime.now() if result is not None: self.result = result - def set_failed(self, exc_info=None): + def set_failed(self, **kw): self.state = FAILED - if exc_info is not None: - self.exc_info = exc_info + for k, v in kw.items(): + if v is not None: + setattr(self, k, v) def __repr__(self): return "" % (self.uuid, self.priority) @@ -709,6 +773,7 @@ def postpone(self, result=None, seconds=None): """ eta_seconds = self._get_retry_seconds(seconds) self.eta = timedelta(seconds=eta_seconds) + self.exc_name = None self.exc_info = None if result is not None: self.result = result diff --git a/queue_job/migrations/13.0.3.7.0/pre-migration.py b/queue_job/migrations/13.0.3.7.0/pre-migration.py new file mode 100644 index 0000000000..c14d6800ad --- /dev/null +++ b/queue_job/migrations/13.0.3.7.0/pre-migration.py @@ -0,0 +1,35 @@ +# License LGPL-3.0 or later (http://www.gnu.org/licenses/lgpl.html) + +import logging + +from odoo.tools.sql import column_exists + +_logger = logging.getLogger(__name__) + + +def migrate(cr, version): + if not column_exists(cr, "queue_job", "exec_time"): + # Disable trigger otherwise the update takes ages. + cr.execute( + """ + ALTER TABLE queue_job DISABLE TRIGGER queue_job_notify; + """ + ) + cr.execute( + """ + ALTER TABLE queue_job ADD COLUMN exec_time double precision DEFAULT 0; + """ + ) + cr.execute( + """ + UPDATE + queue_job + SET + exec_time = EXTRACT(EPOCH FROM (date_done - date_started)); + """ + ) + cr.execute( + """ + ALTER TABLE queue_job ENABLE TRIGGER queue_job_notify; + """ + ) diff --git a/queue_job/migrations/13.0.3.8.0/post-migration.py b/queue_job/migrations/13.0.3.8.0/post-migration.py new file mode 100644 index 0000000000..f6eff72707 --- /dev/null +++ b/queue_job/migrations/13.0.3.8.0/post-migration.py @@ -0,0 +1,47 @@ +# License LGPL-3.0 or later (http://www.gnu.org/licenses/lgpl.html) + +import logging + +from odoo import SUPERUSER_ID, api + +_logger = logging.getLogger(__name__) + + +def migrate(cr, version): + with api.Environment.manage(): + env = api.Environment(cr, SUPERUSER_ID, {}) + _logger.info("Computing exception name for failed jobs") + _compute_jobs_new_values(env) + + +def _compute_jobs_new_values(env): + for job in env["queue.job"].search( + [("state", "=", "failed"), ("exc_info", "!=", False)] + ): + exception_details = _get_exception_details(job) + if exception_details: + job.update(exception_details) + + +def _get_exception_details(job): + for line in reversed(job.exc_info.splitlines()): + if _find_exception(line): + name, msg = line.split(":", 1) + return { + "exc_name": name.strip(), + "exc_message": msg.strip("()', \""), + } + + +def _find_exception(line): + # Just a list of common errors. + # If you want to target others, add your own migration step for your db. + exceptions = ( + "Error:", # catch all well named exceptions + # other live instance errors found + "requests.exceptions.MissingSchema", + "botocore.errorfactory.NoSuchKey", + ) + for exc in exceptions: + if exc in line: + return exc diff --git a/queue_job/models/base.py b/queue_job/models/base.py index 3bb4d78361..a83f457900 100644 --- a/queue_job/models/base.py +++ b/queue_job/models/base.py @@ -1,11 +1,12 @@ # Copyright 2016 Camptocamp # License LGPL-3.0 or later (http://www.gnu.org/licenses/lgpl.html) +import functools import inspect import logging import os -from odoo import models +from odoo import api, models from ..job import DelayableRecordset @@ -108,3 +109,110 @@ def with_delay( channel=channel, identity_key=identity_key, ) + + def _patch_job_auto_delay(self, method_name, context_key=None): + """Patch a method to be automatically delayed as job method when called + + This patch method has to be called in ``_register_hook`` (example + below). + + When a method is patched, any call to the method will not directly + execute the method's body, but will instead enqueue a job. + + When a ``context_key`` is set when calling ``_patch_job_auto_delay``, + the patched method is automatically delayed only when this key is + ``True`` in the caller's context. It is advised to patch the method + with a ``context_key``, because making the automatic delay *in any + case* can produce nasty and unexpected side effects (e.g. another + module calls the method and expects it to be computed before doing + something else, expecting a result, ...). + + A typical use case is when a method in a module we don't control is + called synchronously in the middle of another method, and we'd like all + the calls to this method become asynchronous. + + The options of the job usually passed to ``with_delay()`` (priority, + description, identity_key, ...) can be returned in a dictionary by a + method named after the name of the method suffixed by ``_job_options`` + which takes the same parameters as the initial method. + + It is still possible to force synchronous execution of the method by + setting a key ``_job_force_sync`` to True in the environment context. + + Example patching the "foo" method to be automatically delayed as job + (the job options method is optional): + + .. code-block:: python + + # original method: + def foo(self, arg1): + print("hello", arg1) + + def large_method(self): + # doing a lot of things + self.foo("world) + # doing a lot of other things + + def button_x(self): + self.with_context(auto_delay_foo=True).large_method() + + # auto delay patch: + def foo_job_options(self, arg1): + return { + "priority": 100, + "description": "Saying hello to {}".format(arg1) + } + + def _register_hook(self): + self._patch_method( + "foo", + self._patch_job_auto_delay("foo", context_key="auto_delay_foo") + ) + return super()._register_hook() + + The result when ``button_x`` is called, is that a new job for ``foo`` + is delayed. + """ + + def auto_delay_wrapper(self, *args, **kwargs): + # when no context_key is set, we delay in any case (warning, can be + # dangerous) + context_delay = self.env.context.get(context_key) if context_key else True + if ( + self.env.context.get("job_uuid") + or not context_delay + or self.env.context.get("_job_force_sync") + or self.env.context.get("test_queue_job_no_delay") + ): + # we are in the job execution + return auto_delay_wrapper.origin(self, *args, **kwargs) + else: + # replace the synchronous call by a job on itself + method_name = auto_delay_wrapper.origin.__name__ + job_options_method = getattr( + self, "{}_job_options".format(method_name), None + ) + job_options = {} + if job_options_method: + job_options.update(job_options_method(*args, **kwargs)) + delayed = self.with_delay(**job_options) + return getattr(delayed, method_name)(*args, **kwargs) + + origin = getattr(self, method_name) + return functools.update_wrapper(auto_delay_wrapper, origin) + + @api.model + def _job_store_values(self, job): + """Hook for manipulating job stored values. + + You can define a more specific hook for a job function + by defining a method name with this pattern: + + `_queue_job_store_values_${func_name}` + + NOTE: values will be stored only if they match stored fields on `queue.job`. + + :param job: current queue_job.job.Job instance. + :return: dictionary for setting job values. + """ + return {} diff --git a/queue_job/models/queue_job.py b/queue_job/models/queue_job.py index 1da0eaf86d..d42fd5b7fc 100644 --- a/queue_job/models/queue_job.py +++ b/queue_job/models/queue_job.py @@ -43,27 +43,22 @@ class QueueJob(models.Model): "date_created", "model_name", "method_name", + "func_string", + "channel_method_name", + "job_function_id", "records", "args", "kwargs", ) uuid = fields.Char(string="UUID", readonly=True, index=True, required=True) - user_id = fields.Many2one( - comodel_name="res.users", - string="User ID", - compute="_compute_user_id", - inverse="_inverse_user_id", - store=True, - ) + user_id = fields.Many2one(comodel_name="res.users", string="User ID") company_id = fields.Many2one( comodel_name="res.company", string="Company", index=True ) name = fields.Char(string="Description", readonly=True) - model_name = fields.Char( - string="Model", compute="_compute_model_name", store=True, readonly=True - ) + model_name = fields.Char(string="Model", readonly=True) method_name = fields.Char(readonly=True) # record_ids field is only for backward compatibility (e.g. used in related # actions), can be removed (replaced by "records") in 14.0 @@ -73,12 +68,12 @@ class QueueJob(models.Model): ) args = JobSerialized(readonly=True, base_type=tuple) kwargs = JobSerialized(readonly=True, base_type=dict) - func_string = fields.Char( - string="Task", compute="_compute_func_string", readonly=True, store=True - ) + func_string = fields.Char(string="Task", readonly=True) state = fields.Selection(STATES, readonly=True, required=True, index=True) priority = fields.Integer() + exc_name = fields.Char(string="Exception", readonly=True) + exc_message = fields.Char(string="Exception Message", readonly=True) exc_info = fields.Text(string="Exception Info", readonly=True) result = fields.Text(readonly=True) @@ -86,6 +81,11 @@ class QueueJob(models.Model): date_started = fields.Datetime(string="Start Date", readonly=True) date_enqueued = fields.Datetime(string="Enqueue Time", readonly=True) date_done = fields.Datetime(readonly=True) + exec_time = fields.Float( + string="Execution Time (avg)", + group_operator="avg", + help="Time required to execute this job in seconds. Average when grouped.", + ) eta = fields.Datetime(string="Execute only after") retry = fields.Integer(string="Current try") @@ -95,24 +95,17 @@ class QueueJob(models.Model): "max. retries.\n" "Retries are infinite when empty.", ) - channel_method_name = fields.Char( - readonly=True, compute="_compute_job_function", store=True - ) + # FIXME the name of this field is very confusing + channel_method_name = fields.Char(readonly=True) job_function_id = fields.Many2one( - comodel_name="queue.job.function", - compute="_compute_job_function", - string="Job Function", - readonly=True, - store=True, + comodel_name="queue.job.function", string="Job Function", readonly=True, ) - override_channel = fields.Char() - channel = fields.Char( - compute="_compute_channel", inverse="_inverse_channel", store=True, index=True - ) + channel = fields.Char(index=True) - identity_key = fields.Char() - worker_pid = fields.Integer() + identity_key = fields.Char(readonly=True) + worker_pid = fields.Integer(readonly=True) + parent_id = fields.Many2one("queue.job", string="Parent Job") def init(self): self._cr.execute( @@ -126,67 +119,23 @@ def init(self): "'enqueued') AND identity_key IS NOT NULL;" ) - @api.depends("records") - def _compute_user_id(self): - for record in self: - record.user_id = record.records.env.uid - - def _inverse_user_id(self): - for record in self.with_context(_job_edit_sentinel=self.EDIT_SENTINEL): - record.records = record.records.with_user(record.user_id.id) - - @api.depends("records") - def _compute_model_name(self): - for record in self: - record.model_name = record.records._name - @api.depends("records") def _compute_record_ids(self): for record in self: record.record_ids = record.records.ids - def _inverse_channel(self): - for record in self: - record.override_channel = record.channel - - @api.depends("job_function_id.channel_id") - def _compute_channel(self): - for record in self: - channel = ( - record.override_channel or record.job_function_id.channel or "root" - ) - if record.channel != channel: - record.channel = channel - - @api.depends("model_name", "method_name", "job_function_id.channel_id") - def _compute_job_function(self): - for record in self: - func_model = self.env["queue.job.function"] - channel_method_name = func_model.job_function_name( - record.model_name, record.method_name - ) - function = func_model.search([("name", "=", channel_method_name)], limit=1) - record.channel_method_name = channel_method_name - record.job_function_id = function - - @api.depends("model_name", "method_name", "records", "args", "kwargs") - def _compute_func_string(self): - for record in self: - model = repr(record.records) - args = [repr(arg) for arg in record.args] - kwargs = ["{}={!r}".format(key, val) for key, val in record.kwargs.items()] - all_args = ", ".join(args + kwargs) - record.func_string = "{}.{}({})".format(model, record.method_name, all_args) - @api.model_create_multi def create(self, vals_list): if self.env.context.get("_job_edit_sentinel") is not self.EDIT_SENTINEL: # Prevent to create a queue.job record "raw" from RPC. # ``with_delay()`` must be used. raise exceptions.AccessError( - _("Queue jobs must created by calling 'with_delay()'.") + _("Queue jobs must be created by calling 'with_delay()'.") ) - return super().create(vals_list) + return super( + QueueJob, + self.with_context(mail_create_nolog=True, mail_create_nosubscribe=True), + ).create(vals_list) def write(self, vals): if self.env.context.get("_job_edit_sentinel") is not self.EDIT_SENTINEL: @@ -200,10 +149,25 @@ def write(self, vals): ) ) + different_user_jobs = self.browse() + if vals.get("user_id"): + different_user_jobs = self.filtered( + lambda records: records.env.user.id != vals["user_id"] + ) + if vals.get("state") == "failed": self._message_post_on_failure() - return super().write(vals) + result = super().write(vals) + + for record in different_user_jobs: + # the user is stored in the env of the record, but we still want to + # have a stored user_id field to be able to search/groupby, so + # synchronize the env of records with user_id + super(QueueJob, record).write( + {"records": record.records.with_user(vals["user_id"])} + ) + return result def open_related_action(self): """Open the related action associated to the job""" @@ -243,9 +207,10 @@ def _message_post_on_failure(self): # subscribe the users now to avoid to subscribe them # at every job creation domain = self._subscribe_users_domain() - users = self.env["res.users"].search(domain) - self.message_subscribe(partner_ids=users.mapped("partner_id").ids) + base_users = self.env["res.users"].search(domain) for record in self: + users = base_users | record.user_id + record.message_subscribe(partner_ids=users.mapped("partner_id").ids) msg = record._message_failed_job() if msg: record.message_post(body=msg, subtype="queue_job.mt_job_failed") @@ -515,7 +480,8 @@ class JobFunction(models.Model): "retry_pattern " "related_action_enable " "related_action_func_name " - "related_action_kwargs ", + "related_action_kwargs " + "job_function_id ", ) def _default_channel(self): @@ -637,6 +603,7 @@ def job_default_config(self): related_action_enable=True, related_action_func_name=None, related_action_kwargs={}, + job_function_id=None, ) def _parse_retry_pattern(self): @@ -669,6 +636,7 @@ def job_config(self, name): related_action_enable=config.related_action.get("enable", True), related_action_func_name=config.related_action.get("func_name"), related_action_kwargs=config.related_action.get("kwargs"), + job_function_id=config.id, ) def _retry_pattern_format_error_message(self): diff --git a/queue_job/readme/USAGE.rst b/queue_job/readme/USAGE.rst index 6c472eccf9..c8ff94b793 100644 --- a/queue_job/readme/USAGE.rst +++ b/queue_job/readme/USAGE.rst @@ -25,7 +25,7 @@ Example of job function: .. code-block:: XML - + action_done diff --git a/queue_job/static/description/index.html b/queue_job/static/description/index.html index 037e104684..ded805e543 100644 --- a/queue_job/static/description/index.html +++ b/queue_job/static/description/index.html @@ -508,7 +508,7 @@

Developers

Example of job function:

 <record id="job_function_sale_order_action_done" model="queue.job.function">
-    <field name="model_id" ref="sale.model_sale_order"</field>
+    <field name="model_id" ref="sale.model_sale_order" />
     <field name="method">action_done</field>
     <field name="channel_id" ref="channel_sale" />
     <field name="related_action" eval='{"func_name": "custom_related_action"}' />
diff --git a/queue_job/tests/common.py b/queue_job/tests/common.py
index 6795965b76..efbd0e9dba 100644
--- a/queue_job/tests/common.py
+++ b/queue_job/tests/common.py
@@ -1,9 +1,13 @@
 # Copyright 2019 Camptocamp
 # License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
+import doctest
 from contextlib import contextmanager
 
 import mock
 
+from odoo.tests import BaseCase, tagged
+
+# pylint: disable=odoo-addons-relative-import
 from odoo.addons.queue_job.job import Job
 
 
@@ -96,3 +100,37 @@ def test_export(self):
         delayable = mock.MagicMock(name="DelayableBinding")
         delayable_cls.return_value = delayable
         yield delayable_cls, delayable
+
+
+@tagged("doctest")
+class OdooDocTestCase(BaseCase):
+    """
+    We need a custom DocTestCase class in order to:
+    - define test_tags to run as part of standard tests
+    - output a more meaningful test name than default "DocTestCase.runTest"
+    """
+
+    __qualname__ = "doctests for "
+
+    def __init__(self, test):
+        self.__test = test
+        self.__name = test._dt_test.name
+        super().__init__(self.__name)
+
+    def __getattr__(self, item):
+        if item == self.__name:
+            return self.__test
+
+
+def load_doctests(module):
+    """
+    Generates a tests loading method for the doctests of the given module
+    https://docs.python.org/3/library/unittest.html#load-tests-protocol
+    """
+
+    def load_tests(loader, tests, ignore):
+        for test in doctest.DocTestSuite(module):
+            tests.addTest(OdooDocTestCase(test))
+        return tests
+
+    return load_tests
diff --git a/queue_job/tests/test_model_job_function.py b/queue_job/tests/test_model_job_function.py
index c9bdea56e8..e6ddf3fcc3 100644
--- a/queue_job/tests/test_model_job_function.py
+++ b/queue_job/tests/test_model_job_function.py
@@ -31,7 +31,7 @@ def test_function_job_config(self):
         channel = self.env["queue.job.channel"].create(
             {"name": "foo", "parent_id": self.env.ref("queue_job.channel_root").id}
         )
-        self.env["queue.job.function"].create(
+        job_function = self.env["queue.job.function"].create(
             {
                 "model_id": self.env.ref("base.model_res_users").id,
                 "method": "read",
@@ -52,5 +52,6 @@ def test_function_job_config(self):
                 related_action_enable=True,
                 related_action_func_name="related_action_foo",
                 related_action_kwargs={"b": 1},
+                job_function_id=job_function.id,
             ),
         )
diff --git a/queue_job/tests/test_runner_channels.py b/queue_job/tests/test_runner_channels.py
index 93333fa490..d323d00683 100644
--- a/queue_job/tests/test_runner_channels.py
+++ b/queue_job/tests/test_runner_channels.py
@@ -1,13 +1,10 @@
 # Copyright 2015-2016 Camptocamp SA
 # License LGPL-3.0 or later (http://www.gnu.org/licenses/lgpl.html)
 
-import doctest
-
 # pylint: disable=odoo-addons-relative-import
 # we are testing, we want to test as we were an external consumer of the API
 from odoo.addons.queue_job.jobrunner import channels
 
+from .common import load_doctests
 
-def load_tests(loader, tests, ignore):
-    tests.addTests(doctest.DocTestSuite(channels))
-    return tests
+load_tests = load_doctests(channels)
diff --git a/queue_job/tests/test_runner_runner.py b/queue_job/tests/test_runner_runner.py
index 817ac6396e..c6486e27ef 100644
--- a/queue_job/tests/test_runner_runner.py
+++ b/queue_job/tests/test_runner_runner.py
@@ -1,13 +1,10 @@
 # Copyright 2015-2016 Camptocamp SA
 # License LGPL-3.0 or later (http://www.gnu.org/licenses/lgpl.html)
 
-import doctest
-
 # pylint: disable=odoo-addons-relative-import
 # we are testing, we want to test as we were an external consumer of the API
 from odoo.addons.queue_job.jobrunner import runner
 
+from .common import load_doctests
 
-def load_tests(loader, tests, ignore):
-    tests.addTests(doctest.DocTestSuite(runner))
-    return tests
+load_tests = load_doctests(runner)
diff --git a/queue_job/views/queue_job_views.xml b/queue_job/views/queue_job_views.xml
index 1197495ead..c143da9eea 100644
--- a/queue_job/views/queue_job_views.xml
+++ b/queue_job/views/queue_job_views.xml
@@ -44,6 +44,7 @@
                         
                             
                             
+                            
                             
                             
                             
+                            
+                            
                         
                     
                     
@@ -68,6 +71,18 @@
                             > If the max. retries is 0, the number of retries is infinite.
                         
                     
+                    
+                        
+
+ +
- - -
@@ -104,15 +112,39 @@ - + + + + + + queue.job.pivot + queue.job + + + + + + + + + + queue.job.graph + queue.job + + + + + + + queue.job.search queue.job @@ -123,6 +155,11 @@ + + + + + + + + @@ -172,7 +224,7 @@ Jobs queue.job - tree,form + tree,form,pivot,graph {'search_default_pending': 1, 'search_default_enqueued': 1, 'search_default_started': 1, diff --git a/test_queue_job/__manifest__.py b/test_queue_job/__manifest__.py index c116362ff3..56661d6659 100644 --- a/test_queue_job/__manifest__.py +++ b/test_queue_job/__manifest__.py @@ -3,7 +3,7 @@ { "name": "Queue Job Tests", - "version": "13.0.2.1.0", + "version": "13.0.2.4.0", "author": "Camptocamp,Odoo Community Association (OCA)", "license": "LGPL-3", "category": "Generic Modules", diff --git a/test_queue_job/i18n/test_queue_job.pot b/test_queue_job/i18n/test_queue_job.pot index ba5e3fc45f..1b9d45a9a2 100644 --- a/test_queue_job/i18n/test_queue_job.pot +++ b/test_queue_job/i18n/test_queue_job.pot @@ -13,6 +13,11 @@ msgstr "" "Content-Transfer-Encoding: \n" "Plural-Forms: \n" +#. module: test_queue_job +#: model:ir.model.fields,field_description:test_queue_job.field_queue_job__additional_info +msgid "Additional Info" +msgstr "" + #. module: test_queue_job #: model:ir.model.fields,field_description:test_queue_job.field_test_queue_channel__create_uid #: model:ir.model.fields,field_description:test_queue_job.field_test_queue_job__create_uid diff --git a/test_queue_job/i18n/zh_CN.po b/test_queue_job/i18n/zh_CN.po index b8e3b8ea9a..8cac393e3c 100644 --- a/test_queue_job/i18n/zh_CN.po +++ b/test_queue_job/i18n/zh_CN.po @@ -1,6 +1,6 @@ # Translation of Odoo Server. # This file contains the translation of the following modules: -# * test_queue_job +# * test_queue_job # msgid "" msgstr "" @@ -16,6 +16,11 @@ msgstr "" "Plural-Forms: nplurals=1; plural=0;\n" "X-Generator: Weblate 3.8\n" +#. module: test_queue_job +#: model:ir.model.fields,field_description:test_queue_job.field_queue_job__additional_info +msgid "Additional Info" +msgstr "" + #. module: test_queue_job #: model:ir.model.fields,field_description:test_queue_job.field_test_queue_channel__create_uid #: model:ir.model.fields,field_description:test_queue_job.field_test_queue_job__create_uid diff --git a/test_queue_job/models/test_models.py b/test_queue_job/models/test_models.py index 36fdb1c6f9..2812855a20 100644 --- a/test_queue_job/models/test_models.py +++ b/test_queue_job/models/test_models.py @@ -10,6 +10,8 @@ class QueueJob(models.Model): _inherit = "queue.job" + additional_info = fields.Char() + def testing_related_method(self, **kwargs): return self, kwargs @@ -61,6 +63,39 @@ def job_alter_mutable(self, mutable_arg, mutable_kwarg=None): mutable_kwarg["b"] = 2 return mutable_arg, mutable_kwarg + def delay_me(self, arg, kwarg=None): + return arg, kwarg + + def delay_me_options_job_options(self): + return { + "identity_key": "my_job_identity", + } + + def delay_me_options(self): + return "ok" + + def delay_me_context_key(self): + return "ok" + + def _register_hook(self): + self._patch_method("delay_me", self._patch_job_auto_delay("delay_me")) + self._patch_method( + "delay_me_options", self._patch_job_auto_delay("delay_me_options") + ) + self._patch_method( + "delay_me_context_key", + self._patch_job_auto_delay( + "delay_me_context_key", context_key="auto_delay_delay_me_context_key" + ), + ) + return super()._register_hook() + + def _job_store_values(self, job): + value = "JUST_TESTING" + if job.state == "failed": + value += "_BUT_FAILED" + return {"additional_info": value} + class TestQueueChannel(models.Model): diff --git a/test_queue_job/tests/__init__.py b/test_queue_job/tests/__init__.py index 9af8df15a0..502a0752fd 100644 --- a/test_queue_job/tests/__init__.py +++ b/test_queue_job/tests/__init__.py @@ -1,4 +1,5 @@ from . import test_autovacuum from . import test_job +from . import test_job_auto_delay from . import test_job_channels from . import test_related_actions diff --git a/test_queue_job/tests/test_job.py b/test_queue_job/tests/test_job.py index f9b679cc27..e0224ebf3d 100644 --- a/test_queue_job/tests/test_job.py +++ b/test_queue_job/tests/test_job.py @@ -7,7 +7,6 @@ import mock import odoo.tests.common as common -from odoo import SUPERUSER_ID from odoo.addons.queue_job.exception import ( FailedJobError, @@ -150,6 +149,7 @@ def test_worker_pid(self): def test_set_done(self): job_a = Job(self.method) + job_a.date_started = datetime(2015, 3, 15, 16, 40, 0) datetime_path = "odoo.addons.queue_job.job.datetime" with mock.patch(datetime_path, autospec=True) as mock_datetime: mock_datetime.now.return_value = datetime(2015, 3, 15, 16, 41, 0) @@ -158,13 +158,20 @@ def test_set_done(self): self.assertEquals(job_a.state, DONE) self.assertEquals(job_a.result, "test") self.assertEquals(job_a.date_done, datetime(2015, 3, 15, 16, 41, 0)) + self.assertEquals(job_a.exec_time, 60.0) self.assertFalse(job_a.exc_info) def test_set_failed(self): job_a = Job(self.method) - job_a.set_failed(exc_info="failed test") + job_a.set_failed( + exc_info="failed test", + exc_name="FailedTest", + exc_message="Sadly this job failed", + ) self.assertEquals(job_a.state, FAILED) self.assertEquals(job_a.exc_info, "failed test") + self.assertEquals(job_a.exc_name, "FailedTest") + self.assertEquals(job_a.exc_message, "Sadly this job failed") def test_postpone(self): job_a = Job(self.method) @@ -183,6 +190,16 @@ def test_store(self): stored = self.queue_job.search([("uuid", "=", test_job.uuid)]) self.assertEqual(len(stored), 1) + def test_store_extra_data(self): + test_job = Job(self.method) + test_job.store() + stored = self.queue_job.search([("uuid", "=", test_job.uuid)]) + self.assertEqual(stored.additional_info, "JUST_TESTING") + test_job.set_failed(exc_info="failed test", exc_name="FailedTest") + test_job.store() + stored.invalidate_cache() + self.assertEqual(stored.additional_info, "JUST_TESTING_BUT_FAILED") + def test_read(self): eta = datetime.now() + timedelta(hours=5) test_job = Job( @@ -234,6 +251,7 @@ def test_read(self): self.assertAlmostEqual(job_read.date_started, test_date, delta=delta) self.assertAlmostEqual(job_read.date_enqueued, test_date, delta=delta) self.assertAlmostEqual(job_read.date_done, test_date, delta=delta) + self.assertAlmostEqual(job_read.exec_time, 0.0) def test_job_unlinked(self): test_job = Job(self.method, args=("o", "k"), kwargs={"c": "!"}) @@ -481,7 +499,7 @@ def test_message_when_write_fail(self): stored.write({"state": "failed"}) self.assertEqual(stored.state, FAILED) messages = stored.message_ids - self.assertEqual(len(messages), 2) + self.assertEqual(len(messages), 1) def test_follower_when_write_fail(self): """Check that inactive users doesn't are not followers even if @@ -540,6 +558,22 @@ def setUp(self): User = self.env["res.users"] Company = self.env["res.company"] Partner = self.env["res.partner"] + + main_company = self.env.ref("base.main_company") + + self.partner_user = Partner.create( + {"name": "Simple User", "email": "simple.user@example.com"} + ) + self.simple_user = User.create( + { + "partner_id": self.partner_user.id, + "company_ids": [(4, main_company.id)], + "login": "simple_user", + "name": "simple user", + "groups_id": [], + } + ) + self.other_partner_a = Partner.create( {"name": "My Company a", "is_company": True, "email": "test@tes.ttest"} ) @@ -556,7 +590,7 @@ def setUp(self): "company_id": self.other_company_a.id, "company_ids": [(4, self.other_company_a.id)], "login": "my_login a", - "name": "my user", + "name": "my user A", "groups_id": [(4, grp_queue_job_manager)], } ) @@ -576,16 +610,11 @@ def setUp(self): "company_id": self.other_company_b.id, "company_ids": [(4, self.other_company_b.id)], "login": "my_login_b", - "name": "my user 1", + "name": "my user B", "groups_id": [(4, grp_queue_job_manager)], } ) - def _subscribe_users(self, stored): - domain = stored._subscribe_users_domain() - users = self.env["res.users"].search(domain) - stored.message_subscribe(partner_ids=users.mapped("partner_id").ids) - def _create_job(self, env): self.cr.execute("delete from queue_job") env["test.queue.job"].with_delay().testing_method() @@ -631,11 +660,14 @@ def test_job_subscription(self): # queue_job.group_queue_job_manager must be followers User = self.env["res.users"] no_company_context = dict(self.env.context, company_id=None) - no_company_env = self.env(context=no_company_context) + no_company_env = self.env(user=self.simple_user, context=no_company_context) stored = self._create_job(no_company_env) - self._subscribe_users(stored) - users = User.with_context(active_test=False).search( - [("groups_id", "=", self.ref("queue_job.group_queue_job_manager"))] + stored._message_post_on_failure() + users = ( + User.search( + [("groups_id", "=", self.ref("queue_job.group_queue_job_manager"))] + ) + + stored.user_id ) self.assertEqual(len(stored.message_follower_ids), len(users)) expected_partners = [u.partner_id for u in users] @@ -649,13 +681,13 @@ def test_job_subscription(self): # jobs created for a specific company_id are followed only by # company's members company_a_context = dict(self.env.context, company_id=self.other_company_a.id) - company_a_env = self.env(context=company_a_context) + company_a_env = self.env(user=self.simple_user, context=company_a_context) stored = self._create_job(company_a_env) stored.with_user(self.other_user_a.id) - self._subscribe_users(stored) - # 2 because admin + self.other_partner_a + stored._message_post_on_failure() + # 2 because simple_user (creator of job) + self.other_partner_a self.assertEqual(len(stored.message_follower_ids), 2) - users = User.browse([SUPERUSER_ID, self.other_user_a.id]) + users = self.simple_user + self.other_user_a expected_partners = [u.partner_id for u in users] self.assertSetEqual( set(stored.message_follower_ids.mapped("partner_id")), diff --git a/test_queue_job/tests/test_job_auto_delay.py b/test_queue_job/tests/test_job_auto_delay.py new file mode 100644 index 0000000000..5549fc7487 --- /dev/null +++ b/test_queue_job/tests/test_job_auto_delay.py @@ -0,0 +1,54 @@ +# Copyright 2020 Camptocamp SA +# License LGPL-3.0 or later (http://www.gnu.org/licenses/lgpl.html) + +from odoo.tests.common import tagged + +from odoo.addons.queue_job.job import Job + +from .common import JobCommonCase + + +@tagged("post_install", "-at_install") +class TestJobAutoDelay(JobCommonCase): + """Test auto delay of jobs""" + + def test_auto_delay(self): + """method decorated by @job_auto_delay is automatically delayed""" + result = self.env["test.queue.job"].delay_me(1, kwarg=2) + self.assertTrue(isinstance(result, Job)) + self.assertEqual(result.args, (1,)) + self.assertEqual(result.kwargs, {"kwarg": 2}) + + def test_auto_delay_options(self): + """method automatically delayed une _job_options arguments""" + result = self.env["test.queue.job"].delay_me_options() + self.assertTrue(isinstance(result, Job)) + self.assertEqual(result.identity_key, "my_job_identity") + + def test_auto_delay_inside_job(self): + """when a delayed job is processed, it must not delay itself""" + job_ = self.env["test.queue.job"].delay_me(1, kwarg=2) + self.assertTrue(job_.perform(), (1, 2)) + + def test_auto_delay_force_sync(self): + """method forced to run synchronously""" + result = ( + self.env["test.queue.job"] + .with_context(_job_force_sync=True) + .delay_me(1, kwarg=2) + ) + self.assertTrue(result, (1, 2)) + + def test_auto_delay_context_key_set(self): + """patched with context_key delays only if context keys is set""" + result = ( + self.env["test.queue.job"] + .with_context(auto_delay_delay_me_context_key=True) + .delay_me_context_key() + ) + self.assertTrue(isinstance(result, Job)) + + def test_auto_delay_context_key_unset(self): + """patched with context_key do not delay if context keys is not set""" + result = self.env["test.queue.job"].delay_me_context_key() + self.assertEqual(result, "ok")