diff --git a/queue_job/README.rst b/queue_job/README.rst index 30ff79ca40..67c21a6a60 100644 --- a/queue_job/README.rst +++ b/queue_job/README.rst @@ -74,6 +74,7 @@ Features: description, number of retries * Related Actions: link an action on the job view, such as open the record concerned by the job +* Error Handler: trigger a method when job fails, such as calling a webhook **Table of contents** @@ -418,6 +419,15 @@ Based on this configuration, we can tell that: * retries 10 to 15 postponed 30 seconds later * all subsequent retries postponed 5 minutes later +**Job function: Error Handler** + +The *Error Handler* is a method executed whenever the job fails + +It's configured similarly to Related Action + + + + **Job Context** The context of the recordset of the job, or any recordset passed in arguments of @@ -676,6 +686,7 @@ Contributors * Souheil Bejaoui * Eric Antones * Simone Orsi +* Tris Doan Maintainers ~~~~~~~~~~~ diff --git a/queue_job/controllers/main.py b/queue_job/controllers/main.py index d1e56e8f77..57048e76c6 100644 --- a/queue_job/controllers/main.py +++ b/queue_job/controllers/main.py @@ -35,7 +35,14 @@ def _try_perform_job(self, env, job): _logger.debug("%s started", job) - job.perform() + try: + job.perform() + except Exception as exc: + with registry(job.env.cr.dbname).cursor() as new_cr: + job.env = job.env(cr=new_cr) + job.error_handler(exc) + raise + # Triggers any stored computed fields before calling 'set_done' # so that will be part of the 'exec_time' env.flush_all() diff --git a/queue_job/job.py b/queue_job/job.py index d452a84557..0c97b406f5 100644 --- a/queue_job/job.py +++ b/queue_job/job.py @@ -955,6 +955,24 @@ def related_action(self): action_kwargs = self.job_config.related_action_kwargs return action(**action_kwargs) + def error_handler(self, exc): + record = self.db_record() + funcname = self.job_config.error_handler_func_name + if not self.job_config.error_handler_enable or not funcname: + return None + + if not isinstance(funcname, str): + raise ValueError( + "error_handler must be the name of the method on queue.job as string" + ) + action = getattr(record, funcname) + _logger.info("Job %s fails due to %s, execute %s", self.uuid, exc, action) + action_kwargs = {"job": self, **self.job_config.error_handler_kwargs} + try: + return action(**action_kwargs) + except Exception as exc: + _logger.warning("Error handler failed: %s", exc) + def _is_model_method(func): return inspect.ismethod(func) and isinstance( diff --git a/queue_job/models/queue_job_function.py b/queue_job/models/queue_job_function.py index ad034b46bc..9735169b7c 100644 --- a/queue_job/models/queue_job_function.py +++ b/queue_job/models/queue_job_function.py @@ -28,6 +28,9 @@ class QueueJobFunction(models.Model): "related_action_enable " "related_action_func_name " "related_action_kwargs " + "error_handler_enable " + "error_handler_func_name " + "error_handler_kwargs " "job_function_id ", ) @@ -79,6 +82,33 @@ def _default_channel(self): "enable, func_name, kwargs.\n" "See the module description for details.", ) + error_handler = JobSerialized(base_type=dict) + edit_error_handler = fields.Text( + string="Error Handler", + compute="_compute_edit_error_handler", + inverse="_inverse_edit_error_handler", + help="The handler is executed when the job fails. " + "Configured as a dictionary with optional keys: " + "enable, func_name, kwargs.\n" + "See the module description for details.", + ) + + @api.depends("error_handler") + def _compute_edit_error_handler(self): + for record in self: + record.edit_error_handler = str(record.error_handler) + + def _inverse_edit_error_handler(self): + try: + edited = (self.edit_error_handler or "").strip() + if edited: + self.error_handler = ast.literal_eval(edited) + else: + self.error_handler = {} + except (ValueError, TypeError, SyntaxError) as ex: + raise exceptions.UserError( + self._error_handler_format_error_message() + ) from ex @api.depends("model_id.model", "method") def _compute_name(self): @@ -149,6 +179,9 @@ def job_default_config(self): related_action_func_name=None, related_action_kwargs={}, job_function_id=None, + error_handler_enable=True, + error_handler_func_name=None, + error_handler_kwargs={}, ) def _parse_retry_pattern(self): @@ -184,6 +217,9 @@ def job_config(self, name): related_action_func_name=config.related_action.get("func_name"), related_action_kwargs=config.related_action.get("kwargs", {}), job_function_id=config.id, + error_handler_enable=config.error_handler.get("enable", True), + error_handler_func_name=config.error_handler.get("func_name"), + error_handler_kwargs=config.error_handler.get("kwargs", {}), ) def _retry_pattern_format_error_message(self): @@ -226,6 +262,14 @@ def _related_action_format_error_message(self): ' "kwargs" {{"limit": 10}}}}' ).format(self.name) + def _error_handler_format_error_message(self): + return _( + "Unexpected format of Error Handler for {}.\n" + "Example of valid format:\n" + '{{"enable": True, "func_name": "_call_webhook",' + ' "kwargs" {"webhook_url": "XXX","payload": {"text":"Hello World!"}}}}' + ).format(self.name) + @api.constrains("related_action") def _check_related_action(self): valid_keys = ("enable", "func_name", "kwargs") diff --git a/queue_job/readme/CONTRIBUTORS.rst b/queue_job/readme/CONTRIBUTORS.rst index 4b34823abe..bd6cf3458f 100644 --- a/queue_job/readme/CONTRIBUTORS.rst +++ b/queue_job/readme/CONTRIBUTORS.rst @@ -10,3 +10,4 @@ * Souheil Bejaoui * Eric Antones * Simone Orsi +* Tris Doan diff --git a/queue_job/readme/DESCRIPTION.rst b/queue_job/readme/DESCRIPTION.rst index 263f86385d..dae9087b6e 100644 --- a/queue_job/readme/DESCRIPTION.rst +++ b/queue_job/readme/DESCRIPTION.rst @@ -44,3 +44,4 @@ Features: description, number of retries * Related Actions: link an action on the job view, such as open the record concerned by the job +* Error Handler: trigger a method when job fails, such as calling a webhook diff --git a/queue_job/readme/USAGE.rst b/queue_job/readme/USAGE.rst index b1a0e6a4cf..1952cfdb2a 100644 --- a/queue_job/readme/USAGE.rst +++ b/queue_job/readme/USAGE.rst @@ -274,6 +274,15 @@ Based on this configuration, we can tell that: * retries 10 to 15 postponed 30 seconds later * all subsequent retries postponed 5 minutes later +**Job function: Error Handler** + +The *Error Handler* is a method executed whenever the job fails + +It's configured similarly to Related Action + + + + **Job Context** The context of the recordset of the job, or any recordset passed in arguments of diff --git a/queue_job/static/description/index.html b/queue_job/static/description/index.html index b3abf393da..7e33f4ac86 100644 --- a/queue_job/static/description/index.html +++ b/queue_job/static/description/index.html @@ -375,19 +375,19 @@

Job Queue

Jobs are executed in the background by a Jobrunner, in their own transaction.

Example:

-from odoo import models, fields, api
+from odoo import models, fields, api
 
-class MyModel(models.Model):
+class MyModel(models.Model):
    _name = 'my.model'
 
-   def my_method(self, a, k=None):
+   def my_method(self, a, k=None):
        _logger.info('executed with a: %s and k: %s', a, k)
 
 
-class MyOtherModel(models.Model):
+class MyOtherModel(models.Model):
     _name = 'my.other.model'
 
-    def button_do_stuff(self):
+    def button_do_stuff(self):
         self.env['my.model'].with_delay().my_method('a', k=2)
 

In the snippet of code above, when we call button_do_stuff, a job capturing @@ -408,6 +408,7 @@

Job Queue

description, number of retries
  • Related Actions: link an action on the job view, such as open the record concerned by the job
  • +
  • Error Handler: trigger a method when job fails, such as calling a webhook
  • Table of contents

    @@ -513,7 +514,7 @@

    Delaying jobs

    The fast way to enqueue a job for a method is to use with_delay() on a record or model:

    -def button_done(self):
    +def button_done(self):
         self.with_delay().print_confirmation_document(self.state)
         self.write({"state": "done"})
         return True
    @@ -529,7 +530,7 @@ 

    Delaying jobs

    on a record or model. The following is the equivalent of with_delay() but using the long form:

    -def button_done(self):
    +def button_done(self):
         delayable = self.delayable()
         delayable.print_confirmation_document(self.state)
         delayable.delay()
    @@ -539,7 +540,7 @@ 

    Delaying jobs

    Methods of Delayable objects return itself, so it can be used as a builder pattern, which in some cases allow to build the jobs dynamically:

    -def button_generate_simple_with_delayable(self):
    +def button_generate_simple_with_delayable(self):
         self.ensure_one()
         # Introduction of a delayable object, using a builder pattern
         # allowing to chain jobs or set properties. The delay() method
    @@ -555,7 +556,7 @@ 

    Delaying jobs

    The simplest way to define a dependency is to use .on_done(job) on a Delayable:

    -def button_chain_done(self):
    +def button_chain_done(self):
         self.ensure_one()
         job1 = self.browse(1).delayable().generate_thumbnail((50, 50))
         job2 = self.browse(1).delayable().generate_thumbnail((50, 50))
    @@ -572,9 +573,9 @@ 

    Delaying jobs

    [B] of jobs. When and only when all the jobs of the group [A] are executed, the jobs of the group [B] are executed. The code would look like:

    -from odoo.addons.queue_job.delay import group, chain
    +from odoo.addons.queue_job.delay import group, chain
     
    -def button_done(self):
    +def button_done(self):
         group_a = group(self.delayable().method_foo(), self.delayable().method_bar())
         group_b = group(self.delayable().method_baz(1), self.delayable().method_baz(2))
         chain(group_a, group_b).delay()
    @@ -595,7 +596,7 @@ 

    Delaying jobs

    work. This can be useful to avoid very long jobs, parallelize some task and get more specific errors. Usage is as follows:

    -def button_split_delayable(self):
    +def button_split_delayable(self):
         (
             self  # Can be a big recordset, let's say 1000 records
             .delayable()
    @@ -610,7 +611,7 @@ 

    Delaying jobs

    True, the jobs will be chained, meaning that the next job will only start when the previous one is done:

    -def button_increment_var(self):
    +def button_increment_var(self):
         (
             self
             .delayable()
    @@ -694,10 +695,10 @@ 

    Configure default options for job

    Example of related action code:

    -class QueueJob(models.Model):
    +class QueueJob(models.Model):
         _inherit = 'queue.job'
     
    -    def related_action_partner(self, name):
    +    def related_action_partner(self, name):
             self.ensure_one()
             model = self.model_name
             partner = self.records
    @@ -732,6 +733,9 @@ 

    Configure default options for job
  • retries 10 to 15 postponed 30 seconds later
  • all subsequent retries postponed 5 minutes later
  • +

    Job function: Error Handler

    +

    The Error Handler is a method executed whenever the job fails

    +

    It’s configured similarly to Related Action

    Job Context

    The context of the recordset of the job, or any recordset passed in arguments of a job, is transferred to the job according to an allow-list.

    @@ -748,7 +752,7 @@

    Configure default options for job

    Tip: you can do this at test case level like this

     @classmethod
    -def setUpClass(cls):
    +def setUpClass(cls):
         super().setUpClass()
         cls.env = cls.env(context=dict(
             cls.env.context,
    @@ -788,20 +792,20 @@ 

    Testing

    A very small example (more details in tests/common.py):

     # code
    -def my_job_method(self, name, count):
    +def my_job_method(self, name, count):
         self.write({"name": " ".join([name] * count)
     
    -def method_to_test(self):
    +def method_to_test(self):
         count = self.env["other.model"].search_count([])
         self.with_delay(priority=15).my_job_method("Hi!", count=count)
         return count
     
     # tests
    -from odoo.addons.queue_job.tests.common import trap_jobs
    +from odoo.addons.queue_job.tests.common import trap_jobs
     
     # first test only check the expected behavior of the method and the proper
     # enqueuing of jobs
    -def test_method_to_test(self):
    +def test_method_to_test(self):
         with trap_jobs() as trap:
             result = self.env["model"].method_to_test()
             expected_count = 12
    @@ -817,7 +821,7 @@ 

    Testing

    # second test to validate the behavior of the job unitarily - def test_my_job_method(self): + def test_my_job_method(self): record = self.env["model"].browse(1) record.my_job_method("Hi!", count=12) self.assertEqual(record.name, "Hi! Hi! Hi! Hi! Hi! Hi! Hi! Hi! Hi! Hi! Hi! Hi!") @@ -825,7 +829,7 @@

    Testing

    If you prefer, you can still test the whole thing in a single test, by calling jobs_tester.perform_enqueued_jobs() in your test.

    -def test_method_to_test(self):
    +def test_method_to_test(self):
         with trap_jobs() as trap:
             result = self.env["model"].method_to_test()
             expected_count = 12
    @@ -860,7 +864,7 @@ 

    Testing

    Tip: you can do this at test case level like this

     @classmethod
    -def setUpClass(cls):
    +def setUpClass(cls):
         super().setUpClass()
         cls.env = cls.env(context=dict(
             cls.env.context,
    @@ -970,6 +974,7 @@ 

    Contributors

  • Souheil Bejaoui <souheil.bejaoui@acsone.eu>
  • Eric Antones <eantones@nuobit.com>
  • Simone Orsi <simone.orsi@camptocamp.com>
  • +
  • Tris Doan <tridm@trobz.com>
  • diff --git a/queue_job/tests/test_model_job_function.py b/queue_job/tests/test_model_job_function.py index 84676fdb65..5921a552be 100644 --- a/queue_job/tests/test_model_job_function.py +++ b/queue_job/tests/test_model_job_function.py @@ -52,6 +52,9 @@ def test_function_job_config(self): related_action_enable=True, related_action_func_name="related_action_foo", related_action_kwargs={"b": 1}, + error_handler_enable=True, + error_handler_func_name=None, + error_handler_kwargs={}, job_function_id=job_function.id, ), ) diff --git a/queue_job/views/queue_job_function_views.xml b/queue_job/views/queue_job_function_views.xml index a6e2ce402c..e748c07d1f 100644 --- a/queue_job/views/queue_job_function_views.xml +++ b/queue_job/views/queue_job_function_views.xml @@ -13,6 +13,7 @@ + diff --git a/test_queue_job/data/queue_job_function_data.xml b/test_queue_job/data/queue_job_function_data.xml index 8338045141..fe3cf686a6 100644 --- a/test_queue_job/data/queue_job_function_data.xml +++ b/test_queue_job/data/queue_job_function_data.xml @@ -3,6 +3,7 @@ testing_method +