Skip to content
Closed
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions queue_job/data/queue_data.xml
Original file line number Diff line number Diff line change
Expand Up @@ -22,11 +22,11 @@
<field eval="True" name="active" />
<field name="user_id" ref="base.user_root" />
<field name="interval_number">1</field>
<field name="interval_type">days</field>
<field name="interval_type">hours</field>
<field name="numbercall">-1</field>
<field eval="False" name="doall" />
<field name="state">code</field>
<field name="code">model.autovacuum()</field>
<field name="code">model.autovacuum(limit_per_channel=1000)</field>
</record>
</data>
<data noupdate="0">
Expand Down
39 changes: 22 additions & 17 deletions queue_job/models/queue_job.py
Original file line number Diff line number Diff line change
Expand Up @@ -390,28 +390,33 @@ def _needaction_domain_get(self):
"""
return [("state", "=", "failed")]

def autovacuum(self):
"""Delete all jobs done based on the removal interval defined on the
channel
def autovacuum(self, limit_per_channel=None):
"""Delete jobs done based on the removal interval defined on the
channel.

Called from a cron.

Only deletes specific numbers of jobs from each channel,
given as an argument, to avoid timing out in databases with
a lot of jobs to delete.
The rate at which done jobs are deleted can be adjusted by
adjusting the limit and execution interval on the cron.
"""
for channel in self.env["queue.job.channel"].search([]):
deadline = datetime.now() - timedelta(days=int(channel.removal_interval))
while True:
jobs = self.search(
[
"|",
("date_done", "<=", deadline),
("date_cancelled", "<=", deadline),
("channel", "=", channel.complete_name),
],
limit=1000,
)
if jobs:
jobs.unlink()
else:
break
jobs = self.search(
[
"|",
("date_done", "<=", deadline),
("date_cancelled", "<=", deadline),
("channel", "=", channel.complete_name),
],
limit=limit_per_channel,
)
# Unlink in smaller batches for performance reasons
for job_ids in self._cr.split_for_in_conditions(jobs.ids, 1000):
self.browse(job_ids).unlink()

return True

def requeue_stuck_jobs(self, enqueued_delta=5, started_delta=0):
Expand Down