diff --git a/docs/changelog.md b/docs/changelog.md
index b4ace91..84e0445 100644
--- a/docs/changelog.md
+++ b/docs/changelog.md
@@ -1,5 +1,18 @@
# Changelog
+## v3.0.0 🌈
+
+### Breaking Changes
+
+- Renamed `REDIS_CLIENT_KWARGS` configuration to `CLIENT_KWARGS`.
+
+### 🚀 Features
+
+- Created a new `Task` model representing all kind of scheduled tasks.
+ - In future versions, `CronTask`, `ScheduledTask` and `RepeatableTask` will be removed.
+ - `Task` model has a `task_type` field to differentiate between the types of tasks.
+ - Old tasks in the database will be migrated to the new `Task` model automatically.
+
## v2.1.1 🌈
### 🐛 Bug Fixes
diff --git a/docs/configuration.md b/docs/configuration.md
index fb05ec8..da5c5a9 100644
--- a/docs/configuration.md
+++ b/docs/configuration.md
@@ -20,7 +20,7 @@ SCHEDULER_QUEUES = {
'USERNAME': 'some-user',
'PASSWORD': 'some-password',
'DEFAULT_TIMEOUT': 360,
- 'REDIS_CLIENT_KWARGS': { # Eventual additional Redis connection arguments
+ 'CLIENT_KWARGS': { # Eventual additional Redis connection arguments
'ssl_cert_reqs': None,
},
'TOKEN_VALIDATION_METHOD': None, # Method to validate auth-header
diff --git a/docs/index.md b/docs/index.md
index 24f3275..d7e70b7 100644
--- a/docs/index.md
+++ b/docs/index.md
@@ -1,16 +1,44 @@
# Django tasks Scheduler
-[![Django CI][1]][2]
-![badge][3]
-[![badge][4]][5]
+[![Django CI][badge]][2]
+![badge][coverage]
+[![badge][pypi-downloads]][pypi]
---
A database backed asynchronous tasks scheduler for django.
This allows remembering scheduled tasks, their parameters, etc.
+!!! Important
+ Version 3.0.0 introduced a major design change. Instead of three separate models, there is one new `Task` model.
+ The goal is to simplify.
+ Make sure to follow [the migration guide](migrate_to_v3.md)
+
+
## Terminology
+### Scheduled Task
+
+Starting v3.0.0, django-tasks-scheduler is using a single `Task` model with different task types, the task types are:
+
+- `ONCE` - Run the task once at a scheduled time.
+- `REPEATABLE` - Run the task multiple times (limited number of times or infinite times) based on a time interval.
+- `CRON` - Run a task indefinitely based on a cron string schedule.
+
+This enables having one admin view for all scheduled tasks, and having one table in the database to maintain the task
+reduces the number of overall queries.
+An `Task` instance contains all relevant information about a task to enable the users to schedule using django-admin and
+track their status.
+
+Previously, there were three different models for ScheduledTask. These exist for legacy purposes and are scheduled to
+be removed.
+
+* `Scheduled Task` - Run a task once, on a specific time (can be immediate).
+* `Repeatable Task` - Run a task multiple times (limited number of times or infinite times) based on an interval
+* `Cron Task` - Run a task multiple times (limited number of times or infinite times) based on a cron string
+
+Scheduled tasks are scheduled when the django application starts, and after a scheduled task is executed.
+
### Queue
A queue of messages between processes (main django-app process and worker usually).
@@ -34,7 +62,7 @@ This is a subprocess of worker.
Once a worker listening to the queue becomes available, the job will be executed
-### Scheduled Task Execution
+### Scheduled Job Execution
A scheduler checking the queue periodically will check whether the time the job should be executed has come, and if so,
it will queue it.
@@ -42,19 +70,6 @@ it will queue it.
* A job is considered scheduled if it is queued to be executed, or scheduled to be executed.
* If there is no scheduler, the job will not be queued to run.
-### Scheduled Task
-
-django models storing information about jobs. So it is possible to schedule using
-django-admin and track their status.
-
-There are three types of ScheduledTask.
-
-* `Scheduled Task` - Run a job once, on a specific time (can be immediate).
-* `Repeatable Task` - Run a job multiple times (limited number of times or infinite times) based on an interval
-* `Cron Task` - Run a job multiple times (limited number of times or infinite times) based on a cron string
-
-Scheduled jobs are scheduled when the django application starts, and after a scheduled task is executed.
-
## Scheduler sequence diagram
```mermaid
@@ -121,24 +136,24 @@ sequenceDiagram
## Reporting issues or Features requests
-Please report issues via [GitHub Issues][6] .
+Please report issues via [GitHub Issues][issues] .
---
## Acknowledgements
-A lot of django-admin views and their tests were adopted from [django-rq][7].
+A lot of django-admin views and their tests were adopted from [django-rq][django-rq].
-[1]:https://github.com/django-commons/django-tasks-scheduler/actions/workflows/test.yml/badge.svg
+[badge]:https://github.com/django-commons/django-tasks-scheduler/actions/workflows/test.yml/badge.svg
[2]:https://github.com/django-commons/django-tasks-scheduler/actions/workflows/test.yml
-[3]:https://img.shields.io/endpoint?url=https://gist.githubusercontent.com/cunla/b756396efb895f0e34558c980f1ca0c7/raw/django-tasks-scheduler-4.json
+[coverage]:https://img.shields.io/endpoint?url=https://gist.githubusercontent.com/cunla/b756396efb895f0e34558c980f1ca0c7/raw/django-tasks-scheduler-4.json
-[4]:https://img.shields.io/pypi/dm/django-tasks-scheduler
+[pypi-downloads]:https://img.shields.io/pypi/dm/django-tasks-scheduler
-[5]:https://pypi.org/project/django-tasks-scheduler/
+[pypi]:https://pypi.org/project/django-tasks-scheduler/
-[6]:https://github.com/django-commons/django-tasks-scheduler/issues
+[issues]:https://github.com/django-commons/django-tasks-scheduler/issues
-[7]:https://github.com/rq/django-rq
\ No newline at end of file
+[django-rq]:https://github.com/rq/django-rq
\ No newline at end of file
diff --git a/docs/installation.md b/docs/installation.md
index 6a2db1d..13573b0 100644
--- a/docs/installation.md
+++ b/docs/installation.md
@@ -26,7 +26,7 @@
'USERNAME': 'some-user',
'PASSWORD': 'some-password',
'DEFAULT_TIMEOUT': 360,
- 'REDIS_CLIENT_KWARGS': { # Eventual additional Redis connection arguments
+ 'CLIENT_KWARGS': { # Eventual additional Redis connection arguments
'ssl_cert_reqs': None,
},
},
diff --git a/docs/migrate_to_v3.md b/docs/migrate_to_v3.md
new file mode 100644
index 0000000..ed4cf3b
--- /dev/null
+++ b/docs/migrate_to_v3.md
@@ -0,0 +1,36 @@
+Migration from v2 to v3
+=======================
+
+Version 3.0.0 introduced a major design change. Instead of three separate models, there is one new `Task` model. The
+goal is to have one centralized admin view for all your scheduled tasks, regardless of the scheduling type.
+
+You need to migrate the scheduled tasks using the old models (`ScheduledTask`, `RepeatableTask`, `CronTask`) to the new
+model. It can be done using the export/import commands provided.
+
+After upgrading to django-tasks-scheduler v3.0.0, you will notice you are not able to create new scheduled tasks in the
+old models, that is intentional. In the next version of django-tasks-scheduler (v3.1), the old models will be deleted,
+so make sure you migrate your old models.
+
+!!! Note
+ While we tested different scenarios heavily and left the code for old tasks, we could not account for all different
+ use cases, therefore, please [open an issue][issues] if you encounter any.
+
+There are two ways to migrate your existing scheduled tasks:
+
+# Using the admin views of the old models
+
+If you go to the admin view of the old models, you will notice there is a new action in the actions drop down menu for
+migrating the selected tasks. Use it, and you will also have a link to the new task to compare the migration result.
+
+Note once you migrate using this method, the old task will be disabled automatically.
+
+# Export/Import management commands
+
+Run in your project directory:
+
+```shell
+python manage.py export > scheduled_tasks.json
+python manage.py import --filename scheduled_tasks.json
+```
+
+[issues]: https://github.com/django-commons/django-tasks-scheduler/issues
\ No newline at end of file
diff --git a/mkdocs.yml b/mkdocs.yml
index a6b8f0c..de019e9 100644
--- a/mkdocs.yml
+++ b/mkdocs.yml
@@ -101,6 +101,7 @@ theme:
nav:
- Home: index.md
+ - Migrate v2 to v3: migrate_to_v3.md
- Installation: installation.md
- Configuration: configuration.md
- Usage: usage.md
diff --git a/poetry.lock b/poetry.lock
index 5979b58..9028713 100644
--- a/poetry.lock
+++ b/poetry.lock
@@ -366,73 +366,73 @@ files = [
[[package]]
name = "coverage"
-version = "7.6.7"
+version = "7.6.8"
description = "Code coverage measurement for Python"
optional = false
python-versions = ">=3.9"
files = [
- {file = "coverage-7.6.7-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:108bb458827765d538abcbf8288599fee07d2743357bdd9b9dad456c287e121e"},
- {file = "coverage-7.6.7-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c973b2fe4dc445cb865ab369df7521df9c27bf40715c837a113edaa2aa9faf45"},
- {file = "coverage-7.6.7-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3c6b24007c4bcd0b19fac25763a7cac5035c735ae017e9a349b927cfc88f31c1"},
- {file = "coverage-7.6.7-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:acbb8af78f8f91b3b51f58f288c0994ba63c646bc1a8a22ad072e4e7e0a49f1c"},
- {file = "coverage-7.6.7-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ad32a981bcdedb8d2ace03b05e4fd8dace8901eec64a532b00b15217d3677dd2"},
- {file = "coverage-7.6.7-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:34d23e28ccb26236718a3a78ba72744212aa383141961dd6825f6595005c8b06"},
- {file = "coverage-7.6.7-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e25bacb53a8c7325e34d45dddd2f2fbae0dbc230d0e2642e264a64e17322a777"},
- {file = "coverage-7.6.7-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:af05bbba896c4472a29408455fe31b3797b4d8648ed0a2ccac03e074a77e2314"},
- {file = "coverage-7.6.7-cp310-cp310-win32.whl", hash = "sha256:796c9b107d11d2d69e1849b2dfe41730134b526a49d3acb98ca02f4985eeff7a"},
- {file = "coverage-7.6.7-cp310-cp310-win_amd64.whl", hash = "sha256:987a8e3da7da4eed10a20491cf790589a8e5e07656b6dc22d3814c4d88faf163"},
- {file = "coverage-7.6.7-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:7e61b0e77ff4dddebb35a0e8bb5a68bf0f8b872407d8d9f0c726b65dfabe2469"},
- {file = "coverage-7.6.7-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1a5407a75ca4abc20d6252efeb238377a71ce7bda849c26c7a9bece8680a5d99"},
- {file = "coverage-7.6.7-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:df002e59f2d29e889c37abd0b9ee0d0e6e38c24f5f55d71ff0e09e3412a340ec"},
- {file = "coverage-7.6.7-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:673184b3156cba06154825f25af33baa2671ddae6343f23175764e65a8c4c30b"},
- {file = "coverage-7.6.7-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e69ad502f1a2243f739f5bd60565d14a278be58be4c137d90799f2c263e7049a"},
- {file = "coverage-7.6.7-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:60dcf7605c50ea72a14490d0756daffef77a5be15ed1b9fea468b1c7bda1bc3b"},
- {file = "coverage-7.6.7-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:9c2eb378bebb2c8f65befcb5147877fc1c9fbc640fc0aad3add759b5df79d55d"},
- {file = "coverage-7.6.7-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:3c0317288f032221d35fa4cbc35d9f4923ff0dfd176c79c9b356e8ef8ef2dff4"},
- {file = "coverage-7.6.7-cp311-cp311-win32.whl", hash = "sha256:951aade8297358f3618a6e0660dc74f6b52233c42089d28525749fc8267dccd2"},
- {file = "coverage-7.6.7-cp311-cp311-win_amd64.whl", hash = "sha256:5e444b8e88339a2a67ce07d41faabb1d60d1004820cee5a2c2b54e2d8e429a0f"},
- {file = "coverage-7.6.7-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:f07ff574986bc3edb80e2c36391678a271d555f91fd1d332a1e0f4b5ea4b6ea9"},
- {file = "coverage-7.6.7-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:49ed5ee4109258973630c1f9d099c7e72c5c36605029f3a91fe9982c6076c82b"},
- {file = "coverage-7.6.7-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f3e8796434a8106b3ac025fd15417315d7a58ee3e600ad4dbcfddc3f4b14342c"},
- {file = "coverage-7.6.7-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a3b925300484a3294d1c70f6b2b810d6526f2929de954e5b6be2bf8caa1f12c1"},
- {file = "coverage-7.6.7-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3c42ec2c522e3ddd683dec5cdce8e62817afb648caedad9da725001fa530d354"},
- {file = "coverage-7.6.7-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:0266b62cbea568bd5e93a4da364d05de422110cbed5056d69339bd5af5685433"},
- {file = "coverage-7.6.7-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:e5f2a0f161d126ccc7038f1f3029184dbdf8f018230af17ef6fd6a707a5b881f"},
- {file = "coverage-7.6.7-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c132b5a22821f9b143f87446805e13580b67c670a548b96da945a8f6b4f2efbb"},
- {file = "coverage-7.6.7-cp312-cp312-win32.whl", hash = "sha256:7c07de0d2a110f02af30883cd7dddbe704887617d5c27cf373362667445a4c76"},
- {file = "coverage-7.6.7-cp312-cp312-win_amd64.whl", hash = "sha256:fd49c01e5057a451c30c9b892948976f5d38f2cbd04dc556a82743ba8e27ed8c"},
- {file = "coverage-7.6.7-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:46f21663e358beae6b368429ffadf14ed0a329996248a847a4322fb2e35d64d3"},
- {file = "coverage-7.6.7-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:40cca284c7c310d622a1677f105e8507441d1bb7c226f41978ba7c86979609ab"},
- {file = "coverage-7.6.7-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:77256ad2345c29fe59ae861aa11cfc74579c88d4e8dbf121cbe46b8e32aec808"},
- {file = "coverage-7.6.7-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:87ea64b9fa52bf395272e54020537990a28078478167ade6c61da7ac04dc14bc"},
- {file = "coverage-7.6.7-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2d608a7808793e3615e54e9267519351c3ae204a6d85764d8337bd95993581a8"},
- {file = "coverage-7.6.7-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:cdd94501d65adc5c24f8a1a0eda110452ba62b3f4aeaba01e021c1ed9cb8f34a"},
- {file = "coverage-7.6.7-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:82c809a62e953867cf57e0548c2b8464207f5f3a6ff0e1e961683e79b89f2c55"},
- {file = "coverage-7.6.7-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:bb684694e99d0b791a43e9fc0fa58efc15ec357ac48d25b619f207c41f2fd384"},
- {file = "coverage-7.6.7-cp313-cp313-win32.whl", hash = "sha256:963e4a08cbb0af6623e61492c0ec4c0ec5c5cf74db5f6564f98248d27ee57d30"},
- {file = "coverage-7.6.7-cp313-cp313-win_amd64.whl", hash = "sha256:14045b8bfd5909196a90da145a37f9d335a5d988a83db34e80f41e965fb7cb42"},
- {file = "coverage-7.6.7-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:f2c7a045eef561e9544359a0bf5784b44e55cefc7261a20e730baa9220c83413"},
- {file = "coverage-7.6.7-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:5dd4e4a49d9c72a38d18d641135d2fb0bdf7b726ca60a103836b3d00a1182acd"},
- {file = "coverage-7.6.7-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5c95e0fa3d1547cb6f021ab72f5c23402da2358beec0a8e6d19a368bd7b0fb37"},
- {file = "coverage-7.6.7-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f63e21ed474edd23f7501f89b53280014436e383a14b9bd77a648366c81dce7b"},
- {file = "coverage-7.6.7-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ead9b9605c54d15be228687552916c89c9683c215370c4a44f1f217d2adcc34d"},
- {file = "coverage-7.6.7-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:0573f5cbf39114270842d01872952d301027d2d6e2d84013f30966313cadb529"},
- {file = "coverage-7.6.7-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:e2c8e3384c12dfa19fa9a52f23eb091a8fad93b5b81a41b14c17c78e23dd1d8b"},
- {file = "coverage-7.6.7-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:70a56a2ec1869e6e9fa69ef6b76b1a8a7ef709972b9cc473f9ce9d26b5997ce3"},
- {file = "coverage-7.6.7-cp313-cp313t-win32.whl", hash = "sha256:dbba8210f5067398b2c4d96b4e64d8fb943644d5eb70be0d989067c8ca40c0f8"},
- {file = "coverage-7.6.7-cp313-cp313t-win_amd64.whl", hash = "sha256:dfd14bcae0c94004baba5184d1c935ae0d1231b8409eb6c103a5fd75e8ecdc56"},
- {file = "coverage-7.6.7-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:37a15573f988b67f7348916077c6d8ad43adb75e478d0910957394df397d2874"},
- {file = "coverage-7.6.7-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b6cce5c76985f81da3769c52203ee94722cd5d5889731cd70d31fee939b74bf0"},
- {file = "coverage-7.6.7-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a1ab9763d291a17b527ac6fd11d1a9a9c358280adb320e9c2672a97af346ac2c"},
- {file = "coverage-7.6.7-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6cf96ceaa275f071f1bea3067f8fd43bec184a25a962c754024c973af871e1b7"},
- {file = "coverage-7.6.7-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:aee9cf6b0134d6f932d219ce253ef0e624f4fa588ee64830fcba193269e4daa3"},
- {file = "coverage-7.6.7-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:2bc3e45c16564cc72de09e37413262b9f99167803e5e48c6156bccdfb22c8327"},
- {file = "coverage-7.6.7-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:623e6965dcf4e28a3debaa6fcf4b99ee06d27218f46d43befe4db1c70841551c"},
- {file = "coverage-7.6.7-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:850cfd2d6fc26f8346f422920ac204e1d28814e32e3a58c19c91980fa74d8289"},
- {file = "coverage-7.6.7-cp39-cp39-win32.whl", hash = "sha256:c296263093f099da4f51b3dff1eff5d4959b527d4f2f419e16508c5da9e15e8c"},
- {file = "coverage-7.6.7-cp39-cp39-win_amd64.whl", hash = "sha256:90746521206c88bdb305a4bf3342b1b7316ab80f804d40c536fc7d329301ee13"},
- {file = "coverage-7.6.7-pp39.pp310-none-any.whl", hash = "sha256:0ddcb70b3a3a57581b450571b31cb774f23eb9519c2aaa6176d3a84c9fc57671"},
- {file = "coverage-7.6.7.tar.gz", hash = "sha256:d79d4826e41441c9a118ff045e4bccb9fdbdcb1d02413e7ea6eb5c87b5439d24"},
+ {file = "coverage-7.6.8-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:b39e6011cd06822eb964d038d5dff5da5d98652b81f5ecd439277b32361a3a50"},
+ {file = "coverage-7.6.8-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:63c19702db10ad79151a059d2d6336fe0c470f2e18d0d4d1a57f7f9713875dcf"},
+ {file = "coverage-7.6.8-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3985b9be361d8fb6b2d1adc9924d01dec575a1d7453a14cccd73225cb79243ee"},
+ {file = "coverage-7.6.8-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:644ec81edec0f4ad17d51c838a7d01e42811054543b76d4ba2c5d6af741ce2a6"},
+ {file = "coverage-7.6.8-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1f188a2402f8359cf0c4b1fe89eea40dc13b52e7b4fd4812450da9fcd210181d"},
+ {file = "coverage-7.6.8-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:e19122296822deafce89a0c5e8685704c067ae65d45e79718c92df7b3ec3d331"},
+ {file = "coverage-7.6.8-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:13618bed0c38acc418896005732e565b317aa9e98d855a0e9f211a7ffc2d6638"},
+ {file = "coverage-7.6.8-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:193e3bffca48ad74b8c764fb4492dd875038a2f9925530cb094db92bb5e47bed"},
+ {file = "coverage-7.6.8-cp310-cp310-win32.whl", hash = "sha256:3988665ee376abce49613701336544041f2117de7b7fbfe91b93d8ff8b151c8e"},
+ {file = "coverage-7.6.8-cp310-cp310-win_amd64.whl", hash = "sha256:f56f49b2553d7dd85fd86e029515a221e5c1f8cb3d9c38b470bc38bde7b8445a"},
+ {file = "coverage-7.6.8-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:86cffe9c6dfcfe22e28027069725c7f57f4b868a3f86e81d1c62462764dc46d4"},
+ {file = "coverage-7.6.8-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d82ab6816c3277dc962cfcdc85b1efa0e5f50fb2c449432deaf2398a2928ab94"},
+ {file = "coverage-7.6.8-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:13690e923a3932e4fad4c0ebfb9cb5988e03d9dcb4c5150b5fcbf58fd8bddfc4"},
+ {file = "coverage-7.6.8-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4be32da0c3827ac9132bb488d331cb32e8d9638dd41a0557c5569d57cf22c9c1"},
+ {file = "coverage-7.6.8-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:44e6c85bbdc809383b509d732b06419fb4544dca29ebe18480379633623baafb"},
+ {file = "coverage-7.6.8-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:768939f7c4353c0fac2f7c37897e10b1414b571fd85dd9fc49e6a87e37a2e0d8"},
+ {file = "coverage-7.6.8-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e44961e36cb13c495806d4cac67640ac2866cb99044e210895b506c26ee63d3a"},
+ {file = "coverage-7.6.8-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:3ea8bb1ab9558374c0ab591783808511d135a833c3ca64a18ec927f20c4030f0"},
+ {file = "coverage-7.6.8-cp311-cp311-win32.whl", hash = "sha256:629a1ba2115dce8bf75a5cce9f2486ae483cb89c0145795603d6554bdc83e801"},
+ {file = "coverage-7.6.8-cp311-cp311-win_amd64.whl", hash = "sha256:fb9fc32399dca861584d96eccd6c980b69bbcd7c228d06fb74fe53e007aa8ef9"},
+ {file = "coverage-7.6.8-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:e683e6ecc587643f8cde8f5da6768e9d165cd31edf39ee90ed7034f9ca0eefee"},
+ {file = "coverage-7.6.8-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1defe91d41ce1bd44b40fabf071e6a01a5aa14de4a31b986aa9dfd1b3e3e414a"},
+ {file = "coverage-7.6.8-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7ad66e8e50225ebf4236368cc43c37f59d5e6728f15f6e258c8639fa0dd8e6d"},
+ {file = "coverage-7.6.8-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3fe47da3e4fda5f1abb5709c156eca207eacf8007304ce3019eb001e7a7204cb"},
+ {file = "coverage-7.6.8-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:202a2d645c5a46b84992f55b0a3affe4f0ba6b4c611abec32ee88358db4bb649"},
+ {file = "coverage-7.6.8-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:4674f0daa1823c295845b6a740d98a840d7a1c11df00d1fd62614545c1583787"},
+ {file = "coverage-7.6.8-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:74610105ebd6f33d7c10f8907afed696e79c59e3043c5f20eaa3a46fddf33b4c"},
+ {file = "coverage-7.6.8-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:37cda8712145917105e07aab96388ae76e787270ec04bcb9d5cc786d7cbb8443"},
+ {file = "coverage-7.6.8-cp312-cp312-win32.whl", hash = "sha256:9e89d5c8509fbd6c03d0dd1972925b22f50db0792ce06324ba069f10787429ad"},
+ {file = "coverage-7.6.8-cp312-cp312-win_amd64.whl", hash = "sha256:379c111d3558272a2cae3d8e57e6b6e6f4fe652905692d54bad5ea0ca37c5ad4"},
+ {file = "coverage-7.6.8-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:0b0c69f4f724c64dfbfe79f5dfb503b42fe6127b8d479b2677f2b227478db2eb"},
+ {file = "coverage-7.6.8-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:c15b32a7aca8038ed7644f854bf17b663bc38e1671b5d6f43f9a2b2bd0c46f63"},
+ {file = "coverage-7.6.8-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:63068a11171e4276f6ece913bde059e77c713b48c3a848814a6537f35afb8365"},
+ {file = "coverage-7.6.8-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6f4548c5ead23ad13fb7a2c8ea541357474ec13c2b736feb02e19a3085fac002"},
+ {file = "coverage-7.6.8-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b4b4299dd0d2c67caaaf286d58aef5e75b125b95615dda4542561a5a566a1e3"},
+ {file = "coverage-7.6.8-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:c9ebfb2507751f7196995142f057d1324afdab56db1d9743aab7f50289abd022"},
+ {file = "coverage-7.6.8-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:c1b4474beee02ede1eef86c25ad4600a424fe36cff01a6103cb4533c6bf0169e"},
+ {file = "coverage-7.6.8-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:d9fd2547e6decdbf985d579cf3fc78e4c1d662b9b0ff7cc7862baaab71c9cc5b"},
+ {file = "coverage-7.6.8-cp313-cp313-win32.whl", hash = "sha256:8aae5aea53cbfe024919715eca696b1a3201886ce83790537d1c3668459c7146"},
+ {file = "coverage-7.6.8-cp313-cp313-win_amd64.whl", hash = "sha256:ae270e79f7e169ccfe23284ff5ea2d52a6f401dc01b337efb54b3783e2ce3f28"},
+ {file = "coverage-7.6.8-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:de38add67a0af869b0d79c525d3e4588ac1ffa92f39116dbe0ed9753f26eba7d"},
+ {file = "coverage-7.6.8-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:b07c25d52b1c16ce5de088046cd2432b30f9ad5e224ff17c8f496d9cb7d1d451"},
+ {file = "coverage-7.6.8-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:62a66ff235e4c2e37ed3b6104d8b478d767ff73838d1222132a7a026aa548764"},
+ {file = "coverage-7.6.8-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:09b9f848b28081e7b975a3626e9081574a7b9196cde26604540582da60235fdf"},
+ {file = "coverage-7.6.8-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:093896e530c38c8e9c996901858ac63f3d4171268db2c9c8b373a228f459bbc5"},
+ {file = "coverage-7.6.8-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:9a7b8ac36fd688c8361cbc7bf1cb5866977ece6e0b17c34aa0df58bda4fa18a4"},
+ {file = "coverage-7.6.8-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:38c51297b35b3ed91670e1e4efb702b790002e3245a28c76e627478aa3c10d83"},
+ {file = "coverage-7.6.8-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:2e4e0f60cb4bd7396108823548e82fdab72d4d8a65e58e2c19bbbc2f1e2bfa4b"},
+ {file = "coverage-7.6.8-cp313-cp313t-win32.whl", hash = "sha256:6535d996f6537ecb298b4e287a855f37deaf64ff007162ec0afb9ab8ba3b8b71"},
+ {file = "coverage-7.6.8-cp313-cp313t-win_amd64.whl", hash = "sha256:c79c0685f142ca53256722a384540832420dff4ab15fec1863d7e5bc8691bdcc"},
+ {file = "coverage-7.6.8-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3ac47fa29d8d41059ea3df65bd3ade92f97ee4910ed638e87075b8e8ce69599e"},
+ {file = "coverage-7.6.8-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:24eda3a24a38157eee639ca9afe45eefa8d2420d49468819ac5f88b10de84f4c"},
+ {file = "coverage-7.6.8-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e4c81ed2820b9023a9a90717020315e63b17b18c274a332e3b6437d7ff70abe0"},
+ {file = "coverage-7.6.8-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:bd55f8fc8fa494958772a2a7302b0354ab16e0b9272b3c3d83cdb5bec5bd1779"},
+ {file = "coverage-7.6.8-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f39e2f3530ed1626c66e7493be7a8423b023ca852aacdc91fb30162c350d2a92"},
+ {file = "coverage-7.6.8-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:716a78a342679cd1177bc8c2fe957e0ab91405bd43a17094324845200b2fddf4"},
+ {file = "coverage-7.6.8-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:177f01eeaa3aee4a5ffb0d1439c5952b53d5010f86e9d2667963e632e30082cc"},
+ {file = "coverage-7.6.8-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:912e95017ff51dc3d7b6e2be158dedc889d9a5cc3382445589ce554f1a34c0ea"},
+ {file = "coverage-7.6.8-cp39-cp39-win32.whl", hash = "sha256:4db3ed6a907b555e57cc2e6f14dc3a4c2458cdad8919e40b5357ab9b6db6c43e"},
+ {file = "coverage-7.6.8-cp39-cp39-win_amd64.whl", hash = "sha256:428ac484592f780e8cd7b6b14eb568f7c85460c92e2a37cb0c0e5186e1a0d076"},
+ {file = "coverage-7.6.8-pp39.pp310-none-any.whl", hash = "sha256:5c52a036535d12590c32c49209e79cabaad9f9ad8aa4cbd875b68c4d67a9cbce"},
+ {file = "coverage-7.6.8.tar.gz", hash = "sha256:8b2b8503edb06822c86d82fa64a4a5cb0760bb8f31f26e138ec743f422f37cfc"},
]
[package.extras]
@@ -466,51 +466,53 @@ pytz = ">2021.1"
[[package]]
name = "cryptography"
-version = "43.0.3"
+version = "44.0.0"
description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers."
optional = false
-python-versions = ">=3.7"
-files = [
- {file = "cryptography-43.0.3-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:bf7a1932ac4176486eab36a19ed4c0492da5d97123f1406cf15e41b05e787d2e"},
- {file = "cryptography-43.0.3-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:63efa177ff54aec6e1c0aefaa1a241232dcd37413835a9b674b6e3f0ae2bfd3e"},
- {file = "cryptography-43.0.3-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e1ce50266f4f70bf41a2c6dc4358afadae90e2a1e5342d3c08883df1675374f"},
- {file = "cryptography-43.0.3-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:443c4a81bb10daed9a8f334365fe52542771f25aedaf889fd323a853ce7377d6"},
- {file = "cryptography-43.0.3-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:74f57f24754fe349223792466a709f8e0c093205ff0dca557af51072ff47ab18"},
- {file = "cryptography-43.0.3-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:9762ea51a8fc2a88b70cf2995e5675b38d93bf36bd67d91721c309df184f49bd"},
- {file = "cryptography-43.0.3-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:81ef806b1fef6b06dcebad789f988d3b37ccaee225695cf3e07648eee0fc6b73"},
- {file = "cryptography-43.0.3-cp37-abi3-win32.whl", hash = "sha256:cbeb489927bd7af4aa98d4b261af9a5bc025bd87f0e3547e11584be9e9427be2"},
- {file = "cryptography-43.0.3-cp37-abi3-win_amd64.whl", hash = "sha256:f46304d6f0c6ab8e52770addfa2fc41e6629495548862279641972b6215451cd"},
- {file = "cryptography-43.0.3-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:8ac43ae87929a5982f5948ceda07001ee5e83227fd69cf55b109144938d96984"},
- {file = "cryptography-43.0.3-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:846da004a5804145a5f441b8530b4bf35afbf7da70f82409f151695b127213d5"},
- {file = "cryptography-43.0.3-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0f996e7268af62598f2fc1204afa98a3b5712313a55c4c9d434aef49cadc91d4"},
- {file = "cryptography-43.0.3-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:f7b178f11ed3664fd0e995a47ed2b5ff0a12d893e41dd0494f406d1cf555cab7"},
- {file = "cryptography-43.0.3-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:c2e6fc39c4ab499049df3bdf567f768a723a5e8464816e8f009f121a5a9f4405"},
- {file = "cryptography-43.0.3-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:e1be4655c7ef6e1bbe6b5d0403526601323420bcf414598955968c9ef3eb7d16"},
- {file = "cryptography-43.0.3-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:df6b6c6d742395dd77a23ea3728ab62f98379eff8fb61be2744d4679ab678f73"},
- {file = "cryptography-43.0.3-cp39-abi3-win32.whl", hash = "sha256:d56e96520b1020449bbace2b78b603442e7e378a9b3bd68de65c782db1507995"},
- {file = "cryptography-43.0.3-cp39-abi3-win_amd64.whl", hash = "sha256:0c580952eef9bf68c4747774cde7ec1d85a6e61de97281f2dba83c7d2c806362"},
- {file = "cryptography-43.0.3-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:d03b5621a135bffecad2c73e9f4deb1a0f977b9a8ffe6f8e002bf6c9d07b918c"},
- {file = "cryptography-43.0.3-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:a2a431ee15799d6db9fe80c82b055bae5a752bef645bba795e8e52687c69efe3"},
- {file = "cryptography-43.0.3-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:281c945d0e28c92ca5e5930664c1cefd85efe80e5c0d2bc58dd63383fda29f83"},
- {file = "cryptography-43.0.3-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:f18c716be16bc1fea8e95def49edf46b82fccaa88587a45f8dc0ff6ab5d8e0a7"},
- {file = "cryptography-43.0.3-pp39-pypy39_pp73-macosx_10_9_x86_64.whl", hash = "sha256:4a02ded6cd4f0a5562a8887df8b3bd14e822a90f97ac5e544c162899bc467664"},
- {file = "cryptography-43.0.3-pp39-pypy39_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:53a583b6637ab4c4e3591a15bc9db855b8d9dee9a669b550f311480acab6eb08"},
- {file = "cryptography-43.0.3-pp39-pypy39_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:1ec0bcf7e17c0c5669d881b1cd38c4972fade441b27bda1051665faaa89bdcaa"},
- {file = "cryptography-43.0.3-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:2ce6fae5bdad59577b44e4dfed356944fbf1d925269114c28be377692643b4ff"},
- {file = "cryptography-43.0.3.tar.gz", hash = "sha256:315b9001266a492a6ff443b61238f956b214dbec9910a081ba5b6646a055a805"},
+python-versions = "!=3.9.0,!=3.9.1,>=3.7"
+files = [
+ {file = "cryptography-44.0.0-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:84111ad4ff3f6253820e6d3e58be2cc2a00adb29335d4cacb5ab4d4d34f2a123"},
+ {file = "cryptography-44.0.0-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b15492a11f9e1b62ba9d73c210e2416724633167de94607ec6069ef724fad092"},
+ {file = "cryptography-44.0.0-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:831c3c4d0774e488fdc83a1923b49b9957d33287de923d58ebd3cec47a0ae43f"},
+ {file = "cryptography-44.0.0-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:761817a3377ef15ac23cd7834715081791d4ec77f9297ee694ca1ee9c2c7e5eb"},
+ {file = "cryptography-44.0.0-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:3c672a53c0fb4725a29c303be906d3c1fa99c32f58abe008a82705f9ee96f40b"},
+ {file = "cryptography-44.0.0-cp37-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:4ac4c9f37eba52cb6fbeaf5b59c152ea976726b865bd4cf87883a7e7006cc543"},
+ {file = "cryptography-44.0.0-cp37-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:60eb32934076fa07e4316b7b2742fa52cbb190b42c2df2863dbc4230a0a9b385"},
+ {file = "cryptography-44.0.0-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:ed3534eb1090483c96178fcb0f8893719d96d5274dfde98aa6add34614e97c8e"},
+ {file = "cryptography-44.0.0-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:f3f6fdfa89ee2d9d496e2c087cebef9d4fcbb0ad63c40e821b39f74bf48d9c5e"},
+ {file = "cryptography-44.0.0-cp37-abi3-win32.whl", hash = "sha256:eb33480f1bad5b78233b0ad3e1b0be21e8ef1da745d8d2aecbb20671658b9053"},
+ {file = "cryptography-44.0.0-cp37-abi3-win_amd64.whl", hash = "sha256:abc998e0c0eee3c8a1904221d3f67dcfa76422b23620173e28c11d3e626c21bd"},
+ {file = "cryptography-44.0.0-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:660cb7312a08bc38be15b696462fa7cc7cd85c3ed9c576e81f4dc4d8b2b31591"},
+ {file = "cryptography-44.0.0-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1923cb251c04be85eec9fda837661c67c1049063305d6be5721643c22dd4e2b7"},
+ {file = "cryptography-44.0.0-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:404fdc66ee5f83a1388be54300ae978b2efd538018de18556dde92575e05defc"},
+ {file = "cryptography-44.0.0-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:c5eb858beed7835e5ad1faba59e865109f3e52b3783b9ac21e7e47dc5554e289"},
+ {file = "cryptography-44.0.0-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:f53c2c87e0fb4b0c00fa9571082a057e37690a8f12233306161c8f4b819960b7"},
+ {file = "cryptography-44.0.0-cp39-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:9e6fc8a08e116fb7c7dd1f040074c9d7b51d74a8ea40d4df2fc7aa08b76b9e6c"},
+ {file = "cryptography-44.0.0-cp39-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:9abcc2e083cbe8dde89124a47e5e53ec38751f0d7dfd36801008f316a127d7ba"},
+ {file = "cryptography-44.0.0-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:d2436114e46b36d00f8b72ff57e598978b37399d2786fd39793c36c6d5cb1c64"},
+ {file = "cryptography-44.0.0-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:a01956ddfa0a6790d594f5b34fc1bfa6098aca434696a03cfdbe469b8ed79285"},
+ {file = "cryptography-44.0.0-cp39-abi3-win32.whl", hash = "sha256:eca27345e1214d1b9f9490d200f9db5a874479be914199194e746c893788d417"},
+ {file = "cryptography-44.0.0-cp39-abi3-win_amd64.whl", hash = "sha256:708ee5f1bafe76d041b53a4f95eb28cdeb8d18da17e597d46d7833ee59b97ede"},
+ {file = "cryptography-44.0.0-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:37d76e6863da3774cd9db5b409a9ecfd2c71c981c38788d3fcfaf177f447b731"},
+ {file = "cryptography-44.0.0-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:f677e1268c4e23420c3acade68fac427fffcb8d19d7df95ed7ad17cdef8404f4"},
+ {file = "cryptography-44.0.0-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:f5e7cb1e5e56ca0933b4873c0220a78b773b24d40d186b6738080b73d3d0a756"},
+ {file = "cryptography-44.0.0-pp310-pypy310_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:8b3e6eae66cf54701ee7d9c83c30ac0a1e3fa17be486033000f2a73a12ab507c"},
+ {file = "cryptography-44.0.0-pp310-pypy310_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:be4ce505894d15d5c5037167ffb7f0ae90b7be6f2a98f9a5c3442395501c32fa"},
+ {file = "cryptography-44.0.0-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:62901fb618f74d7d81bf408c8719e9ec14d863086efe4185afd07c352aee1d2c"},
+ {file = "cryptography-44.0.0.tar.gz", hash = "sha256:cd4e834f340b4293430701e772ec543b0fbe6c2dea510a5286fe0acabe153a02"},
]
[package.dependencies]
cffi = {version = ">=1.12", markers = "platform_python_implementation != \"PyPy\""}
[package.extras]
-docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=1.1.1)"]
-docstest = ["pyenchant (>=1.6.11)", "readme-renderer", "sphinxcontrib-spelling (>=4.0.1)"]
-nox = ["nox"]
-pep8test = ["check-sdist", "click", "mypy", "ruff"]
-sdist = ["build"]
+docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=3.0.0)"]
+docstest = ["pyenchant (>=3)", "readme-renderer (>=30.0)", "sphinxcontrib-spelling (>=7.3.1)"]
+nox = ["nox (>=2024.4.15)", "nox[uv] (>=2024.3.2)"]
+pep8test = ["check-sdist", "click (>=8.0.1)", "mypy (>=1.4)", "ruff (>=0.3.6)"]
+sdist = ["build (>=1.0.0)"]
ssh = ["bcrypt (>=3.1.5)"]
-test = ["certifi", "cryptography-vectors (==43.0.3)", "pretend", "pytest (>=6.2.0)", "pytest-benchmark", "pytest-cov", "pytest-xdist"]
+test = ["certifi (>=2024)", "cryptography-vectors (==44.0.0)", "pretend (>=0.7)", "pytest (>=7.4.0)", "pytest-benchmark (>=4.0)", "pytest-cov (>=2.10.1)", "pytest-xdist (>=3.5.0)"]
test-randomorder = ["pytest-randomly"]
[[package]]
@@ -657,13 +659,13 @@ probabilistic = ["pyprobables (>=0.6,<0.7)"]
[[package]]
name = "fastjsonschema"
-version = "2.20.0"
+version = "2.21.0"
description = "Fastest Python implementation of JSON schema"
optional = false
python-versions = "*"
files = [
- {file = "fastjsonschema-2.20.0-py3-none-any.whl", hash = "sha256:5875f0b0fa7a0043a91e93a9b8f793bcbbba9691e7fd83dca95c28ba26d21f0a"},
- {file = "fastjsonschema-2.20.0.tar.gz", hash = "sha256:3d48fc5300ee96f5d116f10fe6f28d938e6008f59a6a025c2649475b87f76a23"},
+ {file = "fastjsonschema-2.21.0-py3-none-any.whl", hash = "sha256:5b23b8e7c9c6adc0ecb91c03a0768cb48cd154d9159378a69c8318532e0b5cbf"},
+ {file = "fastjsonschema-2.21.0.tar.gz", hash = "sha256:a02026bbbedc83729da3bfff215564b71902757f33f60089f1abae193daa4771"},
]
[package.extras]
@@ -1551,13 +1553,43 @@ doc = ["sphinx"]
[[package]]
name = "tomli"
-version = "2.1.0"
+version = "2.2.1"
description = "A lil' TOML parser"
optional = false
python-versions = ">=3.8"
files = [
- {file = "tomli-2.1.0-py3-none-any.whl", hash = "sha256:a5c57c3d1c56f5ccdf89f6523458f60ef716e210fc47c4cfb188c5ba473e0391"},
- {file = "tomli-2.1.0.tar.gz", hash = "sha256:3f646cae2aec94e17d04973e4249548320197cfabdf130015d023de4b74d8ab8"},
+ {file = "tomli-2.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:678e4fa69e4575eb77d103de3df8a895e1591b48e740211bd1067378c69e8249"},
+ {file = "tomli-2.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6"},
+ {file = "tomli-2.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ece47d672db52ac607a3d9599a9d48dcb2f2f735c6c2d1f34130085bb12b112a"},
+ {file = "tomli-2.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6972ca9c9cc9f0acaa56a8ca1ff51e7af152a9f87fb64623e31d5c83700080ee"},
+ {file = "tomli-2.2.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c954d2250168d28797dd4e3ac5cf812a406cd5a92674ee4c8f123c889786aa8e"},
+ {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8dd28b3e155b80f4d54beb40a441d366adcfe740969820caf156c019fb5c7ec4"},
+ {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e59e304978767a54663af13c07b3d1af22ddee3bb2fb0618ca1593e4f593a106"},
+ {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:33580bccab0338d00994d7f16f4c4ec25b776af3ffaac1ed74e0b3fc95e885a8"},
+ {file = "tomli-2.2.1-cp311-cp311-win32.whl", hash = "sha256:465af0e0875402f1d226519c9904f37254b3045fc5084697cefb9bdde1ff99ff"},
+ {file = "tomli-2.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:2d0f2fdd22b02c6d81637a3c95f8cd77f995846af7414c5c4b8d0545afa1bc4b"},
+ {file = "tomli-2.2.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:4a8f6e44de52d5e6c657c9fe83b562f5f4256d8ebbfe4ff922c495620a7f6cea"},
+ {file = "tomli-2.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8d57ca8095a641b8237d5b079147646153d22552f1c637fd3ba7f4b0b29167a8"},
+ {file = "tomli-2.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e340144ad7ae1533cb897d406382b4b6fede8890a03738ff1683af800d54192"},
+ {file = "tomli-2.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db2b95f9de79181805df90bedc5a5ab4c165e6ec3fe99f970d0e302f384ad222"},
+ {file = "tomli-2.2.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:40741994320b232529c802f8bc86da4e1aa9f413db394617b9a256ae0f9a7f77"},
+ {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:400e720fe168c0f8521520190686ef8ef033fb19fc493da09779e592861b78c6"},
+ {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:02abe224de6ae62c19f090f68da4e27b10af2b93213d36cf44e6e1c5abd19fdd"},
+ {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b82ebccc8c8a36f2094e969560a1b836758481f3dc360ce9a3277c65f374285e"},
+ {file = "tomli-2.2.1-cp312-cp312-win32.whl", hash = "sha256:889f80ef92701b9dbb224e49ec87c645ce5df3fa2cc548664eb8a25e03127a98"},
+ {file = "tomli-2.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:7fc04e92e1d624a4a63c76474610238576942d6b8950a2d7f908a340494e67e4"},
+ {file = "tomli-2.2.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f4039b9cbc3048b2416cc57ab3bda989a6fcf9b36cf8937f01a6e731b64f80d7"},
+ {file = "tomli-2.2.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:286f0ca2ffeeb5b9bd4fcc8d6c330534323ec51b2f52da063b11c502da16f30c"},
+ {file = "tomli-2.2.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a92ef1a44547e894e2a17d24e7557a5e85a9e1d0048b0b5e7541f76c5032cb13"},
+ {file = "tomli-2.2.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9316dc65bed1684c9a98ee68759ceaed29d229e985297003e494aa825ebb0281"},
+ {file = "tomli-2.2.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e85e99945e688e32d5a35c1ff38ed0b3f41f43fad8df0bdf79f72b2ba7bc5272"},
+ {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ac065718db92ca818f8d6141b5f66369833d4a80a9d74435a268c52bdfa73140"},
+ {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:d920f33822747519673ee656a4b6ac33e382eca9d331c87770faa3eef562aeb2"},
+ {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a198f10c4d1b1375d7687bc25294306e551bf1abfa4eace6650070a5c1ae2744"},
+ {file = "tomli-2.2.1-cp313-cp313-win32.whl", hash = "sha256:d3f5614314d758649ab2ab3a62d4f2004c825922f9e370b29416484086b264ec"},
+ {file = "tomli-2.2.1-cp313-cp313-win_amd64.whl", hash = "sha256:a38aa0308e754b0e3c67e344754dff64999ff9b513e691d0e786265c93583c69"},
+ {file = "tomli-2.2.1-py3-none-any.whl", hash = "sha256:cb55c73c5f4408779d0cf3eef9f762b9c9f147a77de7b258bef0a5628adc85cc"},
+ {file = "tomli-2.2.1.tar.gz", hash = "sha256:cd45e1dc79c835ce60f7404ec8119f2eb06d38b1deba146f07ced3bbc44505ff"},
]
[[package]]
@@ -1641,13 +1673,13 @@ ocsp = ["cryptography (>=36.0.1)", "pyopenssl (==23.2.1)", "requests (>=2.31.0)"
[[package]]
name = "virtualenv"
-version = "20.27.1"
+version = "20.28.0"
description = "Virtual Python Environment builder"
optional = false
python-versions = ">=3.8"
files = [
- {file = "virtualenv-20.27.1-py3-none-any.whl", hash = "sha256:f11f1b8a29525562925f745563bfd48b189450f61fb34c4f9cc79dd5aa32a1f4"},
- {file = "virtualenv-20.27.1.tar.gz", hash = "sha256:142c6be10212543b32c6c45d3d3893dff89112cc588b7d0879ae5a1ec03a47ba"},
+ {file = "virtualenv-20.28.0-py3-none-any.whl", hash = "sha256:23eae1b4516ecd610481eda647f3a7c09aea295055337331bb4e6892ecce47b0"},
+ {file = "virtualenv-20.28.0.tar.gz", hash = "sha256:2c9c3262bb8e7b87ea801d715fae4495e6032450c71d2309be9550e7364049aa"},
]
[package.dependencies]
diff --git a/pyproject.toml b/pyproject.toml
index 114fa72..76601d4 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -7,7 +7,7 @@ name = "django-tasks-scheduler"
packages = [
{ include = "scheduler" },
]
-version = "2.1.1"
+version = "3.0.0b1"
description = "An async job scheduler for django using redis/valkey brokers"
readme = "README.md"
keywords = ["redis", "valkey", "django", "background-jobs", "job-queue", "task-queue", "redis-queue", "scheduled-jobs"]
diff --git a/scheduler/__init__.py b/scheduler/__init__.py
index c6530c8..e7010c5 100644
--- a/scheduler/__init__.py
+++ b/scheduler/__init__.py
@@ -2,4 +2,8 @@
__version__ = importlib.metadata.version("django-tasks-scheduler")
-from .decorators import job # noqa: F401
+from .decorators import job
+
+__all__ = [
+ "job",
+]
diff --git a/scheduler/admin/__init__.py b/scheduler/admin/__init__.py
index 237e1c8..ab85f06 100644
--- a/scheduler/admin/__init__.py
+++ b/scheduler/admin/__init__.py
@@ -1,2 +1,5 @@
-from .task_models import TaskAdmin # noqa: F401
-from .ephemeral_models import QueueAdmin, WorkerAdmin # noqa: F401
+from .ephemeral_models import QueueAdmin, WorkerAdmin
+from .old_task_models import TaskAdmin as OldTaskAdmin
+from .task_admin import TaskAdmin
+
+__all__ = ["OldTaskAdmin", "QueueAdmin", "WorkerAdmin", "TaskAdmin", ]
diff --git a/scheduler/admin/task_models.py b/scheduler/admin/old_task_models.py
similarity index 81%
rename from scheduler/admin/task_models.py
rename to scheduler/admin/old_task_models.py
index ca3d8db..80ff764 100644
--- a/scheduler/admin/task_models.py
+++ b/scheduler/admin/old_task_models.py
@@ -1,10 +1,15 @@
+from typing import Optional
+
from django.contrib import admin, messages
from django.contrib.contenttypes.admin import GenericStackedInline
+from django.http import HttpRequest
+from django.urls import reverse
+from django.utils.html import format_html
from django.utils.translation import gettext_lazy as _
from scheduler import tools
from scheduler.broker_types import ConnectionErrorTypes
-from scheduler.models import CronTask, TaskArg, TaskKwarg, RepeatableTask, ScheduledTask
+from scheduler.models import CronTask, TaskArg, TaskKwarg, RepeatableTask, ScheduledTask, BaseTask, migrate_util
from scheduler.settings import SCHEDULER_CONFIG, logger
from scheduler.tools import get_job_executions_for_task
@@ -108,6 +113,11 @@ class JobKwargInline(HiddenMixin, GenericStackedInline):
)
+def get_message_bit(rows_updated: int) -> str:
+ message_bit = "1 task was" if rows_updated == 1 else f"{rows_updated} tasks were"
+ return message_bit
+
+
@admin.register(CronTask, ScheduledTask, RepeatableTask)
class TaskAdmin(admin.ModelAdmin):
"""TaskAdmin admin view for all task models.
@@ -117,6 +127,7 @@ class TaskAdmin(admin.ModelAdmin):
save_on_top = True
change_form_template = "admin/scheduler/change_form.html"
actions = [
+ "migrate_selected",
"disable_selected",
"enable_selected",
"enqueue_job_now",
@@ -129,6 +140,7 @@ class TaskAdmin(admin.ModelAdmin):
list_display = (
"enabled",
"name",
+ "link_new_task",
"job_id",
"function_string",
"is_scheduled",
@@ -159,6 +171,17 @@ class TaskAdmin(admin.ModelAdmin):
),
)
+ @admin.display(description="New task")
+ def link_new_task(self, o: BaseTask) -> Optional[str]:
+ if o.new_task_id is None:
+ return None
+ url = reverse("admin:scheduler_task_change", args=[o.new_task_id.id, ])
+ html = format_html(f"""{o.new_task_id.id}""")
+ return html
+
+ def has_add_permission(self, request: HttpRequest) -> bool:
+ return False
+
def get_list_display(self, request):
if self.model.__name__ not in _LIST_DISPLAY_EXTRA:
raise ValueError(f"Unrecognized model {self.model}")
@@ -213,6 +236,16 @@ def delete_model(self, request, obj):
obj.unschedule()
super(TaskAdmin, self).delete_model(request, obj)
+ @admin.action(description=_("Migrate to new Task model(s)"), permissions=("change",))
+ def migrate_selected(self, request, queryset):
+ rows_updated = 0
+ for obj in queryset.iterator():
+ migrate_util.migrate(obj)
+ rows_updated += 1
+
+ level = messages.WARNING if not rows_updated else messages.INFO
+ self.message_user(request, f"{get_message_bit(rows_updated)} successfully migrated to new model.", level=level)
+
@admin.action(description=_("Disable selected %(verbose_name_plural)s"), permissions=("change",))
def disable_selected(self, request, queryset):
rows_updated = 0
@@ -221,10 +254,9 @@ def disable_selected(self, request, queryset):
obj.unschedule()
rows_updated += 1
- message_bit = "1 job was" if rows_updated == 1 else f"{rows_updated} jobs were"
-
level = messages.WARNING if not rows_updated else messages.INFO
- self.message_user(request, f"{message_bit} successfully disabled and unscheduled.", level=level)
+ self.message_user(request, f"{get_message_bit(rows_updated)} successfully disabled and unscheduled.",
+ level=level)
@admin.action(description=_("Enable selected %(verbose_name_plural)s"), permissions=("change",))
def enable_selected(self, request, queryset):
@@ -234,9 +266,8 @@ def enable_selected(self, request, queryset):
obj.save()
rows_updated += 1
- message_bit = "1 job was" if rows_updated == 1 else f"{rows_updated} jobs were"
level = messages.WARNING if not rows_updated else messages.INFO
- self.message_user(request, f"{message_bit} successfully enabled and scheduled.", level=level)
+ self.message_user(request, f"{get_message_bit(rows_updated)} successfully enabled and scheduled.", level=level)
@admin.action(description="Enqueue now", permissions=("change",))
def enqueue_job_now(self, request, queryset):
diff --git a/scheduler/admin/task_admin.py b/scheduler/admin/task_admin.py
new file mode 100644
index 0000000..e19af2c
--- /dev/null
+++ b/scheduler/admin/task_admin.py
@@ -0,0 +1,185 @@
+from django.contrib import admin, messages
+from django.contrib.contenttypes.admin import GenericStackedInline
+from django.utils.translation import gettext_lazy as _
+
+from scheduler import tools
+from scheduler.broker_types import ConnectionErrorTypes
+from scheduler.models import TaskArg, TaskKwarg, Task
+from scheduler.settings import SCHEDULER_CONFIG, logger
+from scheduler.tools import get_job_executions_for_task, TaskType
+
+
+class JobArgInline(GenericStackedInline):
+ model = TaskArg
+ extra = 0
+ fieldsets = ((None, dict(fields=("arg_type", "val"))),)
+
+
+class JobKwargInline(GenericStackedInline):
+ model = TaskKwarg
+ extra = 0
+ fieldsets = ((None, dict(fields=("key", ("arg_type", "val")))),)
+
+
+def get_message_bit(rows_updated: int) -> str:
+ message_bit = "1 task was" if rows_updated == 1 else f"{rows_updated} tasks were"
+ return message_bit
+
+
+@admin.register(Task)
+class TaskAdmin(admin.ModelAdmin):
+ """TaskAdmin admin view for all task models."""
+
+ class Media:
+ js = (
+ "admin/js/jquery.init.js",
+ "admin/js/select-fields.js",
+ )
+
+ save_on_top = True
+ change_form_template = "admin/scheduler/change_form.html"
+ actions = [
+ "disable_selected",
+ "enable_selected",
+ "enqueue_job_now",
+ ]
+ inlines = [
+ JobArgInline,
+ JobKwargInline,
+ ]
+ list_filter = ("enabled",)
+ list_display = (
+ "enabled",
+ "name",
+ "job_id",
+ "function_string",
+ "is_scheduled",
+ "queue",
+ "task_schedule",
+ "next_run",
+ "successful_runs",
+ "last_successful_run",
+ "failed_runs",
+ "last_failed_run",
+ )
+ list_display_links = ("name",)
+ readonly_fields = (
+ "job_id",
+ "successful_runs",
+ "last_successful_run",
+ "failed_runs",
+ "last_failed_run",
+ )
+ # radio_fields = {"task_type": admin.HORIZONTAL}
+ fieldsets = (
+ (
+ None,
+ dict(
+ fields=(
+ "name",
+ "callable",
+ "task_type",
+ ("enabled", "timeout", "result_ttl"),
+ )
+ ),
+ ),
+ (
+ None,
+ dict(fields=("scheduled_time",), classes=("tasktype-OnceTaskType",)),
+ ),
+ (
+ None,
+ dict(fields=("cron_string",), classes=("tasktype-CronTaskType",)),
+ ),
+ (
+ None,
+ dict(fields=("interval", "interval_unit", "repeat"), classes=("tasktype-RepeatableTaskType",)),
+ ),
+ (_("RQ Settings"), dict(fields=(("queue", "at_front"), "job_id"))),
+ (
+ _("Previous runs info"),
+ dict(fields=(("successful_runs", "last_successful_run"), ("failed_runs", "last_failed_run"))),
+ ),
+ )
+
+ @admin.display(description="Schedule")
+ def task_schedule(self, o: Task) -> str:
+ if o.task_type == TaskType.ONCE.value:
+ return f"Run once: {o.scheduled_time:%Y-%m-%d %H:%M:%S}"
+ elif o.task_type == TaskType.CRON.value:
+ return f"Cron: {o.cron_string}"
+ elif o.task_type == TaskType.REPEATABLE.value:
+ if o.interval is None or o.interval_unit is None:
+ return ""
+ return "Repeatable: {} {}".format(o.interval, o.get_interval_unit_display())
+
+ @admin.display(description="Next run")
+ def next_run(self, o: Task) -> str:
+ return tools.get_next_cron_time(o.cron_string)
+
+ def change_view(self, request, object_id, form_url="", extra_context=None):
+ extra = extra_context or {}
+ obj = self.get_object(request, object_id)
+ try:
+ execution_list = get_job_executions_for_task(obj.queue, obj)
+ except ConnectionErrorTypes as e:
+ logger.warn(f"Could not get job executions: {e}")
+ execution_list = list()
+ paginator = self.get_paginator(request, execution_list, SCHEDULER_CONFIG.EXECUTIONS_IN_PAGE)
+ page_number = request.GET.get("p", 1)
+ page_obj = paginator.get_page(page_number)
+ page_range = paginator.get_elided_page_range(page_obj.number)
+
+ extra.update(
+ {
+ "pagination_required": paginator.count > SCHEDULER_CONFIG.EXECUTIONS_IN_PAGE,
+ "executions": page_obj,
+ "page_range": page_range,
+ "page_var": "p",
+ }
+ )
+
+ return super(TaskAdmin, self).change_view(request, object_id, form_url, extra_context=extra)
+
+ def delete_queryset(self, request, queryset):
+ for job in queryset:
+ job.unschedule()
+ super(TaskAdmin, self).delete_queryset(request, queryset)
+
+ def delete_model(self, request, obj):
+ obj.unschedule()
+ super(TaskAdmin, self).delete_model(request, obj)
+
+ @admin.action(description=_("Disable selected %(verbose_name_plural)s"), permissions=("change",))
+ def disable_selected(self, request, queryset):
+ rows_updated = 0
+ for obj in queryset.filter(enabled=True).iterator():
+ obj.enabled = False
+ obj.unschedule()
+ rows_updated += 1
+
+ level = messages.WARNING if not rows_updated else messages.INFO
+ self.message_user(request, f"{get_message_bit(rows_updated)} successfully disabled and unscheduled.",
+ level=level)
+
+ @admin.action(description=_("Enable selected %(verbose_name_plural)s"), permissions=("change",))
+ def enable_selected(self, request, queryset):
+ rows_updated = 0
+ for obj in queryset.filter(enabled=False).iterator():
+ obj.enabled = True
+ obj.save()
+ rows_updated += 1
+
+ level = messages.WARNING if not rows_updated else messages.INFO
+ self.message_user(request, f"{get_message_bit(rows_updated)} successfully enabled and scheduled.", level=level)
+
+ @admin.action(description="Enqueue now", permissions=("change",))
+ def enqueue_job_now(self, request, queryset):
+ task_names = []
+ for task in queryset:
+ task.enqueue_to_run()
+ task_names.append(task.name)
+ self.message_user(
+ request,
+ f"The following jobs have been enqueued: {', '.join(task_names)}",
+ )
diff --git a/scheduler/decorators.py b/scheduler/decorators.py
index 76c1a1c..c8f7e94 100644
--- a/scheduler/decorators.py
+++ b/scheduler/decorators.py
@@ -2,6 +2,8 @@
from .queues import get_queue, QueueNotFoundError
from .rq_classes import rq_job_decorator
+JOB_METHODS_LIST = list()
+
def job(*args, **kwargs):
"""
@@ -36,5 +38,6 @@ def job(*args, **kwargs):
decorator = rq_job_decorator(queue, *args, **kwargs)
if func:
+ JOB_METHODS_LIST.append(f"{func.__module__}.{func.__name__}")
return decorator(func)
return decorator
diff --git a/scheduler/management/commands/export.py b/scheduler/management/commands/export.py
index 6a83595..bb2b249 100644
--- a/scheduler/management/commands/export.py
+++ b/scheduler/management/commands/export.py
@@ -54,7 +54,7 @@ def handle(self, *args, **options):
if options.get("format") == "json":
import json
- click.echo(json.dumps(res, indent=2), file=file)
+ click.echo(json.dumps(res, indent=2, default=str), file=file)
return
if options.get("format") == "yaml":
diff --git a/scheduler/management/commands/import.py b/scheduler/management/commands/import.py
index b7bbfe9..fb21089 100644
--- a/scheduler/management/commands/import.py
+++ b/scheduler/management/commands/import.py
@@ -1,5 +1,5 @@
import sys
-from typing import Dict, Any
+from typing import Dict, Any, Optional
import click
from django.apps import apps
@@ -8,7 +8,8 @@
from django.core.management.base import BaseCommand
from django.utils import timezone
-from scheduler.models import TaskArg, TaskKwarg
+from scheduler.models import TaskArg, TaskKwarg, Task
+from scheduler.models.task import TaskType
from scheduler.tools import MODEL_NAMES
@@ -18,17 +19,33 @@ def job_model_str(model_str: str) -> str:
return model_str
-def create_job_from_dict(job_dict: Dict[str, Any], update):
- model = apps.get_model(app_label="scheduler", model_name=job_model_str(job_dict["model"]))
- existing_job = model.objects.filter(name=job_dict["name"]).first()
- if existing_job:
+def get_task_type(model_str: str) -> TaskType:
+ model_str = job_model_str(model_str)
+ try:
+ return TaskType(model_str)
+ except ValueError:
+ pass
+ if model_str == "CronTask":
+ return TaskType.CRON
+ elif model_str == "RepeatableTask":
+ return TaskType.REPEATABLE
+ elif model_str in {"ScheduledTask", "OnceTask"}:
+ return TaskType.ONCE
+ raise ValueError(f"Invalid model {model_str}")
+
+
+def create_task_from_dict(task_dict: Dict[str, Any], update: bool) -> Optional[Task]:
+ existing_task = Task.objects.filter(name=task_dict["name"]).first()
+ task_type = get_task_type(task_dict["model"])
+ if existing_task:
if update:
- click.echo(f'Found existing job "{existing_job}, removing it to be reinserted"')
- existing_job.delete()
+ click.echo(f'Found existing job "{existing_task}, removing it to be reinserted"')
+ existing_task.delete()
else:
- click.echo(f'Found existing job "{existing_job}", skipping')
- return
- kwargs = dict(job_dict)
+ click.echo(f'Found existing job "{existing_task}", skipping')
+ return None
+ kwargs = dict(task_dict)
+ kwargs["task_type"] = task_type
del kwargs["model"]
del kwargs["callable_args"]
del kwargs["callable_kwargs"]
@@ -37,26 +54,28 @@ def create_job_from_dict(job_dict: Dict[str, Any], update):
if not settings.USE_TZ and not timezone.is_naive(target):
target = timezone.make_naive(target)
kwargs["scheduled_time"] = target
- model_fields = set(map(lambda field: field.attname, model._meta.get_fields()))
+ model_fields = filter(lambda field: hasattr(field, 'attname'), Task._meta.get_fields())
+ model_fields = set(map(lambda field: field.attname, model_fields))
keys_to_ignore = list(filter(lambda _k: _k not in model_fields, kwargs.keys()))
for k in keys_to_ignore:
del kwargs[k]
- scheduled_job = model.objects.create(**kwargs)
- click.echo(f"Created job {scheduled_job}")
- content_type = ContentType.objects.get_for_model(scheduled_job)
+ task = Task.objects.create(**kwargs)
+ click.echo(f"Created task {task}")
+ content_type = ContentType.objects.get_for_model(task)
- for arg in job_dict["callable_args"]:
+ for arg in task_dict["callable_args"]:
TaskArg.objects.create(
content_type=content_type,
- object_id=scheduled_job.id,
+ object_id=task.id,
**arg,
)
- for arg in job_dict["callable_kwargs"]:
+ for arg in task_dict["callable_kwargs"]:
TaskKwarg.objects.create(
content_type=content_type,
- object_id=scheduled_job.id,
+ object_id=task.id,
**arg,
)
+ return task
class Command(BaseCommand):
@@ -125,4 +144,4 @@ def handle(self, *args, **options):
model.objects.all().delete()
for job in jobs:
- create_job_from_dict(job, update=options.get("update"))
+ create_task_from_dict(job, update=options.get("update"))
diff --git a/scheduler/management/commands/rqworker.py b/scheduler/management/commands/rqworker.py
index 4ca5713..ce6201b 100644
--- a/scheduler/management/commands/rqworker.py
+++ b/scheduler/management/commands/rqworker.py
@@ -117,10 +117,7 @@ def handle(self, **options):
try:
# Instantiate a worker
- w = create_worker(
- *queues,
- **init_options
- )
+ w = create_worker(*queues, **init_options)
# Close any opened DB connection before any fork
reset_db_connections()
diff --git a/scheduler/migrations/0018_alter_crontask_queue_alter_repeatabletask_queue_and_more.py b/scheduler/migrations/0018_alter_crontask_queue_alter_repeatabletask_queue_and_more.py
index aeb79c5..9b8b28c 100644
--- a/scheduler/migrations/0018_alter_crontask_queue_alter_repeatabletask_queue_and_more.py
+++ b/scheduler/migrations/0018_alter_crontask_queue_alter_repeatabletask_queue_and_more.py
@@ -1,6 +1,6 @@
# Generated by Django 5.1b1 on 2024-06-29 14:21
-import scheduler.models.scheduled_task
+import scheduler.models.old_scheduled_task
from django.db import migrations, models
@@ -14,16 +14,16 @@ class Migration(migrations.Migration):
migrations.AlterField(
model_name='crontask',
name='queue',
- field=models.CharField(choices=scheduler.models.scheduled_task.get_queue_choices, help_text='Queue name', max_length=255, verbose_name='queue'),
+ field=models.CharField(choices=scheduler.models.old_scheduled_task.get_queue_choices, help_text='Queue name', max_length=255, verbose_name='queue'),
),
migrations.AlterField(
model_name='repeatabletask',
name='queue',
- field=models.CharField(choices=scheduler.models.scheduled_task.get_queue_choices, help_text='Queue name', max_length=255, verbose_name='queue'),
+ field=models.CharField(choices=scheduler.models.old_scheduled_task.get_queue_choices, help_text='Queue name', max_length=255, verbose_name='queue'),
),
migrations.AlterField(
model_name='scheduledtask',
name='queue',
- field=models.CharField(choices=scheduler.models.scheduled_task.get_queue_choices, help_text='Queue name', max_length=255, verbose_name='queue'),
+ field=models.CharField(choices=scheduler.models.old_scheduled_task.get_queue_choices, help_text='Queue name', max_length=255, verbose_name='queue'),
),
]
diff --git a/scheduler/migrations/0019_task_crontask_new_task_id_repeatabletask_new_task_id_and_more.py b/scheduler/migrations/0019_task_crontask_new_task_id_repeatabletask_new_task_id_and_more.py
new file mode 100644
index 0000000..bfdbcc1
--- /dev/null
+++ b/scheduler/migrations/0019_task_crontask_new_task_id_repeatabletask_new_task_id_and_more.py
@@ -0,0 +1,186 @@
+# Generated by Django 5.1.3 on 2024-11-20 20:32
+
+import django.db.models.deletion
+import scheduler.models.task
+from django.db import migrations, models
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ("scheduler", "0018_alter_crontask_queue_alter_repeatabletask_queue_and_more"),
+ ]
+
+ operations = [
+ migrations.CreateModel(
+ name="Task",
+ fields=[
+ ("id", models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name="ID")),
+ ("created_at", models.DateTimeField(auto_now_add=True)),
+ ("updated_at", models.DateTimeField(auto_now=True)),
+ (
+ "name",
+ models.CharField(help_text="Name of the job", max_length=128, unique=True, verbose_name="name"),
+ ),
+ (
+ "task_type",
+ models.CharField(
+ choices=[
+ ("CronTaskType", "Cron Task"),
+ ("RepeatableTaskType", "Repeatable Task"),
+ ("OnceTaskType", "Run once"),
+ ],
+ default="OnceTaskType",
+ max_length=32,
+ verbose_name="Task type",
+ ),
+ ),
+ ("callable", models.CharField(max_length=2048, verbose_name="callable")),
+ (
+ "enabled",
+ models.BooleanField(
+ default=True,
+ help_text="Should job be scheduled? This field is useful to keep past jobs that should no longer be scheduled",
+ verbose_name="enabled",
+ ),
+ ),
+ (
+ "queue",
+ models.CharField(
+ choices=scheduler.models.task.get_queue_choices,
+ help_text="Queue name",
+ max_length=255,
+ verbose_name="queue",
+ ),
+ ),
+ (
+ "job_id",
+ models.CharField(
+ blank=True,
+ editable=False,
+ help_text="Current job_id on queue",
+ max_length=128,
+ null=True,
+ verbose_name="job id",
+ ),
+ ),
+ (
+ "at_front",
+ models.BooleanField(
+ default=False,
+ help_text="When queuing the job, add it in the front of the queue",
+ verbose_name="At front",
+ ),
+ ),
+ (
+ "timeout",
+ models.IntegerField(
+ blank=True,
+ help_text="Timeout specifies the maximum runtime, in seconds, for the job before it'll be considered 'lost'. Blank uses the default timeout.",
+ null=True,
+ verbose_name="timeout",
+ ),
+ ),
+ (
+ "result_ttl",
+ models.IntegerField(
+ blank=True,
+ help_text="The TTL value (in seconds) of the job result.
\n -1: Result never expires, you should delete jobs manually.
\n 0: Result gets deleted immediately.
\n >0: Result expires after n seconds.",
+ null=True,
+ verbose_name="result ttl",
+ ),
+ ),
+ (
+ "failed_runs",
+ models.PositiveIntegerField(
+ default=0, help_text="Number of times the task has failed", verbose_name="failed runs"
+ ),
+ ),
+ (
+ "successful_runs",
+ models.PositiveIntegerField(
+ default=0, help_text="Number of times the task has succeeded", verbose_name="successful runs"
+ ),
+ ),
+ (
+ "last_successful_run",
+ models.DateTimeField(
+ blank=True,
+ help_text="Last time the task has succeeded",
+ null=True,
+ verbose_name="last successful run",
+ ),
+ ),
+ (
+ "last_failed_run",
+ models.DateTimeField(
+ blank=True, help_text="Last time the task has failed", null=True, verbose_name="last failed run"
+ ),
+ ),
+ (
+ "interval",
+ models.PositiveIntegerField(
+ blank=True, help_text="Interval for repeatable task", null=True, verbose_name="interval"
+ ),
+ ),
+ (
+ "interval_unit",
+ models.CharField(
+ blank=True,
+ choices=[
+ ("seconds", "seconds"),
+ ("minutes", "minutes"),
+ ("hours", "hours"),
+ ("days", "days"),
+ ("weeks", "weeks"),
+ ],
+ default="hours",
+ max_length=12,
+ null=True,
+ verbose_name="interval unit",
+ ),
+ ),
+ (
+ "repeat",
+ models.PositiveIntegerField(
+ blank=True,
+ help_text="Number of times to run the job. Leaving this blank means it will run forever.",
+ null=True,
+ verbose_name="repeat",
+ ),
+ ),
+ ("scheduled_time", models.DateTimeField(blank=True, null=True, verbose_name="scheduled time")),
+ (
+ "cron_string",
+ models.CharField(
+ blank=True,
+ help_text='Define the schedule in a crontab like syntax.\n Times are in UTC. Use crontab.guru to create a cron string.',
+ max_length=64,
+ null=True,
+ verbose_name="cron string",
+ ),
+ ),
+ ],
+ ),
+ migrations.AddField(
+ model_name="crontask",
+ name="new_task_id",
+ field=models.ForeignKey(
+ blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to="scheduler.task"
+ ),
+ ),
+ migrations.AddField(
+ model_name="repeatabletask",
+ name="new_task_id",
+ field=models.ForeignKey(
+ blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to="scheduler.task"
+ ),
+ ),
+ migrations.AddField(
+ model_name="scheduledtask",
+ name="new_task_id",
+ field=models.ForeignKey(
+ blank=True, null=True, on_delete=django.db.models.deletion.CASCADE, to="scheduler.task"
+ ),
+ ),
+ ]
diff --git a/scheduler/models/__init__.py b/scheduler/models/__init__.py
index b05c19a..7dea098 100644
--- a/scheduler/models/__init__.py
+++ b/scheduler/models/__init__.py
@@ -1,3 +1,10 @@
-from .args import TaskKwarg, TaskArg, BaseTaskArg # noqa: F401
-from .queue import Queue # noqa: F401
-from .scheduled_task import BaseTask, ScheduledTask, RepeatableTask, CronTask # noqa: F401
+from .args import TaskKwarg, TaskArg
+from .old_scheduled_task import BaseTask, ScheduledTask, RepeatableTask, CronTask
+from .queue import Queue
+from .task import Task
+
+__all__ = [
+ "TaskKwarg", "TaskArg",
+ "BaseTask", "ScheduledTask", "RepeatableTask", "CronTask",
+ "Queue", "Task",
+]
diff --git a/scheduler/models/migrate_util.py b/scheduler/models/migrate_util.py
new file mode 100644
index 0000000..8bc1c9b
--- /dev/null
+++ b/scheduler/models/migrate_util.py
@@ -0,0 +1,90 @@
+from datetime import datetime
+from typing import Dict, Any, Optional
+
+from django.conf import settings
+from django.contrib.contenttypes.models import ContentType
+from django.utils import timezone
+
+from scheduler.models.old_scheduled_task import BaseTask
+from scheduler.models.task import Task, TaskArg, TaskKwarg
+from scheduler.settings import logger
+from scheduler.tools import TaskType
+
+
+def job_model_str(model_str: str) -> str:
+ if model_str.find("Job") == len(model_str) - 3:
+ return model_str[:-3] + "Task"
+ return model_str
+
+
+def get_task_type(model_str: str) -> TaskType:
+ model_str = job_model_str(model_str)
+ try:
+ return TaskType(model_str)
+ except ValueError:
+ pass
+ if model_str == "CronTask":
+ return TaskType.CRON
+ elif model_str == "RepeatableTask":
+ return TaskType.REPEATABLE
+ elif model_str in {"ScheduledTask", "OnceTask"}:
+ return TaskType.ONCE
+ raise ValueError(f"Invalid model {model_str}")
+
+
+def create_task_from_dict(task_dict: Dict[str, Any], recreate: bool) -> Optional[Task]:
+ existing_task = None
+ if "new_task_id" in task_dict:
+ existing_task = Task.objects.filter(id=task_dict["new_task_id"]).first()
+ if existing_task is None:
+ existing_task = Task.objects.filter(name=task_dict["name"]).first()
+ task_type = get_task_type(task_dict["model"])
+ if existing_task:
+ if recreate:
+ logger.info(f'Found existing job "{existing_task}, removing it to be reinserted"')
+ existing_task.delete()
+ else:
+ logger.info(f'Found existing job "{existing_task}", skipping')
+ return existing_task
+ kwargs = dict(task_dict)
+ kwargs["task_type"] = task_type
+ del kwargs["model"]
+ del kwargs["callable_args"]
+ del kwargs["callable_kwargs"]
+ del kwargs["new_task_id"]
+ if kwargs.get("scheduled_time", None):
+ target = datetime.fromisoformat(kwargs["scheduled_time"])
+ if not settings.USE_TZ and not timezone.is_naive(target):
+ target = timezone.make_naive(target)
+ kwargs["scheduled_time"] = target
+ model_fields = filter(lambda field: hasattr(field, 'attname'), Task._meta.get_fields())
+ model_fields = set(map(lambda field: field.attname, model_fields))
+ keys_to_ignore = list(filter(lambda _k: _k not in model_fields, kwargs.keys()))
+ for k in keys_to_ignore:
+ del kwargs[k]
+ task = Task.objects.create(**kwargs)
+ logger.info(f"Created task {task}")
+ content_type = ContentType.objects.get_for_model(task)
+
+ for arg in task_dict["callable_args"]:
+ TaskArg.objects.create(
+ content_type=content_type,
+ object_id=task.id,
+ **arg,
+ )
+ for arg in task_dict["callable_kwargs"]:
+ TaskKwarg.objects.create(
+ content_type=content_type,
+ object_id=task.id,
+ **arg,
+ )
+ return task
+
+
+def migrate(old: BaseTask) -> Optional[Task]:
+ old_task_dict = old.to_dict()
+ new_task = create_task_from_dict(old_task_dict, old_task_dict.get("new_task_id") is not None)
+ old.new_task_id = new_task
+ old.enabled = False
+ old.save()
+ return new_task
diff --git a/scheduler/models/scheduled_task.py b/scheduler/models/old_scheduled_task.py
similarity index 96%
rename from scheduler/models/scheduled_task.py
rename to scheduler/models/old_scheduled_task.py
index 46fa882..7d7f408 100644
--- a/scheduler/models/scheduled_task.py
+++ b/scheduler/models/old_scheduled_task.py
@@ -70,7 +70,7 @@ def get_queue_choices():
class BaseTask(models.Model):
created = models.DateTimeField(auto_now_add=True)
modified = models.DateTimeField(auto_now=True)
- TASK_TYPE = "BaseTask"
+ task_type = "BaseTask"
name = models.CharField(
_("name"),
max_length=128,
@@ -124,6 +124,7 @@ class BaseTask(models.Model):
>0: Result expires after n seconds."""
),
)
+ new_task_id = models.ForeignKey('Task', on_delete=models.CASCADE, blank=True, null=True)
def callable_func(self):
"""Translate callable string to callable"""
@@ -180,7 +181,7 @@ def _enqueue_args(self) -> Dict:
"""
res = dict(
meta=dict(
- task_type=self.TASK_TYPE,
+ task_type=self.task_type,
scheduled_task_id=self.id,
),
on_success=success_callback,
@@ -227,7 +228,7 @@ def schedule(self) -> bool:
job = self.rqueue.enqueue_at(
schedule_time,
tools.run_task,
- args=(self.TASK_TYPE, self.id),
+ args=(self.task_type, self.id),
**kwargs,
)
self.job_id = job.id
@@ -239,7 +240,7 @@ def enqueue_to_run(self) -> bool:
kwargs = self._enqueue_args()
job = self.rqueue.enqueue(
tools.run_task,
- args=(self.TASK_TYPE, self.id),
+ args=(self.task_type, self.id),
**kwargs,
)
self.job_id = job.id
@@ -266,7 +267,7 @@ def _schedule_time(self):
def to_dict(self) -> Dict:
"""Export model to dictionary, so it can be saved as external file backup"""
res = dict(
- model=self.TASK_TYPE,
+ model=self.task_type,
name=self.name,
callable=self.callable,
callable_args=[
@@ -294,10 +295,11 @@ def to_dict(self) -> Dict:
scheduled_time=self._schedule_time().isoformat(),
interval=getattr(self, "interval", None),
interval_unit=getattr(self, "interval_unit", None),
- successful_runs=getattr(self, "successful_runs", None),
- failed_runs=getattr(self, "failed_runs", None),
+ successful_runs=getattr(self, "successful_runs", 0),
+ failed_runs=getattr(self, "failed_runs", 0),
last_successful_run=getattr(self, "last_successful_run", None),
last_failed_run=getattr(self, "last_failed_run", None),
+ new_task_id=getattr(self, "new_task_id", None),
)
return res
@@ -312,7 +314,7 @@ def get_absolute_url(self):
def __str__(self):
func = self.function_string()
- return f"{self.TASK_TYPE}[{self.name}={func}]"
+ return f"{self.task_type}[{self.name}={func}]"
def save(self, **kwargs):
schedule_job = kwargs.pop("schedule_job", True)
@@ -391,11 +393,11 @@ class Meta:
class ScheduledTask(ScheduledTimeMixin, BaseTask):
- TASK_TYPE = "ScheduledTask"
+ task_type = "ScheduledTask"
def ready_for_schedule(self) -> bool:
return super(ScheduledTask, self).ready_for_schedule() and (
- self.scheduled_time is None or self.scheduled_time >= timezone.now()
+ self.scheduled_time is None or self.scheduled_time >= timezone.now()
)
class Meta:
@@ -405,6 +407,8 @@ class Meta:
class RepeatableTask(RepeatableMixin, ScheduledTimeMixin, BaseTask):
+ task_type = "RepeatableTask"
+
class TimeUnits(models.TextChoices):
SECONDS = "seconds", _("seconds")
MINUTES = "minutes", _("minutes")
@@ -422,7 +426,6 @@ class TimeUnits(models.TextChoices):
null=True,
help_text=_("Number of times to run the job. Leaving this blank means it will run forever."),
)
- TASK_TYPE = "RepeatableTask"
def clean(self):
super(RepeatableTask, self).clean()
@@ -497,7 +500,7 @@ class Meta:
class CronTask(RepeatableMixin, BaseTask):
- TASK_TYPE = "CronTask"
+ task_type = "CronTask"
cron_string = models.CharField(
_("cron string"),
diff --git a/scheduler/models/task.py b/scheduler/models/task.py
new file mode 100644
index 0000000..c2a5249
--- /dev/null
+++ b/scheduler/models/task.py
@@ -0,0 +1,463 @@
+import math
+import uuid
+from datetime import timedelta, datetime
+from typing import Dict
+
+import croniter
+from django.apps import apps
+from django.conf import settings as django_settings
+from django.contrib import admin
+from django.contrib.contenttypes.fields import GenericRelation
+from django.core.exceptions import ValidationError
+from django.core.mail import mail_admins
+from django.db import models
+from django.templatetags.tz import utc
+from django.urls import reverse
+from django.utils import timezone
+from django.utils.safestring import mark_safe
+from django.utils.translation import gettext_lazy as _
+
+from scheduler import settings
+from scheduler import tools
+from scheduler.models.args import TaskArg, TaskKwarg
+from scheduler.queues import get_queue
+from scheduler.rq_classes import DjangoQueue
+from scheduler.settings import QUEUES
+from scheduler.settings import logger
+from scheduler.tools import TaskType
+
+SCHEDULER_INTERVAL = settings.SCHEDULER_CONFIG.SCHEDULER_INTERVAL
+
+
+def failure_callback(job, connection, result, *args, **kwargs):
+ task_type = job.meta.get("task_type", None)
+ if task_type is None:
+ return
+ task = Task.objects.filter(job_id=job.id).first()
+ if task is None:
+ logger.warn(f"Could not find task for job {job.id}")
+ return
+ mail_admins(
+ f"Task {task.id}/{task.name} has failed",
+ "See django-admin for logs",
+ )
+ task.job_id = None
+ task.failed_runs += 1
+ task.last_failed_run = timezone.now()
+ task.save(schedule_job=True)
+
+
+def success_callback(job, connection, result, *args, **kwargs):
+ task_type = job.meta.get("task_type", None)
+ if task_type is None:
+ return
+
+ task = Task.objects.filter(job_id=job.id).first()
+ if task is None:
+ model = apps.get_model(app_label="scheduler", model_name=task_type)
+ task = model.objects.filter(job_id=job.id).first()
+ if task is None:
+ logger.warn(f"Could not find task for job {job.id}")
+ return
+ task.job_id = None
+ task.successful_runs += 1
+ task.last_successful_run = timezone.now()
+ task.save(schedule_job=True)
+
+
+def get_queue_choices():
+ return [(queue, queue) for queue in QUEUES.keys()]
+
+
+class Task(models.Model):
+ class TimeUnits(models.TextChoices):
+ SECONDS = "seconds", _("seconds")
+ MINUTES = "minutes", _("minutes")
+ HOURS = "hours", _("hours")
+ DAYS = "days", _("days")
+ WEEKS = "weeks", _("weeks")
+
+ created_at = models.DateTimeField(auto_now_add=True)
+ updated_at = models.DateTimeField(auto_now=True)
+ name = models.CharField(_("name"), max_length=128, unique=True, help_text=_("Name of the job"))
+ task_type = models.CharField(_("Task type"), max_length=32, choices=TaskType.choices, default=TaskType.ONCE)
+ callable = models.CharField(_("callable"), max_length=2048)
+ callable_args = GenericRelation(TaskArg, related_query_name="args")
+ callable_kwargs = GenericRelation(TaskKwarg, related_query_name="kwargs")
+ enabled = models.BooleanField(
+ _("enabled"),
+ default=True,
+ help_text=_(
+ "Should job be scheduled? This field is useful to keep past jobs that should no longer be scheduled"
+ ),
+ )
+ queue = models.CharField(_("queue"), max_length=255, choices=get_queue_choices, help_text=_("Queue name"))
+ job_id = models.CharField(
+ _("job id"), max_length=128, editable=False, blank=True, null=True, help_text=_("Current job_id on queue")
+ )
+ at_front = models.BooleanField(
+ _("At front"),
+ default=False,
+ help_text=_("When queuing the job, add it in the front of the queue"),
+ )
+ timeout = models.IntegerField(
+ _("timeout"),
+ blank=True,
+ null=True,
+ help_text=_(
+ "Timeout specifies the maximum runtime, in seconds, for the job "
+ "before it'll be considered 'lost'. Blank uses the default "
+ "timeout."
+ ),
+ )
+ result_ttl = models.IntegerField(
+ _("result ttl"),
+ blank=True,
+ null=True,
+ help_text=mark_safe(
+ """The TTL value (in seconds) of the job result.
+ -1: Result never expires, you should delete jobs manually.
+ 0: Result gets deleted immediately.
+ >0: Result expires after n seconds."""
+ ),
+ )
+ failed_runs = models.PositiveIntegerField(
+ _("failed runs"),
+ default=0,
+ help_text=_("Number of times the task has failed"),
+ )
+ successful_runs = models.PositiveIntegerField(
+ _("successful runs"),
+ default=0,
+ help_text=_("Number of times the task has succeeded"),
+ )
+ last_successful_run = models.DateTimeField(
+ _("last successful run"),
+ blank=True,
+ null=True,
+ help_text=_("Last time the task has succeeded"),
+ )
+ last_failed_run = models.DateTimeField(
+ _("last failed run"),
+ blank=True,
+ null=True,
+ help_text=_("Last time the task has failed"),
+ )
+ interval = models.PositiveIntegerField(
+ _("interval"),
+ blank=True,
+ null=True,
+ help_text=_("Interval for repeatable task"),
+ )
+ interval_unit = models.CharField(
+ _("interval unit"),
+ max_length=12,
+ choices=TimeUnits.choices,
+ default=TimeUnits.HOURS,
+ blank=True,
+ null=True,
+ )
+ repeat = models.PositiveIntegerField(
+ _("repeat"),
+ blank=True,
+ null=True,
+ help_text=_("Number of times to run the job. Leaving this blank means it will run forever."),
+ )
+ scheduled_time = models.DateTimeField(_("scheduled time"), blank=True, null=True)
+ cron_string = models.CharField(
+ _("cron string"),
+ max_length=64,
+ blank=True,
+ null=True,
+ help_text=mark_safe(
+ """Define the schedule in a crontab like syntax.
+ Times are in UTC. Use crontab.guru to create a cron string."""
+ ),
+ )
+
+ def callable_func(self):
+ """Translate callable string to callable"""
+ return tools.callable_func(self.callable)
+
+ @admin.display(boolean=True, description=_("is scheduled?"))
+ def is_scheduled(self) -> bool:
+ """Check whether a next job for this task is queued/scheduled to be executed"""
+ if self.job_id is None: # no job_id => is not scheduled
+ return False
+ # check whether job_id is in scheduled/queued/active jobs
+ scheduled_jobs = self.rqueue.scheduled_job_registry.get_job_ids()
+ enqueued_jobs = self.rqueue.get_job_ids()
+ active_jobs = self.rqueue.started_job_registry.get_job_ids()
+ res = (self.job_id in scheduled_jobs) or (self.job_id in enqueued_jobs) or (self.job_id in active_jobs)
+ # If the job_id is not scheduled/queued/started,
+ # update the job_id to None. (The job_id belongs to a previous run which is completed)
+ if not res:
+ self.job_id = None
+ super(Task, self).save()
+ return res
+
+ @admin.display(description="Callable")
+ def function_string(self) -> str:
+ args = self.parse_args()
+ args_list = [repr(arg) for arg in args]
+ kwargs = self.parse_kwargs()
+ kwargs_list = [k + "=" + repr(v) for (k, v) in kwargs.items()]
+ return self.callable + f"({', '.join(args_list + kwargs_list)})"
+
+ def parse_args(self):
+ """Parse args for running the job"""
+ args = self.callable_args.all()
+ return [arg.value() for arg in args]
+
+ def parse_kwargs(self):
+ """Parse kwargs for running the job"""
+ kwargs = self.callable_kwargs.all()
+ return dict([kwarg.value() for kwarg in kwargs])
+
+ def _next_job_id(self):
+ addition = uuid.uuid4().hex[-10:]
+ name = self.name.replace("/", ".")
+ return f"{self.queue}:{name}:{addition}"
+
+ def _enqueue_args(self) -> Dict:
+ """Args for DjangoQueue.enqueue.
+ Set all arguments for DjangoQueue.enqueue/enqueue_at.
+ Particularly:
+ - set job timeout and ttl
+ - ensure a callback to reschedule the job next iteration.
+ - Set job-id to proper format
+ - set job meta
+ """
+ res = dict(
+ meta=dict(
+ task_type=self.task_type,
+ scheduled_task_id=self.id,
+ ),
+ on_success=success_callback,
+ on_failure=failure_callback,
+ job_id=self._next_job_id(),
+ )
+ if self.at_front:
+ res["at_front"] = self.at_front
+ if self.timeout:
+ res["job_timeout"] = self.timeout
+ if self.result_ttl is not None:
+ res["result_ttl"] = self.result_ttl
+ if self.task_type == TaskType.REPEATABLE:
+ res["meta"]["interval"] = self.interval_seconds()
+ res["meta"]["repeat"] = self.repeat
+ return res
+
+ @property
+ def rqueue(self) -> DjangoQueue:
+ """Returns django-queue for job"""
+ return get_queue(self.queue)
+
+ def ready_for_schedule(self) -> bool:
+ """Is the task ready to be scheduled?
+
+ If the task is already scheduled or disabled, then it is not
+ ready to be scheduled.
+
+ :returns: True if the task is ready to be scheduled.
+ """
+ if self.is_scheduled():
+ logger.debug(f"Task {self.name} already scheduled")
+ return False
+ if not self.enabled:
+ logger.debug(f"Task {str(self)} disabled, enable task before scheduling")
+ return False
+ if self.task_type in {TaskType.REPEATABLE, TaskType.ONCE} and self._schedule_time() < timezone.now():
+ return False
+ return True
+
+ def schedule(self) -> bool:
+ """Schedule the next execution for the task to run.
+ :returns: True if a job was scheduled, False otherwise.
+ """
+ if not self.ready_for_schedule():
+ return False
+ schedule_time = self._schedule_time()
+ kwargs = self._enqueue_args()
+ job = self.rqueue.enqueue_at(
+ schedule_time,
+ tools.run_task,
+ args=(self.task_type, self.id),
+ **kwargs,
+ )
+ self.job_id = job.id
+ super(Task, self).save()
+ return True
+
+ def enqueue_to_run(self) -> bool:
+ """Enqueue task to run now."""
+ kwargs = self._enqueue_args()
+ job = self.rqueue.enqueue(
+ tools.run_task,
+ args=(self.task_type, self.id),
+ **kwargs,
+ )
+ self.job_id = job.id
+ self.save(schedule_job=False)
+ return True
+
+ def unschedule(self) -> bool:
+ """Remove a job from django-queue.
+
+ If a job is queued to be executed or scheduled to be executed, it will remove it.
+ """
+ queue = self.rqueue
+ if self.job_id is None:
+ return True
+ queue.remove(self.job_id)
+ queue.scheduled_job_registry.remove(self.job_id)
+ self.job_id = None
+ self.save(schedule_job=False)
+ return True
+
+ def _schedule_time(self) -> datetime:
+ if self.task_type == TaskType.CRON:
+ self.scheduled_time = tools.get_next_cron_time(self.cron_string)
+ elif self.task_type == TaskType.REPEATABLE:
+ _now = timezone.now()
+ if self.scheduled_time >= _now:
+ return utc(self.scheduled_time) if django_settings.USE_TZ else self.scheduled_time
+ gap = math.ceil((_now.timestamp() - self.scheduled_time.timestamp()) / self.interval_seconds())
+ if self.repeat is None or self.repeat >= gap:
+ self.scheduled_time += timedelta(seconds=self.interval_seconds() * gap)
+ self.repeat = (self.repeat - gap) if self.repeat is not None else None
+ return utc(self.scheduled_time) if django_settings.USE_TZ else self.scheduled_time
+
+ def to_dict(self) -> Dict:
+ """Export model to dictionary, so it can be saved as external file backup"""
+ interval_unit = str(self.interval_unit) if self.interval_unit else None
+ res = dict(
+ model=str(self.task_type),
+ name=self.name,
+ callable=self.callable,
+ callable_args=[
+ dict(
+ arg_type=arg.arg_type,
+ val=arg.val,
+ )
+ for arg in self.callable_args.all()
+ ],
+ callable_kwargs=[
+ dict(
+ arg_type=arg.arg_type,
+ key=arg.key,
+ val=arg.val,
+ )
+ for arg in self.callable_kwargs.all()
+ ],
+ enabled=self.enabled,
+ queue=self.queue,
+ repeat=getattr(self, "repeat", None),
+ at_front=self.at_front,
+ timeout=self.timeout,
+ result_ttl=self.result_ttl,
+ cron_string=getattr(self, "cron_string", None),
+ scheduled_time=self._schedule_time().isoformat(),
+ interval=getattr(self, "interval", None),
+ interval_unit=interval_unit,
+ successful_runs=getattr(self, "successful_runs", None),
+ failed_runs=getattr(self, "failed_runs", None),
+ last_successful_run=getattr(self, "last_successful_run", None),
+ last_failed_run=getattr(self, "last_failed_run", None),
+ )
+ return res
+
+ def get_absolute_url(self):
+ model = self._meta.model.__name__.lower()
+ return reverse(
+ f"admin:scheduler_{model}_change",
+ args=[
+ self.id,
+ ],
+ )
+
+ def __str__(self):
+ func = self.function_string()
+ return f"{self.task_type}[{self.name}={func}]"
+
+ def save(self, **kwargs):
+ schedule_job = kwargs.pop("schedule_job", True)
+ update_fields = kwargs.get("update_fields", None)
+ if update_fields is not None:
+ kwargs["update_fields"] = set(update_fields).union({"modified"})
+ super(Task, self).save(**kwargs)
+ if schedule_job:
+ self.schedule()
+ super(Task, self).save()
+
+ def delete(self, **kwargs):
+ self.unschedule()
+ super(Task, self).delete(**kwargs)
+
+ def interval_seconds(self):
+ kwargs = {
+ self.interval_unit: self.interval,
+ }
+ return timedelta(**kwargs).total_seconds()
+
+ def clean_callable(self):
+ try:
+ tools.callable_func(self.callable)
+ except Exception:
+ raise ValidationError(
+ {"callable": ValidationError(_("Invalid callable, must be importable"), code="invalid")}
+ )
+
+ def clean_queue(self):
+ queue_keys = settings.QUEUES.keys()
+ if self.queue not in queue_keys:
+ raise ValidationError(
+ {
+ "queue": ValidationError(
+ _("Invalid queue, must be one of: {}".format(", ".join(queue_keys))), code="invalid"
+ )
+ }
+ )
+
+ def clean_interval_unit(self):
+ if SCHEDULER_INTERVAL > self.interval_seconds():
+ raise ValidationError(
+ _("Job interval is set lower than %(queue)r queue's interval. " "minimum interval is %(interval)"),
+ code="invalid",
+ params={"queue": self.queue, "interval": SCHEDULER_INTERVAL},
+ )
+ if self.interval_seconds() % SCHEDULER_INTERVAL:
+ raise ValidationError(
+ _("Job interval is not a multiple of rq_scheduler's interval frequency: %(interval)ss"),
+ code="invalid",
+ params={"interval": SCHEDULER_INTERVAL},
+ )
+
+ def clean_result_ttl(self) -> None:
+ """Throws an error if there are repeats left to run and the result_ttl won't last until the next scheduled time.
+ :return: None
+ """
+ if self.result_ttl and self.result_ttl != -1 and self.result_ttl < self.interval_seconds() and self.repeat:
+ raise ValidationError(
+ _(
+ "Job result_ttl must be either indefinite (-1) or "
+ "longer than the interval, %(interval)s seconds, to ensure rescheduling."
+ ),
+ code="invalid",
+ params={"interval": self.interval_seconds()},
+ )
+
+ def clean_cron_string(self):
+ try:
+ croniter.croniter(self.cron_string)
+ except ValueError as e:
+ raise ValidationError({"cron_string": ValidationError(_(str(e)), code="invalid")})
+
+ def clean(self):
+ self.clean_queue()
+ self.clean_callable()
+ if self.task_type == TaskType.CRON:
+ self.clean_cron_string()
+ if self.task_type == TaskType.REPEATABLE:
+ self.clean_interval_unit()
+ self.clean_result_ttl()
diff --git a/scheduler/queues.py b/scheduler/queues.py
index 401b4ec..15abbd9 100644
--- a/scheduler/queues.py
+++ b/scheduler/queues.py
@@ -76,7 +76,7 @@ def _get_broker_connection(config, use_strict_broker=False):
password=config.get("PASSWORD"),
ssl=config.get("SSL", False),
ssl_cert_reqs=config.get("SSL_CERT_REQS", "required"),
- **config.get("REDIS_CLIENT_KWARGS", {}),
+ **config.get("CLIENT_KWARGS", {}),
)
@@ -86,7 +86,7 @@ def get_connection(queue_settings, use_strict_redis=False):
def get_queue(
- name="default", default_timeout=None, is_async=None, autocommit=None, connection=None, **kwargs
+ name="default", default_timeout=None, is_async=None, autocommit=None, connection=None, **kwargs
) -> DjangoQueue:
"""Returns an DjangoQueue using parameters defined in `SCHEDULER_QUEUES`"""
from .settings import QUEUES
diff --git a/scheduler/rq_classes.py b/scheduler/rq_classes.py
index 1a14368..8c536b0 100644
--- a/scheduler/rq_classes.py
+++ b/scheduler/rq_classes.py
@@ -24,7 +24,8 @@
from scheduler import settings
from scheduler.broker_types import PipelineType, ConnectionType
-MODEL_NAMES = ["ScheduledTask", "RepeatableTask", "CronTask"]
+MODEL_NAMES = ["ScheduledTask", "RepeatableTask", "CronTask", "Task"]
+TASK_TYPES = ["OnceTaskType", "RepeatableTaskType", "CronTaskType"]
rq_job_decorator = job
ExecutionStatus = JobStatus
@@ -61,9 +62,11 @@ def __eq__(self, other) -> bool:
def is_scheduled_task(self) -> bool:
return self.meta.get("scheduled_task_id", None) is not None
- def is_execution_of(self, task: "ScheduledTask") -> bool: # noqa: F821
- return (self.meta.get("task_type", None) == task.TASK_TYPE
- and self.meta.get("scheduled_task_id", None) == task.id)
+ def is_execution_of(self, task: "Task") -> bool: # noqa: F821
+ return (
+ self.meta.get("task_type", None) == task.task_type and self.meta.get("scheduled_task_id",
+ None) == task.id
+ )
def stop_execution(self, connection: ConnectionType):
send_stop_job_command(connection, self.id)
diff --git a/scheduler/static/admin/js/select-fields.js b/scheduler/static/admin/js/select-fields.js
new file mode 100644
index 0000000..50ed38f
--- /dev/null
+++ b/scheduler/static/admin/js/select-fields.js
@@ -0,0 +1,27 @@
+(function ($) {
+ $(function () {
+ const tasktypes = {
+ "CronTaskType": $(".tasktype-CronTaskType"),
+ "RepeatableTaskType": $(".tasktype-RepeatableTaskType"),
+ "OnceTaskType": $(".tasktype-OnceTaskType"),
+ };
+ var taskTypeField = $('#id_task_type');
+
+ function toggleVerified(value) {
+ console.log(value);
+ for (const [k, v] of Object.entries(tasktypes)) {
+ if (k === value) {
+ v.show();
+ } else {
+ v.hide();
+ }
+ }
+ }
+
+ toggleVerified(taskTypeField.val());
+
+ taskTypeField.change(function () {
+ toggleVerified($(this).val());
+ });
+ });
+})(django.jQuery);
\ No newline at end of file
diff --git a/scheduler/tests/test_internals.py b/scheduler/tests/test_internals.py
index a8cb491..f916a48 100644
--- a/scheduler/tests/test_internals.py
+++ b/scheduler/tests/test_internals.py
@@ -2,16 +2,16 @@
from django.utils import timezone
-from scheduler.models import ScheduledTask
+from scheduler.models.task import TaskType
from scheduler.tests.testtools import SchedulerBaseCase, task_factory
from scheduler.tools import get_scheduled_task
class TestInternals(SchedulerBaseCase):
def test_get_scheduled_job(self):
- task = task_factory(ScheduledTask, scheduled_time=timezone.now() - timedelta(hours=1))
- self.assertEqual(task, get_scheduled_task(task.TASK_TYPE, task.id))
+ task = task_factory(TaskType.ONCE, scheduled_time=timezone.now() - timedelta(hours=1))
+ self.assertEqual(task, get_scheduled_task(TaskType.ONCE, task.id))
with self.assertRaises(ValueError):
- get_scheduled_task(task.TASK_TYPE, task.id + 1)
+ get_scheduled_task(task.task_type, task.id + 1)
with self.assertRaises(ValueError):
get_scheduled_task("UNKNOWN_JOBTYPE", task.id)
diff --git a/scheduler/tests/test_job_decorator.py b/scheduler/tests/test_job_decorator.py
index 7b78554..85a64b9 100644
--- a/scheduler/tests/test_job_decorator.py
+++ b/scheduler/tests/test_job_decorator.py
@@ -4,6 +4,7 @@
from scheduler import job, settings
from . import test_settings # noqa
+from ..decorators import JOB_METHODS_LIST
from ..queues import get_queue, QueueNotFoundError
@@ -34,6 +35,9 @@ class JobDecoratorTest(TestCase):
def setUp(self) -> None:
get_queue("default").connection.flushall()
+ def test_all_job_methods_registered(self):
+ self.assertEqual(1, len(JOB_METHODS_LIST))
+
def test_job_decorator_no_params(self):
test_job.delay()
config = settings.SCHEDULER_CONFIG
@@ -71,7 +75,6 @@ def _assert_job_with_func_and_props(self, queue_name, expected_func, expected_re
def test_job_decorator_bad_queue(self):
with self.assertRaises(QueueNotFoundError):
-
@job("bad-queue")
def test_job_bad_queue():
time.sleep(1)
diff --git a/scheduler/tests/test_mgmt_cmds.py b/scheduler/tests/test_mgmt_cmds.py
deleted file mode 100644
index 6257935..0000000
--- a/scheduler/tests/test_mgmt_cmds.py
+++ /dev/null
@@ -1,334 +0,0 @@
-import json
-import os
-import tempfile
-from unittest import mock
-
-import yaml
-from django.core.management import call_command
-from django.test import TestCase
-
-from scheduler.models import ScheduledTask, RepeatableTask
-from scheduler.queues import get_queue
-from scheduler.tests.jobs import failing_job, test_job
-from scheduler.tests.testtools import task_factory
-from . import test_settings # noqa
-from .test_views import BaseTestCase
-from ..tools import create_worker
-
-
-class RqworkerTestCase(TestCase):
-
- def test_rqworker__no_queues_params(self):
- queue = get_queue("default")
-
- # enqueue some jobs that will fail
- jobs = []
- job_ids = []
- for _ in range(0, 3):
- job = queue.enqueue(failing_job)
- jobs.append(job)
- job_ids.append(job.id)
-
- # Create a worker to execute these jobs
- call_command("rqworker", fork_job_execution=False, burst=True)
-
- # check if all jobs are really failed
- for job in jobs:
- self.assertTrue(job.is_failed)
-
- def test_rqworker__job_class_param__green(self):
- queue = get_queue("default")
-
- # enqueue some jobs that will fail
- jobs = []
- job_ids = []
- for _ in range(0, 3):
- job = queue.enqueue(failing_job)
- jobs.append(job)
- job_ids.append(job.id)
-
- # Create a worker to execute these jobs
- call_command(
- "rqworker", "--job-class", "scheduler.rq_classes.JobExecution", fork_job_execution=False, burst=True
- )
-
- # check if all jobs are really failed
- for job in jobs:
- self.assertTrue(job.is_failed)
-
- def test_rqworker__bad_job_class__fail(self):
- queue = get_queue("default")
-
- # enqueue some jobs that will fail
- jobs = []
- job_ids = []
- for _ in range(0, 3):
- job = queue.enqueue(failing_job)
- jobs.append(job)
- job_ids.append(job.id)
-
- # Create a worker to execute these jobs
- with self.assertRaises(ImportError):
- call_command("rqworker", "--job-class", "rq.badclass", fork_job_execution=False, burst=True)
-
- def test_rqworker__run_jobs(self):
- queue = get_queue("default")
-
- # enqueue some jobs that will fail
- jobs = []
- job_ids = []
- for _ in range(0, 3):
- job = queue.enqueue(failing_job)
- jobs.append(job)
- job_ids.append(job.id)
-
- # Create a worker to execute these jobs
- call_command("rqworker", "default", fork_job_execution=False, burst=True)
-
- # check if all jobs are really failed
- for job in jobs:
- self.assertTrue(job.is_failed)
-
- def test_rqworker__worker_with_two_queues(self):
- queue = get_queue("default")
- queue2 = get_queue("django_tasks_scheduler_test")
-
- # enqueue some jobs that will fail
- jobs = []
- job_ids = []
- for _ in range(0, 3):
- job = queue.enqueue(failing_job)
- jobs.append(job)
- job_ids.append(job.id)
- job = queue2.enqueue(failing_job)
- jobs.append(job)
- job_ids.append(job.id)
-
- # Create a worker to execute these jobs
- call_command("rqworker", "default", "django_tasks_scheduler_test", fork_job_execution=False, burst=True)
-
- # check if all jobs are really failed
- for job in jobs:
- self.assertTrue(job.is_failed)
-
- def test_rqworker__worker_with_one_queue__does_not_perform_other_queue_job(self):
- queue = get_queue("default")
- queue2 = get_queue("django_tasks_scheduler_test")
-
- job = queue.enqueue(failing_job)
- other_job = queue2.enqueue(failing_job)
-
- # Create a worker to execute these jobs
- call_command("rqworker", "default", fork_job_execution=False, burst=True)
- # assert
- self.assertTrue(job.is_failed)
- self.assertTrue(other_job.is_queued)
-
-
-class RqstatsTest(TestCase):
- def test_rqstats__does_not_fail(self):
- call_command("rqstats", "-j")
- call_command("rqstats", "-y")
- call_command("rqstats")
-
-
-class DeleteFailedExecutionsTest(BaseTestCase):
- def test_delete_failed_executions__delete_jobs(self):
- queue = get_queue("default")
- call_command("delete_failed_executions", queue="default")
- queue.enqueue(failing_job)
- worker = create_worker("default")
- worker.work(burst=True)
- self.assertEqual(1, len(queue.failed_job_registry))
- call_command("delete_failed_executions", queue="default")
- self.assertEqual(0, len(queue.failed_job_registry))
-
-
-class RunJobTest(TestCase):
- def test_run_job__should_schedule_job(self):
- queue = get_queue("default")
- queue.empty()
- func_name = f"{test_job.__module__}.{test_job.__name__}"
- # act
- call_command("run_job", func_name, queue="default")
- # assert
- job_list = queue.get_jobs()
- self.assertEqual(1, len(job_list))
- self.assertEqual(func_name + "()", job_list[0].get_call_string())
-
-
-class ExportTest(TestCase):
- def setUp(self) -> None:
- self.tmpfile = tempfile.NamedTemporaryFile()
-
- def tearDown(self) -> None:
- os.remove(self.tmpfile.name)
-
- def test_export__should_export_job(self):
- jobs = list()
- jobs.append(task_factory(ScheduledTask, enabled=True))
- jobs.append(task_factory(RepeatableTask, enabled=True))
-
- # act
- call_command("export", filename=self.tmpfile.name)
- # assert
- result = json.load(self.tmpfile)
- self.assertEqual(len(jobs), len(result))
- self.assertEqual(result[0], jobs[0].to_dict())
- self.assertEqual(result[1], jobs[1].to_dict())
-
- def test_export__should_export_enabled_jobs_only(self):
- jobs = list()
- jobs.append(task_factory(ScheduledTask, enabled=True))
- jobs.append(task_factory(RepeatableTask, enabled=False))
-
- # act
- call_command("export", filename=self.tmpfile.name, enabled=True)
- # assert
- result = json.load(self.tmpfile)
- self.assertEqual(len(jobs) - 1, len(result))
- self.assertEqual(result[0], jobs[0].to_dict())
-
- def test_export__should_export_job_yaml_without_yaml_lib(self):
- jobs = list()
- jobs.append(task_factory(ScheduledTask, enabled=True))
- jobs.append(task_factory(RepeatableTask, enabled=True))
-
- # act
- with mock.patch.dict("sys.modules", {"yaml": None}):
- with self.assertRaises(SystemExit) as cm:
- call_command("export", filename=self.tmpfile.name, format="yaml")
- self.assertEqual(cm.exception.code, 1)
-
- def test_export__should_export_job_yaml_green(self):
- jobs = list()
- jobs.append(task_factory(ScheduledTask, enabled=True))
- jobs.append(task_factory(RepeatableTask, enabled=True))
-
- # act
- call_command("export", filename=self.tmpfile.name, format="yaml")
- # assert
- result = yaml.load(self.tmpfile, yaml.SafeLoader)
- self.assertEqual(len(jobs), len(result))
- self.assertEqual(result[0], jobs[0].to_dict())
- self.assertEqual(result[1], jobs[1].to_dict())
-
-
-class ImportTest(TestCase):
- def setUp(self) -> None:
- self.tmpfile = tempfile.NamedTemporaryFile(mode="w")
-
- def tearDown(self) -> None:
- os.remove(self.tmpfile.name)
-
- def test_import__should_schedule_job(self):
- jobs = list()
- jobs.append(task_factory(ScheduledTask, enabled=True, instance_only=True))
- jobs.append(task_factory(RepeatableTask, enabled=True, instance_only=True))
- res = json.dumps([j.to_dict() for j in jobs])
- self.tmpfile.write(res)
- self.tmpfile.flush()
- # act
- call_command("import", filename=self.tmpfile.name)
- # assert
- self.assertEqual(1, ScheduledTask.objects.count())
- db_job = ScheduledTask.objects.first()
- attrs = ["name", "queue", "callable", "enabled", "timeout"]
- for attr in attrs:
- self.assertEqual(getattr(jobs[0], attr), getattr(db_job, attr))
-
- def test_import__should_schedule_job_yaml(self):
- jobs = list()
- jobs.append(task_factory(ScheduledTask, enabled=True, instance_only=True))
- jobs.append(task_factory(RepeatableTask, enabled=True, instance_only=True))
- res = yaml.dump([j.to_dict() for j in jobs], default_flow_style=False)
- self.tmpfile.write(res)
- self.tmpfile.flush()
- # act
- call_command("import", filename=self.tmpfile.name, format="yaml")
- # assert
- self.assertEqual(1, ScheduledTask.objects.count())
- db_job = ScheduledTask.objects.first()
- attrs = ["name", "queue", "callable", "enabled", "timeout"]
- for attr in attrs:
- self.assertEqual(getattr(jobs[0], attr), getattr(db_job, attr))
-
- def test_import__should_schedule_job_yaml_without_yaml_lib(self):
- jobs = list()
- jobs.append(task_factory(ScheduledTask, enabled=True, instance_only=True))
- jobs.append(task_factory(RepeatableTask, enabled=True, instance_only=True))
- res = yaml.dump([j.to_dict() for j in jobs], default_flow_style=False)
- self.tmpfile.write(res)
- self.tmpfile.flush()
- # act
- with mock.patch.dict("sys.modules", {"yaml": None}):
- with self.assertRaises(SystemExit) as cm:
- call_command("import", filename=self.tmpfile.name, format="yaml")
- self.assertEqual(cm.exception.code, 1)
-
- def test_import__should_schedule_job_reset(self):
- jobs = list()
- task_factory(ScheduledTask, enabled=True)
- task_factory(ScheduledTask, enabled=True)
- jobs.append(task_factory(ScheduledTask, enabled=True))
- jobs.append(task_factory(RepeatableTask, enabled=True, instance_only=True))
- res = json.dumps([j.to_dict() for j in jobs])
- self.tmpfile.write(res)
- self.tmpfile.flush()
- # act
- call_command(
- "import",
- filename=self.tmpfile.name,
- reset=True,
- )
- # assert
- self.assertEqual(1, ScheduledTask.objects.count())
- db_job = ScheduledTask.objects.first()
- attrs = ["name", "queue", "callable", "enabled", "timeout"]
- for attr in attrs:
- self.assertEqual(getattr(jobs[0], attr), getattr(db_job, attr))
- self.assertEqual(1, RepeatableTask.objects.count())
- db_job = RepeatableTask.objects.first()
- attrs = ["name", "queue", "callable", "enabled", "timeout"]
- for attr in attrs:
- self.assertEqual(getattr(jobs[1], attr), getattr(db_job, attr))
-
- def test_import__should_schedule_job_update_existing(self):
- jobs = list()
- task_factory(ScheduledTask, enabled=True)
- jobs.append(task_factory(ScheduledTask, enabled=True))
- res = json.dumps([j.to_dict() for j in jobs])
- self.tmpfile.write(res)
- self.tmpfile.flush()
- # act
- call_command(
- "import",
- filename=self.tmpfile.name,
- update=True,
- )
- # assert
- self.assertEqual(2, ScheduledTask.objects.count())
- db_job = ScheduledTask.objects.get(name=jobs[0].name)
- self.assertNotEqual(jobs[0].id, db_job.id)
- attrs = ["name", "queue", "callable", "enabled", "timeout"]
- for attr in attrs:
- self.assertEqual(getattr(jobs[0], attr), getattr(db_job, attr))
-
- def test_import__should_schedule_job_without_update_existing(self):
- jobs = list()
- task_factory(ScheduledTask, enabled=True)
- jobs.append(task_factory(ScheduledTask, enabled=True))
- res = json.dumps([j.to_dict() for j in jobs])
- self.tmpfile.write(res)
- self.tmpfile.flush()
- # act
- call_command(
- "import",
- filename=self.tmpfile.name,
- )
- # assert
- self.assertEqual(2, ScheduledTask.objects.count())
- db_job = ScheduledTask.objects.get(name=jobs[0].name)
- attrs = ["id", "name", "queue", "callable", "enabled", "timeout"]
- for attr in attrs:
- self.assertEqual(getattr(jobs[0], attr), getattr(db_job, attr))
diff --git a/scheduler/tests/test_mgmt_commands/__init__.py b/scheduler/tests/test_mgmt_commands/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/scheduler/tests/test_mgmt_commands/test_delete_failed_executions.py b/scheduler/tests/test_mgmt_commands/test_delete_failed_executions.py
new file mode 100644
index 0000000..74a53df
--- /dev/null
+++ b/scheduler/tests/test_mgmt_commands/test_delete_failed_executions.py
@@ -0,0 +1,19 @@
+from django.core.management import call_command
+
+from scheduler.queues import get_queue
+from scheduler.tests.jobs import failing_job
+from scheduler.tests.test_views import BaseTestCase
+from scheduler.tools import create_worker
+from scheduler.tests import test_settings # noqa
+
+
+class DeleteFailedExecutionsTest(BaseTestCase):
+ def test_delete_failed_executions__delete_jobs(self):
+ queue = get_queue("default")
+ call_command("delete_failed_executions", queue="default")
+ queue.enqueue(failing_job)
+ worker = create_worker("default")
+ worker.work(burst=True)
+ self.assertEqual(1, len(queue.failed_job_registry))
+ call_command("delete_failed_executions", queue="default")
+ self.assertEqual(0, len(queue.failed_job_registry))
diff --git a/scheduler/tests/test_mgmt_commands/test_export.py b/scheduler/tests/test_mgmt_commands/test_export.py
new file mode 100644
index 0000000..6ad1878
--- /dev/null
+++ b/scheduler/tests/test_mgmt_commands/test_export.py
@@ -0,0 +1,73 @@
+import json
+import os
+import tempfile
+from unittest import mock
+
+import yaml
+from django.core.management import call_command
+from django.test import TestCase
+
+from scheduler.tests import test_settings # noqa
+from scheduler.tests.testtools import task_factory
+from scheduler.tools import TaskType
+
+
+class ExportTest(TestCase):
+ def setUp(self) -> None:
+ super().setUp()
+ self.tmpfile = tempfile.NamedTemporaryFile()
+
+ def tearDown(self) -> None:
+ super().tearDown()
+ os.remove(self.tmpfile.name)
+
+ def test_export__should_export_job(self):
+ jobs = list()
+ jobs.append(task_factory(TaskType.ONCE, enabled=True))
+ jobs.append(task_factory(TaskType.REPEATABLE, enabled=True))
+
+ # act
+ call_command("export", filename=self.tmpfile.name)
+ # assert
+ result = json.load(self.tmpfile)
+ self.assertEqual(len(jobs), len(result))
+ self.assertEqual(result[0], jobs[0].to_dict())
+ self.assertEqual(result[1], jobs[1].to_dict())
+
+ def test_export__should_export_enabled_jobs_only(self):
+ jobs = list()
+ jobs.append(task_factory(TaskType.ONCE, enabled=True))
+ jobs.append(task_factory(TaskType.REPEATABLE, enabled=False))
+
+ # act
+ call_command("export", filename=self.tmpfile.name, enabled=True)
+ # assert
+ result = json.load(self.tmpfile)
+ self.assertEqual(len(jobs) - 1, len(result))
+ self.assertEqual(result[0], jobs[0].to_dict())
+
+ def test_export__should_export_job_yaml_without_yaml_lib(self):
+ jobs = list()
+ jobs.append(task_factory(TaskType.ONCE, enabled=True))
+ jobs.append(task_factory(TaskType.REPEATABLE, enabled=True))
+
+ # act
+ with mock.patch.dict("sys.modules", {"yaml": None}):
+ with self.assertRaises(SystemExit) as cm:
+ call_command("export", filename=self.tmpfile.name, format="yaml")
+ self.assertEqual(cm.exception.code, 1)
+
+ def test_export__should_export_job_yaml_green(self):
+ jobs = list()
+ jobs.append(task_factory(TaskType.ONCE, enabled=True))
+ jobs.append(task_factory(TaskType.REPEATABLE, enabled=True))
+ jobs.append(task_factory(TaskType.CRON, enabled=True))
+
+ # act
+ call_command("export", filename=self.tmpfile.name, format="yaml")
+ # assert
+ result = yaml.load(self.tmpfile, yaml.SafeLoader)
+ self.assertEqual(len(jobs), len(result))
+ self.assertEqual(result[0], jobs[0].to_dict())
+ self.assertEqual(result[1], jobs[1].to_dict())
+ self.assertEqual(result[2], jobs[2].to_dict())
diff --git a/scheduler/tests/test_mgmt_commands/test_import.py b/scheduler/tests/test_mgmt_commands/test_import.py
new file mode 100644
index 0000000..c856e28
--- /dev/null
+++ b/scheduler/tests/test_mgmt_commands/test_import.py
@@ -0,0 +1,134 @@
+import json
+import os
+import tempfile
+from unittest import mock
+
+import yaml
+from django.core.management import call_command
+from django.test import TestCase
+
+from scheduler.models import Task
+from scheduler.tests.testtools import task_factory
+from scheduler.tools import TaskType
+from scheduler.tests import test_settings # noqa
+
+
+class ImportTest(TestCase):
+ def setUp(self) -> None:
+ self.tmpfile = tempfile.NamedTemporaryFile(mode="w")
+
+ def tearDown(self) -> None:
+ os.remove(self.tmpfile.name)
+
+ def test_import__should_schedule_job(self):
+ jobs = list()
+ jobs.append(task_factory(TaskType.ONCE, enabled=True, instance_only=True))
+ jobs.append(task_factory(TaskType.REPEATABLE, enabled=True, instance_only=True))
+ res = json.dumps([j.to_dict() for j in jobs])
+ self.tmpfile.write(res)
+ self.tmpfile.flush()
+ # act
+ call_command("import", filename=self.tmpfile.name)
+ # assert
+ self.assertEqual(1, Task.objects.filter(task_type=TaskType.ONCE).count())
+ self.assertEqual(1, Task.objects.filter(task_type=TaskType.REPEATABLE).count())
+ db_job = Task.objects.filter(task_type=TaskType.ONCE).first()
+ attrs = ["name", "queue", "callable", "enabled", "timeout"]
+ for attr in attrs:
+ self.assertEqual(getattr(jobs[0], attr), getattr(db_job, attr))
+
+ def test_import__should_schedule_job_yaml(self):
+ tasks = list()
+ tasks.append(task_factory(TaskType.ONCE, enabled=True, instance_only=True))
+ tasks.append(task_factory(TaskType.REPEATABLE, enabled=True, instance_only=True))
+ res = yaml.dump([j.to_dict() for j in tasks], default_flow_style=False)
+ self.tmpfile.write(res)
+ self.tmpfile.flush()
+ # act
+ call_command("import", filename=self.tmpfile.name, format="yaml")
+ # assert
+ self.assertEqual(1, Task.objects.filter(task_type=TaskType.ONCE).count())
+ self.assertEqual(1, Task.objects.filter(task_type=TaskType.REPEATABLE).count())
+ db_job = Task.objects.filter(task_type=TaskType.ONCE).first()
+ attrs = ["name", "queue", "callable", "enabled", "timeout"]
+ for attr in attrs:
+ self.assertEqual(getattr(tasks[0], attr), getattr(db_job, attr))
+
+ def test_import__should_schedule_job_yaml_without_yaml_lib(self):
+ jobs = list()
+ jobs.append(task_factory(TaskType.ONCE, enabled=True, instance_only=True))
+ jobs.append(task_factory(TaskType.REPEATABLE, enabled=True, instance_only=True))
+ res = yaml.dump([j.to_dict() for j in jobs], default_flow_style=False)
+ self.tmpfile.write(res)
+ self.tmpfile.flush()
+ # act
+ with mock.patch.dict("sys.modules", {"yaml": None}):
+ with self.assertRaises(SystemExit) as cm:
+ call_command("import", filename=self.tmpfile.name, format="yaml")
+ self.assertEqual(cm.exception.code, 1)
+
+ def test_import__should_schedule_job_reset(self):
+ jobs = list()
+ task_factory(TaskType.ONCE, enabled=True)
+ task_factory(TaskType.ONCE, enabled=True)
+ jobs.append(task_factory(TaskType.ONCE, enabled=True))
+ jobs.append(task_factory(TaskType.REPEATABLE, enabled=True, instance_only=True))
+ res = json.dumps([j.to_dict() for j in jobs])
+ self.tmpfile.write(res)
+ self.tmpfile.flush()
+ # act
+ call_command(
+ "import",
+ filename=self.tmpfile.name,
+ reset=True,
+ )
+ # assert
+ self.assertEqual(1, Task.objects.filter(task_type=TaskType.ONCE).count())
+ db_job = Task.objects.filter(task_type=TaskType.ONCE).first()
+ attrs = ["name", "queue", "callable", "enabled", "timeout"]
+ for attr in attrs:
+ self.assertEqual(getattr(jobs[0], attr), getattr(db_job, attr))
+ self.assertEqual(1, Task.objects.filter(task_type=TaskType.REPEATABLE).count())
+ db_job = Task.objects.filter(task_type=TaskType.REPEATABLE).first()
+ attrs = ["name", "queue", "callable", "enabled", "timeout"]
+ for attr in attrs:
+ self.assertEqual(getattr(jobs[1], attr), getattr(db_job, attr))
+
+ def test_import__should_schedule_job_update_existing(self):
+ tasks = list()
+ tasks.append(task_factory(TaskType.ONCE, enabled=True))
+ tasks.append(task_factory(TaskType.ONCE, enabled=True))
+ res = json.dumps([j.to_dict() for j in tasks])
+ self.tmpfile.write(res)
+ self.tmpfile.flush()
+ # act
+ call_command(
+ "import",
+ filename=self.tmpfile.name,
+ update=True,
+ )
+ # assert
+ self.assertEqual(2, Task.objects.filter(task_type=TaskType.ONCE).count())
+ db_job = Task.objects.filter(task_type=TaskType.ONCE).get(name=tasks[0].name)
+ attrs = ["name", "queue", "callable", "enabled", "timeout"]
+ for attr in attrs:
+ self.assertEqual(getattr(tasks[0], attr), getattr(db_job, attr))
+
+ def test_import__should_schedule_job_without_update_existing(self):
+ tasks = list()
+ tasks.append(task_factory(TaskType.ONCE, enabled=True))
+ tasks.append(task_factory(TaskType.ONCE, enabled=True))
+ res = json.dumps([j.to_dict() for j in tasks])
+ self.tmpfile.write(res)
+ self.tmpfile.flush()
+ # act
+ call_command(
+ "import",
+ filename=self.tmpfile.name,
+ )
+ # assert
+ self.assertEqual(2, Task.objects.filter(task_type=TaskType.ONCE).count())
+ db_job = Task.objects.get(name=tasks[0].name)
+ attrs = ["id", "name", "queue", "callable", "enabled", "timeout"]
+ for attr in attrs:
+ self.assertEqual(getattr(tasks[0], attr), getattr(db_job, attr))
diff --git a/scheduler/tests/test_mgmt_commands/test_rq_stats.py b/scheduler/tests/test_mgmt_commands/test_rq_stats.py
new file mode 100644
index 0000000..0daf641
--- /dev/null
+++ b/scheduler/tests/test_mgmt_commands/test_rq_stats.py
@@ -0,0 +1,11 @@
+from django.core.management import call_command
+from django.test import TestCase
+
+from scheduler.tests import test_settings # noqa
+
+
+class RqstatsTest(TestCase):
+ def test_rqstats__does_not_fail(self):
+ call_command("rqstats", "-j")
+ call_command("rqstats", "-y")
+ call_command("rqstats")
diff --git a/scheduler/tests/test_mgmt_commands/test_rq_worker.py b/scheduler/tests/test_mgmt_commands/test_rq_worker.py
new file mode 100644
index 0000000..c4e4e49
--- /dev/null
+++ b/scheduler/tests/test_mgmt_commands/test_rq_worker.py
@@ -0,0 +1,115 @@
+from django.core.management import call_command
+from django.test import TestCase
+
+from scheduler.queues import get_queue
+from scheduler.tests.jobs import failing_job
+from scheduler.tests import test_settings # noqa
+
+
+class RqworkerTestCase(TestCase):
+
+ def test_rqworker__no_queues_params(self):
+ queue = get_queue("default")
+
+ # enqueue some jobs that will fail
+ jobs = []
+ job_ids = []
+ for _ in range(0, 3):
+ job = queue.enqueue(failing_job)
+ jobs.append(job)
+ job_ids.append(job.id)
+
+ # Create a worker to execute these jobs
+ call_command("rqworker", fork_job_execution=False, burst=True)
+
+ # check if all jobs are really failed
+ for job in jobs:
+ self.assertTrue(job.is_failed)
+
+ def test_rqworker__job_class_param__green(self):
+ queue = get_queue("default")
+
+ # enqueue some jobs that will fail
+ jobs = []
+ job_ids = []
+ for _ in range(0, 3):
+ job = queue.enqueue(failing_job)
+ jobs.append(job)
+ job_ids.append(job.id)
+
+ # Create a worker to execute these jobs
+ call_command(
+ "rqworker", "--job-class", "scheduler.rq_classes.JobExecution", fork_job_execution=False, burst=True
+ )
+
+ # check if all jobs are really failed
+ for job in jobs:
+ self.assertTrue(job.is_failed)
+
+ def test_rqworker__bad_job_class__fail(self):
+ queue = get_queue("default")
+
+ # enqueue some jobs that will fail
+ jobs = []
+ job_ids = []
+ for _ in range(0, 3):
+ job = queue.enqueue(failing_job)
+ jobs.append(job)
+ job_ids.append(job.id)
+
+ # Create a worker to execute these jobs
+ with self.assertRaises(ImportError):
+ call_command("rqworker", "--job-class", "rq.badclass", fork_job_execution=False, burst=True)
+
+ def test_rqworker__run_jobs(self):
+ queue = get_queue("default")
+
+ # enqueue some jobs that will fail
+ jobs = []
+ job_ids = []
+ for _ in range(0, 3):
+ job = queue.enqueue(failing_job)
+ jobs.append(job)
+ job_ids.append(job.id)
+
+ # Create a worker to execute these jobs
+ call_command("rqworker", "default", fork_job_execution=False, burst=True)
+
+ # check if all jobs are really failed
+ for job in jobs:
+ self.assertTrue(job.is_failed)
+
+ def test_rqworker__worker_with_two_queues(self):
+ queue = get_queue("default")
+ queue2 = get_queue("django_tasks_scheduler_test")
+
+ # enqueue some jobs that will fail
+ jobs = []
+ job_ids = []
+ for _ in range(0, 3):
+ job = queue.enqueue(failing_job)
+ jobs.append(job)
+ job_ids.append(job.id)
+ job = queue2.enqueue(failing_job)
+ jobs.append(job)
+ job_ids.append(job.id)
+
+ # Create a worker to execute these jobs
+ call_command("rqworker", "default", "django_tasks_scheduler_test", fork_job_execution=False, burst=True)
+
+ # check if all jobs are really failed
+ for job in jobs:
+ self.assertTrue(job.is_failed)
+
+ def test_rqworker__worker_with_one_queue__does_not_perform_other_queue_job(self):
+ queue = get_queue("default")
+ queue2 = get_queue("django_tasks_scheduler_test")
+
+ job = queue.enqueue(failing_job)
+ other_job = queue2.enqueue(failing_job)
+
+ # Create a worker to execute these jobs
+ call_command("rqworker", "default", fork_job_execution=False, burst=True)
+ # assert
+ self.assertTrue(job.is_failed)
+ self.assertTrue(other_job.is_queued)
diff --git a/scheduler/tests/test_mgmt_commands/test_run_job.py b/scheduler/tests/test_mgmt_commands/test_run_job.py
new file mode 100644
index 0000000..4efe24d
--- /dev/null
+++ b/scheduler/tests/test_mgmt_commands/test_run_job.py
@@ -0,0 +1,19 @@
+from django.core.management import call_command
+from django.test import TestCase
+
+from scheduler.queues import get_queue
+from scheduler.tests.jobs import test_job
+from scheduler.tests import test_settings # noqa
+
+
+class RunJobTest(TestCase):
+ def test_run_job__should_schedule_job(self):
+ queue = get_queue("default")
+ queue.empty()
+ func_name = f"{test_job.__module__}.{test_job.__name__}"
+ # act
+ call_command("run_job", func_name, queue="default")
+ # assert
+ job_list = queue.get_jobs()
+ self.assertEqual(1, len(job_list))
+ self.assertEqual(func_name + "()", job_list[0].get_call_string())
diff --git a/scheduler/tests/test_old_models/__init__.py b/scheduler/tests/test_old_models/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/scheduler/tests/test_cron_task.py b/scheduler/tests/test_old_models/test_cron_task.py
similarity index 87%
rename from scheduler/tests/test_cron_task.py
rename to scheduler/tests/test_old_models/test_cron_task.py
index a64f9b7..33743ec 100644
--- a/scheduler/tests/test_cron_task.py
+++ b/scheduler/tests/test_old_models/test_cron_task.py
@@ -3,23 +3,23 @@
from scheduler import settings
from scheduler.models import CronTask
from scheduler.tools import create_worker
-from .test_models import BaseTestCases
-from .testtools import task_factory
-from ..queues import get_queue
+from scheduler.tests.test_old_models.test_old_models import BaseTestCases
+from scheduler.tests.testtools import old_task_factory
+from scheduler.queues import get_queue
class TestCronTask(BaseTestCases.TestBaseTask):
TaskModelClass = CronTask
def test_clean(self):
- task = task_factory(CronTask)
+ task = old_task_factory(CronTask)
task.cron_string = "* * * * *"
task.queue = list(settings.QUEUES)[0]
task.callable = "scheduler.tests.jobs.test_job"
self.assertIsNone(task.clean())
def test_clean_cron_string_invalid(self):
- task = task_factory(CronTask)
+ task = old_task_factory(CronTask)
task.cron_string = "not-a-cron-string"
task.queue = list(settings.QUEUES)[0]
task.callable = "scheduler.tests.jobs.test_job"
@@ -27,7 +27,7 @@ def test_clean_cron_string_invalid(self):
task.clean_cron_string()
def test_check_rescheduled_after_execution(self):
- task = task_factory(
+ task = old_task_factory(
CronTask,
)
queue = task.rqueue
@@ -43,7 +43,7 @@ def test_check_rescheduled_after_execution(self):
self.assertNotEqual(task.job_id, first_run_id)
def test_check_rescheduled_after_failed_execution(self):
- task = task_factory(
+ task = old_task_factory(
CronTask,
callable_name="scheduler.tests.jobs.scheduler.tests.jobs.test_job",
)
@@ -63,7 +63,7 @@ def test_cron_task_enqueuing_jobs(self):
queue = get_queue()
prev_queued = len(queue.scheduled_job_registry)
prev_finished = len(queue.finished_job_registry)
- task = task_factory(CronTask, callable_name="scheduler.tests.jobs.enqueue_jobs")
+ task = old_task_factory(CronTask, callable_name="scheduler.tests.jobs.enqueue_jobs")
self.assertEqual(prev_queued + 1, len(queue.scheduled_job_registry))
first_run_id = task.job_id
entry = queue.fetch_job(first_run_id)
diff --git a/scheduler/tests/test_old_models/test_old_models.py b/scheduler/tests/test_old_models/test_old_models.py
new file mode 100644
index 0000000..e76764f
--- /dev/null
+++ b/scheduler/tests/test_old_models/test_old_models.py
@@ -0,0 +1,544 @@
+import zoneinfo
+from datetime import datetime, timedelta
+
+from django.contrib.messages import get_messages
+from django.core.exceptions import ValidationError
+from django.test import override_settings
+from django.urls import reverse
+from django.utils import timezone
+from freezegun import freeze_time
+
+from scheduler import settings
+from scheduler.models import BaseTask, TaskArg, TaskKwarg, ScheduledTask
+from scheduler.queues import get_queue
+from scheduler.tests import jobs
+from scheduler.tests.testtools import (
+ old_task_factory, taskarg_factory, _get_task_job_execution_from_registry, SchedulerBaseCase, _get_executions)
+from scheduler.tools import run_task, create_worker
+
+
+def assert_response_has_msg(response, message):
+ messages = [m.message for m in get_messages(response.wsgi_request)]
+ assert message in messages, f'expected "{message}" in {messages}'
+
+
+def assert_has_execution_with_status(task, status):
+ job_list = _get_executions(task)
+ job_list = [(j.id, j.get_status()) for j in job_list]
+ for job in job_list:
+ if job[1] == status:
+ return
+ raise AssertionError(f"{task} does not have an execution with status {status}: {job_list}")
+
+
+class BaseTestCases:
+ class TestBaseTask(SchedulerBaseCase):
+ TaskModelClass = BaseTask
+
+ def test_callable_func(self):
+ task = old_task_factory(self.TaskModelClass)
+ task.callable = "scheduler.tests.jobs.test_job"
+ func = task.callable_func()
+ self.assertEqual(jobs.test_job, func)
+
+ def test_callable_func_not_callable(self):
+ task = old_task_factory(self.TaskModelClass)
+ task.callable = "scheduler.tests.jobs.test_non_callable"
+ with self.assertRaises(TypeError):
+ task.callable_func()
+
+ def test_clean_callable(self):
+ task = old_task_factory(self.TaskModelClass)
+ task.callable = "scheduler.tests.jobs.test_job"
+ self.assertIsNone(task.clean_callable())
+
+ def test_clean_callable_invalid(self):
+ task = old_task_factory(self.TaskModelClass)
+ task.callable = "scheduler.tests.jobs.test_non_callable"
+ with self.assertRaises(ValidationError):
+ task.clean_callable()
+
+ def test_clean_queue(self):
+ for queue in settings.QUEUES.keys():
+ task = old_task_factory(self.TaskModelClass)
+ task.queue = queue
+ self.assertIsNone(task.clean_queue())
+
+ def test_clean_queue_invalid(self):
+ task = old_task_factory(self.TaskModelClass)
+ task.queue = "xxxxxx"
+ task.callable = "scheduler.tests.jobs.test_job"
+ with self.assertRaises(ValidationError):
+ task.clean()
+
+ # next 2 check the above are included in job.clean() function
+ def test_clean_base(self):
+ task = old_task_factory(self.TaskModelClass)
+ task.queue = list(settings.QUEUES)[0]
+ task.callable = "scheduler.tests.jobs.test_job"
+ self.assertIsNone(task.clean())
+
+ def test_clean_invalid_callable(self):
+ task = old_task_factory(self.TaskModelClass)
+ task.queue = list(settings.QUEUES)[0]
+ task.callable = "scheduler.tests.jobs.test_non_callable"
+ with self.assertRaises(ValidationError):
+ task.clean()
+
+ def test_clean_invalid_queue(self):
+ task = old_task_factory(self.TaskModelClass)
+ task.queue = "xxxxxx"
+ task.callable = "scheduler.tests.jobs.test_job"
+ with self.assertRaises(ValidationError):
+ task.clean()
+
+ def test_is_schedulable_already_scheduled(self):
+ task = old_task_factory(
+ self.TaskModelClass,
+ )
+ task.schedule()
+ self.assertTrue(task.is_scheduled())
+
+ def test_is_schedulable_disabled(self):
+ task = old_task_factory(self.TaskModelClass)
+ task.enabled = False
+ self.assertFalse(task.enabled)
+
+ def test_schedule(self):
+ task = old_task_factory(
+ self.TaskModelClass,
+ )
+ self.assertTrue(task.is_scheduled())
+ self.assertIsNotNone(task.job_id)
+
+ def test_unschedulable(self):
+ task = old_task_factory(self.TaskModelClass, enabled=False)
+ self.assertFalse(task.is_scheduled())
+ self.assertIsNone(task.job_id)
+
+ def test_unschedule(self):
+ task = old_task_factory(
+ self.TaskModelClass,
+ )
+ self.assertTrue(task.unschedule())
+ self.assertIsNone(task.job_id)
+
+ def test_unschedule_not_scheduled(self):
+ task = old_task_factory(self.TaskModelClass, enabled=False)
+ self.assertTrue(task.unschedule())
+ self.assertIsNone(task.job_id)
+
+ def test_save_enabled(self):
+ task = old_task_factory(
+ self.TaskModelClass,
+ )
+ self.assertIsNotNone(task.job_id)
+
+ def test_save_disabled(self):
+ task = old_task_factory(self.TaskModelClass, enabled=False)
+ task.save()
+ self.assertIsNone(task.job_id)
+
+ def test_save_and_schedule(self):
+ task = old_task_factory(
+ self.TaskModelClass,
+ )
+ self.assertIsNotNone(task.job_id)
+ self.assertTrue(task.is_scheduled())
+
+ def test_schedule2(self):
+ task = old_task_factory(self.TaskModelClass)
+ task.queue = list(settings.QUEUES)[0]
+ task.enabled = False
+ task.scheduled_time = timezone.now() + timedelta(minutes=1)
+ self.assertFalse(task.schedule())
+
+ def test_delete_and_unschedule(self):
+ task = old_task_factory(
+ self.TaskModelClass,
+ )
+ self.assertIsNotNone(task.job_id)
+ self.assertTrue(task.is_scheduled())
+ task.delete()
+ self.assertFalse(task.is_scheduled())
+
+ def test_job_create(self):
+ prev_count = self.TaskModelClass.objects.count()
+ old_task_factory(self.TaskModelClass)
+ self.assertEqual(self.TaskModelClass.objects.count(), prev_count + 1)
+
+ def test_str(self):
+ name = "test"
+ task = old_task_factory(self.TaskModelClass, name=name)
+ self.assertEqual(f"{self.TaskModelClass.__name__}[{name}={task.callable}()]", str(task))
+
+ def test_callable_passthrough(self):
+ task = old_task_factory(self.TaskModelClass)
+ entry = _get_task_job_execution_from_registry(task)
+ self.assertEqual(entry.func, run_task)
+ job_model, job_id = entry.args
+ self.assertEqual(job_model, self.TaskModelClass.__name__)
+ self.assertEqual(job_id, task.id)
+
+ def test_timeout_passthrough(self):
+ task = old_task_factory(self.TaskModelClass, timeout=500)
+ entry = _get_task_job_execution_from_registry(task)
+ self.assertEqual(entry.timeout, 500)
+
+ def test_at_front_passthrough(self):
+ task = old_task_factory(self.TaskModelClass, at_front=True)
+ queue = task.rqueue
+ jobs_to_schedule = queue.scheduled_job_registry.get_job_ids()
+ self.assertIn(task.job_id, jobs_to_schedule)
+
+ def test_callable_result(self):
+ task = old_task_factory(
+ self.TaskModelClass,
+ )
+ entry = _get_task_job_execution_from_registry(task)
+ self.assertEqual(entry.perform(), 2)
+
+ def test_callable_empty_args_and_kwargs(self):
+ task = old_task_factory(self.TaskModelClass, callable="scheduler.tests.jobs.test_args_kwargs")
+ entry = _get_task_job_execution_from_registry(task)
+ self.assertEqual(entry.perform(), "test_args_kwargs()")
+
+ def test_delete_args(self):
+ task = old_task_factory(
+ self.TaskModelClass,
+ )
+ arg = taskarg_factory(TaskArg, val="one", content_object=task)
+ self.assertEqual(1, task.callable_args.count())
+ arg.delete()
+ self.assertEqual(0, task.callable_args.count())
+
+ def test_delete_kwargs(self):
+ task = old_task_factory(
+ self.TaskModelClass,
+ )
+ kwarg = taskarg_factory(TaskKwarg, key="key1", arg_type="str", val="one", content_object=task)
+ self.assertEqual(1, task.callable_kwargs.count())
+ kwarg.delete()
+ self.assertEqual(0, task.callable_kwargs.count())
+
+ def test_parse_args(self):
+ task = old_task_factory(
+ self.TaskModelClass,
+ )
+ date = timezone.now()
+ taskarg_factory(TaskArg, val="one", content_object=task)
+ taskarg_factory(TaskArg, arg_type="int", val=2, content_object=task)
+ taskarg_factory(TaskArg, arg_type="bool", val=True, content_object=task)
+ taskarg_factory(TaskArg, arg_type="bool", val=False, content_object=task)
+ taskarg_factory(TaskArg, arg_type="datetime", val=date, content_object=task)
+ self.assertEqual(task.parse_args(), ["one", 2, True, False, date])
+
+ def test_parse_kwargs(self):
+ job = old_task_factory(
+ self.TaskModelClass,
+ )
+ date = timezone.now()
+ taskarg_factory(TaskKwarg, key="key1", arg_type="str", val="one", content_object=job)
+ taskarg_factory(TaskKwarg, key="key2", arg_type="int", val=2, content_object=job)
+ taskarg_factory(TaskKwarg, key="key3", arg_type="bool", val=True, content_object=job)
+ taskarg_factory(TaskKwarg, key="key4", arg_type="datetime", val=date, content_object=job)
+ kwargs = job.parse_kwargs()
+ self.assertEqual(kwargs, dict(key1="one", key2=2, key3=True, key4=date))
+
+ def test_callable_args_and_kwargs(self):
+ task = old_task_factory(self.TaskModelClass, callable="scheduler.tests.jobs.test_args_kwargs")
+ date = timezone.now()
+ taskarg_factory(TaskArg, arg_type="str", val="one", content_object=task)
+ taskarg_factory(TaskKwarg, key="key1", arg_type="int", val=2, content_object=task)
+ taskarg_factory(TaskKwarg, key="key2", arg_type="datetime", val=date, content_object=task)
+ taskarg_factory(TaskKwarg, key="key3", arg_type="bool", val=False, content_object=task)
+ task.save()
+ entry = _get_task_job_execution_from_registry(task)
+ self.assertEqual(entry.perform(), "test_args_kwargs('one', key1=2, key2={}, key3=False)".format(date))
+
+ def test_function_string(self):
+ task = old_task_factory(
+ self.TaskModelClass,
+ )
+ date = timezone.now()
+ taskarg_factory(TaskArg, arg_type="str", val="one", content_object=task)
+ taskarg_factory(TaskArg, arg_type="int", val="1", content_object=task)
+ taskarg_factory(TaskArg, arg_type="datetime", val=date, content_object=task)
+ taskarg_factory(TaskArg, arg_type="bool", val=True, content_object=task)
+ taskarg_factory(TaskKwarg, key="key1", arg_type="str", val="one", content_object=task)
+ taskarg_factory(TaskKwarg, key="key2", arg_type="int", val=2, content_object=task)
+ taskarg_factory(TaskKwarg, key="key3", arg_type="datetime", val=date, content_object=task)
+ taskarg_factory(TaskKwarg, key="key4", arg_type="bool", val=False, content_object=task)
+ self.assertEqual(
+ task.function_string(),
+ f"scheduler.tests.jobs.test_job('one', 1, {repr(date)}, True, "
+ f"key1='one', key2=2, key3={repr(date)}, key4=False)",
+ )
+
+ def test_admin_list_view(self):
+ # arrange
+ self.client.login(username="admin", password="admin")
+ job = old_task_factory(
+ self.TaskModelClass,
+ )
+ model = job._meta.model.__name__.lower()
+ url = reverse(f"admin:scheduler_{model}_changelist")
+ # act
+ res = self.client.get(url)
+ # assert
+ self.assertEqual(200, res.status_code)
+
+ def test_admin_list_view_delete_model(self):
+ # arrange
+ self.client.login(username="admin", password="admin")
+ task = old_task_factory(
+ self.TaskModelClass,
+ )
+ model = task._meta.model.__name__.lower()
+ url = reverse(f"admin:scheduler_{model}_changelist")
+ # act
+ res = self.client.post(
+ url,
+ data={
+ "action": "delete_model",
+ "_selected_action": [
+ task.pk,
+ ],
+ },
+ )
+ # assert
+ self.assertEqual(302, res.status_code)
+
+ def test_admin_run_job_now_enqueues_job_at(self):
+ # arrange
+ self.client.login(username="admin", password="admin")
+ task = old_task_factory(
+ self.TaskModelClass,
+ )
+ model = task._meta.model.__name__.lower()
+ url = reverse(f"admin:scheduler_{model}_changelist")
+ # act
+ res = self.client.post(
+ url,
+ data={
+ "action": "enqueue_job_now",
+ "_selected_action": [
+ task.pk,
+ ],
+ },
+ )
+ # assert
+ self.assertEqual(302, res.status_code)
+ task.refresh_from_db()
+ queue = get_queue(task.queue)
+ self.assertIn(task.job_id, queue.get_job_ids())
+
+ def test_admin_change_view(self):
+ # arrange
+ self.client.login(username="admin", password="admin")
+ task = old_task_factory(self.TaskModelClass)
+ model = task._meta.model.__name__.lower()
+ url = reverse(
+ f"admin:scheduler_{model}_change",
+ args=[
+ task.pk,
+ ],
+ )
+ # act
+ res = self.client.get(url)
+ # assert
+ self.assertEqual(200, res.status_code)
+
+ def test_admin_change_view__bad_redis_connection(self):
+ # arrange
+ self.client.login(username="admin", password="admin")
+ task = old_task_factory(self.TaskModelClass, queue="test2", instance_only=True)
+ task.save(schedule_job=False)
+ model = task._meta.model.__name__.lower()
+ url = reverse(
+ f"admin:scheduler_{model}_change",
+ args=[
+ task.pk,
+ ],
+ )
+ # act
+ res = self.client.get(url)
+ # assert
+ self.assertEqual(200, res.status_code)
+
+ def test_admin_enqueue_job_now(self):
+ # arrange
+ self.client.login(username="admin", password="admin")
+ task = old_task_factory(self.TaskModelClass)
+ self.assertIsNotNone(task.job_id)
+ self.assertTrue(task.is_scheduled())
+ data = {
+ "action": "enqueue_job_now",
+ "_selected_action": [
+ task.id,
+ ],
+ }
+ model = task._meta.model.__name__.lower()
+ url = reverse(f"admin:scheduler_{model}_changelist")
+ # act
+ res = self.client.post(url, data=data, follow=True)
+
+ # assert part 1
+ self.assertEqual(200, res.status_code)
+ entry = _get_task_job_execution_from_registry(task)
+ task_model, scheduled_task_id = entry.args
+ self.assertEqual(task_model, task.task_type)
+ self.assertEqual(scheduled_task_id, task.id)
+ self.assertEqual("scheduled", entry.get_status())
+ assert_has_execution_with_status(task, "queued")
+
+ # act 2
+ worker = create_worker(
+ "default",
+ fork_job_execution=False,
+ )
+ worker.work(burst=True)
+
+ # assert 2
+ entry = _get_task_job_execution_from_registry(task)
+ self.assertEqual(task_model, task.task_type)
+ self.assertEqual(scheduled_task_id, task.id)
+ assert_has_execution_with_status(task, "finished")
+
+ def test_admin_enable_job(self):
+ # arrange
+ self.client.login(username="admin", password="admin")
+ task = old_task_factory(self.TaskModelClass, enabled=False)
+ self.assertIsNone(task.job_id)
+ self.assertFalse(task.is_scheduled())
+ data = {
+ "action": "enable_selected",
+ "_selected_action": [
+ task.id,
+ ],
+ }
+ model = task._meta.model.__name__.lower()
+ url = reverse(f"admin:scheduler_{model}_changelist")
+ # act
+ res = self.client.post(url, data=data, follow=True)
+ # assert
+ self.assertEqual(200, res.status_code)
+ task.refresh_from_db()
+ self.assertTrue(task.enabled)
+ self.assertTrue(task.is_scheduled())
+ assert_response_has_msg(res, "1 task was successfully enabled and scheduled.")
+
+ def test_admin_disable_job(self):
+ # arrange
+ self.client.login(username="admin", password="admin")
+ task = old_task_factory(self.TaskModelClass, enabled=True)
+ task.save()
+ data = {
+ "action": "disable_selected",
+ "_selected_action": [
+ task.id,
+ ],
+ }
+ model = task._meta.model.__name__.lower()
+ url = reverse(f"admin:scheduler_{model}_changelist")
+ self.assertTrue(task.is_scheduled())
+ # act
+ res = self.client.post(url, data=data, follow=True)
+ # assert
+ self.assertEqual(200, res.status_code)
+ task.refresh_from_db()
+ self.assertFalse(task.is_scheduled())
+ self.assertFalse(task.enabled)
+ assert_response_has_msg(res, "1 task was successfully disabled and unscheduled.")
+
+ def test_admin_single_delete(self):
+ # arrange
+ self.client.login(username="admin", password="admin")
+ prev_count = self.TaskModelClass.objects.count()
+ task = old_task_factory(
+ self.TaskModelClass,
+ )
+ self.assertIsNotNone(task.job_id)
+ self.assertTrue(task.is_scheduled())
+ prev = len(_get_executions(task))
+ model = task._meta.model.__name__.lower()
+ url = reverse(
+ f"admin:scheduler_{model}_delete",
+ args=[
+ task.pk,
+ ],
+ )
+ data = {
+ "post": "yes",
+ }
+ # act
+ res = self.client.post(url, data=data, follow=True)
+ # assert
+ self.assertEqual(200, res.status_code)
+ self.assertEqual(prev_count, self.TaskModelClass.objects.count())
+ self.assertEqual(prev - 1, len(_get_executions(task)))
+
+ def test_admin_delete_selected(self):
+ # arrange
+ self.client.login(username="admin", password="admin")
+ task = old_task_factory(self.TaskModelClass, enabled=True)
+ task.save()
+ queue = get_queue(task.queue)
+ scheduled_jobs = queue.scheduled_job_registry.get_job_ids()
+ job_id = task.job_id
+ self.assertIn(job_id, scheduled_jobs)
+ data = {
+ "action": "delete_selected",
+ "_selected_action": [
+ task.id,
+ ],
+ "post": "yes",
+ }
+ model = task._meta.model.__name__.lower()
+ url = reverse(f"admin:scheduler_{model}_changelist")
+ # act
+ res = self.client.post(url, data=data, follow=True)
+ # assert
+ self.assertEqual(200, res.status_code)
+ assert_response_has_msg(res, f"Successfully deleted 1 {self.TaskModelClass._meta.verbose_name}.")
+ self.assertIsNone(self.TaskModelClass.objects.filter(id=task.id).first())
+ scheduled_jobs = queue.scheduled_job_registry.get_job_ids()
+ self.assertNotIn(job_id, scheduled_jobs)
+
+ class TestSchedulableTask(TestBaseTask):
+ # Currently ScheduledJob and RepeatableJob
+ TaskModelClass = ScheduledTask
+
+ @freeze_time("2016-12-25")
+ @override_settings(USE_TZ=False)
+ def test_schedule_time_no_tz(self):
+ task = old_task_factory(self.TaskModelClass)
+ task.scheduled_time = datetime(2016, 12, 25, 8, 0, 0, tzinfo=None)
+ self.assertEqual("2016-12-25T08:00:00", task._schedule_time().isoformat())
+
+ @freeze_time("2016-12-25")
+ @override_settings(USE_TZ=True)
+ def test_schedule_time_with_tz(self):
+ task = old_task_factory(self.TaskModelClass)
+ est = zoneinfo.ZoneInfo("US/Eastern")
+ task.scheduled_time = datetime(2016, 12, 25, 8, 0, 0, tzinfo=est)
+ self.assertEqual("2016-12-25T13:00:00+00:00", task._schedule_time().isoformat())
+
+ def test_result_ttl_passthrough(self):
+ job = old_task_factory(self.TaskModelClass, result_ttl=500)
+ entry = _get_task_job_execution_from_registry(job)
+ self.assertEqual(entry.result_ttl, 500)
+
+
+class TestScheduledTask(BaseTestCases.TestSchedulableTask):
+ TaskModelClass = ScheduledTask
+
+ def test_clean(self):
+ job = old_task_factory(self.TaskModelClass)
+ job.queue = list(settings.QUEUES)[0]
+ job.callable = "scheduler.tests.jobs.test_job"
+ self.assertIsNone(job.clean())
+
+ def test_unschedulable_old_job(self):
+ job = old_task_factory(self.TaskModelClass, scheduled_time=timezone.now() - timedelta(hours=1))
+ self.assertFalse(job.is_scheduled())
diff --git a/scheduler/tests/test_repeatable_task.py b/scheduler/tests/test_old_models/test_old_repeatable_task.py
similarity index 74%
rename from scheduler/tests/test_repeatable_task.py
rename to scheduler/tests/test_old_models/test_old_repeatable_task.py
index c55c0c0..6005cd3 100644
--- a/scheduler/tests/test_repeatable_task.py
+++ b/scheduler/tests/test_old_models/test_old_repeatable_task.py
@@ -6,24 +6,24 @@
from scheduler import settings
from scheduler.models import RepeatableTask
-from scheduler.tests.test_models import BaseTestCases
-from .testtools import task_factory, _get_job_from_scheduled_registry
+from scheduler.tests.test_old_models.test_old_task_model import BaseTestCases
+from scheduler.tests.testtools import old_task_factory, _get_task_job_execution_from_registry
-class TestRepeatableTask(BaseTestCases.TestSchedulableJob):
+class TestRepeatableTask(BaseTestCases.TestSchedulableTask):
TaskModelClass = RepeatableTask
def test_unschedulable_old_job(self):
- job = task_factory(self.TaskModelClass, scheduled_time=timezone.now() - timedelta(hours=1), repeat=0)
+ job = old_task_factory(self.TaskModelClass, scheduled_time=timezone.now() - timedelta(hours=1), repeat=0)
self.assertFalse(job.is_scheduled())
def test_schedulable_old_job_repeat_none(self):
# If repeat is None, the job should be scheduled
- job = task_factory(self.TaskModelClass, scheduled_time=timezone.now() - timedelta(hours=1), repeat=None)
+ job = old_task_factory(self.TaskModelClass, scheduled_time=timezone.now() - timedelta(hours=1), repeat=None)
self.assertTrue(job.is_scheduled())
def test_clean(self):
- job = task_factory(self.TaskModelClass)
+ job = old_task_factory(self.TaskModelClass)
job.queue = list(settings.QUEUES)[0]
job.callable = "scheduler.tests.jobs.test_job"
job.interval = 1
@@ -31,7 +31,7 @@ def test_clean(self):
self.assertIsNone(job.clean())
def test_clean_seconds(self):
- job = task_factory(self.TaskModelClass)
+ job = old_task_factory(self.TaskModelClass)
job.queue = list(settings.QUEUES)[0]
job.callable = "scheduler.tests.jobs.test_job"
job.interval = 60
@@ -45,7 +45,7 @@ def test_clean_seconds(self):
}
)
def test_clean_too_frequent(self):
- job = task_factory(self.TaskModelClass)
+ job = old_task_factory(self.TaskModelClass)
job.queue = list(settings.QUEUES)[0]
job.callable = "scheduler.tests.jobs.test_job"
job.interval = 2 # Smaller than 10
@@ -55,7 +55,7 @@ def test_clean_too_frequent(self):
job.clean_interval_unit()
def test_clean_not_multiple(self):
- job = task_factory(self.TaskModelClass)
+ job = old_task_factory(self.TaskModelClass)
job.queue = list(settings.QUEUES)[0]
job.callable = "scheduler.tests.jobs.test_job"
job.interval = 121
@@ -64,7 +64,7 @@ def test_clean_not_multiple(self):
job.clean_interval_unit()
def test_clean_short_result_ttl(self):
- job = task_factory(self.TaskModelClass)
+ job = old_task_factory(self.TaskModelClass)
job.queue = list(settings.QUEUES)[0]
job.callable = "scheduler.tests.jobs.test_job"
job.interval = 1
@@ -76,7 +76,7 @@ def test_clean_short_result_ttl(self):
job.clean_result_ttl()
def test_clean_indefinite_result_ttl(self):
- job = task_factory(self.TaskModelClass)
+ job = old_task_factory(self.TaskModelClass)
job.queue = list(settings.QUEUES)[0]
job.callable = "scheduler.tests.jobs.test_job"
job.interval = 1
@@ -85,7 +85,7 @@ def test_clean_indefinite_result_ttl(self):
job.clean_result_ttl()
def test_clean_undefined_result_ttl(self):
- job = task_factory(self.TaskModelClass)
+ job = old_task_factory(self.TaskModelClass)
job.queue = list(settings.QUEUES)[0]
job.callable = "scheduler.tests.jobs.test_job"
job.interval = 1
@@ -93,19 +93,19 @@ def test_clean_undefined_result_ttl(self):
job.clean_result_ttl()
def test_interval_seconds_weeks(self):
- job = task_factory(self.TaskModelClass, interval=2, interval_unit="weeks")
+ job = old_task_factory(self.TaskModelClass, interval=2, interval_unit="weeks")
self.assertEqual(1209600.0, job.interval_seconds())
def test_interval_seconds_days(self):
- job = task_factory(self.TaskModelClass, interval=2, interval_unit="days")
+ job = old_task_factory(self.TaskModelClass, interval=2, interval_unit="days")
self.assertEqual(172800.0, job.interval_seconds())
def test_interval_seconds_hours(self):
- job = task_factory(self.TaskModelClass, interval=2, interval_unit="hours")
+ job = old_task_factory(self.TaskModelClass, interval=2, interval_unit="hours")
self.assertEqual(7200.0, job.interval_seconds())
def test_interval_seconds_minutes(self):
- job = task_factory(self.TaskModelClass, interval=15, interval_unit="minutes")
+ job = old_task_factory(self.TaskModelClass, interval=15, interval_unit="minutes")
self.assertEqual(900.0, job.interval_seconds())
def test_interval_seconds_seconds(self):
@@ -113,42 +113,44 @@ def test_interval_seconds_seconds(self):
self.assertEqual(15.0, job.interval_seconds())
def test_interval_display(self):
- job = task_factory(self.TaskModelClass, interval=15, interval_unit="minutes")
+ job = old_task_factory(self.TaskModelClass, interval=15, interval_unit="minutes")
self.assertEqual(job.interval_display(), "15 minutes")
def test_result_interval(self):
- job = task_factory(
+ job = old_task_factory(
self.TaskModelClass,
)
- entry = _get_job_from_scheduled_registry(job)
+ entry = _get_task_job_execution_from_registry(job)
self.assertEqual(entry.meta["interval"], 3600)
def test_repeat(self):
- job = task_factory(self.TaskModelClass, repeat=10)
- entry = _get_job_from_scheduled_registry(job)
+ job = old_task_factory(self.TaskModelClass, repeat=10)
+ entry = _get_task_job_execution_from_registry(job)
self.assertEqual(entry.meta["repeat"], 10)
def test_repeat_old_job_exhausted(self):
base_time = timezone.now()
- job = task_factory(self.TaskModelClass, scheduled_time=base_time - timedelta(hours=10), repeat=10)
+ job = old_task_factory(self.TaskModelClass, scheduled_time=base_time - timedelta(hours=10), repeat=10)
self.assertEqual(job.is_scheduled(), False)
def test_repeat_old_job_last_iter(self):
base_time = timezone.now()
- job = task_factory(self.TaskModelClass, scheduled_time=base_time - timedelta(hours=9, minutes=30), repeat=10)
+ job = old_task_factory(
+ self.TaskModelClass, scheduled_time=base_time - timedelta(hours=9, minutes=30), repeat=10,
+ )
self.assertEqual(job.repeat, 0)
self.assertEqual(job.is_scheduled(), True)
def test_repeat_old_job_remaining(self):
base_time = timezone.now()
- job = task_factory(self.TaskModelClass, scheduled_time=base_time - timedelta(minutes=30), repeat=5)
+ job = old_task_factory(self.TaskModelClass, scheduled_time=base_time - timedelta(minutes=30), repeat=5)
self.assertEqual(job.repeat, 4)
self.assertEqual(job.scheduled_time, base_time + timedelta(minutes=30))
self.assertEqual(job.is_scheduled(), True)
def test_repeat_none_interval_2_min(self):
base_time = timezone.now()
- job = task_factory(self.TaskModelClass, scheduled_time=base_time - timedelta(minutes=29), repeat=None)
+ job = old_task_factory(self.TaskModelClass, scheduled_time=base_time - timedelta(minutes=29), repeat=None)
job.interval = 120
job.interval_unit = "seconds"
job.schedule()
@@ -156,7 +158,7 @@ def test_repeat_none_interval_2_min(self):
self.assertTrue(job.is_scheduled())
def test_check_rescheduled_after_execution(self):
- task = task_factory(self.TaskModelClass, scheduled_time=timezone.now() + timedelta(seconds=1), repeat=10)
+ task = old_task_factory(self.TaskModelClass, scheduled_time=timezone.now() + timedelta(seconds=1), repeat=10)
queue = task.rqueue
first_run_id = task.job_id
entry = queue.fetch_job(first_run_id)
@@ -170,7 +172,7 @@ def test_check_rescheduled_after_execution(self):
self.assertNotEqual(task.job_id, first_run_id)
def test_check_rescheduled_after_execution_failed_job(self):
- task = task_factory(
+ task = old_task_factory(
self.TaskModelClass,
callable_name="scheduler.tests.jobs.failing_job",
scheduled_time=timezone.now() + timedelta(seconds=1),
@@ -189,7 +191,7 @@ def test_check_rescheduled_after_execution_failed_job(self):
self.assertNotEqual(task.job_id, first_run_id)
def test_check_not_rescheduled_after_last_repeat(self):
- task = task_factory(
+ task = old_task_factory(
self.TaskModelClass,
scheduled_time=timezone.now() + timedelta(seconds=1),
repeat=1,
diff --git a/scheduler/tests/test_models.py b/scheduler/tests/test_old_models/test_old_task_model.py
similarity index 82%
rename from scheduler/tests/test_models.py
rename to scheduler/tests/test_old_models/test_old_task_model.py
index 2a1873d..c8c9afe 100644
--- a/scheduler/tests/test_models.py
+++ b/scheduler/tests/test_old_models/test_old_task_model.py
@@ -11,15 +11,10 @@
from scheduler import settings
from scheduler.models import BaseTask, TaskArg, TaskKwarg, ScheduledTask
from scheduler.tools import run_task, create_worker
-from . import jobs
-from .testtools import (
- task_factory,
- taskarg_factory,
- _get_job_from_scheduled_registry,
- SchedulerBaseCase,
- _get_executions,
-)
-from ..queues import get_queue
+from scheduler.tests import jobs
+from scheduler.tests.testtools import old_task_factory, taskarg_factory, _get_task_job_execution_from_registry, \
+ SchedulerBaseCase, _get_executions
+from scheduler.queues import get_queue
def assert_response_has_msg(response, message):
@@ -41,36 +36,36 @@ class TestBaseTask(SchedulerBaseCase):
TaskModelClass = BaseTask
def test_callable_func(self):
- task = task_factory(self.TaskModelClass)
+ task = old_task_factory(self.TaskModelClass)
task.callable = "scheduler.tests.jobs.test_job"
func = task.callable_func()
self.assertEqual(jobs.test_job, func)
def test_callable_func_not_callable(self):
- task = task_factory(self.TaskModelClass)
+ task = old_task_factory(self.TaskModelClass)
task.callable = "scheduler.tests.jobs.test_non_callable"
with self.assertRaises(TypeError):
task.callable_func()
def test_clean_callable(self):
- task = task_factory(self.TaskModelClass)
+ task = old_task_factory(self.TaskModelClass)
task.callable = "scheduler.tests.jobs.test_job"
self.assertIsNone(task.clean_callable())
def test_clean_callable_invalid(self):
- task = task_factory(self.TaskModelClass)
+ task = old_task_factory(self.TaskModelClass)
task.callable = "scheduler.tests.jobs.test_non_callable"
with self.assertRaises(ValidationError):
task.clean_callable()
def test_clean_queue(self):
for queue in settings.QUEUES.keys():
- task = task_factory(self.TaskModelClass)
+ task = old_task_factory(self.TaskModelClass)
task.queue = queue
self.assertIsNone(task.clean_queue())
def test_clean_queue_invalid(self):
- task = task_factory(self.TaskModelClass)
+ task = old_task_factory(self.TaskModelClass)
task.queue = "xxxxxx"
task.callable = "scheduler.tests.jobs.test_job"
with self.assertRaises(ValidationError):
@@ -78,88 +73,88 @@ def test_clean_queue_invalid(self):
# next 2 check the above are included in job.clean() function
def test_clean_base(self):
- task = task_factory(self.TaskModelClass)
+ task = old_task_factory(self.TaskModelClass)
task.queue = list(settings.QUEUES)[0]
task.callable = "scheduler.tests.jobs.test_job"
self.assertIsNone(task.clean())
def test_clean_invalid_callable(self):
- task = task_factory(self.TaskModelClass)
+ task = old_task_factory(self.TaskModelClass)
task.queue = list(settings.QUEUES)[0]
task.callable = "scheduler.tests.jobs.test_non_callable"
with self.assertRaises(ValidationError):
task.clean()
def test_clean_invalid_queue(self):
- task = task_factory(self.TaskModelClass)
+ task = old_task_factory(self.TaskModelClass)
task.queue = "xxxxxx"
task.callable = "scheduler.tests.jobs.test_job"
with self.assertRaises(ValidationError):
task.clean()
def test_is_schedulable_already_scheduled(self):
- task = task_factory(
+ task = old_task_factory(
self.TaskModelClass,
)
task.schedule()
self.assertTrue(task.is_scheduled())
def test_is_schedulable_disabled(self):
- task = task_factory(self.TaskModelClass)
+ task = old_task_factory(self.TaskModelClass)
task.enabled = False
self.assertFalse(task.enabled)
def test_schedule(self):
- task = task_factory(
+ task = old_task_factory(
self.TaskModelClass,
)
self.assertTrue(task.is_scheduled())
self.assertIsNotNone(task.job_id)
def test_unschedulable(self):
- task = task_factory(self.TaskModelClass, enabled=False)
+ task = old_task_factory(self.TaskModelClass, enabled=False)
self.assertFalse(task.is_scheduled())
self.assertIsNone(task.job_id)
def test_unschedule(self):
- task = task_factory(
+ task = old_task_factory(
self.TaskModelClass,
)
self.assertTrue(task.unschedule())
self.assertIsNone(task.job_id)
def test_unschedule_not_scheduled(self):
- task = task_factory(self.TaskModelClass, enabled=False)
+ task = old_task_factory(self.TaskModelClass, enabled=False)
self.assertTrue(task.unschedule())
self.assertIsNone(task.job_id)
def test_save_enabled(self):
- task = task_factory(
+ task = old_task_factory(
self.TaskModelClass,
)
self.assertIsNotNone(task.job_id)
def test_save_disabled(self):
- task = task_factory(self.TaskModelClass, enabled=False)
+ task = old_task_factory(self.TaskModelClass, enabled=False)
task.save()
self.assertIsNone(task.job_id)
def test_save_and_schedule(self):
- task = task_factory(
+ task = old_task_factory(
self.TaskModelClass,
)
self.assertIsNotNone(task.job_id)
self.assertTrue(task.is_scheduled())
def test_schedule2(self):
- task = task_factory(self.TaskModelClass)
+ task = old_task_factory(self.TaskModelClass)
task.queue = list(settings.QUEUES)[0]
task.enabled = False
task.scheduled_time = timezone.now() + timedelta(minutes=1)
self.assertFalse(task.schedule())
def test_delete_and_unschedule(self):
- task = task_factory(
+ task = old_task_factory(
self.TaskModelClass,
)
self.assertIsNotNone(task.job_id)
@@ -169,47 +164,47 @@ def test_delete_and_unschedule(self):
def test_job_create(self):
prev_count = self.TaskModelClass.objects.count()
- task_factory(self.TaskModelClass)
+ old_task_factory(self.TaskModelClass)
self.assertEqual(self.TaskModelClass.objects.count(), prev_count + 1)
def test_str(self):
name = "test"
- task = task_factory(self.TaskModelClass, name=name)
+ task = old_task_factory(self.TaskModelClass, name=name)
self.assertEqual(f"{self.TaskModelClass.__name__}[{name}={task.callable}()]", str(task))
def test_callable_passthrough(self):
- task = task_factory(self.TaskModelClass)
- entry = _get_job_from_scheduled_registry(task)
+ task = old_task_factory(self.TaskModelClass)
+ entry = _get_task_job_execution_from_registry(task)
self.assertEqual(entry.func, run_task)
job_model, job_id = entry.args
self.assertEqual(job_model, self.TaskModelClass.__name__)
self.assertEqual(job_id, task.id)
def test_timeout_passthrough(self):
- task = task_factory(self.TaskModelClass, timeout=500)
- entry = _get_job_from_scheduled_registry(task)
+ task = old_task_factory(self.TaskModelClass, timeout=500)
+ entry = _get_task_job_execution_from_registry(task)
self.assertEqual(entry.timeout, 500)
def test_at_front_passthrough(self):
- task = task_factory(self.TaskModelClass, at_front=True)
+ task = old_task_factory(self.TaskModelClass, at_front=True)
queue = task.rqueue
jobs_to_schedule = queue.scheduled_job_registry.get_job_ids()
self.assertIn(task.job_id, jobs_to_schedule)
def test_callable_result(self):
- task = task_factory(
+ task = old_task_factory(
self.TaskModelClass,
)
- entry = _get_job_from_scheduled_registry(task)
+ entry = _get_task_job_execution_from_registry(task)
self.assertEqual(entry.perform(), 2)
def test_callable_empty_args_and_kwargs(self):
- task = task_factory(self.TaskModelClass, callable="scheduler.tests.jobs.test_args_kwargs")
- entry = _get_job_from_scheduled_registry(task)
+ task = old_task_factory(self.TaskModelClass, callable="scheduler.tests.jobs.test_args_kwargs")
+ entry = _get_task_job_execution_from_registry(task)
self.assertEqual(entry.perform(), "test_args_kwargs()")
def test_delete_args(self):
- task = task_factory(
+ task = old_task_factory(
self.TaskModelClass,
)
arg = taskarg_factory(TaskArg, val="one", content_object=task)
@@ -218,7 +213,7 @@ def test_delete_args(self):
self.assertEqual(0, task.callable_args.count())
def test_delete_kwargs(self):
- task = task_factory(
+ task = old_task_factory(
self.TaskModelClass,
)
kwarg = taskarg_factory(TaskKwarg, key="key1", arg_type="str", val="one", content_object=task)
@@ -227,7 +222,7 @@ def test_delete_kwargs(self):
self.assertEqual(0, task.callable_kwargs.count())
def test_parse_args(self):
- task = task_factory(
+ task = old_task_factory(
self.TaskModelClass,
)
date = timezone.now()
@@ -239,7 +234,7 @@ def test_parse_args(self):
self.assertEqual(task.parse_args(), ["one", 2, True, False, date])
def test_parse_kwargs(self):
- job = task_factory(
+ job = old_task_factory(
self.TaskModelClass,
)
date = timezone.now()
@@ -251,18 +246,18 @@ def test_parse_kwargs(self):
self.assertEqual(kwargs, dict(key1="one", key2=2, key3=True, key4=date))
def test_callable_args_and_kwargs(self):
- task = task_factory(self.TaskModelClass, callable="scheduler.tests.jobs.test_args_kwargs")
+ task = old_task_factory(self.TaskModelClass, callable="scheduler.tests.jobs.test_args_kwargs")
date = timezone.now()
taskarg_factory(TaskArg, arg_type="str", val="one", content_object=task)
taskarg_factory(TaskKwarg, key="key1", arg_type="int", val=2, content_object=task)
taskarg_factory(TaskKwarg, key="key2", arg_type="datetime", val=date, content_object=task)
taskarg_factory(TaskKwarg, key="key3", arg_type="bool", val=False, content_object=task)
task.save()
- entry = _get_job_from_scheduled_registry(task)
+ entry = _get_task_job_execution_from_registry(task)
self.assertEqual(entry.perform(), "test_args_kwargs('one', key1=2, key2={}, key3=False)".format(date))
def test_function_string(self):
- task = task_factory(
+ task = old_task_factory(
self.TaskModelClass,
)
date = timezone.now()
@@ -283,7 +278,7 @@ def test_function_string(self):
def test_admin_list_view(self):
# arrange
self.client.login(username="admin", password="admin")
- job = task_factory(
+ job = old_task_factory(
self.TaskModelClass,
)
model = job._meta.model.__name__.lower()
@@ -296,7 +291,7 @@ def test_admin_list_view(self):
def test_admin_list_view_delete_model(self):
# arrange
self.client.login(username="admin", password="admin")
- task = task_factory(
+ task = old_task_factory(
self.TaskModelClass,
)
model = task._meta.model.__name__.lower()
@@ -317,7 +312,7 @@ def test_admin_list_view_delete_model(self):
def test_admin_run_job_now_enqueues_job_at(self):
# arrange
self.client.login(username="admin", password="admin")
- task = task_factory(
+ task = old_task_factory(
self.TaskModelClass,
)
model = task._meta.model.__name__.lower()
@@ -341,7 +336,7 @@ def test_admin_run_job_now_enqueues_job_at(self):
def test_admin_change_view(self):
# arrange
self.client.login(username="admin", password="admin")
- task = task_factory(
+ task = old_task_factory(
self.TaskModelClass,
)
model = task._meta.model.__name__.lower()
@@ -359,7 +354,7 @@ def test_admin_change_view(self):
def test_admin_change_view__bad_redis_connection(self):
# arrange
self.client.login(username="admin", password="admin")
- task = task_factory(self.TaskModelClass, queue="test2", instance_only=True)
+ task = old_task_factory(self.TaskModelClass, queue="test2", instance_only=True)
task.save(schedule_job=False)
model = task._meta.model.__name__.lower()
url = reverse(
@@ -376,9 +371,7 @@ def test_admin_change_view__bad_redis_connection(self):
def test_admin_enqueue_job_now(self):
# arrange
self.client.login(username="admin", password="admin")
- task = task_factory(
- self.TaskModelClass,
- )
+ task = old_task_factory(self.TaskModelClass)
self.assertIsNotNone(task.job_id)
self.assertTrue(task.is_scheduled())
data = {
@@ -394,9 +387,9 @@ def test_admin_enqueue_job_now(self):
# assert part 1
self.assertEqual(200, res.status_code)
- entry = _get_job_from_scheduled_registry(task)
+ entry = _get_task_job_execution_from_registry(task)
task_model, scheduled_task_id = entry.args
- self.assertEqual(task_model, task.TASK_TYPE)
+ self.assertEqual(task_model, task.task_type)
self.assertEqual(scheduled_task_id, task.id)
self.assertEqual("scheduled", entry.get_status())
assert_has_execution_with_status(task, "queued")
@@ -409,15 +402,15 @@ def test_admin_enqueue_job_now(self):
worker.work(burst=True)
# assert 2
- entry = _get_job_from_scheduled_registry(task)
- self.assertEqual(task_model, task.TASK_TYPE)
+ entry = _get_task_job_execution_from_registry(task)
+ self.assertEqual(task_model, task.task_type)
self.assertEqual(scheduled_task_id, task.id)
assert_has_execution_with_status(task, "finished")
def test_admin_enable_job(self):
# arrange
self.client.login(username="admin", password="admin")
- task = task_factory(self.TaskModelClass, enabled=False)
+ task = old_task_factory(self.TaskModelClass, enabled=False)
self.assertIsNone(task.job_id)
self.assertFalse(task.is_scheduled())
data = {
@@ -435,12 +428,12 @@ def test_admin_enable_job(self):
task.refresh_from_db()
self.assertTrue(task.enabled)
self.assertTrue(task.is_scheduled())
- assert_response_has_msg(res, "1 job was successfully enabled and scheduled.")
+ assert_response_has_msg(res, "1 task was successfully enabled and scheduled.")
def test_admin_disable_job(self):
# arrange
self.client.login(username="admin", password="admin")
- task = task_factory(self.TaskModelClass, enabled=True)
+ task = old_task_factory(self.TaskModelClass, enabled=True)
task.save()
data = {
"action": "disable_selected",
@@ -458,13 +451,13 @@ def test_admin_disable_job(self):
task.refresh_from_db()
self.assertFalse(task.is_scheduled())
self.assertFalse(task.enabled)
- assert_response_has_msg(res, "1 job was successfully disabled and unscheduled.")
+ assert_response_has_msg(res, "1 task was successfully disabled and unscheduled.")
def test_admin_single_delete(self):
# arrange
self.client.login(username="admin", password="admin")
prev_count = self.TaskModelClass.objects.count()
- task = task_factory(
+ task = old_task_factory(
self.TaskModelClass,
)
self.assertIsNotNone(task.job_id)
@@ -490,7 +483,7 @@ def test_admin_single_delete(self):
def test_admin_delete_selected(self):
# arrange
self.client.login(username="admin", password="admin")
- task = task_factory(self.TaskModelClass, enabled=True)
+ task = old_task_factory(self.TaskModelClass, enabled=True)
task.save()
queue = get_queue(task.queue)
scheduled_jobs = queue.scheduled_job_registry.get_job_ids()
@@ -514,40 +507,40 @@ def test_admin_delete_selected(self):
scheduled_jobs = queue.scheduled_job_registry.get_job_ids()
self.assertNotIn(job_id, scheduled_jobs)
- class TestSchedulableJob(TestBaseTask):
+ class TestSchedulableTask(TestBaseTask):
# Currently ScheduledJob and RepeatableJob
TaskModelClass = ScheduledTask
@freeze_time("2016-12-25")
@override_settings(USE_TZ=False)
def test_schedule_time_no_tz(self):
- task = task_factory(self.TaskModelClass)
+ task = old_task_factory(self.TaskModelClass)
task.scheduled_time = datetime(2016, 12, 25, 8, 0, 0, tzinfo=None)
self.assertEqual("2016-12-25T08:00:00", task._schedule_time().isoformat())
@freeze_time("2016-12-25")
@override_settings(USE_TZ=True)
def test_schedule_time_with_tz(self):
- task = task_factory(self.TaskModelClass)
+ task = old_task_factory(self.TaskModelClass)
est = zoneinfo.ZoneInfo("US/Eastern")
task.scheduled_time = datetime(2016, 12, 25, 8, 0, 0, tzinfo=est)
self.assertEqual("2016-12-25T13:00:00+00:00", task._schedule_time().isoformat())
def test_result_ttl_passthrough(self):
- job = task_factory(self.TaskModelClass, result_ttl=500)
- entry = _get_job_from_scheduled_registry(job)
+ job = old_task_factory(self.TaskModelClass, result_ttl=500)
+ entry = _get_task_job_execution_from_registry(job)
self.assertEqual(entry.result_ttl, 500)
-class TestScheduledJob(BaseTestCases.TestSchedulableJob):
+class TestScheduledJob(BaseTestCases.TestSchedulableTask):
TaskModelClass = ScheduledTask
def test_clean(self):
- job = task_factory(self.TaskModelClass)
+ job = old_task_factory(self.TaskModelClass)
job.queue = list(settings.QUEUES)[0]
job.callable = "scheduler.tests.jobs.test_job"
self.assertIsNone(job.clean())
def test_unschedulable_old_job(self):
- job = task_factory(self.TaskModelClass, scheduled_time=timezone.now() - timedelta(hours=1))
+ job = old_task_factory(self.TaskModelClass, scheduled_time=timezone.now() - timedelta(hours=1))
self.assertFalse(job.is_scheduled())
diff --git a/scheduler/tests/test_task_types/__init__.py b/scheduler/tests/test_task_types/__init__.py
new file mode 100644
index 0000000..e69de29
diff --git a/scheduler/tests/test_task_types/test_cron_task.py b/scheduler/tests/test_task_types/test_cron_task.py
new file mode 100644
index 0000000..a7d2a7a
--- /dev/null
+++ b/scheduler/tests/test_task_types/test_cron_task.py
@@ -0,0 +1,78 @@
+from django.core.exceptions import ValidationError
+
+from scheduler import settings
+from scheduler.queues import get_queue
+from scheduler.tests.test_task_types.test_task_model import BaseTestCases
+from scheduler.tests.testtools import task_factory
+from scheduler.tools import create_worker, TaskType
+
+
+class TestCronTask(BaseTestCases.TestBaseTask):
+ task_type = TaskType.CRON
+
+ def test_clean(self):
+ task = task_factory(self.task_type)
+ task.cron_string = "* * * * *"
+ task.queue = list(settings.QUEUES)[0]
+ task.callable = "scheduler.tests.jobs.test_job"
+ self.assertIsNone(task.clean())
+
+ def test_clean_cron_string_invalid(self):
+ task = task_factory(self.task_type)
+ task.cron_string = "not-a-cron-string"
+ task.queue = list(settings.QUEUES)[0]
+ task.callable = "scheduler.tests.jobs.test_job"
+ with self.assertRaises(ValidationError):
+ task.clean_cron_string()
+
+ def test_check_rescheduled_after_execution(self):
+ task = task_factory(self.task_type)
+ queue = task.rqueue
+ first_run_id = task.job_id
+ entry = queue.fetch_job(first_run_id)
+ queue.run_sync(entry)
+ task.refresh_from_db()
+ self.assertEqual(task.failed_runs, 0)
+ self.assertIsNone(task.last_failed_run)
+ self.assertEqual(task.successful_runs, 1)
+ self.assertIsNotNone(task.last_successful_run)
+ self.assertTrue(task.is_scheduled())
+ self.assertNotEqual(task.job_id, first_run_id)
+
+ def test_check_rescheduled_after_failed_execution(self):
+ task = task_factory(
+ self.task_type,
+ callable_name="scheduler.tests.jobs.scheduler.tests.jobs.test_job",
+ )
+ queue = task.rqueue
+ first_run_id = task.job_id
+ entry = queue.fetch_job(first_run_id)
+ queue.run_sync(entry)
+ task.refresh_from_db()
+ self.assertEqual(task.failed_runs, 1)
+ self.assertIsNotNone(task.last_failed_run)
+ self.assertEqual(task.successful_runs, 0)
+ self.assertIsNone(task.last_successful_run)
+ self.assertTrue(task.is_scheduled())
+ self.assertNotEqual(task.job_id, first_run_id)
+
+ def test_cron_task_enqueuing_jobs(self):
+ queue = get_queue()
+ prev_queued = len(queue.scheduled_job_registry)
+ prev_finished = len(queue.finished_job_registry)
+ task = task_factory(self.task_type, callable_name="scheduler.tests.jobs.enqueue_jobs")
+ self.assertEqual(prev_queued + 1, len(queue.scheduled_job_registry))
+ first_run_id = task.job_id
+ entry = queue.fetch_job(first_run_id)
+ queue.run_sync(entry)
+ self.assertEqual(20, len(queue))
+ self.assertEqual(prev_finished + 1, len(queue.finished_job_registry))
+ worker = create_worker(
+ "default",
+ fork_job_execution=False,
+ )
+ worker.work(burst=True)
+ self.assertEqual(prev_finished + 21, len(queue.finished_job_registry))
+ worker.refresh()
+ self.assertEqual(20, worker.successful_job_count)
+ self.assertEqual(0, worker.failed_job_count)
diff --git a/scheduler/tests/test_task_types/test_once_task.py b/scheduler/tests/test_task_types/test_once_task.py
new file mode 100644
index 0000000..f9b686c
--- /dev/null
+++ b/scheduler/tests/test_task_types/test_once_task.py
@@ -0,0 +1,22 @@
+from datetime import timedelta
+
+from django.utils import timezone
+
+from scheduler import settings
+from scheduler.models.task import TaskType
+from scheduler.tests.test_task_types.test_task_model import BaseTestCases
+from scheduler.tests.testtools import task_factory
+
+
+class TestScheduledTask(BaseTestCases.TestSchedulableTask):
+ task_type = TaskType.ONCE
+
+ def test_clean(self):
+ job = task_factory(self.task_type)
+ job.queue = list(settings.QUEUES)[0]
+ job.callable = "scheduler.tests.jobs.test_job"
+ self.assertIsNone(job.clean())
+
+ def test_unschedulable_old_job(self):
+ job = task_factory(self.task_type, scheduled_time=timezone.now() - timedelta(hours=1))
+ self.assertFalse(job.is_scheduled())
diff --git a/scheduler/tests/test_task_types/test_repeatable_task.py b/scheduler/tests/test_task_types/test_repeatable_task.py
new file mode 100644
index 0000000..4dd9aec
--- /dev/null
+++ b/scheduler/tests/test_task_types/test_repeatable_task.py
@@ -0,0 +1,203 @@
+from datetime import timedelta
+
+from django.core.exceptions import ValidationError
+from django.test import override_settings
+from django.utils import timezone
+
+from scheduler import settings
+from scheduler.models import RepeatableTask
+from scheduler.tests.testtools import task_factory, _get_task_job_execution_from_registry
+from scheduler.tools import TaskType
+from scheduler.tests.test_task_types.test_task_model import BaseTestCases
+
+
+class TestRepeatableTask(BaseTestCases.TestSchedulableTask):
+ task_type = TaskType.REPEATABLE
+
+ def test_unschedulable_old_job(self):
+ job = task_factory(self.task_type, scheduled_time=timezone.now() - timedelta(hours=1), repeat=0)
+ self.assertFalse(job.is_scheduled())
+
+ def test_schedulable_old_job_repeat_none(self):
+ # If repeat is None, the job should be scheduled
+ job = task_factory(self.task_type, scheduled_time=timezone.now() - timedelta(hours=1), repeat=None)
+ self.assertTrue(job.is_scheduled())
+
+ def test_clean(self):
+ job = task_factory(self.task_type)
+ job.queue = list(settings.QUEUES)[0]
+ job.callable = "scheduler.tests.jobs.test_job"
+ job.interval = 1
+ job.result_ttl = -1
+ self.assertIsNone(job.clean())
+
+ def test_clean_seconds(self):
+ job = task_factory(self.task_type)
+ job.queue = list(settings.QUEUES)[0]
+ job.callable = "scheduler.tests.jobs.test_job"
+ job.interval = 60
+ job.result_ttl = -1
+ job.interval_unit = "seconds"
+ self.assertIsNone(job.clean())
+
+ @override_settings(
+ SCHEDULER_CONFIG={
+ "SCHEDULER_INTERVAL": 10,
+ }
+ )
+ def test_clean_too_frequent(self):
+ job = task_factory(self.task_type)
+ job.queue = list(settings.QUEUES)[0]
+ job.callable = "scheduler.tests.jobs.test_job"
+ job.interval = 2 # Smaller than 10
+ job.result_ttl = -1
+ job.interval_unit = "seconds"
+ with self.assertRaises(ValidationError):
+ job.clean_interval_unit()
+
+ def test_clean_not_multiple(self):
+ job = task_factory(self.task_type)
+ job.queue = list(settings.QUEUES)[0]
+ job.callable = "scheduler.tests.jobs.test_job"
+ job.interval = 121
+ job.interval_unit = "seconds"
+ with self.assertRaises(ValidationError):
+ job.clean_interval_unit()
+
+ def test_clean_short_result_ttl(self):
+ job = task_factory(self.task_type)
+ job.queue = list(settings.QUEUES)[0]
+ job.callable = "scheduler.tests.jobs.test_job"
+ job.interval = 1
+ job.repeat = 1
+ job.result_ttl = 3599
+ job.interval_unit = "hours"
+ job.repeat = 42
+ with self.assertRaises(ValidationError):
+ job.clean_result_ttl()
+
+ def test_clean_indefinite_result_ttl(self):
+ job = task_factory(self.task_type)
+ job.queue = list(settings.QUEUES)[0]
+ job.callable = "scheduler.tests.jobs.test_job"
+ job.interval = 1
+ job.result_ttl = -1
+ job.interval_unit = "hours"
+ job.clean_result_ttl()
+
+ def test_clean_undefined_result_ttl(self):
+ job = task_factory(self.task_type)
+ job.queue = list(settings.QUEUES)[0]
+ job.callable = "scheduler.tests.jobs.test_job"
+ job.interval = 1
+ job.interval_unit = "hours"
+ job.clean_result_ttl()
+
+ def test_interval_seconds_weeks(self):
+ job = task_factory(self.task_type, interval=2, interval_unit="weeks")
+ self.assertEqual(1209600.0, job.interval_seconds())
+
+ def test_interval_seconds_days(self):
+ job = task_factory(self.task_type, interval=2, interval_unit="days")
+ self.assertEqual(172800.0, job.interval_seconds())
+
+ def test_interval_seconds_hours(self):
+ job = task_factory(self.task_type, interval=2, interval_unit="hours")
+ self.assertEqual(7200.0, job.interval_seconds())
+
+ def test_interval_seconds_minutes(self):
+ job = task_factory(self.task_type, interval=15, interval_unit="minutes")
+ self.assertEqual(900.0, job.interval_seconds())
+
+ def test_interval_seconds_seconds(self):
+ job = RepeatableTask(interval=15, interval_unit="seconds")
+ self.assertEqual(15.0, job.interval_seconds())
+
+ def test_result_interval(self):
+ job = task_factory(
+ self.task_type,
+ )
+ entry = _get_task_job_execution_from_registry(job)
+ self.assertEqual(entry.meta["interval"], 3600)
+
+ def test_repeat(self):
+ job = task_factory(self.task_type, repeat=10)
+ entry = _get_task_job_execution_from_registry(job)
+ self.assertEqual(entry.meta["repeat"], 10)
+
+ def test_repeat_old_job_exhausted(self):
+ base_time = timezone.now()
+ job = task_factory(self.task_type, scheduled_time=base_time - timedelta(hours=10), repeat=10)
+ self.assertEqual(job.is_scheduled(), False)
+
+ def test_repeat_old_job_last_iter(self):
+ base_time = timezone.now()
+ job = task_factory(self.task_type, scheduled_time=base_time - timedelta(hours=9, minutes=30), repeat=10)
+ self.assertEqual(job.repeat, 0)
+ self.assertEqual(job.is_scheduled(), True)
+
+ def test_repeat_old_job_remaining(self):
+ base_time = timezone.now()
+ job = task_factory(self.task_type, scheduled_time=base_time - timedelta(minutes=30), repeat=5)
+ self.assertEqual(job.repeat, 4)
+ self.assertEqual(job.scheduled_time, base_time + timedelta(minutes=30))
+ self.assertEqual(job.is_scheduled(), True)
+
+ def test_repeat_none_interval_2_min(self):
+ base_time = timezone.now()
+ job = task_factory(self.task_type, scheduled_time=base_time - timedelta(minutes=29), repeat=None)
+ job.interval = 120
+ job.interval_unit = "seconds"
+ job.schedule()
+ self.assertTrue(job.scheduled_time > base_time)
+ self.assertTrue(job.is_scheduled())
+
+ def test_check_rescheduled_after_execution(self):
+ task = task_factory(self.task_type, scheduled_time=timezone.now() + timedelta(seconds=1), repeat=10)
+ queue = task.rqueue
+ first_run_id = task.job_id
+ entry = queue.fetch_job(first_run_id)
+ queue.run_sync(entry)
+ task.refresh_from_db()
+ self.assertEqual(task.failed_runs, 0)
+ self.assertIsNone(task.last_failed_run)
+ self.assertEqual(task.successful_runs, 1)
+ self.assertIsNotNone(task.last_successful_run)
+ self.assertTrue(task.is_scheduled())
+ self.assertNotEqual(task.job_id, first_run_id)
+
+ def test_check_rescheduled_after_execution_failed_job(self):
+ task = task_factory(
+ self.task_type,
+ callable_name="scheduler.tests.jobs.failing_job",
+ scheduled_time=timezone.now() + timedelta(seconds=1),
+ repeat=10,
+ )
+ queue = task.rqueue
+ first_run_id = task.job_id
+ entry = queue.fetch_job(first_run_id)
+ queue.run_sync(entry)
+ task.refresh_from_db()
+ self.assertEqual(task.failed_runs, 1)
+ self.assertIsNotNone(task.last_failed_run)
+ self.assertEqual(task.successful_runs, 0)
+ self.assertIsNone(task.last_successful_run)
+ self.assertTrue(task.is_scheduled())
+ self.assertNotEqual(task.job_id, first_run_id)
+
+ def test_check_not_rescheduled_after_last_repeat(self):
+ task = task_factory(
+ self.task_type,
+ scheduled_time=timezone.now() + timedelta(seconds=1),
+ repeat=1,
+ )
+ queue = task.rqueue
+ first_run_id = task.job_id
+ entry = queue.fetch_job(first_run_id)
+ queue.run_sync(entry)
+ task.refresh_from_db()
+ self.assertEqual(task.failed_runs, 0)
+ self.assertIsNone(task.last_failed_run)
+ self.assertEqual(task.successful_runs, 1)
+ self.assertIsNotNone(task.last_successful_run)
+ self.assertNotEqual(task.job_id, first_run_id)
diff --git a/scheduler/tests/test_task_types/test_task_model.py b/scheduler/tests/test_task_types/test_task_model.py
new file mode 100644
index 0000000..02de659
--- /dev/null
+++ b/scheduler/tests/test_task_types/test_task_model.py
@@ -0,0 +1,508 @@
+import zoneinfo
+from datetime import datetime, timedelta
+
+from django.contrib.messages import get_messages
+from django.core.exceptions import ValidationError
+from django.test import override_settings
+from django.urls import reverse
+from django.utils import timezone
+from freezegun import freeze_time
+
+from scheduler import settings
+from scheduler.models import Task, TaskArg, TaskKwarg
+from scheduler.models.task import TaskType
+from scheduler.queues import get_queue
+from scheduler.tests import jobs
+from scheduler.tests.testtools import (
+ task_factory, taskarg_factory, _get_task_job_execution_from_registry,
+ SchedulerBaseCase, _get_executions, )
+from scheduler.tools import run_task, create_worker
+
+
+def assert_response_has_msg(response, message):
+ messages = [m.message for m in get_messages(response.wsgi_request)]
+ assert message in messages, f'expected "{message}" in {messages}'
+
+
+def assert_has_execution_with_status(task, status):
+ job_list = _get_executions(task)
+ job_list = [(j.id, j.get_status()) for j in job_list]
+ for job in job_list:
+ if job[1] == status:
+ return
+ raise AssertionError(f"{task} does not have an execution with status {status}: {job_list}")
+
+
+class BaseTestCases:
+ class TestBaseTask(SchedulerBaseCase):
+ task_type = None
+
+ def test_callable_func(self):
+ task = task_factory(self.task_type)
+ task.callable = "scheduler.tests.jobs.test_job"
+ func = task.callable_func()
+ self.assertEqual(jobs.test_job, func)
+
+ def test_callable_func_not_callable(self):
+ task = task_factory(self.task_type)
+ task.callable = "scheduler.tests.jobs.test_non_callable"
+ with self.assertRaises(TypeError):
+ task.callable_func()
+
+ def test_clean_callable(self):
+ task = task_factory(self.task_type)
+ task.callable = "scheduler.tests.jobs.test_job"
+ self.assertIsNone(task.clean_callable())
+
+ def test_clean_callable_invalid(self):
+ task = task_factory(self.task_type)
+ task.callable = "scheduler.tests.jobs.test_non_callable"
+ with self.assertRaises(ValidationError):
+ task.clean_callable()
+
+ def test_clean_queue(self):
+ for queue in settings.QUEUES.keys():
+ task = task_factory(self.task_type)
+ task.queue = queue
+ self.assertIsNone(task.clean_queue())
+
+ def test_clean_queue_invalid(self):
+ task = task_factory(self.task_type)
+ task.queue = "xxxxxx"
+ task.callable = "scheduler.tests.jobs.test_job"
+ with self.assertRaises(ValidationError):
+ task.clean()
+
+ # next 2 check the above are included in job.clean() function
+ def test_clean_base(self):
+ task = task_factory(self.task_type)
+ task.queue = list(settings.QUEUES)[0]
+ task.callable = "scheduler.tests.jobs.test_job"
+ self.assertIsNone(task.clean())
+
+ def test_clean_invalid_callable(self):
+ task = task_factory(self.task_type)
+ task.queue = list(settings.QUEUES)[0]
+ task.callable = "scheduler.tests.jobs.test_non_callable"
+ with self.assertRaises(ValidationError):
+ task.clean()
+
+ def test_clean_invalid_queue(self):
+ task = task_factory(self.task_type)
+ task.queue = "xxxxxx"
+ task.callable = "scheduler.tests.jobs.test_job"
+ with self.assertRaises(ValidationError):
+ task.clean()
+
+ def test_is_schedulable_already_scheduled(self):
+ task = task_factory(self.task_type)
+ task.schedule()
+ self.assertTrue(task.is_scheduled())
+
+ def test_is_schedulable_disabled(self):
+ task = task_factory(self.task_type)
+ task.enabled = False
+ self.assertFalse(task.enabled)
+
+ def test_schedule(self):
+ task = task_factory(
+ self.task_type,
+ )
+ self.assertTrue(task.is_scheduled())
+ self.assertIsNotNone(task.job_id)
+
+ def test_unschedulable(self):
+ task = task_factory(self.task_type, enabled=False)
+ self.assertFalse(task.is_scheduled())
+ self.assertIsNone(task.job_id)
+
+ def test_unschedule(self):
+ task = task_factory(self.task_type)
+ self.assertTrue(task.unschedule())
+ self.assertIsNone(task.job_id)
+
+ def test_unschedule_not_scheduled(self):
+ task = task_factory(self.task_type, enabled=False)
+ self.assertTrue(task.unschedule())
+ self.assertIsNone(task.job_id)
+
+ def test_save_enabled(self):
+ task = task_factory(self.task_type)
+ self.assertIsNotNone(task.job_id)
+
+ def test_save_disabled(self):
+ task = task_factory(self.task_type, enabled=False)
+ task.save()
+ self.assertIsNone(task.job_id)
+
+ def test_save_and_schedule(self):
+ task = task_factory(self.task_type)
+ self.assertIsNotNone(task.job_id)
+ self.assertTrue(task.is_scheduled())
+
+ def test_schedule2(self):
+ task = task_factory(self.task_type)
+ task.queue = list(settings.QUEUES)[0]
+ task.enabled = False
+ task.scheduled_time = timezone.now() + timedelta(minutes=1)
+ self.assertFalse(task.schedule())
+
+ def test_delete_and_unschedule(self):
+ task = task_factory(self.task_type)
+ self.assertIsNotNone(task.job_id)
+ self.assertTrue(task.is_scheduled())
+ task.delete()
+ self.assertFalse(task.is_scheduled())
+
+ def test_job_create(self):
+ prev_count = Task.objects.filter(task_type=self.task_type).count()
+ task_factory(self.task_type)
+ self.assertEqual(Task.objects.filter(task_type=self.task_type).count(), prev_count + 1)
+
+ def test_str(self):
+ name = "test"
+ task = task_factory(self.task_type, name=name)
+ self.assertEqual(f"{self.task_type.value}[{name}={task.callable}()]", str(task))
+
+ def test_callable_passthrough(self):
+ task = task_factory(self.task_type)
+ entry = _get_task_job_execution_from_registry(task)
+ self.assertEqual(entry.func, run_task)
+ job_model, job_id = entry.args
+ self.assertEqual(job_model, self.task_type.value)
+ self.assertEqual(job_id, task.id)
+
+ def test_timeout_passthrough(self):
+ task = task_factory(self.task_type, timeout=500)
+ entry = _get_task_job_execution_from_registry(task)
+ self.assertEqual(entry.timeout, 500)
+
+ def test_at_front_passthrough(self):
+ task = task_factory(self.task_type, at_front=True)
+ queue = task.rqueue
+ jobs_to_schedule = queue.scheduled_job_registry.get_job_ids()
+ self.assertIn(task.job_id, jobs_to_schedule)
+
+ def test_callable_result(self):
+ task = task_factory(self.task_type)
+ entry = _get_task_job_execution_from_registry(task)
+ self.assertEqual(entry.perform(), 2)
+
+ def test_callable_empty_args_and_kwargs(self):
+ task = task_factory(self.task_type, callable="scheduler.tests.jobs.test_args_kwargs")
+ entry = _get_task_job_execution_from_registry(task)
+ self.assertEqual(entry.perform(), "test_args_kwargs()")
+
+ def test_delete_args(self):
+ task = task_factory(self.task_type)
+ arg = taskarg_factory(TaskArg, val="one", content_object=task)
+ self.assertEqual(1, task.callable_args.count())
+ arg.delete()
+ self.assertEqual(0, task.callable_args.count())
+
+ def test_delete_kwargs(self):
+ task = task_factory(self.task_type)
+ kwarg = taskarg_factory(TaskKwarg, key="key1", arg_type="str", val="one", content_object=task)
+ self.assertEqual(1, task.callable_kwargs.count())
+ kwarg.delete()
+ self.assertEqual(0, task.callable_kwargs.count())
+
+ def test_parse_args(self):
+ task = task_factory(self.task_type)
+ date = timezone.now()
+ taskarg_factory(TaskArg, val="one", content_object=task)
+ taskarg_factory(TaskArg, arg_type="int", val=2, content_object=task)
+ taskarg_factory(TaskArg, arg_type="bool", val=True, content_object=task)
+ taskarg_factory(TaskArg, arg_type="bool", val=False, content_object=task)
+ taskarg_factory(TaskArg, arg_type="datetime", val=date, content_object=task)
+ self.assertEqual(task.parse_args(), ["one", 2, True, False, date])
+
+ def test_parse_kwargs(self):
+ job = task_factory(self.task_type)
+ date = timezone.now()
+ taskarg_factory(TaskKwarg, key="key1", arg_type="str", val="one", content_object=job)
+ taskarg_factory(TaskKwarg, key="key2", arg_type="int", val=2, content_object=job)
+ taskarg_factory(TaskKwarg, key="key3", arg_type="bool", val=True, content_object=job)
+ taskarg_factory(TaskKwarg, key="key4", arg_type="datetime", val=date, content_object=job)
+ kwargs = job.parse_kwargs()
+ self.assertEqual(kwargs, dict(key1="one", key2=2, key3=True, key4=date))
+
+ def test_callable_args_and_kwargs(self):
+ task = task_factory(self.task_type, callable="scheduler.tests.jobs.test_args_kwargs")
+ date = timezone.now()
+ taskarg_factory(TaskArg, arg_type="str", val="one", content_object=task)
+ taskarg_factory(TaskKwarg, key="key1", arg_type="int", val=2, content_object=task)
+ taskarg_factory(TaskKwarg, key="key2", arg_type="datetime", val=date, content_object=task)
+ taskarg_factory(TaskKwarg, key="key3", arg_type="bool", val=False, content_object=task)
+ task.save()
+ entry = _get_task_job_execution_from_registry(task)
+ self.assertEqual(entry.perform(), "test_args_kwargs('one', key1=2, key2={}, key3=False)".format(date))
+
+ def test_function_string(self):
+ task = task_factory(self.task_type)
+ date = timezone.now()
+ taskarg_factory(TaskArg, arg_type="str", val="one", content_object=task)
+ taskarg_factory(TaskArg, arg_type="int", val="1", content_object=task)
+ taskarg_factory(TaskArg, arg_type="datetime", val=date, content_object=task)
+ taskarg_factory(TaskArg, arg_type="bool", val=True, content_object=task)
+ taskarg_factory(TaskKwarg, key="key1", arg_type="str", val="one", content_object=task)
+ taskarg_factory(TaskKwarg, key="key2", arg_type="int", val=2, content_object=task)
+ taskarg_factory(TaskKwarg, key="key3", arg_type="datetime", val=date, content_object=task)
+ taskarg_factory(TaskKwarg, key="key4", arg_type="bool", val=False, content_object=task)
+ self.assertEqual(
+ task.function_string(),
+ f"scheduler.tests.jobs.test_job('one', 1, {repr(date)}, True, "
+ f"key1='one', key2=2, key3={repr(date)}, key4=False)",
+ )
+
+ def test_admin_list_view(self):
+ # arrange
+ self.client.login(username="admin", password="admin")
+ job = task_factory(self.task_type)
+ model = job._meta.model.__name__.lower()
+ url = reverse(f"admin:scheduler_{model}_changelist")
+ # act
+ res = self.client.get(url)
+ # assert
+ self.assertEqual(200, res.status_code)
+
+ def test_admin_list_view_delete_model(self):
+ # arrange
+ self.client.login(username="admin", password="admin")
+ task = task_factory(
+ self.task_type,
+ )
+ model = task._meta.model.__name__.lower()
+ url = reverse(f"admin:scheduler_{model}_changelist")
+ # act
+ res = self.client.post(
+ url,
+ data={
+ "action": "delete_model",
+ "_selected_action": [
+ task.pk,
+ ],
+ },
+ )
+ # assert
+ self.assertEqual(302, res.status_code)
+
+ def test_admin_run_job_now_enqueues_job_at(self):
+ # arrange
+ self.client.login(username="admin", password="admin")
+ task = task_factory(self.task_type)
+ model = task._meta.model.__name__.lower()
+ url = reverse(f"admin:scheduler_{model}_changelist")
+ # act
+ res = self.client.post(
+ url,
+ data={
+ "action": "enqueue_job_now",
+ "_selected_action": [
+ task.pk,
+ ],
+ },
+ )
+ # assert
+ self.assertEqual(302, res.status_code)
+ task.refresh_from_db()
+ queue = get_queue(task.queue)
+ self.assertIn(task.job_id, queue.get_job_ids())
+
+ def test_admin_change_view(self):
+ # arrange
+ self.client.login(username="admin", password="admin")
+ task = task_factory(
+ self.task_type,
+ )
+ model = task._meta.model.__name__.lower()
+ url = reverse(
+ f"admin:scheduler_{model}_change",
+ args=[
+ task.pk,
+ ],
+ )
+ # act
+ res = self.client.get(url)
+ # assert
+ self.assertEqual(200, res.status_code)
+
+ def test_admin_change_view__bad_redis_connection(self):
+ # arrange
+ self.client.login(username="admin", password="admin")
+ task = task_factory(self.task_type, queue="test2", instance_only=True)
+ task.save(schedule_job=False)
+ model = task._meta.model.__name__.lower()
+ url = reverse(
+ f"admin:scheduler_{model}_change",
+ args=[
+ task.pk,
+ ],
+ )
+ # act
+ res = self.client.get(url)
+ # assert
+ self.assertEqual(200, res.status_code)
+
+ def test_admin_enqueue_job_now(self):
+ # arrange
+ self.client.login(username="admin", password="admin")
+ task = task_factory(self.task_type)
+ self.assertIsNotNone(task.job_id)
+ self.assertTrue(task.is_scheduled())
+ data = {
+ "action": "enqueue_job_now",
+ "_selected_action": [
+ task.id,
+ ],
+ }
+ model = task._meta.model.__name__.lower()
+ url = reverse(f"admin:scheduler_{model}_changelist")
+ # act
+ res = self.client.post(url, data=data, follow=True)
+
+ # assert part 1
+ self.assertEqual(200, res.status_code)
+ entry = _get_task_job_execution_from_registry(task)
+ task_model, scheduled_task_id = entry.args
+ self.assertEqual(task_model, task.task_type)
+ self.assertEqual(scheduled_task_id, task.id)
+ self.assertEqual("scheduled", entry.get_status())
+ assert_has_execution_with_status(task, "queued")
+
+ # act 2
+ worker = create_worker(
+ "default",
+ fork_job_execution=False,
+ )
+ worker.work(burst=True)
+
+ # assert 2
+ entry = _get_task_job_execution_from_registry(task)
+ self.assertEqual(task_model, task.task_type)
+ self.assertEqual(scheduled_task_id, task.id)
+ assert_has_execution_with_status(task, "finished")
+
+ def test_admin_enable_job(self):
+ # arrange
+ self.client.login(username="admin", password="admin")
+ task = task_factory(self.task_type, enabled=False)
+ self.assertIsNone(task.job_id)
+ self.assertFalse(task.is_scheduled())
+ data = {
+ "action": "enable_selected",
+ "_selected_action": [
+ task.id,
+ ],
+ }
+ model = task._meta.model.__name__.lower()
+ url = reverse(f"admin:scheduler_{model}_changelist")
+ # act
+ res = self.client.post(url, data=data, follow=True)
+ # assert
+ self.assertEqual(200, res.status_code)
+ task.refresh_from_db()
+ self.assertTrue(task.enabled)
+ self.assertTrue(task.is_scheduled())
+ assert_response_has_msg(res, "1 task was successfully enabled and scheduled.")
+
+ def test_admin_disable_job(self):
+ # arrange
+ self.client.login(username="admin", password="admin")
+ task = task_factory(self.task_type, enabled=True)
+ task.save()
+ data = {
+ "action": "disable_selected",
+ "_selected_action": [
+ task.id,
+ ],
+ }
+ model = task._meta.model.__name__.lower()
+ url = reverse(f"admin:scheduler_{model}_changelist")
+ self.assertTrue(task.is_scheduled())
+ # act
+ res = self.client.post(url, data=data, follow=True)
+ # assert
+ self.assertEqual(200, res.status_code)
+ task.refresh_from_db()
+ self.assertFalse(task.is_scheduled())
+ self.assertFalse(task.enabled)
+ assert_response_has_msg(res, "1 task was successfully disabled and unscheduled.")
+
+ def test_admin_single_delete(self):
+ # arrange
+ self.client.login(username="admin", password="admin")
+ prev_count = Task.objects.filter(task_type=self.task_type).count()
+ task = task_factory(
+ self.task_type,
+ )
+ self.assertIsNotNone(task.job_id)
+ self.assertTrue(task.is_scheduled())
+ prev = len(_get_executions(task))
+ model = task._meta.model.__name__.lower()
+ url = reverse(
+ f"admin:scheduler_{model}_delete",
+ args=[
+ task.pk,
+ ],
+ )
+ data = {
+ "post": "yes",
+ }
+ # act
+ res = self.client.post(url, data=data, follow=True)
+ # assert
+ self.assertEqual(200, res.status_code)
+ self.assertEqual(prev_count, Task.objects.filter(task_type=self.task_type).count())
+ self.assertEqual(prev - 1, len(_get_executions(task)))
+
+ def test_admin_delete_selected(self):
+ # arrange
+ self.client.login(username="admin", password="admin")
+ task = task_factory(self.task_type, enabled=True)
+ task.save()
+ queue = get_queue(task.queue)
+ scheduled_jobs = queue.scheduled_job_registry.get_job_ids()
+ job_id = task.job_id
+ self.assertIn(job_id, scheduled_jobs)
+ data = {
+ "action": "delete_selected",
+ "_selected_action": [
+ task.id,
+ ],
+ "post": "yes",
+ }
+ model = task._meta.model.__name__.lower()
+ url = reverse(f"admin:scheduler_{model}_changelist")
+ # act
+ res = self.client.post(url, data=data, follow=True)
+ # assert
+ self.assertEqual(200, res.status_code)
+ assert_response_has_msg(res, "Successfully deleted 1 task.")
+ self.assertIsNone(Task.objects.filter(task_type=self.task_type).filter(id=task.id).first())
+ scheduled_jobs = queue.scheduled_job_registry.get_job_ids()
+ self.assertNotIn(job_id, scheduled_jobs)
+
+ class TestSchedulableTask(TestBaseTask):
+ # Currently ScheduledJob and RepeatableJob
+ task_type = TaskType.ONCE
+
+ @freeze_time("2016-12-25")
+ @override_settings(USE_TZ=False)
+ def test_schedule_time_no_tz(self):
+ task = task_factory(self.task_type)
+ task.scheduled_time = datetime(2016, 12, 25, 8, 0, 0, tzinfo=None)
+ self.assertEqual("2016-12-25T08:00:00", task._schedule_time().isoformat())
+
+ @freeze_time("2016-12-25")
+ @override_settings(USE_TZ=True)
+ def test_schedule_time_with_tz(self):
+ task = task_factory(self.task_type)
+ est = zoneinfo.ZoneInfo("US/Eastern")
+ task.scheduled_time = datetime(2016, 12, 25, 8, 0, 0, tzinfo=est)
+ self.assertEqual("2016-12-25T13:00:00+00:00", task._schedule_time().isoformat())
+
+ def test_result_ttl_passthrough(self):
+ job = task_factory(self.task_type, result_ttl=500)
+ entry = _get_task_job_execution_from_registry(job)
+ self.assertEqual(entry.result_ttl, 500)
diff --git a/scheduler/tests/test_views.py b/scheduler/tests/test_views.py
index afae1d0..5d6f227 100644
--- a/scheduler/tests/test_views.py
+++ b/scheduler/tests/test_views.py
@@ -8,12 +8,11 @@
from django.urls import reverse
from scheduler.queues import get_queue
-from scheduler.tools import create_worker
-from . import test_settings # noqa
-from .jobs import failing_job, long_job, test_job
-from .testtools import assert_message_in_response, task_factory, _get_job_from_scheduled_registry
-from ..models import ScheduledTask
-from ..rq_classes import JobExecution, ExecutionStatus
+from scheduler.rq_classes import JobExecution, ExecutionStatus
+from scheduler.tests import test_settings # noqa
+from scheduler.tests.jobs import failing_job, long_job, test_job
+from scheduler.tests.testtools import assert_message_in_response, task_factory, _get_task_job_execution_from_registry
+from scheduler.tools import create_worker, TaskType
class BaseTestCase(TestCase):
@@ -358,8 +357,8 @@ def test_job_details(self):
def test_scheduled_job_details(self):
"""Job data is displayed properly"""
- scheduled_job = task_factory(ScheduledTask, enabled=True)
- job = _get_job_from_scheduled_registry(scheduled_job)
+ scheduled_job = task_factory(TaskType.ONCE, enabled=True)
+ job = _get_task_job_execution_from_registry(scheduled_job)
url = reverse(
"job_details",
diff --git a/scheduler/tests/testtools.py b/scheduler/tests/testtools.py
index 561327f..8548486 100644
--- a/scheduler/tests/testtools.py
+++ b/scheduler/tests/testtools.py
@@ -8,6 +8,7 @@
from scheduler import settings
from scheduler.models import CronTask, TaskKwarg, RepeatableTask, ScheduledTask, BaseTask
+from scheduler.models.task import TaskType, Task
from scheduler.queues import get_queue
@@ -26,7 +27,50 @@ def sequence_gen():
seq = sequence_gen()
-def task_factory(cls, callable_name: str = "scheduler.tests.jobs.test_job", instance_only=False, **kwargs):
+def task_factory(
+ task_type: TaskType, callable_name: str = "scheduler.tests.jobs.test_job", instance_only=False, **kwargs
+):
+ values = dict(
+ name="Scheduled Job %d" % next(seq),
+ job_id=None,
+ queue=list(settings.QUEUES.keys())[0],
+ callable=callable_name,
+ enabled=True,
+ timeout=None,
+ )
+ if task_type == TaskType.ONCE:
+ values.update(
+ dict(
+ result_ttl=None,
+ scheduled_time=timezone.now() + timedelta(days=1),
+ )
+ )
+ elif task_type == TaskType.REPEATABLE:
+ values.update(
+ dict(
+ result_ttl=None,
+ interval=1,
+ interval_unit="hours",
+ repeat=None,
+ scheduled_time=timezone.now() + timedelta(days=1),
+ )
+ )
+ elif task_type == TaskType.CRON:
+ values.update(
+ dict(
+ cron_string="0 0 * * *",
+ )
+ )
+ values.update(kwargs)
+ if instance_only:
+ instance = Task(task_type=task_type, **values)
+ else:
+ instance = Task.objects.create(task_type=task_type, **values)
+ return instance
+
+
+# TODO remove
+def old_task_factory(cls, callable_name: str = "scheduler.tests.jobs.test_job", instance_only=False, **kwargs):
values = dict(
name="Scheduled Job %d" % next(seq),
job_id=None,
@@ -69,7 +113,7 @@ def task_factory(cls, callable_name: str = "scheduler.tests.jobs.test_job", inst
def taskarg_factory(cls, **kwargs):
content_object = kwargs.pop("content_object", None)
if content_object is None:
- content_object = task_factory(ScheduledTask)
+ content_object = old_task_factory(ScheduledTask)
values = dict(
arg_type="str",
val="",
@@ -84,7 +128,7 @@ def taskarg_factory(cls, **kwargs):
return instance
-def _get_job_from_scheduled_registry(django_task: BaseTask):
+def _get_task_job_execution_from_registry(django_task: BaseTask):
jobs_to_schedule = django_task.rqueue.scheduled_job_registry.get_job_ids()
entry = next(i for i in jobs_to_schedule if i == django_task.job_id)
return django_task.rqueue.fetch_job(entry)
@@ -113,7 +157,7 @@ def setUp(self) -> None:
queue.empty()
def tearDown(self) -> None:
- super(SchedulerBaseCase, self).setUp()
+ super(SchedulerBaseCase, self).tearDown()
queue = get_queue("default")
queue.empty()
diff --git a/scheduler/tools.py b/scheduler/tools.py
index 3f5eac9..c73aae8 100644
--- a/scheduler/tools.py
+++ b/scheduler/tools.py
@@ -1,17 +1,25 @@
import importlib
import os
-from typing import List, Any, Callable
+from typing import List, Any, Callable, Optional
import croniter
from django.apps import apps
+from django.db import models
from django.utils import timezone
from django.utils.module_loading import import_string
+from django.utils.translation import gettext_lazy as _
from scheduler.queues import get_queues, logger, get_queue
-from scheduler.rq_classes import DjangoWorker, MODEL_NAMES, JobExecution
+from scheduler.rq_classes import DjangoWorker, JobExecution, TASK_TYPES, MODEL_NAMES
from scheduler.settings import SCHEDULER_CONFIG, Broker
+class TaskType(models.TextChoices):
+ CRON = "CronTaskType", _("Cron Task")
+ REPEATABLE = "RepeatableTaskType", _("Repeatable Task")
+ ONCE = "OnceTaskType", _("Run once")
+
+
def callable_func(callable_str: str) -> Callable:
path = callable_str.split(".")
module = importlib.import_module(".".join(path[:-1]))
@@ -21,22 +29,35 @@ def callable_func(callable_str: str) -> Callable:
return func
-def get_next_cron_time(cron_string) -> timezone.datetime:
+def get_next_cron_time(cron_string: Optional[str]) -> Optional[timezone.datetime]:
"""Calculate the next scheduled time by creating a crontab object with a cron string"""
+ if cron_string is None:
+ return None
now = timezone.now()
itr = croniter.croniter(cron_string, now)
next_itr = itr.get_next(timezone.datetime)
return next_itr
-def get_scheduled_task(task_model: str, task_id: int) -> "BaseTask": # noqa: F821
- if task_model not in MODEL_NAMES:
- raise ValueError(f"Job Model {task_model} does not exist, choices are {MODEL_NAMES}")
- model = apps.get_model(app_label="scheduler", model_name=task_model)
- task = model.objects.filter(id=task_id).first()
- if task is None:
- raise ValueError(f"Job {task_model}:{task_id} does not exit")
- return task
+def get_scheduled_task(task_type_str: str, task_id: int) -> "BaseTask": # noqa: F821
+ # Try with new model names
+ model = apps.get_model(app_label="scheduler", model_name="Task")
+ if task_type_str in TASK_TYPES:
+ try:
+ task_type = TaskType(task_type_str)
+ task = model.objects.filter(task_type=task_type, id=task_id).first()
+ if task is None:
+ raise ValueError(f"Job {task_type}:{task_id} does not exit")
+ return task
+ except ValueError:
+ raise ValueError(f"Invalid task type {task_type_str}")
+ elif task_type_str in MODEL_NAMES:
+ model = apps.get_model(app_label="scheduler", model_name=task_type_str)
+ task = model.objects.filter(id=task_id).first()
+ if task is None:
+ raise ValueError(f"Job {task_type_str}:{task_id} does not exit")
+ return task
+ raise ValueError(f"Job Model {task_type_str} does not exist, choices are {TASK_TYPES}")
def run_task(task_model: str, task_id: int) -> Any:
@@ -65,7 +86,7 @@ def create_worker(*queue_names, **kwargs) -> DjangoWorker:
queues = get_queues(*queue_names)
existing_workers = DjangoWorker.all(connection=queues[0].connection)
existing_worker_names = set(map(lambda w: w.name, existing_workers))
- kwargs["fork_job_execution"] = SCHEDULER_CONFIG.BROKER != Broker.FAKEREDIS
+ kwargs.setdefault("fork_job_execution", SCHEDULER_CONFIG.BROKER != Broker.FAKEREDIS)
if kwargs.get("name", None) is None:
kwargs["name"] = _calc_worker_name(existing_worker_names)
diff --git a/testproject/testproject/settings.py b/testproject/testproject/settings.py
index 1ab283c..e076068 100644
--- a/testproject/testproject/settings.py
+++ b/testproject/testproject/settings.py
@@ -44,9 +44,9 @@
"default": {
"BACKEND": "django.core.cache.backends.redis.RedisCache",
"LOCATION": [
- "redis://127.0.0.1:6379", # leader
+ "redis://127.0.0.1:6379",
],
- "OPTIONS": {"connection_class": FakeConnection},
+ "BROKER": "fakeredis",
}
}
TEMPLATES = [
@@ -115,19 +115,19 @@
STATIC_URL = "/static/"
SCHEDULER_QUEUES = {
"default": {
- "URL": f"redis://localhost:${BROKER_PORT}/0",
+ "URL": f"redis://localhost:{BROKER_PORT}/0",
},
"low": {
- "URL": f"redis://localhost:${BROKER_PORT}/0",
+ "URL": f"redis://localhost:{BROKER_PORT}/0",
},
"high": {
- "URL": f"redis://localhost:${BROKER_PORT}/1",
+ "URL": f"redis://localhost:{BROKER_PORT}/1",
},
"medium": {
- "URL": f"redis://localhost:${BROKER_PORT}/1",
+ "URL": f"redis://localhost:{BROKER_PORT}/1",
},
"another": {
- "URL": f"redis://localhost:${BROKER_PORT}/1",
+ "URL": f"redis://localhost:{BROKER_PORT}/1",
},
}
DEFAULT_AUTO_FIELD = "django.db.models.BigAutoField"