diff --git a/.github/actions/test-coverage/action.yml b/.github/actions/test-coverage/action.yml index aed62bc..59f958e 100644 --- a/.github/actions/test-coverage/action.yml +++ b/.github/actions/test-coverage/action.yml @@ -17,26 +17,26 @@ outputs: runs: using: "composite" steps: - - name: Run Tests with coverage + - name: Run regular tests with coverage shell: bash run: | cd testproject - poetry run coverage run manage.py test scheduler + uv run coverage run manage.py test --exclude-tag multiprocess scheduler - name: Coverage report id: coverage_report shell: bash run: | mv testproject/.coverage . echo 'REPORT<> $GITHUB_ENV - poetry run coverage report >> $GITHUB_ENV + uv run coverage report >> $GITHUB_ENV echo 'EOF' >> $GITHUB_ENV - name: json report id: json-report shell: bash run: | - poetry run coverage json + uv run coverage json echo "COVERAGE=$(jq '.totals.percent_covered_display|tonumber' coverage.json)" >> $GITHUB_ENV - - uses: mshick/add-pr-comment@v2 + - uses: mshick/add-pr-comment@dd126dd8c253650d181ad9538d8b4fa218fc31e8 if: ${{ github.event_name == 'pull_request' }} with: message: | diff --git a/.github/workflows/publish-documentation.yml b/.github/workflows/publish-documentation.yml index 0768868..74f9cbb 100644 --- a/.github/workflows/publish-documentation.yml +++ b/.github/workflows/publish-documentation.yml @@ -17,10 +17,12 @@ jobs: url: https://pypi.org/p/fakeredis steps: - uses: actions/checkout@v4 + with: + persist-credentials: false - name: Set up Python uses: actions/setup-python@v5 with: - python-version: "3.12" + python-version: "3.13" - name: Configure Git Credentials run: | git config user.name github-actions[bot] diff --git a/.github/workflows/publish.yml b/.github/workflows/publish.yml index c28ab73..108d77f 100644 --- a/.github/workflows/publish.yml +++ b/.github/workflows/publish.yml @@ -10,17 +10,19 @@ env: PYPI_TEST_URL: https://test.pypi.org/p/django-tasks-scheduler jobs: - build: name: Build distribution 📦 runs-on: ubuntu-latest - + permissions: + id-token: write # IMPORTANT: this permission is mandatory for trusted publishing steps: - uses: actions/checkout@v4 + with: + persist-credentials: false - name: Set up Python uses: actions/setup-python@v5 with: - python-version: "3.12" + python-version: "3.13" - name: Install pypa/build run: python3 -m pip install build --user @@ -51,7 +53,7 @@ jobs: name: python-package-distributions path: dist/ - name: Publish distribution 📦 to PyPI - uses: pypa/gh-action-pypi-publish@release/v1 + uses: pypa/gh-action-pypi-publish@v1.12.4 publish-to-testpypi: name: Publish Python 🐍 distribution 📦 to TestPyPI @@ -73,7 +75,7 @@ jobs: name: python-package-distributions path: dist/ - name: Publish distribution 📦 to TestPyPI - uses: pypa/gh-action-pypi-publish@release/v1.10 + uses: pypa/gh-action-pypi-publish@v1.12.4 with: repository-url: https://test.pypi.org/legacy/ skip-existing: true \ No newline at end of file diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 79d0b61..e416acb 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -13,36 +13,40 @@ jobs: ruff: runs-on: ubuntu-latest name: "ruff on code" + permissions: + contents: read steps: - uses: actions/checkout@v4 - - - name: "Setup Python, Poetry and Dependencies" - uses: dsoftwareinc/setup-python-poetry-action@v1 with: - python-version: "3.12" - poetry-version: "2.1.1" - + persist-credentials: false + - name: Install uv + uses: astral-sh/setup-uv@v6 + - uses: actions/setup-python@v5 + with: + cache-dependency-path: uv.lock + python-version: "3.13" - name: Run ruff shell: bash run: | - poetry run ruff check + uv run ruff check - test: + test-regular: needs: [ 'ruff' ] runs-on: ubuntu-latest - name: "Run tests ${{ matrix.python-version }}/${{ matrix.django-version }}/${{ matrix.broker }}" + name: "Tests py${{ matrix.python-version }}/dj${{ matrix.django-version }}/${{ matrix.broker }}" strategy: max-parallel: 6 matrix: - python-version: [ '3.10', '3.11', '3.12', '3.13' ] - django-version: [ '5.0.7', '5.1.7' ] + python-version: [ '3.11', '3.12', '3.13' ] + django-version: [ '5.1.8', '5.2' ] broker: [ 'redis', 'fakeredis', 'valkey' ] include: - - python-version: '3.12' - django-version: '5.1.7' + - python-version: '3.13' + django-version: '5.2' broker: 'redis' coverage: yes - + permissions: + pull-requests: write services: redis: image: redis:7.2.2 @@ -69,36 +73,35 @@ jobs: steps: - uses: actions/checkout@v4 - - - name: "Setup Python, Poetry and Dependencies" - uses: dsoftwareinc/setup-python-poetry-action@v1 with: + persist-credentials: false + - name: Install uv + uses: astral-sh/setup-uv@v6 + - uses: actions/setup-python@v5 + with: + cache-dependency-path: uv.lock python-version: "${{ matrix.python-version }}" - poetry-version: "2.1.1" - poetry-install-additional-args: "-E yaml" - name: Install django version shell: bash run: | - python -m pip --quiet install poetry - echo "$HOME/.poetry/bin" >> $GITHUB_PATH if [ ${{ matrix.broker == 'valkey' }} == true ]; then - additional_args="-E valkey" + additional_args="--extra valkey" fi - poetry install -E yaml $additional_args - poetry run pip install django==${{ matrix.django-version }} + uv sync --extra yaml $additional_args + uv pip install django==${{ matrix.django-version }} - name: Get version id: getVersion shell: bash run: | - VERSION=$(poetry version -s --no-ansi -n) + VERSION=$(uv version --short) echo "VERSION=$VERSION" >> $GITHUB_OUTPUT - name: Check for missing migrations run: | cd testproject - poetry run python manage.py makemigrations --check + uv run python manage.py makemigrations --check - name: Run Tests without coverage if: ${{ matrix.coverage != 'yes' }} @@ -110,7 +113,7 @@ jobs: else export BROKER_PORT=6379 fi - poetry run python manage.py test scheduler + uv run python manage.py test --exclude-tag multiprocess scheduler # Steps for coverage check - name: Run tests with coverage @@ -123,7 +126,7 @@ jobs: - name: Create coverage badge if: ${{ matrix.coverage == 'yes' && github.event_name == 'push' }} - uses: schneegans/dynamic-badges-action@v1.7.0 + uses: schneegans/dynamic-badges-action@7142847813c746736c986b42dec98541e49a2cea with: auth: ${{ secrets.GIST_SECRET }} gistID: b756396efb895f0e34558c980f1ca0c7 @@ -141,9 +144,9 @@ jobs: # write permission is required for auto-labeler # otherwise, read permission is required at least pull-requests: write - needs: test + needs: test-regular runs-on: ubuntu-latest steps: - - uses: release-drafter/release-drafter@v6 + - uses: release-drafter/release-drafter@b1476f6e6eb133afa41ed8589daba6dc69b4d3f5 env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} \ No newline at end of file diff --git a/.github/zizmor.yml b/.github/zizmor.yml new file mode 100644 index 0000000..c3a3542 --- /dev/null +++ b/.github/zizmor.yml @@ -0,0 +1,15 @@ +rules: + unpinned-images: + ignore: + - 'test.yml' + - 'test-dragonfly.yml' + unpinned-uses: + config: + policies: + actions/*: any + astral-sh/*: any + pypa/gh-action-pypi-publish: any + github-env: + ignore: + - 'action.yml:36:7' + - 'action.yml:28:7' \ No newline at end of file diff --git a/README.md b/README.md index 26673ae..f0ef295 100644 --- a/README.md +++ b/README.md @@ -4,7 +4,62 @@ Django Tasks Scheduler ![badge](https://img.shields.io/endpoint?url=https://gist.githubusercontent.com/cunla/b756396efb895f0e34558c980f1ca0c7/raw/django-tasks-scheduler-4.json) [![badge](https://img.shields.io/pypi/dm/django-tasks-scheduler)](https://pypi.org/project/django-tasks-scheduler/) -Documentation can be found in https://django-tasks-scheduler.readthedocs.io/en/latest/ +Documentation can be found in https://django-tasks-scheduler.readthedocs.io/ + +# Usage + +1. Update `settings.py` to include scheduler configuration: + +```python +import os +from typing import Dict +from scheduler.types import SchedulerConfiguration, Broker, QueueConfiguration + +INSTALLED_APPS = [ + # ... + 'scheduler', + # ... +] +SCHEDULER_CONFIG = SchedulerConfiguration( + EXECUTIONS_IN_PAGE=20, + SCHEDULER_INTERVAL=10, + BROKER=Broker.REDIS, + CALLBACK_TIMEOUT=60, # Callback timeout in seconds (success/failure/stopped) + # Default values, can be overriden per task/job + DEFAULT_SUCCESS_TTL=10 * 60, # Time To Live (TTL) in seconds to keep successful job results + DEFAULT_FAILURE_TTL=365 * 24 * 60 * 60, # Time To Live (TTL) in seconds to keep job failure information + DEFAULT_JOB_TTL=10 * 60, # Time To Live (TTL) in seconds to keep job information + DEFAULT_JOB_TIMEOUT=5 * 60, # timeout (seconds) for a job + # General configuration values + DEFAULT_WORKER_TTL=10 * 60, # Time To Live (TTL) in seconds to keep worker information after last heartbeat + DEFAULT_MAINTENANCE_TASK_INTERVAL=10 * 60, # The interval to run maintenance tasks in seconds. 10 minutes. + DEFAULT_JOB_MONITORING_INTERVAL=30, # The interval to monitor jobs in seconds. + SCHEDULER_FALLBACK_PERIOD_SECS=120, # Period (secs) to wait before requiring to reacquire locks +) +SCHEDULER_QUEUES: Dict[str, QueueConfiguration] = { + 'default': QueueConfiguration(URL='redis://localhost:6379/0'), +} +``` + +2. Update `urls.py` to include scheduler urls: + +```python +from django.urls import path, include + +urlpatterns = [ + # ... + path('scheduler/', include('scheduler.urls')), +] +``` + +3. Run migrations: + +```bash +python manage.py migrate +``` + +4. Check out the admin views: + ![](./docs/media/admin-tasks-list.jpg) # Sponsor @@ -12,7 +67,7 @@ django-tasks-scheduler is developed for free. You can support this project by becoming a sponsor using [this link](https://github.com/sponsors/cunla). +# Contributing -# Contributing - -Interested in contributing, providing suggestions, or submitting bugs? See guidelines [at this link](.github/CONTRIBUTING.md). +Interested in contributing, providing suggestions, or submitting bugs? See +guidelines [at this link](.github/CONTRIBUTING.md). diff --git a/SECURITY.md b/SECURITY.md index 806683f..6b2bf97 100644 --- a/SECURITY.md +++ b/SECURITY.md @@ -2,9 +2,9 @@ ## Supported Versions -| Version | Supported | -|-------------|--------------------| -| 2023.latest | :white_check_mark: | +| Version | Supported | +|----------|--------------------| +| 4.latest | :white_check_mark: | ## Reporting a Vulnerability diff --git a/docs/changelog.md b/docs/changelog.md index 26dd172..9938783 100644 --- a/docs/changelog.md +++ b/docs/changelog.md @@ -1,5 +1,105 @@ # Changelog +## v4.0.5 🌈 + +### 🐛 Bug Fixes + +- fix:repeatable task without start date #276 +- fix:admin list of tasks showing local datetime #280 +- fix:wait for job child process using os.waitpid #281 + +### 🧰 Maintenance + +- refactor some tests + +## v4.0.4 🌈 + +### 🐛 Bug Fixes + +- Issue when `SCHEDULER_CONFIG` is a `dict` #273 +- Do not warn about _non_serializable_fields #274 + +### 🧰 Maintenance + +- Fix gha zizmor findings +- Update dependencies to latest versions + +## v4.0.3 🌈 + +### 🐛 Bug Fixes + +- Updated `scheduler_worker` management command argument to `--without-scheduler` since the worker has a scheduler by + default. + +## v4.0.2 🌈 + +### 🐛 Bug Fixes + +- Add type hint for `JOB_METHODS_LIST` +- Fix issue creating new `ONCE` task without a scheduled time #270 + +### 🧰 Maintenance + +- Update dependencies to latest versions +- Migrate to use `uv` instead of `poetry` for package management + +## v4.0.0 🌈 + +See breaking changes in 4.0.0 beta versions. + +### 🐛 Bug Fixes + +- Fix issue with non-primitive parameters for @job #249 + +## v4.0.0b3 🌈 + +Refactor the code to make it more organized and easier to maintain. This includes: + +- All types are under `types` instead of separated to `broker_types` and `settings_types`. +- Added `__all__` to `models`, and other packages. + +## v4.0.0b2 🌈 + +### 🐛 Bug Fixes + +- Fix bug when `SCHEDULER_CONFIG` is `SchedulerConfiguration` + +## v4.0.0b1 🌈 + +### Breaking Changes + +This version is a full revamp of the package. The main changes are related to removing the RQ dependency. +Worker/Queue/Job are all implemented in the package itself. This change allows for more flexibility and control over +the tasks. + +Management commands: + +- `rqstats` => `scheduler_stats` +- `rqworker` => `scheduler_worker` + +Settings: + +- `SCHEDULER_CONFIG` is now a `SchedulerConfiguration` object to help IDE guide settings. +- `SCHEDULER_QUEUES` is now a list of `QueueConfiguration` objects to help IDE guide settings. +- Configuring queue to use `SSL`/`SSL_CERT_REQS`/`SOCKET_TIMEOUT` is now done using `CONNECTION_KWARGS` in + `QueueConfiguration` + ```python + SCHEDULER_QUEUES: Dict[str, QueueConfiguration] = { + 'default': QueueConfiguration( + HOST='localhost', + PORT=6379, + USERNAME='some-user', + PASSWORD='some-password', + CONNECTION_KWARGS={ # Eventual additional Broker connection arguments + 'ssl_cert_reqs': 'required', + 'ssl':True, + }, + ), + # ... + } + ``` +- For how to configure in `settings.py`, please see the [settings documentation](./configuration.md). + ## v3.0.2 🌈 ### 🐛 Bug Fixes diff --git a/docs/commands.md b/docs/commands.md index 7020cf9..7ea9e19 100644 --- a/docs/commands.md +++ b/docs/commands.md @@ -1,6 +1,6 @@ # Management commands -## rqworker +## `scheduler_worker` - Create a worker Create a new worker with a scheduler for specific queues by order of priority. If no queues are specified, will run on default queue only. @@ -8,11 +8,13 @@ If no queues are specified, will run on default queue only. All queues must have the same redis settings on `SCHEDULER_QUEUES`. ```shell -usage: manage.py rqworker [-h] [--pid PIDFILE] [--burst] [--name NAME] [--worker-ttl WORKER_TTL] [--max-jobs MAX_JOBS] - [--fork-job-execution FORK_JOB_EXECUTION] [--job-class JOB_CLASS] [--sentry-dsn SENTRY_DSN] [--sentry-debug] - [--sentry-ca-certs SENTRY_CA_CERTS] [--version] [-v {0,1,2,3}] [--settings SETTINGS] [--pythonpath PYTHONPATH] - [--traceback] [--no-color] [--force-color] [--skip-checks] - [queues ...] +usage: manage.py scheduler_worker [-h] [--pid PIDFILE] [--name NAME] [--worker-ttl WORKER_TTL] + [--fork-job-execution FORK_JOB_EXECUTION] [--sentry-dsn SENTRY_DSN] [--sentry-debug] + [--sentry-ca-certs SENTRY_CA_CERTS] [--burst] [--max-jobs MAX_JOBS] + [--max-idle-time MAX_IDLE_TIME] [--without-scheduler] [--version] [-v {0,1,2,3}] + [--settings SETTINGS] [--pythonpath PYTHONPATH] [--traceback] [--no-color] [--force-color] + [--skip-checks] + [queues ...] positional arguments: queues The queues to work on, separated by space, all queues should be using the same redis @@ -20,28 +22,29 @@ positional arguments: options: -h, --help show this help message and exit --pid PIDFILE file to write the worker`s pid into - --burst Run worker in burst mode --name NAME Name of the worker --worker-ttl WORKER_TTL Default worker timeout to be used - --max-jobs MAX_JOBS Maximum number of jobs to execute before terminating worker --fork-job-execution FORK_JOB_EXECUTION Fork job execution to another process - --job-class JOB_CLASS - Jobs class to use --sentry-dsn SENTRY_DSN Sentry DSN to use --sentry-debug Enable Sentry debug mode --sentry-ca-certs SENTRY_CA_CERTS Path to CA certs file + --burst Run worker in burst mode + --max-jobs MAX_JOBS Maximum number of jobs to execute before terminating worker + --max-idle-time MAX_IDLE_TIME + Maximum number of seconds to wait for new job before terminating worker + --without-scheduler Run worker without scheduler, default to with scheduler --version Show program's version number and exit. - -v {0,1,2,3}, --verbosity {0,1,2,3} + -v, --verbosity {0,1,2,3} Verbosity level; 0=minimal output, 1=normal output, 2=verbose output, 3=very verbose output --settings SETTINGS The Python path to a settings module, e.g. "myproject.settings.main". If this isn't provided, the DJANGO_SETTINGS_MODULE environment variable will be used. --pythonpath PYTHONPATH A directory to add to the Python path, e.g. "/home/djangoprojects/myproject". - --traceback Raise on CommandError exceptions. + --traceback Display a full stack trace on CommandError exceptions. --no-color Don't colorize the command output. --force-color Force colorization of the command output. --skip-checks Skip system checks. @@ -49,7 +52,7 @@ options: -## export +## `export` - Export scheduled tasks Export all scheduled tasks from django db to json/yaml format. @@ -62,7 +65,7 @@ Result should be (for json): ```json [ { - "model": "ScheduledJob", + "model": "CronTaskType", "name": "Scheduled Task 1", "callable": "scheduler.tests.test_job", "callable_args": [ @@ -83,7 +86,7 @@ Result should be (for json): ] ``` -## import +## `import` - Import scheduled tasks A json/yaml that was exported using the `export` command can be imported to django. @@ -96,7 +99,7 @@ can be imported to django. python manage.py import -f {yaml,json} --filename {SOURCE-FILE} ``` -## run_job +## `run_job` - Run a job immediately Run a method in a queue immediately. @@ -104,10 +107,54 @@ Run a method in a queue immediately. python manage.py run_job {callable} {callable args ...} ``` -## delete failed jobs +## `delete_failed_jobs` - delete failed jobs Run this to empty failed jobs registry from a queue. ```shell python manage.py delete_failed_jobs ``` + +## `scheduler_stats` - Show scheduler stats + +Prints scheduler stats as a table, json, or yaml, example: + +```shell +$ python manage.py scheduler_stats + +Django-Scheduler CLI Dashboard + +-------------------------------------------------------------------------------- +| Name | Queued | Active | Finished | Canceled | Workers | +-------------------------------------------------------------------------------- +| default | 0 | 0 | 0 | 0 | 0 | +| low | 0 | 0 | 0 | 0 | 0 | +| high | 0 | 0 | 0 | 0 | 0 | +| medium | 0 | 0 | 0 | 0 | 0 | +| another | 0 | 0 | 0 | 0 | 0 | +-------------------------------------------------------------------------------- +``` + +```shell +usage: manage.py scheduler_stats [-h] [-j] [-y] [-i INTERVAL] [--version] [-v {0,1,2,3}] [--settings SETTINGS] [--pythonpath PYTHONPATH] [--traceback] [--no-color] [--force-color] [--skip-checks] + +Print statistics + +options: + -h, --help show this help message and exit + -j, --json Output statistics as JSON + -y, --yaml Output statistics as YAML + -i INTERVAL, --interval INTERVAL + Poll statistics every N seconds + --version Show program's version number and exit. + -v {0,1,2,3}, --verbosity {0,1,2,3} + Verbosity level; 0=minimal output, 1=normal output, 2=verbose output, 3=very verbose output + --settings SETTINGS The Python path to a settings module, e.g. "myproject.settings.main". If this isn't provided, the DJANGO_SETTINGS_MODULE environment variable will be used. + --pythonpath PYTHONPATH + A directory to add to the Python path, e.g. "/home/djangoprojects/myproject". + --traceback Raise on CommandError exceptions. + --no-color Don't colorize the command output. + --force-color Force colorization of the command output. + --skip-checks Skip system checks. + +``` \ No newline at end of file diff --git a/docs/configuration.md b/docs/configuration.md index da5c5a9..00ae6e3 100644 --- a/docs/configuration.md +++ b/docs/configuration.md @@ -5,35 +5,39 @@ All default settings for scheduler can be in one dictionary in `settings.py`: ```python -SCHEDULER_CONFIG = { - 'EXECUTIONS_IN_PAGE': 20, - 'DEFAULT_RESULT_TTL': 500, - 'DEFAULT_TIMEOUT': 300, # 5 minutes - 'SCHEDULER_INTERVAL': 10, # 10 seconds - 'BROKER': 'redis', -} -SCHEDULER_QUEUES = { - 'default': { - 'HOST': 'localhost', - 'PORT': 6379, - 'DB': 0, - 'USERNAME': 'some-user', - 'PASSWORD': 'some-password', - 'DEFAULT_TIMEOUT': 360, - 'CLIENT_KWARGS': { # Eventual additional Redis connection arguments - 'ssl_cert_reqs': None, +import os +from typing import Dict +from scheduler.types import SchedulerConfiguration, Broker, QueueConfiguration + +SCHEDULER_CONFIG = SchedulerConfiguration( + EXECUTIONS_IN_PAGE=20, + SCHEDULER_INTERVAL=10, + BROKER=Broker.REDIS, + CALLBACK_TIMEOUT=60, # Callback timeout in seconds (success/failure/stopped) + # Default values, can be overriden per task/job + DEFAULT_SUCCESS_TTL=10 * 60, # Time To Live (TTL) in seconds to keep successful job results + DEFAULT_FAILURE_TTL=365 * 24 * 60 * 60, # Time To Live (TTL) in seconds to keep job failure information + DEFAULT_JOB_TTL=10 * 60, # Time To Live (TTL) in seconds to keep job information + DEFAULT_JOB_TIMEOUT=5 * 60, # timeout (seconds) for a job + # General configuration values + DEFAULT_WORKER_TTL=10 * 60, # Time To Live (TTL) in seconds to keep worker information after last heartbeat + DEFAULT_MAINTENANCE_TASK_INTERVAL=10 * 60, # The interval to run maintenance tasks in seconds. 10 minutes. + DEFAULT_JOB_MONITORING_INTERVAL=30, # The interval to monitor jobs in seconds. + SCHEDULER_FALLBACK_PERIOD_SECS=120, # Period (secs) to wait before requiring to reacquire locks +) +SCHEDULER_QUEUES: Dict[str, QueueConfiguration] = { + 'default': QueueConfiguration( + HOST='localhost', + PORT=6379, + USERNAME='some-user', + PASSWORD='some-password', + CONNECTION_KWARGS={ # Eventual additional Broker connection arguments + 'ssl_cert_reqs': 'required', + 'ssl': True, }, - 'TOKEN_VALIDATION_METHOD': None, # Method to validate auth-header - }, - 'high': { - 'URL': os.getenv('REDISTOGO_URL', 'redis://localhost:6379/0'), # If you're on Heroku - 'DEFAULT_TIMEOUT': 500, - }, - 'low': { - 'HOST': 'localhost', - 'PORT': 6379, - 'DB': 0, - } + ), + 'high': QueueConfiguration(URL=os.getenv('REDISTOGO_URL', 'redis://localhost:6379/0')), + 'low': QueueConfiguration(HOST='localhost', PORT=6379, DB=0, ASYNC=False), } ``` @@ -43,23 +47,58 @@ Number of job executions to show in a page in a ScheduledJob admin view. Default: `20`. -### SCHEDULER_CONFIG: `DEFAULT_RESULT_TTL` +### SCHEDULER_CONFIG: `SCHEDULER_INTERVAL` + +Default scheduler interval, a scheduler is a subprocess of a worker and +will check which job executions are pending. + +Default: `10` (10 seconds). + +### SCHEDULER_CONFIG: `BROKER` -Default time to live for job execution result. +### SCHEDULER_CONFIG: `CALLBACK_TIMEOUT` + +### SCHEDULER_CONFIG: `DEFAULT_SUCCESS_TTL` + +Default time to live for job execution result when it is successful. + +Default: `600` (10 minutes). + +### SCHEDULER_CONFIG: `DEFAULT_FAILURE_TTL` + +Default time to live for job execution result when it is failed. Default: `600` (10 minutes). -### SCHEDULER_CONFIG: `DEFAULT_TIMEOUT` +### SCHEDULER_CONFIG: `DEFAULT_JOB_TTL` + +Default timeout for job info. -Default timeout for job when it is not mentioned in queue. Default: `300` (5 minutes). -### SCHEDULER_CONFIG: `SCHEDULER_INTERVAL` +### SCHEDULER_CONFIG: `DEFAULT_JOB_TIMEOUT` -Default scheduler interval, a scheduler is a subprocess of a worker and -will check which job executions are pending. +timeout (seconds) for a job. -Default: `10` (10 seconds). +Default: `300` (5 minutes). + +### SCHEDULER_CONFIG: `DEFAULT_WORKER_TTL` + +Time To Live (TTL) in seconds to keep worker information after last heartbeat. +Default: `600` (10 minutes). + +### SCHEDULER_CONFIG: `DEFAULT_MAINTENANCE_TASK_INTERVAL` + +The interval to run worker maintenance tasks in seconds. +Default: `600` 10 minutes. + +### SCHEDULER_CONFIG: `DEFAULT_JOB_MONITORING_INTERVAL` + +The interval to monitor jobs in seconds. + +### SCHEDULER_CONFIG: `SCHEDULER_FALLBACK_PERIOD_SECS` + +Period (secs) to wait before requiring to reacquire locks. ### SCHEDULER_CONFIG: `TOKEN_VALIDATION_METHOD` @@ -68,12 +107,6 @@ Enables checking stats using API token. Default: no tokens allowed. -### SCHEDULER_CONFIG: `BROKER` - -Broker driver to use for the scheduler. Can be `redis` or `valkey` or `fakeredis`. - -Default: `redis`. - ### `SCHEDULER_QUEUES` You can configure the queues to work with. diff --git a/docs/drt-model.md b/docs/drt-model.md index d11c328..545658e 100644 --- a/docs/drt-model.md +++ b/docs/drt-model.md @@ -1,6 +1,6 @@ # Worker related flows -Running `python manage.py startworker --name 'X' --queues high default low` +Running `python manage.py scheduler_worker --name 'X' --queues high default low` ## Register new worker for queues ```mermaid @@ -48,8 +48,8 @@ sequenceDiagram note over worker,job: Find next job loop over queueKeys until job to run is found or all queues are empty - worker ->>+ queue: get next job id and remove it or None (zrange+zpop) - queue -->>- worker: job id / nothing + worker ->>+ queue: get next job name and remove it or None (zrange+zpop) + queue -->>- worker: job name / nothing end note over worker,job: Execute job or sleep diff --git a/docs/index.md b/docs/index.md index 7235283..c4d74c6 100644 --- a/docs/index.md +++ b/docs/index.md @@ -13,12 +13,55 @@ This allows remembering scheduled tasks, their parameters, etc. The goal is to simplify. Make sure to follow [the migration guide](migrate_to_v3.md) +## Architecture and terminology -## Terminology +```mermaid +flowchart TD + subgraph Django Process + task[Scheduled Task
django-model] + end + db[(Relational
Database)] + subgraph Worker + worker[Worker
Queue listener
Job Execution] + commands[Worker
commands
Listener] + scheduler[Scheduler] + scheduler ~~~ commands ~~~ worker + end + + subgraph Broker + job[Job] + commandsChannel[Workers
Commands
Channel] + subgraph Queue + direction TB + scheduled[Scheduled Jobs] + queued[Queued jobs] + active[Active jobs] + finished[Finished jobs] + failed[Failed jobs] + canceled[Canceled jobs] + scheduled ~~~ queued ~~~ active + active ~~~ finished + active ~~~ failed + queued ~~~ canceled + end + job ~~~ commandsChannel + end + + task --> db + task -->|Create instance of executing a task| job + job -->|Queuing a job to be executed| scheduled + scheduled -.->|Queue jobs| scheduler -.-> queued + queued -.->|Worker picking up job to execute| worker + worker -.->|Moves it to active jobs| active + active -.->|Once terminated successfully| finished + active -.->|Once terminated unsuccessfully or stopped| failed + queued -...->|In case job is stopped before starting| canceled +``` ### Scheduled Task -Starting v3.0.0, django-tasks-scheduler is using a single `Task` model with different task types, the task types are: +django-tasks-scheduler is using a single `Task` django-model with different task types, the task types +are: - `ONCE` - Run the task once at a scheduled time. - `REPEATABLE` - Run the task multiple times (limited number of times or infinite times) based on a time interval. @@ -29,42 +72,52 @@ reduces the number of overall queries. An `Task` instance contains all relevant information about a task to enable the users to schedule using django-admin and track their status. -Previously, there were three different models for ScheduledTask. These exist for legacy purposes and are scheduled to -be removed. +### Job + +A job is a record in the broker, containing all information required to execute a piece of code, usually representing a +task, but not necessarily. -* `Scheduled Task` - Run a task once, on a specific time (can be immediate). -* `Repeatable Task` - Run a task multiple times (limited number of times or infinite times) based on an interval -* `Cron Task` - Run a task multiple times (limited number of times or infinite times) based on a cron string +It contains the following information: -Scheduled tasks are scheduled when the django application starts, and after a scheduled task is executed. +- Name of the job (that is unique, and passed in different queues). +- Link to the task. +- Reference to the method to be executed. +- Callbacks (In case of failure/success/stopped). +- Timeout details (for method to be executed, for callbacks) +- Successful/Failed result time-to-live. ### Queue A queue of messages between processes (main django-app process and worker usually). -This is implemented in `rq` package. +It is a collection of different registries for different purposes: -* A queue contains multiple registries for scheduled tasks, finished jobs, failed jobs, etc. +- Scheduled jobs: Jobs that are scheduled to run +- Queued jobs: Jobs waiting to be picked up by a worker to run. +- Active jobs: Jobs that are currently being executed. +- Finished jobs: Jobs that have been successfully executed +- Failed jobs: Jobs that have failed to execute or have been stopped +- Canceled jobs: Jobs that have been stopped/canceled before they were executed ### Worker -A process listening to one or more queues **for jobs to be executed**, and executing jobs queued to be -executed. +A process listening to one or more queues **for jobs to be executed**, and executing jobs queued to be executed. -### Scheduler +- A worker has a thread listening to a channel where it can get specific commands. +- A worker can have, by default, a subprocess for the scheduler. -A process listening to one or more queues for **jobs to be scheduled for execution**, and schedule them -to be executed by a worker. +### Scheduler (Worker sub-process) -This is a subprocess of worker. +A process listening to one or more queues for **jobs to be scheduled for execution**, and schedule them to be executed +by a worker (i.e., move them from scheduled-jobs registry to queued-jobs registry). -### Queued Job Execution +This is a sub-process of worker. -Once a worker listening to the queue becomes available, the job will be executed +### Job -### Scheduled Job Execution +Once a worker listening to the queue becomes available, the job will be executed. A scheduler checking the queue periodically will check whether the time the job should be executed has come, and if so, -it will queue it. +it will queue it, i.e., add it to the queued-jobs registry. * A job is considered scheduled if it is queued to be executed, or scheduled to be executed. * If there is no scheduler, the job will not be queued to run. @@ -74,24 +127,27 @@ it will queue it. ```mermaid sequenceDiagram autonumber + box DB + participant db as Database + end box Worker participant scheduler as Scheduler Process end - box DB - participant db as Database - + box Broker + participant job as Job end - box Redis queue - participant queue as Queue - participant schedule as Queue scheduled tasks + box Broker Queue + participant schedule as Scheduled jobs + participant queue as Queued jobs end loop Scheduler process - loop forever - note over scheduler, schedule: Database interaction + note over db, schedule: Database interaction scheduler ->> db: Check for enabled tasks that should be scheduled critical There are tasks to be scheduled - scheduler ->> schedule: Create a job for task that should be scheduled + scheduler ->> job: Create job for task that should be scheduled + scheduler ->> schedule: Add the job to the scheduled-jobs registry end - note over scheduler, schedule: Redis queues interaction + note over scheduler, queue: Broker queues interaction scheduler ->> schedule: check whether there are scheduled tasks that should be executed critical there are jobs that are scheduled to be executed scheduler ->> schedule: remove jobs to be scheduled @@ -109,23 +165,35 @@ sequenceDiagram box Worker participant worker as Worker Process end - box Redis queue - participant queue as Queue - participant finished as Queue finished jobs - participant failed as Queue failed jobs + box Queue + participant queue as Queued jobs + participant finished as Finished jobs + participant failed as Failed jobs + end + box Broker + participant job as Job + participant result as Result end loop Worker process - loop forever worker ->>+ queue: get the first job to be executed queue -->>- worker: A job to be executed or nothing critical There is a job to be executed - worker ->> queue: Remove job from queue + note over worker, result: There is a job to be executed + worker ->> queue: Remove job from queued registry worker ->> worker: Execute job critical Job ended successfully - worker ->> finished: Write job result + worker ->> worker: Execute successful callbacks + worker ->> finished: Move job to finished-jobs registry + worker ->> job: Update job details + worker ->> result: Write result option Job ended unsuccessfully - worker ->> failed: Write job result + worker ->> worker: Execute failure callbacks + worker ->> failed: Move job to failed-jobs registry + worker ->> job: Update job details + worker ->> result: Write result end option No job to be executed + note over worker, result: No job to be executed worker ->> worker: sleep end end @@ -141,7 +209,8 @@ Please report issues via [GitHub Issues][issues] . ## Acknowledgements -A lot of django-admin views and their tests were adopted from [django-rq][django-rq]. +- Some django-admin views and their tests were adopted from [django-rq][django-rq]. +- Worker and Queue implementation was inspired by [rq][rq]. [badge]:https://github.com/django-commons/django-tasks-scheduler/actions/workflows/test.yml/badge.svg @@ -155,4 +224,6 @@ A lot of django-admin views and their tests were adopted from [django-rq][django [issues]:https://github.com/django-commons/django-tasks-scheduler/issues -[django-rq]:https://github.com/rq/django-rq \ No newline at end of file +[django-rq]:https://github.com/rq/django-rq + +[rq]:https://github.com/rq/rq \ No newline at end of file diff --git a/docs/installation.md b/docs/installation.md index 13573b0..e1edcab 100644 --- a/docs/installation.md +++ b/docs/installation.md @@ -15,61 +15,64 @@ ``` 3. Configure your queues. - Add at least one Redis Queue to your `settings.py`: + Add at least one Redis Queue to your `settings.py`. + Note that the usage of `QueueConfiguration` is optional, you can use a simple dictionary, but `QueueConfiguration` + helps preventing configuration errors. ```python - import os - SCHEDULER_QUEUES = { - 'default': { - 'HOST': 'localhost', - 'PORT': 6379, - 'DB': 0, - 'USERNAME': 'some-user', - 'PASSWORD': 'some-password', - 'DEFAULT_TIMEOUT': 360, - 'CLIENT_KWARGS': { # Eventual additional Redis connection arguments - 'ssl_cert_reqs': None, - }, - }, - 'with-sentinel': { - 'SENTINELS': [('localhost', 26736), ('localhost', 26737)], - 'MASTER_NAME': 'redismaster', - 'DB': 0, - # Redis username/password - 'USERNAME': 'redis-user', - 'PASSWORD': 'secret', - 'SOCKET_TIMEOUT': 0.3, - 'CONNECTION_KWARGS': { # Eventual additional Redis connection arguments - 'ssl': True - }, - 'SENTINEL_KWARGS': { # Eventual Sentinel connection arguments - # If Sentinel also has auth, username/password can be passed here - 'username': 'sentinel-user', - 'password': 'secret', - }, - }, - 'high': { - 'URL': os.getenv('REDISTOGO_URL', 'redis://localhost:6379/0'), # If you're on Heroku - 'DEFAULT_TIMEOUT': 500, - }, - 'low': { - 'HOST': 'localhost', - 'PORT': 6379, - 'DB': 0, - } - } + import os + from typing import Dict + from scheduler.types import QueueConfiguration + + SCHEDULER_QUEUES: Dict[str, QueueConfiguration] = { + 'default': QueueConfiguration( + HOST='localhost', + PORT=6379, + USERNAME='some-user', + PASSWORD='some-password', + CONNECTION_KWARGS={ # Eventual additional Broker connection arguments + 'ssl_cert_reqs': 'required', + 'ssl': True, + }, + ), + 'with-sentinel': QueueConfiguration( + SENTINELS= [('localhost', 26736), ('localhost', 26737)], + MASTER_NAME= 'redismaster', + DB= 0, + USERNAME= 'redis-user', + PASSWORD= 'secret', + CONNECTION_KWARGS= { + 'ssl': True}, + SENTINEL_KWARGS= { + 'username': 'sentinel-user', + 'password': 'secret', + }), + 'high': QueueConfiguration(URL=os.getenv('REDISTOGO_URL', 'redis://localhost:6379/0')), + 'low': QueueConfiguration(HOST='localhost', PORT=6379, DB=0, ASYNC=False), + } ``` - + 4. Optional: Configure default values for queuing jobs from code: ```python - SCHEDULER_CONFIG = { - 'EXECUTIONS_IN_PAGE': 20, - 'DEFAULT_RESULT_TTL': 500, - 'DEFAULT_TIMEOUT': 300, # 5 minutes - 'SCHEDULER_INTERVAL': 10, # 10 seconds - 'BROKER': 'redis', # - } + from scheduler.types import SchedulerConfiguration, Broker + + SCHEDULER_CONFIG = SchedulerConfiguration( + EXECUTIONS_IN_PAGE=20, + SCHEDULER_INTERVAL=10, + BROKER=Broker.REDIS, + CALLBACK_TIMEOUT=60, # Callback timeout in seconds (success/failure/stopped) + # Default values, can be overriden per task/job + DEFAULT_SUCCESS_TTL=10 * 60, # Time To Live (TTL) in seconds to keep successful job results + DEFAULT_FAILURE_TTL=365 * 24 * 60 * 60, # Time To Live (TTL) in seconds to keep job failure information + DEFAULT_JOB_TTL=10 * 60, # Time To Live (TTL) in seconds to keep job information + DEFAULT_JOB_TIMEOUT=5 * 60, # timeout (seconds) for a job + # General configuration values + DEFAULT_WORKER_TTL=10 * 60, # Time To Live (TTL) in seconds to keep worker information after last heartbeat + DEFAULT_MAINTENANCE_TASK_INTERVAL=10 * 60, # The interval to run maintenance tasks in seconds. 10 minutes. + DEFAULT_JOB_MONITORING_INTERVAL=30, # The interval to monitor jobs in seconds. + SCHEDULER_FALLBACK_PERIOD_SECS=120, # Period (secs) to wait before requiring to reacquire locks + ) ``` - + 5. Add `scheduler.urls` to your django application `urls.py`: ```python from django.urls import path, include diff --git a/docs/media/add-scheduled-job.jpg b/docs/media/add-scheduled-job.jpg deleted file mode 100644 index 3783e7a..0000000 Binary files a/docs/media/add-scheduled-job.jpg and /dev/null differ diff --git a/docs/media/add-scheduled-task.jpg b/docs/media/add-scheduled-task.jpg new file mode 100644 index 0000000..abb355f Binary files /dev/null and b/docs/media/add-scheduled-task.jpg differ diff --git a/docs/media/admin-job-details.jpg b/docs/media/admin-job-details.jpg new file mode 100644 index 0000000..9c5b617 Binary files /dev/null and b/docs/media/admin-job-details.jpg differ diff --git a/docs/media/admin-queue-registry.jpg b/docs/media/admin-queue-registry.jpg new file mode 100644 index 0000000..32c1981 Binary files /dev/null and b/docs/media/admin-queue-registry.jpg differ diff --git a/docs/media/admin-queues-list.jpg b/docs/media/admin-queues-list.jpg new file mode 100644 index 0000000..0bb5791 Binary files /dev/null and b/docs/media/admin-queues-list.jpg differ diff --git a/docs/media/admin-task-details.jpg b/docs/media/admin-task-details.jpg new file mode 100644 index 0000000..4bf88c1 Binary files /dev/null and b/docs/media/admin-task-details.jpg differ diff --git a/docs/media/admin-tasks-list.jpg b/docs/media/admin-tasks-list.jpg new file mode 100644 index 0000000..52feeed Binary files /dev/null and b/docs/media/admin-tasks-list.jpg differ diff --git a/docs/media/admin-worker-details.jpg b/docs/media/admin-worker-details.jpg new file mode 100644 index 0000000..d1c9529 Binary files /dev/null and b/docs/media/admin-worker-details.jpg differ diff --git a/docs/media/admin-workers-list.jpg b/docs/media/admin-workers-list.jpg new file mode 100644 index 0000000..5b1ef02 Binary files /dev/null and b/docs/media/admin-workers-list.jpg differ diff --git a/docs/requirements.txt b/docs/requirements.txt index 38b687a..948c9be 100644 --- a/docs/requirements.txt +++ b/docs/requirements.txt @@ -1,2 +1,2 @@ mkdocs==1.6.1 -mkdocs-material==9.6.9 \ No newline at end of file +mkdocs-material==9.6.14 diff --git a/docs/usage.md b/docs/usage.md index dacd016..e310a89 100644 --- a/docs/usage.md +++ b/docs/usage.md @@ -6,7 +6,7 @@ from scheduler import job -@job +@job() def long_running_func(): pass @@ -39,29 +39,51 @@ def long_running_func(): long_running_func.delay() # Enqueue function with a timeout of 3600 seconds. ``` -You can set in `settings.py` a default value for `DEFAULT_RESULT_TTL` and `DEFAULT_TIMEOUT`. +You can set in `settings.py` a default value for `DEFAULT_JOB_TTL` and `DEFAULT_JOB_TIMEOUT`. ```python # settings.py -SCHEDULER_CONFIG = { - 'DEFAULT_RESULT_TTL': 360, - 'DEFAULT_TIMEOUT': 60, -} +SCHEDULER_CONFIG = SchedulerConfiguration( + DEFAULT_SUCCESS_TTL=10 * 60, # Time To Live (TTL) in seconds to keep successful job results + DEFAULT_FAILURE_TTL=365 * 24 * 60 * 60, # Time To Live (TTL) in seconds to keep job failure information + DEFAULT_JOB_TTL=10 * 60, # Time To Live (TTL) in seconds to keep job information + DEFAULT_JOB_TIMEOUT=5 * 60, # timeout (seconds) for a job +) ``` -## Scheduling a job Through django-admin +## Managing tasks through the Django Admin + +### Viewing list of scheduled tasks + +![](media/admin-tasks-list.jpg) + +### Viewing details of a scheduled task + +It is possible to view list of executions of a task, as well as the details of a specific execution. +![](media/admin-task-details.jpg) + +### Scheduling a task Through django-admin * Sign in to the Django Admin site (e.g., http://localhost:8000/admin/) and locate the `Tasks Scheduler` section. -* Click on the **Add** link for the type of job you want to add (`Scheduled Task` - run once, `Repeatable Task` - run - multiple times, `Cron Task` - Run based on cron schedule). -* Enter a unique name for the job in the **Name** field. +* Click on the **Add** on `Tasks` +* Enter a unique name for the task in the **Name** field. +* Select the task type, and according to the type, the form will change for the scheduling details. + * For `Repeatable task` + * Enter an Interval, and choose the Interval unit. This will calculate the time before the function is called + again. + * In the Repeat field, enter the number of times the job is to be run. Leaving the field empty, means the job + will be scheduled to run forever. + * For `Cron task` + * In the Repeat field, enter the number of times the job is to be run. Leaving the field empty, means the job + will be scheduled to run forever. + * In the cron string field, enter a cron string describing how often the job should run. * In the **Callable** field, enter a Python dot notation path to the method that defines the job. For the example above, that would be `myapp.jobs.count` * Choose your **Queue**. The queues listed are defined in your app `settings.py` under `SCHEDULER_QUEUES`. * Enter the time in UTC the job is to be executed in the **Scheduled time** field. -![](media/add-scheduled-job.jpg) +![](media/add-scheduled-task.jpg) #### Optional fields: @@ -75,33 +97,28 @@ SCHEDULER_CONFIG = { Once you are done, click **Save** and your job will be persisted to django database. -### Support for arguments for jobs +#### Support for arguments for tasks -django-tasks-scheduler supports scheduling jobs calling methods with arguments, as well as arguments that should be +django-tasks-scheduler supports scheduling tasks calling methods with arguments, as well as arguments that should be calculated in runtime. ![](media/add-args.jpg) -### Scheduled Task: run once +### Viewing queue statistics -No additional steps are required. +![](media/admin-queues-list.jpg) -### Repeatable Task: Run a job multiple time based on interval +### Viewing queue specific registry jobs -These additional fields are required: +![](media/admin-queue-registry.jpg) -* Enter an **Interval**, and choose the **Interval unit**. This will calculate the time before the function is called - again. -* In the **Repeat** field, enter the number of time the job is to be run. Leaving the field empty, means the job will - be scheduled to run forever. +### Viewing workers list -### Cron Task: Run a job multiple times based on cron +![](media/admin-workers-list.jpg) -These additional fields are required: +### Viewing worker details -* In the **Repeat** field, enter the number of time the job is to be run. Leaving the field empty, means the job will be - scheduled to run forever. -* In the **cron string** field, enter a cron string describing how often the job should run. +![](media/admin-worker-details.jpg) ## Enqueue jobs using the command line @@ -117,41 +134,40 @@ python manage.py run_job -q {queue} -t {timeout} -r {result_ttl} {callable} {arg Create a worker to execute queued jobs on specific queues using: ```shell -python manage.py rqworker [-h] [--pid PIDFILE] [--burst] [--name NAME] [--worker-ttl WORKER_TTL] [--max-jobs MAX_JOBS] [--fork-job-execution FORK_JOB_EXECUTION] - [--job-class JOB_CLASS] [--version] [-v {0,1,2,3}] [--settings SETTINGS] [--pythonpath PYTHONPATH] [--traceback] [--no-color] [--force-color] - [--skip-checks] - [queues ...] - +usage: manage.py scheduler_worker [-h] [--pid PIDFILE] [--name NAME] [--worker-ttl WORKER_TTL] [--fork-job-execution FORK_JOB_EXECUTION] [--sentry-dsn SENTRY_DSN] [--sentry-debug] [--sentry-ca-certs SENTRY_CA_CERTS] [--burst] + [--max-jobs MAX_JOBS] [--max-idle-time MAX_IDLE_TIME] [--with-scheduler] [--version] [-v {0,1,2,3}] [--settings SETTINGS] [--pythonpath PYTHONPATH] [--traceback] [--no-color] [--force-color] + [--skip-checks] + [queues ...] ``` -More information about the different parameters can be found in the [commands documentation](commands.md). +More information about the different parameters can be found in the [commands documentation](commands.md). ### Running multiple workers as unix/linux services using systemd You can have multiple workers running as system services. -To have multiple rqworkers, edit the `/etc/systemd/system/rqworker@.service` +To have multiple scheduler workers, edit the `/etc/systemd/system/scheduler_worker@.service` file, make sure it ends with `@.service`, the following is example: ```ini -# /etc/systemd/system/rqworker@.service +# /etc/systemd/system/scheduler_worker@.service [Unit] -Description = rqworker daemon +Description = scheduler_worker daemon After = network.target [Service] WorkingDirectory = {{ path_to_your_project_folder } } ExecStart = /home/ubuntu/.virtualenv/{ { your_virtualenv } }/bin/python \ {{ path_to_your_project_folder } }/manage.py \ - rqworker high default low + scheduler_worker high default low # Optional -# {{user to run rqworker as}} +# {{user to run scheduler_worker as}} User = ubuntu -# {{group to run rqworker as}} +# {{group to run scheduler_worker as}} Group = www-data # Redirect logs to syslog StandardOutput = syslog StandardError = syslog -SyslogIdentifier = rqworker +SyslogIdentifier = scheduler_worker Environment = OBJC_DISABLE_INITIALIZE_FORK_SAFETY = YES Environment = LC_ALL = en_US.UTF-8 Environment = LANG = en_US.UTF-8 @@ -164,11 +180,11 @@ After you are done editing the file, reload the settings and start the new worke ```shell sudo systemctl daemon-reload -sudo systemctl start rqworker@{1..3} +sudo systemctl start scheduler_worker@{1..3} ``` You can target a specific worker using its number: ```shell -sudo systemctl stop rqworker@2 +sudo systemctl stop scheduler_worker@2 ``` \ No newline at end of file diff --git a/mkdocs.yml b/mkdocs.yml index de019e9..ece8bed 100644 --- a/mkdocs.yml +++ b/mkdocs.yml @@ -30,8 +30,8 @@ markdown_extensions: - pymdownx.caret - pymdownx.details - pymdownx.emoji: - emoji_generator: !!python/name:materialx.emoji.to_svg - emoji_index: !!python/name:materialx.emoji.twemoji + emoji_generator: !!python/name:material.extensions.emoji.to_svg + emoji_index: !!python/name:material.extensions.emoji.twemoji - pymdownx.highlight: anchor_linenums: true - pymdownx.inlinehilite diff --git a/poetry.lock b/poetry.lock deleted file mode 100644 index 65ce489..0000000 --- a/poetry.lock +++ /dev/null @@ -1,2046 +0,0 @@ -# This file is automatically @generated by Poetry 2.1.1 and should not be changed by hand. - -[[package]] -name = "anyio" -version = "4.9.0" -description = "High level compatibility layer for multiple asynchronous event loop implementations" -optional = false -python-versions = ">=3.9" -groups = ["dev"] -files = [ - {file = "anyio-4.9.0-py3-none-any.whl", hash = "sha256:9f76d541cad6e36af7beb62e978876f3b41e3e04f2c1fbf0884604c0a9c4d93c"}, - {file = "anyio-4.9.0.tar.gz", hash = "sha256:673c0c244e15788651a4ff38710fea9675823028a6f08a5eda409e0c9840a028"}, -] - -[package.dependencies] -exceptiongroup = {version = ">=1.0.2", markers = "python_version < \"3.11\""} -idna = ">=2.8" -sniffio = ">=1.1" -typing_extensions = {version = ">=4.5", markers = "python_version < \"3.13\""} - -[package.extras] -doc = ["Sphinx (>=8.2,<9.0)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx_rtd_theme"] -test = ["anyio[trio]", "blockbuster (>=1.5.23)", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "trustme", "truststore (>=0.9.1) ; python_version >= \"3.10\"", "uvloop (>=0.21) ; platform_python_implementation == \"CPython\" and platform_system != \"Windows\" and python_version < \"3.14\""] -trio = ["trio (>=0.26.1)"] - -[[package]] -name = "asgiref" -version = "3.8.1" -description = "ASGI specs, helper code, and adapters" -optional = false -python-versions = ">=3.8" -groups = ["main"] -files = [ - {file = "asgiref-3.8.1-py3-none-any.whl", hash = "sha256:3e1e3ecc849832fe52ccf2cb6686b7a55f82bb1d6aee72a58826471390335e47"}, - {file = "asgiref-3.8.1.tar.gz", hash = "sha256:c343bd80a0bec947a9860adb4c432ffa7db769836c64238fc34bdc3fec84d590"}, -] - -[package.dependencies] -typing-extensions = {version = ">=4", markers = "python_version < \"3.11\""} - -[package.extras] -tests = ["mypy (>=0.800)", "pytest", "pytest-asyncio"] - -[[package]] -name = "async-timeout" -version = "5.0.1" -description = "Timeout context manager for asyncio programs" -optional = false -python-versions = ">=3.8" -groups = ["main", "dev"] -files = [ - {file = "async_timeout-5.0.1-py3-none-any.whl", hash = "sha256:39e3809566ff85354557ec2398b55e096c8364bacac9405a7a1fa429e77fe76c"}, - {file = "async_timeout-5.0.1.tar.gz", hash = "sha256:d9321a7a3d5a6a5e187e824d2fa0793ce379a202935782d555d6e9d2735677d3"}, -] -markers = {main = "python_version < \"3.11.3\"", dev = "python_full_version < \"3.11.3\""} - -[[package]] -name = "backports-tarfile" -version = "1.2.0" -description = "Backport of CPython tarfile module" -optional = false -python-versions = ">=3.8" -groups = ["dev"] -markers = "python_version < \"3.12\"" -files = [ - {file = "backports.tarfile-1.2.0-py3-none-any.whl", hash = "sha256:77e284d754527b01fb1e6fa8a1afe577858ebe4e9dad8919e34c862cb399bc34"}, - {file = "backports_tarfile-1.2.0.tar.gz", hash = "sha256:d75e02c268746e1b8144c278978b6e98e85de6ad16f8e4b0844a154557eca991"}, -] - -[package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] -testing = ["jaraco.test", "pytest (!=8.0.*)", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)"] - -[[package]] -name = "build" -version = "1.2.2.post1" -description = "A simple, correct Python build frontend" -optional = false -python-versions = ">=3.8" -groups = ["dev"] -files = [ - {file = "build-1.2.2.post1-py3-none-any.whl", hash = "sha256:1d61c0887fa860c01971625baae8bdd338e517b836a2f70dd1f7aa3a6b2fc5b5"}, - {file = "build-1.2.2.post1.tar.gz", hash = "sha256:b36993e92ca9375a219c99e606a122ff365a760a2d4bba0caa09bd5278b608b7"}, -] - -[package.dependencies] -colorama = {version = "*", markers = "os_name == \"nt\""} -importlib-metadata = {version = ">=4.6", markers = "python_full_version < \"3.10.2\""} -packaging = ">=19.1" -pyproject_hooks = "*" -tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} - -[package.extras] -docs = ["furo (>=2023.08.17)", "sphinx (>=7.0,<8.0)", "sphinx-argparse-cli (>=1.5)", "sphinx-autodoc-typehints (>=1.10)", "sphinx-issues (>=3.0.0)"] -test = ["build[uv,virtualenv]", "filelock (>=3)", "pytest (>=6.2.4)", "pytest-cov (>=2.12)", "pytest-mock (>=2)", "pytest-rerunfailures (>=9.1)", "pytest-xdist (>=1.34)", "setuptools (>=42.0.0) ; python_version < \"3.10\"", "setuptools (>=56.0.0) ; python_version == \"3.10\"", "setuptools (>=56.0.0) ; python_version == \"3.11\"", "setuptools (>=67.8.0) ; python_version >= \"3.12\"", "wheel (>=0.36.0)"] -typing = ["build[uv]", "importlib-metadata (>=5.1)", "mypy (>=1.9.0,<1.10.0)", "tomli", "typing-extensions (>=3.7.4.3)"] -uv = ["uv (>=0.1.18)"] -virtualenv = ["virtualenv (>=20.0.35)"] - -[[package]] -name = "cachecontrol" -version = "0.14.2" -description = "httplib2 caching for requests" -optional = false -python-versions = ">=3.8" -groups = ["dev"] -files = [ - {file = "cachecontrol-0.14.2-py3-none-any.whl", hash = "sha256:ebad2091bf12d0d200dfc2464330db638c5deb41d546f6d7aca079e87290f3b0"}, - {file = "cachecontrol-0.14.2.tar.gz", hash = "sha256:7d47d19f866409b98ff6025b6a0fca8e4c791fb31abbd95f622093894ce903a2"}, -] - -[package.dependencies] -filelock = {version = ">=3.8.0", optional = true, markers = "extra == \"filecache\""} -msgpack = ">=0.5.2,<2.0.0" -requests = ">=2.16.0" - -[package.extras] -dev = ["CacheControl[filecache,redis]", "build", "cherrypy", "codespell[tomli]", "furo", "mypy", "pytest", "pytest-cov", "ruff", "sphinx", "sphinx-copybutton", "tox", "types-redis", "types-requests"] -filecache = ["filelock (>=3.8.0)"] -redis = ["redis (>=2.10.5)"] - -[[package]] -name = "certifi" -version = "2025.1.31" -description = "Python package for providing Mozilla's CA Bundle." -optional = false -python-versions = ">=3.6" -groups = ["dev"] -files = [ - {file = "certifi-2025.1.31-py3-none-any.whl", hash = "sha256:ca78db4565a652026a4db2bcdf68f2fb589ea80d0be70e03929ed730746b84fe"}, - {file = "certifi-2025.1.31.tar.gz", hash = "sha256:3d5da6925056f6f18f119200434a4780a94263f10d1c21d032a6f6b2baa20651"}, -] - -[[package]] -name = "cffi" -version = "1.17.1" -description = "Foreign Function Interface for Python calling C code." -optional = false -python-versions = ">=3.8" -groups = ["dev"] -markers = "sys_platform == \"linux\" or sys_platform == \"darwin\" or platform_python_implementation == \"PyPy\"" -files = [ - {file = "cffi-1.17.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:df8b1c11f177bc2313ec4b2d46baec87a5f3e71fc8b45dab2ee7cae86d9aba14"}, - {file = "cffi-1.17.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8f2cdc858323644ab277e9bb925ad72ae0e67f69e804f4898c070998d50b1a67"}, - {file = "cffi-1.17.1-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:edae79245293e15384b51f88b00613ba9f7198016a5948b5dddf4917d4d26382"}, - {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:45398b671ac6d70e67da8e4224a065cec6a93541bb7aebe1b198a61b58c7b702"}, - {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ad9413ccdeda48c5afdae7e4fa2192157e991ff761e7ab8fdd8926f40b160cc3"}, - {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5da5719280082ac6bd9aa7becb3938dc9f9cbd57fac7d2871717b1feb0902ab6"}, - {file = "cffi-1.17.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2bb1a08b8008b281856e5971307cc386a8e9c5b625ac297e853d36da6efe9c17"}, - {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:045d61c734659cc045141be4bae381a41d89b741f795af1dd018bfb532fd0df8"}, - {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:6883e737d7d9e4899a8a695e00ec36bd4e5e4f18fabe0aca0efe0a4b44cdb13e"}, - {file = "cffi-1.17.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:6b8b4a92e1c65048ff98cfe1f735ef8f1ceb72e3d5f0c25fdb12087a23da22be"}, - {file = "cffi-1.17.1-cp310-cp310-win32.whl", hash = "sha256:c9c3d058ebabb74db66e431095118094d06abf53284d9c81f27300d0e0d8bc7c"}, - {file = "cffi-1.17.1-cp310-cp310-win_amd64.whl", hash = "sha256:0f048dcf80db46f0098ccac01132761580d28e28bc0f78ae0d58048063317e15"}, - {file = "cffi-1.17.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a45e3c6913c5b87b3ff120dcdc03f6131fa0065027d0ed7ee6190736a74cd401"}, - {file = "cffi-1.17.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:30c5e0cb5ae493c04c8b42916e52ca38079f1b235c2f8ae5f4527b963c401caf"}, - {file = "cffi-1.17.1-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f75c7ab1f9e4aca5414ed4d8e5c0e303a34f4421f8a0d47a4d019ceff0ab6af4"}, - {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a1ed2dd2972641495a3ec98445e09766f077aee98a1c896dcb4ad0d303628e41"}, - {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:46bf43160c1a35f7ec506d254e5c890f3c03648a4dbac12d624e4490a7046cd1"}, - {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a24ed04c8ffd54b0729c07cee15a81d964e6fee0e3d4d342a27b020d22959dc6"}, - {file = "cffi-1.17.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:610faea79c43e44c71e1ec53a554553fa22321b65fae24889706c0a84d4ad86d"}, - {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:a9b15d491f3ad5d692e11f6b71f7857e7835eb677955c00cc0aefcd0669adaf6"}, - {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:de2ea4b5833625383e464549fec1bc395c1bdeeb5f25c4a3a82b5a8c756ec22f"}, - {file = "cffi-1.17.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:fc48c783f9c87e60831201f2cce7f3b2e4846bf4d8728eabe54d60700b318a0b"}, - {file = "cffi-1.17.1-cp311-cp311-win32.whl", hash = "sha256:85a950a4ac9c359340d5963966e3e0a94a676bd6245a4b55bc43949eee26a655"}, - {file = "cffi-1.17.1-cp311-cp311-win_amd64.whl", hash = "sha256:caaf0640ef5f5517f49bc275eca1406b0ffa6aa184892812030f04c2abf589a0"}, - {file = "cffi-1.17.1-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:805b4371bf7197c329fcb3ead37e710d1bca9da5d583f5073b799d5c5bd1eee4"}, - {file = "cffi-1.17.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:733e99bc2df47476e3848417c5a4540522f234dfd4ef3ab7fafdf555b082ec0c"}, - {file = "cffi-1.17.1-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1257bdabf294dceb59f5e70c64a3e2f462c30c7ad68092d01bbbfb1c16b1ba36"}, - {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:da95af8214998d77a98cc14e3a3bd00aa191526343078b530ceb0bd710fb48a5"}, - {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d63afe322132c194cf832bfec0dc69a99fb9bb6bbd550f161a49e9e855cc78ff"}, - {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f79fc4fc25f1c8698ff97788206bb3c2598949bfe0fef03d299eb1b5356ada99"}, - {file = "cffi-1.17.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b62ce867176a75d03a665bad002af8e6d54644fad99a3c70905c543130e39d93"}, - {file = "cffi-1.17.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:386c8bf53c502fff58903061338ce4f4950cbdcb23e2902d86c0f722b786bbe3"}, - {file = "cffi-1.17.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:4ceb10419a9adf4460ea14cfd6bc43d08701f0835e979bf821052f1805850fe8"}, - {file = "cffi-1.17.1-cp312-cp312-win32.whl", hash = "sha256:a08d7e755f8ed21095a310a693525137cfe756ce62d066e53f502a83dc550f65"}, - {file = "cffi-1.17.1-cp312-cp312-win_amd64.whl", hash = "sha256:51392eae71afec0d0c8fb1a53b204dbb3bcabcb3c9b807eedf3e1e6ccf2de903"}, - {file = "cffi-1.17.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f3a2b4222ce6b60e2e8b337bb9596923045681d71e5a082783484d845390938e"}, - {file = "cffi-1.17.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:0984a4925a435b1da406122d4d7968dd861c1385afe3b45ba82b750f229811e2"}, - {file = "cffi-1.17.1-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d01b12eeeb4427d3110de311e1774046ad344f5b1a7403101878976ecd7a10f3"}, - {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:706510fe141c86a69c8ddc029c7910003a17353970cff3b904ff0686a5927683"}, - {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:de55b766c7aa2e2a3092c51e0483d700341182f08e67c63630d5b6f200bb28e5"}, - {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c59d6e989d07460165cc5ad3c61f9fd8f1b4796eacbd81cee78957842b834af4"}, - {file = "cffi-1.17.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dd398dbc6773384a17fe0d3e7eeb8d1a21c2200473ee6806bb5e6a8e62bb73dd"}, - {file = "cffi-1.17.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:3edc8d958eb099c634dace3c7e16560ae474aa3803a5df240542b305d14e14ed"}, - {file = "cffi-1.17.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:72e72408cad3d5419375fc87d289076ee319835bdfa2caad331e377589aebba9"}, - {file = "cffi-1.17.1-cp313-cp313-win32.whl", hash = "sha256:e03eab0a8677fa80d646b5ddece1cbeaf556c313dcfac435ba11f107ba117b5d"}, - {file = "cffi-1.17.1-cp313-cp313-win_amd64.whl", hash = "sha256:f6a16c31041f09ead72d69f583767292f750d24913dadacf5756b966aacb3f1a"}, - {file = "cffi-1.17.1-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:636062ea65bd0195bc012fea9321aca499c0504409f413dc88af450b57ffd03b"}, - {file = "cffi-1.17.1-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c7eac2ef9b63c79431bc4b25f1cd649d7f061a28808cbc6c47b534bd789ef964"}, - {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e221cf152cff04059d011ee126477f0d9588303eb57e88923578ace7baad17f9"}, - {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:31000ec67d4221a71bd3f67df918b1f88f676f1c3b535a7eb473255fdc0b83fc"}, - {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:6f17be4345073b0a7b8ea599688f692ac3ef23ce28e5df79c04de519dbc4912c"}, - {file = "cffi-1.17.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0e2b1fac190ae3ebfe37b979cc1ce69c81f4e4fe5746bb401dca63a9062cdaf1"}, - {file = "cffi-1.17.1-cp38-cp38-win32.whl", hash = "sha256:7596d6620d3fa590f677e9ee430df2958d2d6d6de2feeae5b20e82c00b76fbf8"}, - {file = "cffi-1.17.1-cp38-cp38-win_amd64.whl", hash = "sha256:78122be759c3f8a014ce010908ae03364d00a1f81ab5c7f4a7a5120607ea56e1"}, - {file = "cffi-1.17.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:b2ab587605f4ba0bf81dc0cb08a41bd1c0a5906bd59243d56bad7668a6fc6c16"}, - {file = "cffi-1.17.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:28b16024becceed8c6dfbc75629e27788d8a3f9030691a1dbf9821a128b22c36"}, - {file = "cffi-1.17.1-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1d599671f396c4723d016dbddb72fe8e0397082b0a77a4fab8028923bec050e8"}, - {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca74b8dbe6e8e8263c0ffd60277de77dcee6c837a3d0881d8c1ead7268c9e576"}, - {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f7f5baafcc48261359e14bcd6d9bff6d4b28d9103847c9e136694cb0501aef87"}, - {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:98e3969bcff97cae1b2def8ba499ea3d6f31ddfdb7635374834cf89a1a08ecf0"}, - {file = "cffi-1.17.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cdf5ce3acdfd1661132f2a9c19cac174758dc2352bfe37d98aa7512c6b7178b3"}, - {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:9755e4345d1ec879e3849e62222a18c7174d65a6a92d5b346b1863912168b595"}, - {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:f1e22e8c4419538cb197e4dd60acc919d7696e5ef98ee4da4e01d3f8cfa4cc5a"}, - {file = "cffi-1.17.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c03e868a0b3bc35839ba98e74211ed2b05d2119be4e8a0f224fba9384f1fe02e"}, - {file = "cffi-1.17.1-cp39-cp39-win32.whl", hash = "sha256:e31ae45bc2e29f6b2abd0de1cc3b9d5205aa847cafaecb8af1476a609a2f6eb7"}, - {file = "cffi-1.17.1-cp39-cp39-win_amd64.whl", hash = "sha256:d016c76bdd850f3c626af19b0542c9677ba156e4ee4fccfdd7848803533ef662"}, - {file = "cffi-1.17.1.tar.gz", hash = "sha256:1c39c6016c32bc48dd54561950ebd6836e1670f2ae46128f67cf49e789c52824"}, -] - -[package.dependencies] -pycparser = "*" - -[[package]] -name = "charset-normalizer" -version = "3.4.1" -description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." -optional = false -python-versions = ">=3.7" -groups = ["dev"] -files = [ - {file = "charset_normalizer-3.4.1-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:91b36a978b5ae0ee86c394f5a54d6ef44db1de0815eb43de826d41d21e4af3de"}, - {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7461baadb4dc00fd9e0acbe254e3d7d2112e7f92ced2adc96e54ef6501c5f176"}, - {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e218488cd232553829be0664c2292d3af2eeeb94b32bea483cf79ac6a694e037"}, - {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:80ed5e856eb7f30115aaf94e4a08114ccc8813e6ed1b5efa74f9f82e8509858f"}, - {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b010a7a4fd316c3c484d482922d13044979e78d1861f0e0650423144c616a46a"}, - {file = "charset_normalizer-3.4.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4532bff1b8421fd0a320463030c7520f56a79c9024a4e88f01c537316019005a"}, - {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:d973f03c0cb71c5ed99037b870f2be986c3c05e63622c017ea9816881d2dd247"}, - {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:3a3bd0dcd373514dcec91c411ddb9632c0d7d92aed7093b8c3bbb6d69ca74408"}, - {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:d9c3cdf5390dcd29aa8056d13e8e99526cda0305acc038b96b30352aff5ff2bb"}, - {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:2bdfe3ac2e1bbe5b59a1a63721eb3b95fc9b6817ae4a46debbb4e11f6232428d"}, - {file = "charset_normalizer-3.4.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:eab677309cdb30d047996b36d34caeda1dc91149e4fdca0b1a039b3f79d9a807"}, - {file = "charset_normalizer-3.4.1-cp310-cp310-win32.whl", hash = "sha256:c0429126cf75e16c4f0ad00ee0eae4242dc652290f940152ca8c75c3a4b6ee8f"}, - {file = "charset_normalizer-3.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:9f0b8b1c6d84c8034a44893aba5e767bf9c7a211e313a9605d9c617d7083829f"}, - {file = "charset_normalizer-3.4.1-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:8bfa33f4f2672964266e940dd22a195989ba31669bd84629f05fab3ef4e2d125"}, - {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:28bf57629c75e810b6ae989f03c0828d64d6b26a5e205535585f96093e405ed1"}, - {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f08ff5e948271dc7e18a35641d2f11a4cd8dfd5634f55228b691e62b37125eb3"}, - {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:234ac59ea147c59ee4da87a0c0f098e9c8d169f4dc2a159ef720f1a61bbe27cd"}, - {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd4ec41f914fa74ad1b8304bbc634b3de73d2a0889bd32076342a573e0779e00"}, - {file = "charset_normalizer-3.4.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eea6ee1db730b3483adf394ea72f808b6e18cf3cb6454b4d86e04fa8c4327a12"}, - {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c96836c97b1238e9c9e3fe90844c947d5afbf4f4c92762679acfe19927d81d77"}, - {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:4d86f7aff21ee58f26dcf5ae81a9addbd914115cdebcbb2217e4f0ed8982e146"}, - {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:09b5e6733cbd160dcc09589227187e242a30a49ca5cefa5a7edd3f9d19ed53fd"}, - {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:5777ee0881f9499ed0f71cc82cf873d9a0ca8af166dfa0af8ec4e675b7df48e6"}, - {file = "charset_normalizer-3.4.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:237bdbe6159cff53b4f24f397d43c6336c6b0b42affbe857970cefbb620911c8"}, - {file = "charset_normalizer-3.4.1-cp311-cp311-win32.whl", hash = "sha256:8417cb1f36cc0bc7eaba8ccb0e04d55f0ee52df06df3ad55259b9a323555fc8b"}, - {file = "charset_normalizer-3.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:d7f50a1f8c450f3925cb367d011448c39239bb3eb4117c36a6d354794de4ce76"}, - {file = "charset_normalizer-3.4.1-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:73d94b58ec7fecbc7366247d3b0b10a21681004153238750bb67bd9012414545"}, - {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dad3e487649f498dd991eeb901125411559b22e8d7ab25d3aeb1af367df5efd7"}, - {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c30197aa96e8eed02200a83fba2657b4c3acd0f0aa4bdc9f6c1af8e8962e0757"}, - {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2369eea1ee4a7610a860d88f268eb39b95cb588acd7235e02fd5a5601773d4fa"}, - {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc2722592d8998c870fa4e290c2eec2c1569b87fe58618e67d38b4665dfa680d"}, - {file = "charset_normalizer-3.4.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ffc9202a29ab3920fa812879e95a9e78b2465fd10be7fcbd042899695d75e616"}, - {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:804a4d582ba6e5b747c625bf1255e6b1507465494a40a2130978bda7b932c90b"}, - {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:0f55e69f030f7163dffe9fd0752b32f070566451afe180f99dbeeb81f511ad8d"}, - {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:c4c3e6da02df6fa1410a7680bd3f63d4f710232d3139089536310d027950696a"}, - {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:5df196eb874dae23dcfb968c83d4f8fdccb333330fe1fc278ac5ceeb101003a9"}, - {file = "charset_normalizer-3.4.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:e358e64305fe12299a08e08978f51fc21fac060dcfcddd95453eabe5b93ed0e1"}, - {file = "charset_normalizer-3.4.1-cp312-cp312-win32.whl", hash = "sha256:9b23ca7ef998bc739bf6ffc077c2116917eabcc901f88da1b9856b210ef63f35"}, - {file = "charset_normalizer-3.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:6ff8a4a60c227ad87030d76e99cd1698345d4491638dfa6673027c48b3cd395f"}, - {file = "charset_normalizer-3.4.1-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:aabfa34badd18f1da5ec1bc2715cadc8dca465868a4e73a0173466b688f29dda"}, - {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22e14b5d70560b8dd51ec22863f370d1e595ac3d024cb8ad7d308b4cd95f8313"}, - {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8436c508b408b82d87dc5f62496973a1805cd46727c34440b0d29d8a2f50a6c9"}, - {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2d074908e1aecee37a7635990b2c6d504cd4766c7bc9fc86d63f9c09af3fa11b"}, - {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:955f8851919303c92343d2f66165294848d57e9bba6cf6e3625485a70a038d11"}, - {file = "charset_normalizer-3.4.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:44ecbf16649486d4aebafeaa7ec4c9fed8b88101f4dd612dcaf65d5e815f837f"}, - {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:0924e81d3d5e70f8126529951dac65c1010cdf117bb75eb02dd12339b57749dd"}, - {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:2967f74ad52c3b98de4c3b32e1a44e32975e008a9cd2a8cc8966d6a5218c5cb2"}, - {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:c75cb2a3e389853835e84a2d8fb2b81a10645b503eca9bcb98df6b5a43eb8886"}, - {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:09b26ae6b1abf0d27570633b2b078a2a20419c99d66fb2823173d73f188ce601"}, - {file = "charset_normalizer-3.4.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:fa88b843d6e211393a37219e6a1c1df99d35e8fd90446f1118f4216e307e48cd"}, - {file = "charset_normalizer-3.4.1-cp313-cp313-win32.whl", hash = "sha256:eb8178fe3dba6450a3e024e95ac49ed3400e506fd4e9e5c32d30adda88cbd407"}, - {file = "charset_normalizer-3.4.1-cp313-cp313-win_amd64.whl", hash = "sha256:b1ac5992a838106edb89654e0aebfc24f5848ae2547d22c2c3f66454daa11971"}, - {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f30bf9fd9be89ecb2360c7d94a711f00c09b976258846efe40db3d05828e8089"}, - {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:97f68b8d6831127e4787ad15e6757232e14e12060bec17091b85eb1486b91d8d"}, - {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7974a0b5ecd505609e3b19742b60cee7aa2aa2fb3151bc917e6e2646d7667dcf"}, - {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fc54db6c8593ef7d4b2a331b58653356cf04f67c960f584edb7c3d8c97e8f39e"}, - {file = "charset_normalizer-3.4.1-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:311f30128d7d333eebd7896965bfcfbd0065f1716ec92bd5638d7748eb6f936a"}, - {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:7d053096f67cd1241601111b698f5cad775f97ab25d81567d3f59219b5f1adbd"}, - {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_i686.whl", hash = "sha256:807f52c1f798eef6cf26beb819eeb8819b1622ddfeef9d0977a8502d4db6d534"}, - {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_ppc64le.whl", hash = "sha256:dccbe65bd2f7f7ec22c4ff99ed56faa1e9f785482b9bbd7c717e26fd723a1d1e"}, - {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_s390x.whl", hash = "sha256:2fb9bd477fdea8684f78791a6de97a953c51831ee2981f8e4f583ff3b9d9687e"}, - {file = "charset_normalizer-3.4.1-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:01732659ba9b5b873fc117534143e4feefecf3b2078b0a6a2e925271bb6f4cfa"}, - {file = "charset_normalizer-3.4.1-cp37-cp37m-win32.whl", hash = "sha256:7a4f97a081603d2050bfaffdefa5b02a9ec823f8348a572e39032caa8404a487"}, - {file = "charset_normalizer-3.4.1-cp37-cp37m-win_amd64.whl", hash = "sha256:7b1bef6280950ee6c177b326508f86cad7ad4dff12454483b51d8b7d673a2c5d"}, - {file = "charset_normalizer-3.4.1-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:ecddf25bee22fe4fe3737a399d0d177d72bc22be6913acfab364b40bce1ba83c"}, - {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8c60ca7339acd497a55b0ea5d506b2a2612afb2826560416f6894e8b5770d4a9"}, - {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b7b2d86dd06bfc2ade3312a83a5c364c7ec2e3498f8734282c6c3d4b07b346b8"}, - {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dd78cfcda14a1ef52584dbb008f7ac81c1328c0f58184bf9a84c49c605002da6"}, - {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e27f48bcd0957c6d4cb9d6fa6b61d192d0b13d5ef563e5f2ae35feafc0d179c"}, - {file = "charset_normalizer-3.4.1-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:01ad647cdd609225c5350561d084b42ddf732f4eeefe6e678765636791e78b9a"}, - {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:619a609aa74ae43d90ed2e89bdd784765de0a25ca761b93e196d938b8fd1dbbd"}, - {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:89149166622f4db9b4b6a449256291dc87a99ee53151c74cbd82a53c8c2f6ccd"}, - {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:7709f51f5f7c853f0fb938bcd3bc59cdfdc5203635ffd18bf354f6967ea0f824"}, - {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:345b0426edd4e18138d6528aed636de7a9ed169b4aaf9d61a8c19e39d26838ca"}, - {file = "charset_normalizer-3.4.1-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:0907f11d019260cdc3f94fbdb23ff9125f6b5d1039b76003b5b0ac9d6a6c9d5b"}, - {file = "charset_normalizer-3.4.1-cp38-cp38-win32.whl", hash = "sha256:ea0d8d539afa5eb2728aa1932a988a9a7af94f18582ffae4bc10b3fbdad0626e"}, - {file = "charset_normalizer-3.4.1-cp38-cp38-win_amd64.whl", hash = "sha256:329ce159e82018d646c7ac45b01a430369d526569ec08516081727a20e9e4af4"}, - {file = "charset_normalizer-3.4.1-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:b97e690a2118911e39b4042088092771b4ae3fc3aa86518f84b8cf6888dbdb41"}, - {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:78baa6d91634dfb69ec52a463534bc0df05dbd546209b79a3880a34487f4b84f"}, - {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1a2bc9f351a75ef49d664206d51f8e5ede9da246602dc2d2726837620ea034b2"}, - {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:75832c08354f595c760a804588b9357d34ec00ba1c940c15e31e96d902093770"}, - {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0af291f4fe114be0280cdd29d533696a77b5b49cfde5467176ecab32353395c4"}, - {file = "charset_normalizer-3.4.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0167ddc8ab6508fe81860a57dd472b2ef4060e8d378f0cc555707126830f2537"}, - {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:2a75d49014d118e4198bcee5ee0a6f25856b29b12dbf7cd012791f8a6cc5c496"}, - {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:363e2f92b0f0174b2f8238240a1a30142e3db7b957a5dd5689b0e75fb717cc78"}, - {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:ab36c8eb7e454e34e60eb55ca5d241a5d18b2c6244f6827a30e451c42410b5f7"}, - {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:4c0907b1928a36d5a998d72d64d8eaa7244989f7aaaf947500d3a800c83a3fd6"}, - {file = "charset_normalizer-3.4.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:04432ad9479fa40ec0f387795ddad4437a2b50417c69fa275e212933519ff294"}, - {file = "charset_normalizer-3.4.1-cp39-cp39-win32.whl", hash = "sha256:3bed14e9c89dcb10e8f3a29f9ccac4955aebe93c71ae803af79265c9ca5644c5"}, - {file = "charset_normalizer-3.4.1-cp39-cp39-win_amd64.whl", hash = "sha256:49402233c892a461407c512a19435d1ce275543138294f7ef013f0b63d5d3765"}, - {file = "charset_normalizer-3.4.1-py3-none-any.whl", hash = "sha256:d98b1668f06378c6dbefec3b92299716b931cd4e6061f3c875a71ced1780ab85"}, - {file = "charset_normalizer-3.4.1.tar.gz", hash = "sha256:44251f18cd68a75b56585dd00dae26183e102cd5e0f9f1466e6df5da2ed64ea3"}, -] - -[[package]] -name = "cleo" -version = "2.1.0" -description = "Cleo allows you to create beautiful and testable command-line interfaces." -optional = false -python-versions = ">=3.7,<4.0" -groups = ["dev"] -files = [ - {file = "cleo-2.1.0-py3-none-any.whl", hash = "sha256:4a31bd4dd45695a64ee3c4758f583f134267c2bc518d8ae9a29cf237d009b07e"}, - {file = "cleo-2.1.0.tar.gz", hash = "sha256:0b2c880b5d13660a7ea651001fb4acb527696c01f15c9ee650f377aa543fd523"}, -] - -[package.dependencies] -crashtest = ">=0.4.1,<0.5.0" -rapidfuzz = ">=3.0.0,<4.0.0" - -[[package]] -name = "click" -version = "8.1.8" -description = "Composable command line interface toolkit" -optional = false -python-versions = ">=3.7" -groups = ["main"] -files = [ - {file = "click-8.1.8-py3-none-any.whl", hash = "sha256:63c132bbbed01578a06712a2d1f497bb62d9c1c0d329b7903a866228027263b2"}, - {file = "click-8.1.8.tar.gz", hash = "sha256:ed53c9d8990d83c2a27deae68e4ee337473f6330c040a31d4225c9574d16096a"}, -] - -[package.dependencies] -colorama = {version = "*", markers = "platform_system == \"Windows\""} - -[[package]] -name = "colorama" -version = "0.4.6" -description = "Cross-platform colored terminal text." -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" -groups = ["main", "dev"] -files = [ - {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, - {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, -] -markers = {main = "platform_system == \"Windows\"", dev = "os_name == \"nt\""} - -[[package]] -name = "coverage" -version = "7.7.1" -description = "Code coverage measurement for Python" -optional = false -python-versions = ">=3.9" -groups = ["dev"] -files = [ - {file = "coverage-7.7.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:553ba93f8e3c70e1b0031e4dfea36aba4e2b51fe5770db35e99af8dc5c5a9dfe"}, - {file = "coverage-7.7.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:44683f2556a56c9a6e673b583763096b8efbd2df022b02995609cf8e64fc8ae0"}, - {file = "coverage-7.7.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:02fad4f8faa4153db76f9246bc95c1d99f054f4e0a884175bff9155cf4f856cb"}, - {file = "coverage-7.7.1-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4c181ceba2e6808ede1e964f7bdc77bd8c7eb62f202c63a48cc541e5ffffccb6"}, - {file = "coverage-7.7.1-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80b5b207a8b08c6a934b214e364cab2fa82663d4af18981a6c0a9e95f8df7602"}, - {file = "coverage-7.7.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:25fe40967717bad0ce628a0223f08a10d54c9d739e88c9cbb0f77b5959367542"}, - {file = "coverage-7.7.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:881cae0f9cbd928c9c001487bb3dcbfd0b0af3ef53ae92180878591053be0cb3"}, - {file = "coverage-7.7.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:c90e9141e9221dd6fbc16a2727a5703c19443a8d9bf7d634c792fa0287cee1ab"}, - {file = "coverage-7.7.1-cp310-cp310-win32.whl", hash = "sha256:ae13ed5bf5542d7d4a0a42ff5160e07e84adc44eda65ddaa635c484ff8e55917"}, - {file = "coverage-7.7.1-cp310-cp310-win_amd64.whl", hash = "sha256:171e9977c6a5d2b2be9efc7df1126fd525ce7cad0eb9904fe692da007ba90d81"}, - {file = "coverage-7.7.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1165490be0069e34e4f99d08e9c5209c463de11b471709dfae31e2a98cbd49fd"}, - {file = "coverage-7.7.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:44af11c00fd3b19b8809487630f8a0039130d32363239dfd15238e6d37e41a48"}, - {file = "coverage-7.7.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fbba59022e7c20124d2f520842b75904c7b9f16c854233fa46575c69949fb5b9"}, - {file = "coverage-7.7.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:af94fb80e4f159f4d93fb411800448ad87b6039b0500849a403b73a0d36bb5ae"}, - {file = "coverage-7.7.1-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:eae79f8e3501133aa0e220bbc29573910d096795882a70e6f6e6637b09522133"}, - {file = "coverage-7.7.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:e33426a5e1dc7743dd54dfd11d3a6c02c5d127abfaa2edd80a6e352b58347d1a"}, - {file = "coverage-7.7.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:b559adc22486937786731dac69e57296cb9aede7e2687dfc0d2696dbd3b1eb6b"}, - {file = "coverage-7.7.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b838a91e84e1773c3436f6cc6996e000ed3ca5721799e7789be18830fad009a2"}, - {file = "coverage-7.7.1-cp311-cp311-win32.whl", hash = "sha256:2c492401bdb3a85824669d6a03f57b3dfadef0941b8541f035f83bbfc39d4282"}, - {file = "coverage-7.7.1-cp311-cp311-win_amd64.whl", hash = "sha256:1e6f867379fd033a0eeabb1be0cffa2bd660582b8b0c9478895c509d875a9d9e"}, - {file = "coverage-7.7.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:eff187177d8016ff6addf789dcc421c3db0d014e4946c1cc3fbf697f7852459d"}, - {file = "coverage-7.7.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:2444fbe1ba1889e0b29eb4d11931afa88f92dc507b7248f45be372775b3cef4f"}, - {file = "coverage-7.7.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:177d837339883c541f8524683e227adcaea581eca6bb33823a2a1fdae4c988e1"}, - {file = "coverage-7.7.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:15d54ecef1582b1d3ec6049b20d3c1a07d5e7f85335d8a3b617c9960b4f807e0"}, - {file = "coverage-7.7.1-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:75c82b27c56478d5e1391f2e7b2e7f588d093157fa40d53fd9453a471b1191f2"}, - {file = "coverage-7.7.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:315ff74b585110ac3b7ab631e89e769d294f303c6d21302a816b3554ed4c81af"}, - {file = "coverage-7.7.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:4dd532dac197d68c478480edde74fd4476c6823355987fd31d01ad9aa1e5fb59"}, - {file = "coverage-7.7.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:385618003e3d608001676bb35dc67ae3ad44c75c0395d8de5780af7bb35be6b2"}, - {file = "coverage-7.7.1-cp312-cp312-win32.whl", hash = "sha256:63306486fcb5a827449464f6211d2991f01dfa2965976018c9bab9d5e45a35c8"}, - {file = "coverage-7.7.1-cp312-cp312-win_amd64.whl", hash = "sha256:37351dc8123c154fa05b7579fdb126b9f8b1cf42fd6f79ddf19121b7bdd4aa04"}, - {file = "coverage-7.7.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:eebd927b86761a7068a06d3699fd6c20129becf15bb44282db085921ea0f1585"}, - {file = "coverage-7.7.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:2a79c4a09765d18311c35975ad2eb1ac613c0401afdd9cb1ca4110aeb5dd3c4c"}, - {file = "coverage-7.7.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8b1c65a739447c5ddce5b96c0a388fd82e4bbdff7251396a70182b1d83631019"}, - {file = "coverage-7.7.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:392cc8fd2b1b010ca36840735e2a526fcbd76795a5d44006065e79868cc76ccf"}, - {file = "coverage-7.7.1-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9bb47cc9f07a59a451361a850cb06d20633e77a9118d05fd0f77b1864439461b"}, - {file = "coverage-7.7.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:b4c144c129343416a49378e05c9451c34aae5ccf00221e4fa4f487db0816ee2f"}, - {file = "coverage-7.7.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:bc96441c9d9ca12a790b5ae17d2fa6654da4b3962ea15e0eabb1b1caed094777"}, - {file = "coverage-7.7.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:3d03287eb03186256999539d98818c425c33546ab4901028c8fa933b62c35c3a"}, - {file = "coverage-7.7.1-cp313-cp313-win32.whl", hash = "sha256:8fed429c26b99641dc1f3a79179860122b22745dd9af36f29b141e178925070a"}, - {file = "coverage-7.7.1-cp313-cp313-win_amd64.whl", hash = "sha256:092b134129a8bb940c08b2d9ceb4459af5fb3faea77888af63182e17d89e1cf1"}, - {file = "coverage-7.7.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:d3154b369141c3169b8133973ac00f63fcf8d6dbcc297d788d36afbb7811e511"}, - {file = "coverage-7.7.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:264ff2bcce27a7f455b64ac0dfe097680b65d9a1a293ef902675fa8158d20b24"}, - {file = "coverage-7.7.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ba8480ebe401c2f094d10a8c4209b800a9b77215b6c796d16b6ecdf665048950"}, - {file = "coverage-7.7.1-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:520af84febb6bb54453e7fbb730afa58c7178fd018c398a8fcd8e269a79bf96d"}, - {file = "coverage-7.7.1-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:88d96127ae01ff571d465d4b0be25c123789cef88ba0879194d673fdea52f54e"}, - {file = "coverage-7.7.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:0ce92c5a9d7007d838456f4b77ea159cb628187a137e1895331e530973dcf862"}, - {file = "coverage-7.7.1-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:0dab4ef76d7b14f432057fdb7a0477e8bffca0ad39ace308be6e74864e632271"}, - {file = "coverage-7.7.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:7e688010581dbac9cab72800e9076e16f7cccd0d89af5785b70daa11174e94de"}, - {file = "coverage-7.7.1-cp313-cp313t-win32.whl", hash = "sha256:e52eb31ae3afacdacfe50705a15b75ded67935770c460d88c215a9c0c40d0e9c"}, - {file = "coverage-7.7.1-cp313-cp313t-win_amd64.whl", hash = "sha256:a6b6b3bd121ee2ec4bd35039319f3423d0be282b9752a5ae9f18724bc93ebe7c"}, - {file = "coverage-7.7.1-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:34a3bf6b92e6621fc4dcdaab353e173ccb0ca9e4bfbcf7e49a0134c86c9cd303"}, - {file = "coverage-7.7.1-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:d6874929d624d3a670f676efafbbc747f519a6121b581dd41d012109e70a5ebd"}, - {file = "coverage-7.7.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7ba5ff236c87a7b7aa1441a216caf44baee14cbfbd2256d306f926d16b026578"}, - {file = "coverage-7.7.1-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:452735fafe8ff5918236d5fe1feac322b359e57692269c75151f9b4ee4b7e1bc"}, - {file = "coverage-7.7.1-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5f99a93cecf799738e211f9746dc83749b5693538fbfac279a61682ba309387"}, - {file = "coverage-7.7.1-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:11dd6f52c2a7ce8bf0a5f3b6e4a8eb60e157ffedc3c4b4314a41c1dfbd26ce58"}, - {file = "coverage-7.7.1-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:b52edb940d087e2a96e73c1523284a2e94a4e66fa2ea1e2e64dddc67173bad94"}, - {file = "coverage-7.7.1-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:d2e73e2ac468536197e6b3ab79bc4a5c9da0f078cd78cfcc7fe27cf5d1195ef0"}, - {file = "coverage-7.7.1-cp39-cp39-win32.whl", hash = "sha256:18f544356bceef17cc55fcf859e5664f06946c1b68efcea6acdc50f8f6a6e776"}, - {file = "coverage-7.7.1-cp39-cp39-win_amd64.whl", hash = "sha256:d66ff48ab3bb6f762a153e29c0fc1eb5a62a260217bc64470d7ba602f5886d20"}, - {file = "coverage-7.7.1-pp39.pp310.pp311-none-any.whl", hash = "sha256:5b7b02e50d54be6114cc4f6a3222fec83164f7c42772ba03b520138859b5fde1"}, - {file = "coverage-7.7.1-py3-none-any.whl", hash = "sha256:822fa99dd1ac686061e1219b67868e25d9757989cf2259f735a4802497d6da31"}, - {file = "coverage-7.7.1.tar.gz", hash = "sha256:199a1272e642266b90c9f40dec7fd3d307b51bf639fa0d15980dc0b3246c1393"}, -] - -[package.extras] -toml = ["tomli ; python_full_version <= \"3.11.0a6\""] - -[[package]] -name = "crashtest" -version = "0.4.1" -description = "Manage Python errors with ease" -optional = false -python-versions = ">=3.7,<4.0" -groups = ["dev"] -files = [ - {file = "crashtest-0.4.1-py3-none-any.whl", hash = "sha256:8d23eac5fa660409f57472e3851dab7ac18aba459a8d19cbbba86d3d5aecd2a5"}, - {file = "crashtest-0.4.1.tar.gz", hash = "sha256:80d7b1f316ebfbd429f648076d6275c877ba30ba48979de4191714a75266f0ce"}, -] - -[[package]] -name = "croniter" -version = "6.0.0" -description = "croniter provides iteration for datetime object with cron like format" -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,>=2.6" -groups = ["main"] -files = [ - {file = "croniter-6.0.0-py2.py3-none-any.whl", hash = "sha256:2f878c3856f17896979b2a4379ba1f09c83e374931ea15cc835c5dd2eee9b368"}, - {file = "croniter-6.0.0.tar.gz", hash = "sha256:37c504b313956114a983ece2c2b07790b1f1094fe9d81cc94739214748255577"}, -] - -[package.dependencies] -python-dateutil = "*" -pytz = ">2021.1" - -[[package]] -name = "cryptography" -version = "44.0.2" -description = "cryptography is a package which provides cryptographic recipes and primitives to Python developers." -optional = false -python-versions = "!=3.9.0,!=3.9.1,>=3.7" -groups = ["dev"] -markers = "sys_platform == \"linux\"" -files = [ - {file = "cryptography-44.0.2-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:efcfe97d1b3c79e486554efddeb8f6f53a4cdd4cf6086642784fa31fc384e1d7"}, - {file = "cryptography-44.0.2-cp37-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:29ecec49f3ba3f3849362854b7253a9f59799e3763b0c9d0826259a88efa02f1"}, - {file = "cryptography-44.0.2-cp37-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bc821e161ae88bfe8088d11bb39caf2916562e0a2dc7b6d56714a48b784ef0bb"}, - {file = "cryptography-44.0.2-cp37-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:3c00b6b757b32ce0f62c574b78b939afab9eecaf597c4d624caca4f9e71e7843"}, - {file = "cryptography-44.0.2-cp37-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:7bdcd82189759aba3816d1f729ce42ffded1ac304c151d0a8e89b9996ab863d5"}, - {file = "cryptography-44.0.2-cp37-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:4973da6ca3db4405c54cd0b26d328be54c7747e89e284fcff166132eb7bccc9c"}, - {file = "cryptography-44.0.2-cp37-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:4e389622b6927d8133f314949a9812972711a111d577a5d1f4bee5e58736b80a"}, - {file = "cryptography-44.0.2-cp37-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:f514ef4cd14bb6fb484b4a60203e912cfcb64f2ab139e88c2274511514bf7308"}, - {file = "cryptography-44.0.2-cp37-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:1bc312dfb7a6e5d66082c87c34c8a62176e684b6fe3d90fcfe1568de675e6688"}, - {file = "cryptography-44.0.2-cp37-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:3b721b8b4d948b218c88cb8c45a01793483821e709afe5f622861fc6182b20a7"}, - {file = "cryptography-44.0.2-cp37-abi3-win32.whl", hash = "sha256:51e4de3af4ec3899d6d178a8c005226491c27c4ba84101bfb59c901e10ca9f79"}, - {file = "cryptography-44.0.2-cp37-abi3-win_amd64.whl", hash = "sha256:c505d61b6176aaf982c5717ce04e87da5abc9a36a5b39ac03905c4aafe8de7aa"}, - {file = "cryptography-44.0.2-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:8e0ddd63e6bf1161800592c71ac794d3fb8001f2caebe0966e77c5234fa9efc3"}, - {file = "cryptography-44.0.2-cp39-abi3-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:81276f0ea79a208d961c433a947029e1a15948966658cf6710bbabb60fcc2639"}, - {file = "cryptography-44.0.2-cp39-abi3-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9a1e657c0f4ea2a23304ee3f964db058c9e9e635cc7019c4aa21c330755ef6fd"}, - {file = "cryptography-44.0.2-cp39-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:6210c05941994290f3f7f175a4a57dbbb2afd9273657614c506d5976db061181"}, - {file = "cryptography-44.0.2-cp39-abi3-manylinux_2_28_armv7l.manylinux_2_31_armv7l.whl", hash = "sha256:d1c3572526997b36f245a96a2b1713bf79ce99b271bbcf084beb6b9b075f29ea"}, - {file = "cryptography-44.0.2-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:b042d2a275c8cee83a4b7ae30c45a15e6a4baa65a179a0ec2d78ebb90e4f6699"}, - {file = "cryptography-44.0.2-cp39-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:d03806036b4f89e3b13b6218fefea8d5312e450935b1a2d55f0524e2ed7c59d9"}, - {file = "cryptography-44.0.2-cp39-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:c7362add18b416b69d58c910caa217f980c5ef39b23a38a0880dfd87bdf8cd23"}, - {file = "cryptography-44.0.2-cp39-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:8cadc6e3b5a1f144a039ea08a0bdb03a2a92e19c46be3285123d32029f40a922"}, - {file = "cryptography-44.0.2-cp39-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:6f101b1f780f7fc613d040ca4bdf835c6ef3b00e9bd7125a4255ec574c7916e4"}, - {file = "cryptography-44.0.2-cp39-abi3-win32.whl", hash = "sha256:3dc62975e31617badc19a906481deacdeb80b4bb454394b4098e3f2525a488c5"}, - {file = "cryptography-44.0.2-cp39-abi3-win_amd64.whl", hash = "sha256:5f6f90b72d8ccadb9c6e311c775c8305381db88374c65fa1a68250aa8a9cb3a6"}, - {file = "cryptography-44.0.2-pp310-pypy310_pp73-macosx_10_9_x86_64.whl", hash = "sha256:af4ff3e388f2fa7bff9f7f2b31b87d5651c45731d3e8cfa0944be43dff5cfbdb"}, - {file = "cryptography-44.0.2-pp310-pypy310_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:0529b1d5a0105dd3731fa65680b45ce49da4d8115ea76e9da77a875396727b41"}, - {file = "cryptography-44.0.2-pp310-pypy310_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:7ca25849404be2f8e4b3c59483d9d3c51298a22c1c61a0e84415104dacaf5562"}, - {file = "cryptography-44.0.2-pp310-pypy310_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:268e4e9b177c76d569e8a145a6939eca9a5fec658c932348598818acf31ae9a5"}, - {file = "cryptography-44.0.2-pp310-pypy310_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:9eb9d22b0a5d8fd9925a7764a054dca914000607dff201a24c791ff5c799e1fa"}, - {file = "cryptography-44.0.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:2bf7bf75f7df9715f810d1b038870309342bff3069c5bd8c6b96128cb158668d"}, - {file = "cryptography-44.0.2-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:909c97ab43a9c0c0b0ada7a1281430e4e5ec0458e6d9244c0e821bbf152f061d"}, - {file = "cryptography-44.0.2-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:96e7a5e9d6e71f9f4fca8eebfd603f8e86c5225bb18eb621b2c1e50b290a9471"}, - {file = "cryptography-44.0.2-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:d1b3031093a366ac767b3feb8bcddb596671b3aaff82d4050f984da0c248b615"}, - {file = "cryptography-44.0.2-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:04abd71114848aa25edb28e225ab5f268096f44cf0127f3d36975bdf1bdf3390"}, - {file = "cryptography-44.0.2.tar.gz", hash = "sha256:c63454aa261a0cf0c5b4718349629793e9e634993538db841165b3df74f37ec0"}, -] - -[package.dependencies] -cffi = {version = ">=1.12", markers = "platform_python_implementation != \"PyPy\""} - -[package.extras] -docs = ["sphinx (>=5.3.0)", "sphinx-rtd-theme (>=3.0.0) ; python_version >= \"3.8\""] -docstest = ["pyenchant (>=3)", "readme-renderer (>=30.0)", "sphinxcontrib-spelling (>=7.3.1)"] -nox = ["nox (>=2024.4.15)", "nox[uv] (>=2024.3.2) ; python_version >= \"3.8\""] -pep8test = ["check-sdist ; python_version >= \"3.8\"", "click (>=8.0.1)", "mypy (>=1.4)", "ruff (>=0.3.6)"] -sdist = ["build (>=1.0.0)"] -ssh = ["bcrypt (>=3.1.5)"] -test = ["certifi (>=2024)", "cryptography-vectors (==44.0.2)", "pretend (>=0.7)", "pytest (>=7.4.0)", "pytest-benchmark (>=4.0)", "pytest-cov (>=2.10.1)", "pytest-xdist (>=3.5.0)"] -test-randomorder = ["pytest-randomly"] - -[[package]] -name = "distlib" -version = "0.3.9" -description = "Distribution utilities" -optional = false -python-versions = "*" -groups = ["dev"] -files = [ - {file = "distlib-0.3.9-py2.py3-none-any.whl", hash = "sha256:47f8c22fd27c27e25a65601af709b38e4f0a45ea4fc2e710f65755fa8caaaf87"}, - {file = "distlib-0.3.9.tar.gz", hash = "sha256:a60f20dea646b8a33f3e7772f74dc0b2d0772d2837ee1342a00645c81edf9403"}, -] - -[[package]] -name = "django" -version = "5.1.7" -description = "A high-level Python web framework that encourages rapid development and clean, pragmatic design." -optional = false -python-versions = ">=3.10" -groups = ["main"] -files = [ - {file = "Django-5.1.7-py3-none-any.whl", hash = "sha256:1323617cb624add820cb9611cdcc788312d250824f92ca6048fda8625514af2b"}, - {file = "Django-5.1.7.tar.gz", hash = "sha256:30de4ee43a98e5d3da36a9002f287ff400b43ca51791920bfb35f6917bfe041c"}, -] - -[package.dependencies] -asgiref = ">=3.8.1,<4" -sqlparse = ">=0.3.1" -tzdata = {version = "*", markers = "sys_platform == \"win32\""} - -[package.extras] -argon2 = ["argon2-cffi (>=19.1.0)"] -bcrypt = ["bcrypt"] - -[[package]] -name = "dulwich" -version = "0.22.8" -description = "Python Git Library" -optional = false -python-versions = ">=3.9" -groups = ["dev"] -files = [ - {file = "dulwich-0.22.8-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:546176d18b8cc0a492b0f23f07411e38686024cffa7e9d097ae20512a2e57127"}, - {file = "dulwich-0.22.8-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7d2434dd72b2ae09b653c9cfe6764a03c25cfbd99fbbb7c426f0478f6fb1100f"}, - {file = "dulwich-0.22.8-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe8318bc0921d42e3e69f03716f983a301b5ee4c8dc23c7f2c5bbb28581257a9"}, - {file = "dulwich-0.22.8-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c7a0f96a2a87f3b4f7feae79d2ac6b94107d6b7d827ac08f2f331b88c8f597a1"}, - {file = "dulwich-0.22.8-cp310-cp310-win32.whl", hash = "sha256:432a37b25733202897b8d67cdd641688444d980167c356ef4e4dd15a17a39a24"}, - {file = "dulwich-0.22.8-cp310-cp310-win_amd64.whl", hash = "sha256:f3a15e58dac8b8a76073ddca34e014f66f3672a5540a99d49ef6a9c09ab21285"}, - {file = "dulwich-0.22.8-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:0852edc51cff4f4f62976bdaa1d82f6ef248356c681c764c0feb699bc17d5782"}, - {file = "dulwich-0.22.8-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:826aae8b64ac1a12321d6b272fc13934d8f62804fda2bc6ae46f93f4380798eb"}, - {file = "dulwich-0.22.8-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f7ae726f923057d36cdbb9f4fb7da0d0903751435934648b13f1b851f0e38ea1"}, - {file = "dulwich-0.22.8-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6987d753227f55cf75ba29a8dab69d1d83308ce483d7a8c6d223086f7a42e125"}, - {file = "dulwich-0.22.8-cp311-cp311-win32.whl", hash = "sha256:7757b4a2aad64c6f1920082fc1fccf4da25c3923a0ae7b242c08d06861dae6e1"}, - {file = "dulwich-0.22.8-cp311-cp311-win_amd64.whl", hash = "sha256:12b243b7e912011c7225dc67480c313ac8d2990744789b876016fb593f6f3e19"}, - {file = "dulwich-0.22.8-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:d81697f74f50f008bb221ab5045595f8a3b87c0de2c86aa55be42ba97421f3cd"}, - {file = "dulwich-0.22.8-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7bff1da8e2e6a607c3cb45f5c2e652739589fe891245e1d5b770330cdecbde41"}, - {file = "dulwich-0.22.8-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9969099e15b939d3936f8bee8459eaef7ef5a86cd6173393a17fe28ca3d38aff"}, - {file = "dulwich-0.22.8-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:017152c51b9a613f0698db28c67cf3e0a89392d28050dbf4f4ac3f657ea4c0dc"}, - {file = "dulwich-0.22.8-cp312-cp312-win32.whl", hash = "sha256:ee70e8bb8798b503f81b53f7a103cb869c8e89141db9005909f79ab1506e26e9"}, - {file = "dulwich-0.22.8-cp312-cp312-win_amd64.whl", hash = "sha256:dc89c6f14dcdcbfee200b0557c59ae243835e42720be143526d834d0e53ed3af"}, - {file = "dulwich-0.22.8-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:dbade3342376be1cd2409539fe1b901d2d57a531106bbae204da921ef4456a74"}, - {file = "dulwich-0.22.8-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:71420ffb6deebc59b2ce875e63d814509f9c1dc89c76db962d547aebf15670c7"}, - {file = "dulwich-0.22.8-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a626adbfac44646a125618266a24133763bdc992bf8bd0702910d67e6b994443"}, - {file = "dulwich-0.22.8-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0f1476c9c4e4ede95714d06c4831883a26680e37b040b8b6230f506e5ba39f51"}, - {file = "dulwich-0.22.8-cp313-cp313-win32.whl", hash = "sha256:b2b31913932bb5bd41658dd398b33b1a2d4d34825123ad54e40912cfdfe60003"}, - {file = "dulwich-0.22.8-cp313-cp313-win_amd64.whl", hash = "sha256:7a44e5a61a7989aca1e301d39cfb62ad2f8853368682f524d6e878b4115d823d"}, - {file = "dulwich-0.22.8-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f9cd0c67fb44a38358b9fcabee948bf11044ef6ce7a129e50962f54c176d084e"}, - {file = "dulwich-0.22.8-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5b79b94726c3f4a9e5a830c649376fd0963236e73142a4290bac6bc9fc9cb120"}, - {file = "dulwich-0.22.8-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:16bbe483d663944972e22d64e1f191201123c3b5580fbdaac6a4f66bfaa4fc11"}, - {file = "dulwich-0.22.8-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e02d403af23d93dc1f96eb2408e25efd50046e38590a88c86fa4002adc9849b0"}, - {file = "dulwich-0.22.8-cp39-cp39-win32.whl", hash = "sha256:8bdd9543a77fb01be704377f5e634b71f955fec64caa4a493dc3bfb98e3a986e"}, - {file = "dulwich-0.22.8-cp39-cp39-win_amd64.whl", hash = "sha256:3b6757c6b3ba98212b854a766a4157b9cb79a06f4e1b06b46dec4bd834945b8e"}, - {file = "dulwich-0.22.8-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:7bb18fa09daa1586c1040b3e2777d38d4212a5cdbe47d384ba66a1ac336fcc4c"}, - {file = "dulwich-0.22.8-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2b2fda8e87907ed304d4a5962aea0338366144df0df60f950b8f7f125871707f"}, - {file = "dulwich-0.22.8-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1748cd573a0aee4d530bc223a23ccb8bb5b319645931a37bd1cfb68933b720c1"}, - {file = "dulwich-0.22.8-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a631b2309feb9a9631eabd896612ba36532e3ffedccace57f183bb868d7afc06"}, - {file = "dulwich-0.22.8-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:00e7d9a3d324f9e0a1b27880eec0e8e276ff76519621b66c1a429ca9eb3f5a8d"}, - {file = "dulwich-0.22.8-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:f8aa3de93201f9e3e40198725389aa9554a4ee3318a865f96a8e9bc9080f0b25"}, - {file = "dulwich-0.22.8-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1e8da9dd8135884975f5be0563ede02179240250e11f11942801ae31ac293f37"}, - {file = "dulwich-0.22.8-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4fc5ce2435fb3abdf76f1acabe48f2e4b3f7428232cadaef9daaf50ea7fa30ee"}, - {file = "dulwich-0.22.8-pp311-pypy311_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:982b21cc3100d959232cadb3da0a478bd549814dd937104ea50f43694ec27153"}, - {file = "dulwich-0.22.8-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:6bde2b13a05cc0ec2ecd4597a99896663544c40af1466121f4d046119b874ce3"}, - {file = "dulwich-0.22.8-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:6d446cb7d272a151934ad4b48ba691f32486d5267cf2de04ee3b5e05fc865326"}, - {file = "dulwich-0.22.8-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5f6338e6cf95cd76a0191b3637dc3caed1f988ae84d8e75f876d5cd75a8dd81a"}, - {file = "dulwich-0.22.8-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e004fc532ea262f2d5f375068101ca4792becb9d4aa663b050f5ac31fda0bb5c"}, - {file = "dulwich-0.22.8-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6bfdbc6fa477dee00d04e22d43a51571cd820cfaaaa886f0f155b8e29b3e3d45"}, - {file = "dulwich-0.22.8-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:ae900c8e573f79d714c1d22b02cdadd50b64286dd7203028f0200f82089e4950"}, - {file = "dulwich-0.22.8-py3-none-any.whl", hash = "sha256:ffc7a02e62b72884de58baaa3b898b7f6427893e79b1289ffa075092efe59181"}, - {file = "dulwich-0.22.8.tar.gz", hash = "sha256:701547310415de300269331abe29cb5717aa1ea377af826bf513d0adfb1c209b"}, -] - -[package.dependencies] -urllib3 = ">=1.25" - -[package.extras] -dev = ["mypy (==1.15.0)", "ruff (==0.9.7)"] -fastimport = ["fastimport"] -https = ["urllib3 (>=1.24.1)"] -paramiko = ["paramiko"] -pgp = ["gpg"] - -[[package]] -name = "exceptiongroup" -version = "1.2.2" -description = "Backport of PEP 654 (exception groups)" -optional = false -python-versions = ">=3.7" -groups = ["dev"] -markers = "python_version < \"3.11\"" -files = [ - {file = "exceptiongroup-1.2.2-py3-none-any.whl", hash = "sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b"}, - {file = "exceptiongroup-1.2.2.tar.gz", hash = "sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc"}, -] - -[package.extras] -test = ["pytest (>=6)"] - -[[package]] -name = "fakeredis" -version = "2.27.0" -description = "Python implementation of redis API, can be used for testing purposes." -optional = false -python-versions = "<4.0,>=3.7" -groups = ["dev"] -files = [ - {file = "fakeredis-2.27.0-py3-none-any.whl", hash = "sha256:f4b6e0fa4193acbf00d81dac71ff5cc34fe7d7c12f1560b036f98578a103d5c3"}, - {file = "fakeredis-2.27.0.tar.gz", hash = "sha256:7b7584ec104392592297f46864a82cb7339a23e254ee885bf5ae07cfc64fbce7"}, -] - -[package.dependencies] -lupa = {version = ">=2.1,<3.0", optional = true, markers = "extra == \"lua\""} -redis = {version = ">=4.3", markers = "python_full_version > \"3.8.0\""} -sortedcontainers = ">=2,<3" -typing-extensions = {version = ">=4.7,<5.0", markers = "python_version < \"3.11\""} - -[package.extras] -bf = ["pyprobables (>=0.6,<0.7)"] -cf = ["pyprobables (>=0.6,<0.7)"] -json = ["jsonpath-ng (>=1.6,<2.0)"] -lua = ["lupa (>=2.1,<3.0)"] -probabilistic = ["pyprobables (>=0.6,<0.7)"] - -[[package]] -name = "fastjsonschema" -version = "2.21.1" -description = "Fastest Python implementation of JSON schema" -optional = false -python-versions = "*" -groups = ["dev"] -files = [ - {file = "fastjsonschema-2.21.1-py3-none-any.whl", hash = "sha256:c9e5b7e908310918cf494a434eeb31384dd84a98b57a30bcb1f535015b554667"}, - {file = "fastjsonschema-2.21.1.tar.gz", hash = "sha256:794d4f0a58f848961ba16af7b9c85a3e88cd360df008c59aac6fc5ae9323b5d4"}, -] - -[package.extras] -devel = ["colorama", "json-spec", "jsonschema", "pylint", "pytest", "pytest-benchmark", "pytest-cache", "validictory"] - -[[package]] -name = "filelock" -version = "3.18.0" -description = "A platform independent file lock." -optional = false -python-versions = ">=3.9" -groups = ["dev"] -files = [ - {file = "filelock-3.18.0-py3-none-any.whl", hash = "sha256:c401f4f8377c4464e6db25fff06205fd89bdd83b65eb0488ed1b160f780e21de"}, - {file = "filelock-3.18.0.tar.gz", hash = "sha256:adbc88eabb99d2fec8c9c1b229b171f18afa655400173ddc653d5d01501fb9f2"}, -] - -[package.extras] -docs = ["furo (>=2024.8.6)", "sphinx (>=8.1.3)", "sphinx-autodoc-typehints (>=3)"] -testing = ["covdefaults (>=2.3)", "coverage (>=7.6.10)", "diff-cover (>=9.2.1)", "pytest (>=8.3.4)", "pytest-asyncio (>=0.25.2)", "pytest-cov (>=6)", "pytest-mock (>=3.14)", "pytest-timeout (>=2.3.1)", "virtualenv (>=20.28.1)"] -typing = ["typing-extensions (>=4.12.2) ; python_version < \"3.11\""] - -[[package]] -name = "findpython" -version = "0.6.3" -description = "A utility to find python versions on your system" -optional = false -python-versions = ">=3.8" -groups = ["dev"] -files = [ - {file = "findpython-0.6.3-py3-none-any.whl", hash = "sha256:a85bb589b559cdf1b87227cc233736eb7cad894b9e68021ee498850611939ebc"}, - {file = "findpython-0.6.3.tar.gz", hash = "sha256:5863ea55556d8aadc693481a14ac4f3624952719efc1c5591abb0b4a9e965c94"}, -] - -[package.dependencies] -packaging = ">=20" - -[[package]] -name = "freezegun" -version = "1.5.1" -description = "Let your Python tests travel through time" -optional = false -python-versions = ">=3.7" -groups = ["dev"] -files = [ - {file = "freezegun-1.5.1-py3-none-any.whl", hash = "sha256:bf111d7138a8abe55ab48a71755673dbaa4ab87f4cff5634a4442dfec34c15f1"}, - {file = "freezegun-1.5.1.tar.gz", hash = "sha256:b29dedfcda6d5e8e083ce71b2b542753ad48cfec44037b3fc79702e2980a89e9"}, -] - -[package.dependencies] -python-dateutil = ">=2.7" - -[[package]] -name = "h11" -version = "0.14.0" -description = "A pure-Python, bring-your-own-I/O implementation of HTTP/1.1" -optional = false -python-versions = ">=3.7" -groups = ["dev"] -files = [ - {file = "h11-0.14.0-py3-none-any.whl", hash = "sha256:e3fe4ac4b851c468cc8363d500db52c2ead036020723024a109d37346efaa761"}, - {file = "h11-0.14.0.tar.gz", hash = "sha256:8f19fbbe99e72420ff35c00b27a34cb9937e902a8b810e2c88300c6f0a3b699d"}, -] - -[[package]] -name = "httpcore" -version = "1.0.7" -description = "A minimal low-level HTTP client." -optional = false -python-versions = ">=3.8" -groups = ["dev"] -files = [ - {file = "httpcore-1.0.7-py3-none-any.whl", hash = "sha256:a3fff8f43dc260d5bd363d9f9cf1830fa3a458b332856f34282de498ed420edd"}, - {file = "httpcore-1.0.7.tar.gz", hash = "sha256:8551cb62a169ec7162ac7be8d4817d561f60e08eaa485234898414bb5a8a0b4c"}, -] - -[package.dependencies] -certifi = "*" -h11 = ">=0.13,<0.15" - -[package.extras] -asyncio = ["anyio (>=4.0,<5.0)"] -http2 = ["h2 (>=3,<5)"] -socks = ["socksio (==1.*)"] -trio = ["trio (>=0.22.0,<1.0)"] - -[[package]] -name = "httpx" -version = "0.28.1" -description = "The next generation HTTP client." -optional = false -python-versions = ">=3.8" -groups = ["dev"] -files = [ - {file = "httpx-0.28.1-py3-none-any.whl", hash = "sha256:d909fcccc110f8c7faf814ca82a9a4d816bc5a6dbfea25d6591d6985b8ba59ad"}, - {file = "httpx-0.28.1.tar.gz", hash = "sha256:75e98c5f16b0f35b567856f597f06ff2270a374470a5c2392242528e3e3e42fc"}, -] - -[package.dependencies] -anyio = "*" -certifi = "*" -httpcore = "==1.*" -idna = "*" - -[package.extras] -brotli = ["brotli ; platform_python_implementation == \"CPython\"", "brotlicffi ; platform_python_implementation != \"CPython\""] -cli = ["click (==8.*)", "pygments (==2.*)", "rich (>=10,<14)"] -http2 = ["h2 (>=3,<5)"] -socks = ["socksio (==1.*)"] -zstd = ["zstandard (>=0.18.0)"] - -[[package]] -name = "idna" -version = "3.10" -description = "Internationalized Domain Names in Applications (IDNA)" -optional = false -python-versions = ">=3.6" -groups = ["dev"] -files = [ - {file = "idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3"}, - {file = "idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9"}, -] - -[package.extras] -all = ["flake8 (>=7.1.1)", "mypy (>=1.11.2)", "pytest (>=8.3.2)", "ruff (>=0.6.2)"] - -[[package]] -name = "importlib-metadata" -version = "8.6.1" -description = "Read metadata from Python packages" -optional = false -python-versions = ">=3.9" -groups = ["dev"] -markers = "python_version < \"3.12\"" -files = [ - {file = "importlib_metadata-8.6.1-py3-none-any.whl", hash = "sha256:02a89390c1e15fdfdc0d7c6b25cb3e62650d0494005c97d6f148bf5b9787525e"}, - {file = "importlib_metadata-8.6.1.tar.gz", hash = "sha256:310b41d755445d74569f993ccfc22838295d9fe005425094fad953d7f15c8580"}, -] - -[package.dependencies] -zipp = ">=3.20" - -[package.extras] -check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1) ; sys_platform != \"cygwin\""] -cover = ["pytest-cov"] -doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] -enabler = ["pytest-enabler (>=2.2)"] -perf = ["ipython"] -test = ["flufl.flake8", "importlib_resources (>=1.3) ; python_version < \"3.9\"", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6,!=8.1.*)", "pytest-perf (>=0.9.2)"] -type = ["pytest-mypy"] - -[[package]] -name = "installer" -version = "0.7.0" -description = "A library for installing Python wheels." -optional = false -python-versions = ">=3.7" -groups = ["dev"] -files = [ - {file = "installer-0.7.0-py3-none-any.whl", hash = "sha256:05d1933f0a5ba7d8d6296bb6d5018e7c94fa473ceb10cf198a92ccea19c27b53"}, - {file = "installer-0.7.0.tar.gz", hash = "sha256:a26d3e3116289bb08216e0d0f7d925fcef0b0194eedfa0c944bcaaa106c4b631"}, -] - -[[package]] -name = "jaraco-classes" -version = "3.4.0" -description = "Utility functions for Python class constructs" -optional = false -python-versions = ">=3.8" -groups = ["dev"] -files = [ - {file = "jaraco.classes-3.4.0-py3-none-any.whl", hash = "sha256:f662826b6bed8cace05e7ff873ce0f9283b5c924470fe664fff1c2f00f581790"}, - {file = "jaraco.classes-3.4.0.tar.gz", hash = "sha256:47a024b51d0239c0dd8c8540c6c7f484be3b8fcf0b2d85c13825780d3b3f3acd"}, -] - -[package.dependencies] -more-itertools = "*" - -[package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] -testing = ["pytest (>=6)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-ruff (>=0.2.1)"] - -[[package]] -name = "jaraco-context" -version = "6.0.1" -description = "Useful decorators and context managers" -optional = false -python-versions = ">=3.8" -groups = ["dev"] -files = [ - {file = "jaraco.context-6.0.1-py3-none-any.whl", hash = "sha256:f797fc481b490edb305122c9181830a3a5b76d84ef6d1aef2fb9b47ab956f9e4"}, - {file = "jaraco_context-6.0.1.tar.gz", hash = "sha256:9bae4ea555cf0b14938dc0aee7c9f32ed303aa20a3b73e7dc80111628792d1b3"}, -] - -[package.dependencies] -"backports.tarfile" = {version = "*", markers = "python_version < \"3.12\""} - -[package.extras] -doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] -test = ["portend", "pytest (>=6,!=8.1.*)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy", "pytest-ruff (>=0.2.1) ; sys_platform != \"cygwin\""] - -[[package]] -name = "jaraco-functools" -version = "4.1.0" -description = "Functools like those found in stdlib" -optional = false -python-versions = ">=3.8" -groups = ["dev"] -files = [ - {file = "jaraco.functools-4.1.0-py3-none-any.whl", hash = "sha256:ad159f13428bc4acbf5541ad6dec511f91573b90fba04df61dafa2a1231cf649"}, - {file = "jaraco_functools-4.1.0.tar.gz", hash = "sha256:70f7e0e2ae076498e212562325e805204fc092d7b4c17e0e86c959e249701a9d"}, -] - -[package.dependencies] -more-itertools = "*" - -[package.extras] -check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1) ; sys_platform != \"cygwin\""] -cover = ["pytest-cov"] -doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] -enabler = ["pytest-enabler (>=2.2)"] -test = ["jaraco.classes", "pytest (>=6,!=8.1.*)"] -type = ["pytest-mypy"] - -[[package]] -name = "jeepney" -version = "0.9.0" -description = "Low-level, pure Python DBus protocol wrapper." -optional = false -python-versions = ">=3.7" -groups = ["dev"] -markers = "sys_platform == \"linux\"" -files = [ - {file = "jeepney-0.9.0-py3-none-any.whl", hash = "sha256:97e5714520c16fc0a45695e5365a2e11b81ea79bba796e26f9f1d178cb182683"}, - {file = "jeepney-0.9.0.tar.gz", hash = "sha256:cf0e9e845622b81e4a28df94c40345400256ec608d0e55bb8a3feaa9163f5732"}, -] - -[package.extras] -test = ["async-timeout ; python_version < \"3.11\"", "pytest", "pytest-asyncio (>=0.17)", "pytest-trio", "testpath", "trio"] -trio = ["trio"] - -[[package]] -name = "keyring" -version = "25.6.0" -description = "Store and access your passwords safely." -optional = false -python-versions = ">=3.9" -groups = ["dev"] -files = [ - {file = "keyring-25.6.0-py3-none-any.whl", hash = "sha256:552a3f7af126ece7ed5c89753650eec89c7eaae8617d0aa4d9ad2b75111266bd"}, - {file = "keyring-25.6.0.tar.gz", hash = "sha256:0b39998aa941431eb3d9b0d4b2460bc773b9df6fed7621c2dfb291a7e0187a66"}, -] - -[package.dependencies] -importlib_metadata = {version = ">=4.11.4", markers = "python_version < \"3.12\""} -"jaraco.classes" = "*" -"jaraco.context" = "*" -"jaraco.functools" = "*" -jeepney = {version = ">=0.4.2", markers = "sys_platform == \"linux\""} -pywin32-ctypes = {version = ">=0.2.0", markers = "sys_platform == \"win32\""} -SecretStorage = {version = ">=3.2", markers = "sys_platform == \"linux\""} - -[package.extras] -check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1) ; sys_platform != \"cygwin\""] -completion = ["shtab (>=1.1.0)"] -cover = ["pytest-cov"] -doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] -enabler = ["pytest-enabler (>=2.2)"] -test = ["pyfakefs", "pytest (>=6,!=8.1.*)"] -type = ["pygobject-stubs", "pytest-mypy", "shtab", "types-pywin32"] - -[[package]] -name = "lupa" -version = "2.4" -description = "Python wrapper around Lua and LuaJIT" -optional = false -python-versions = "*" -groups = ["dev"] -files = [ - {file = "lupa-2.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:518822e047b2c65146cf09efb287f28c2eb3ced38bcc661f881f33bcd9e2ba1f"}, - {file = "lupa-2.4-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:15ce18c8b7642dd5b8f491c6e19fea6079f24f52e543c698622e5eb80b17b952"}, - {file = "lupa-2.4-cp310-cp310-macosx_11_0_x86_64.whl", hash = "sha256:aea832d79931b512827ab6af68b1d20099d290c7bd94b98306bc9d639a719c6f"}, - {file = "lupa-2.4-cp310-cp310-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3d7f7dc548c35c0384aa54e3a8e0953dead10975e7d5ff9516ba09a36127f449"}, - {file = "lupa-2.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e166d81e6e39a7fedd5dd1d6560483bb7b0db18e1fe4153cc92088a1a81d9035"}, - {file = "lupa-2.4-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2a35e974e9dce96217dda3db89a22384093fdaa3ea7a3d8aaf6e548767634c34"}, - {file = "lupa-2.4-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:bc4bfd7abc63940e71d46ef22080ff02315b5c7619341daca5ea37f6a595edc6"}, - {file = "lupa-2.4-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:b38ce88bfef9677b94bd5ab67d1359dd87fa7a78189909e28e90ada65bb5064b"}, - {file = "lupa-2.4-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:815071e5ef2d313b5e69f5671a343580643e2794cc5f38e22f75995116df11e8"}, - {file = "lupa-2.4-cp310-cp310-win32.whl", hash = "sha256:98c3160f5d1e5b9e976f836ca9a97e51ad3b52043680f117ba3d6c535309fef0"}, - {file = "lupa-2.4-cp310-cp310-win_amd64.whl", hash = "sha256:f1a0cee956c929f09aa8af36d2b28f1a39170ef8673deaf7b80a5dd8a30d1c54"}, - {file = "lupa-2.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5ae945bb9b6fd84bfa4bd3a3caabe54d05d2514da16e1f45d304208c58819ebd"}, - {file = "lupa-2.4-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:dae6006214974192775d76bee156cee42632320f93f9756d2763f4aa90090026"}, - {file = "lupa-2.4-cp311-cp311-macosx_11_0_x86_64.whl", hash = "sha256:fdcf8ae011e2e631dd1737cdf705219eb797063f0455761c7046c2554f1d3f8c"}, - {file = "lupa-2.4-cp311-cp311-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:db0b331de8dcdc6540e6a62500fcbfb1e3d9887c6ff5fb146b8713018ea7c102"}, - {file = "lupa-2.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:63c74c457e52d6532795e60e3f3ad87ae38a833d2a427abd55d98032701b0d39"}, - {file = "lupa-2.4-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:795d047b85363b8f9123cb87bd590d177f7c31a631cc6e0a9de2dbb7f92cf6d5"}, - {file = "lupa-2.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:4b2a360db05c66cf4cca0e07fe322a3b2fe2209a46f8e9d8ff2f4b93b5368b35"}, - {file = "lupa-2.4-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:c6f38b65bb16ce9c92c6d993c60aca1d700326a513ce294635a67a1553689e64"}, - {file = "lupa-2.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:fd0266968ade202b45747e932fb2e1823587eee2b0983733841325a0ade272ed"}, - {file = "lupa-2.4-cp311-cp311-win32.whl", hash = "sha256:8a917b550db751419bd7ec426e26605ad8934a540d376d253b6c6ab1570ce58a"}, - {file = "lupa-2.4-cp311-cp311-win_amd64.whl", hash = "sha256:c8ceb7beb0d6f42d8a20bfa880f986f29ba8ad162ac678d62a9b2628e8ee6946"}, - {file = "lupa-2.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:bbf9b26bd8e4f28e794e3572bfcff4489a137747de26bdfe3df33b88370f39cc"}, - {file = "lupa-2.4-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:085f104ec8e4a848177c16691724da45d0bb8c79deef331fd21c36bdc53e941b"}, - {file = "lupa-2.4-cp312-cp312-macosx_11_0_x86_64.whl", hash = "sha256:81f3a4d471e2eb4e4db3ae9367d1144298f94ff8213c701eee8f9e8100f80b4a"}, - {file = "lupa-2.4-cp312-cp312-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c803c8a5692145024c20ce8ee82826b8840fd806565fa8134621b361f66451d8"}, - {file = "lupa-2.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6732f4051f982695a87db69539fd9b4c2bddf51ee43cdcc1a2c379ca6af6c5b2"}, - {file = "lupa-2.4-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:cdbb1213a20a52e8e2c90f473d15a8a9c885eaf291d3536faf5414e3a5c3f8e6"}, - {file = "lupa-2.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:34992e172096e2209d5a55364774e90311ef30fe002ca6ab9e617211c08651de"}, - {file = "lupa-2.4-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:1b4cfa0fd7f666ad1b56643b7f43925445ccf6f68a75ae715c155bc56dbc843d"}, - {file = "lupa-2.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:41286859dc564098f8cc3d707d8f6a8934540127761498752c4fa25aea38d89b"}, - {file = "lupa-2.4-cp312-cp312-win32.whl", hash = "sha256:bb41e63ca36ba4eafb346fcea2daede74484ef2b70affd934e7d265d30d32dcd"}, - {file = "lupa-2.4-cp312-cp312-win_amd64.whl", hash = "sha256:a89ed97ea51c093cfa0fd00669e4d9fdda8b1bd9abb756339ea8c96cb7e890f7"}, - {file = "lupa-2.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:12b30ea0586579ecde0e13bb372010326178ff309f52b5e39f6df843bd815ba7"}, - {file = "lupa-2.4-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:0fce2487f9d9199e0d78478ecd1ba47d1779850588a8e0b7def4f3adf25e943c"}, - {file = "lupa-2.4-cp313-cp313-macosx_11_0_x86_64.whl", hash = "sha256:ed71a89d500191f7d0ad5a0b988298e4d9fde8445fbac940e0996e214760a5c5"}, - {file = "lupa-2.4-cp313-cp313-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:41f2b0d0b44e1c94814f69ba82ef25b7e47a7f3edcd47d220a11ee3b64514452"}, - {file = "lupa-2.4-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f16fbaa68ec999ee5e8935d517df8d8a6bfcaa8fb2fe5b9c60131be15590d0c0"}, - {file = "lupa-2.4-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4842759d027db108f605dc895c9afc4011d12eac448e0d092a4d0b21e79ba1c5"}, - {file = "lupa-2.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:52efeef1e632c5edff61bd6d79b0f393e515ea2a464f6f0d4276ecc565279f04"}, - {file = "lupa-2.4-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:2b32202a1244b6c7aaa6d2a611b5a842de4b166703388db66265b37074e255fd"}, - {file = "lupa-2.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ba0649579b0698ce4841106ec7eee657995b8c13e9f5e16bbf93e8afb387d59b"}, - {file = "lupa-2.4-cp313-cp313-win32.whl", hash = "sha256:18e12e714a2f633bf3583f23ec07904a0584e351889eff7f98439d520255a204"}, - {file = "lupa-2.4-cp313-cp313-win_amd64.whl", hash = "sha256:203a11122bd11366e5b836590ea11bf2ebfb79bfdaf0ffd44b6646cea51cb255"}, - {file = "lupa-2.4-cp36-cp36m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07f55b6c30f9e03f63ca7c4037b146110194ab0f89021a9923b817a01aa1c3bc"}, - {file = "lupa-2.4-cp36-cp36m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f2d5c732f4fe8a4f1577f49e7a31045294019c731208ecee6f194bb03ee4c186"}, - {file = "lupa-2.4-cp36-cp36m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90a41c0f2744be3b055dec0b9f65cd87c52fb7a86891df43292369ee8e4ea111"}, - {file = "lupa-2.4-cp36-cp36m-musllinux_1_2_aarch64.whl", hash = "sha256:34994926045e66fea6b93b2caab3ac66f5de4218055fd4dd2b98198b2c3765ee"}, - {file = "lupa-2.4-cp36-cp36m-musllinux_1_2_i686.whl", hash = "sha256:b250cd39639fff9a842a138f18343c579a993e56c9dea8914398e5c9775f6b0d"}, - {file = "lupa-2.4-cp36-cp36m-musllinux_1_2_x86_64.whl", hash = "sha256:1247453e4b95dfbf88a13065e49815992db16485398760951425a29df7b5e2dc"}, - {file = "lupa-2.4-cp36-cp36m-win32.whl", hash = "sha256:0f95747c40156a77b4336f1bb42f1e29e42cfb46c57b978b50db6980025b528c"}, - {file = "lupa-2.4-cp36-cp36m-win_amd64.whl", hash = "sha256:4e12cfc3005fcd2a5424449a7d989d1820b7e17a06d65dfe769255278122b69e"}, - {file = "lupa-2.4-cp37-cp37m-macosx_11_0_x86_64.whl", hash = "sha256:31e522dcd53cb2a8c53161465f3d20dc9672241b2c4f5384ebda07f30d35d7f7"}, - {file = "lupa-2.4-cp37-cp37m-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:710067765c252328ba2d521a3ab7dfef3a6b89293b9ed24254587db5210612ca"}, - {file = "lupa-2.4-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9c3feb9d8af4c5cda2f1523ce6b40cadc96b8de275d84f7d64e1a35b8ecd7f62"}, - {file = "lupa-2.4-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:89d802cd78da75262477148ef5aea14c8da76f356329f69b44bc3b31dd3d64a1"}, - {file = "lupa-2.4-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:e84b388356fe392d787e6a8aed182bd5b807de8965aa9ef6f10d0eb5e47ddca5"}, - {file = "lupa-2.4-cp37-cp37m-musllinux_1_2_i686.whl", hash = "sha256:f70d9d7e2fd38a3124461cb3a2d10494c4fbea0ee9fa801e6066b79f0a75e5f0"}, - {file = "lupa-2.4-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:7ca47a1ac55c8f5cc0043b9fee195b2f6f3b9435fde71a0e035546b9410731e9"}, - {file = "lupa-2.4-cp37-cp37m-win32.whl", hash = "sha256:829bfb692fee181d275c0d24dafe2c2273794f438469d0fd32f0127652f57e7a"}, - {file = "lupa-2.4-cp37-cp37m-win_amd64.whl", hash = "sha256:ea439dbd6c3e9895f986fff57a4617140239ad3f0b60ca4ccff0b32b3401b8d5"}, - {file = "lupa-2.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:76bae9285a26d1a1cacb630d1db57e829f3f91d1e8c0760acabd0e9d04eb65f3"}, - {file = "lupa-2.4-cp38-cp38-macosx_11_0_x86_64.whl", hash = "sha256:27cafb9bbe5a4869a50dcb7aca068e1cc68e233d54cd6093116ffb868f7083e3"}, - {file = "lupa-2.4-cp38-cp38-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d1737a54ac93b0bfe22762506665b7ac433fd161a596aee342e4dae106198349"}, - {file = "lupa-2.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:03fca7715493efc98db21686e225942dba3ca1683c6c501e47384702871d7c79"}, - {file = "lupa-2.4-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:579fae5adf99f6872379c585def71e502312072ec8bdf04244dc6c875f2b10c4"}, - {file = "lupa-2.4-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:073bf02f31fa60cff0952b0f4c41a635b3a63d75b4d6afdf2380520efad78241"}, - {file = "lupa-2.4-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:6ed59e6ed08c4ddae4bbf317b37af5ee2253c5ff14dc3914a5f3d3c128535d90"}, - {file = "lupa-2.4-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:a1c9fed2ee9ce6c117fe78f987617a8890c09d19476ec97aa64ce2c6cbb507f0"}, - {file = "lupa-2.4-cp38-cp38-win32.whl", hash = "sha256:9c803d22bdfd0e0de7b43793b10d1e235defdbfbb99dbf12405dfb7e34d004d6"}, - {file = "lupa-2.4-cp38-cp38-win_amd64.whl", hash = "sha256:a468c6fe8334af1a5c5881e54afc39c3ebbef0e1d4af1a9ceaf04a4c95edfb9a"}, - {file = "lupa-2.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:74a3747bcd53b9f1b6adf44343a614cf0d03a4f11d2e9dee08900a2c18f1266a"}, - {file = "lupa-2.4-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:7bb03be049222056ae344b73a2a3c6d842c55c3a69b5c5acea0f9f5a0f1dddc1"}, - {file = "lupa-2.4-cp39-cp39-macosx_11_0_x86_64.whl", hash = "sha256:79ff99c6a3493c2eb69a932e034d0e67fa03ef50e235c0804393ca6040ab9a90"}, - {file = "lupa-2.4-cp39-cp39-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:599764acf3db817b1623ef82988c85d0c361b564108918658079eca1dcd2cc8b"}, - {file = "lupa-2.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6218c0dead8d85ff716969347273af3abf29fa520e07a0fc88079a8cefd58faf"}, - {file = "lupa-2.4-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6a4f6483c55a6449bd95b0c0b17683b0fde6970b578da4f5de37892884b4d353"}, - {file = "lupa-2.4-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:00f7fb8ae883a25bc17058dae19635da32dd79b3c43470f4267d57f7bd2d5a93"}, - {file = "lupa-2.4-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:0df511db2bf0a4e7c8bb5c0092a83e0c217a175f10dba59297b2b903b02e243f"}, - {file = "lupa-2.4-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:761491befe07097a07f7a1f0a6595076ca04c8b2db6071e8dedbbbf4cf1d5591"}, - {file = "lupa-2.4-cp39-cp39-win32.whl", hash = "sha256:b53f91cbcd2673a25754bc65b4224ffa3e9cd580a4c7cf2659db7ca432d1b69b"}, - {file = "lupa-2.4-cp39-cp39-win_amd64.whl", hash = "sha256:ff91e00c077b7e3fc2c5a8b4bcc1f62eaf403f435fc801f32dd610f20332dc0a"}, - {file = "lupa-2.4-pp310-pypy310_pp73-macosx_11_0_x86_64.whl", hash = "sha256:889329d0e8e12a1e2529b0258ee69bb1f2ea94aa673b1782f9e12aa55ff3c960"}, - {file = "lupa-2.4-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:84d58aedec8996065e3fc6d397c1434e86176feda09ce7a73227506fc89d1c48"}, - {file = "lupa-2.4-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:2708eb13b7c0696d9c9e02eea1717c4a24812395d18e6500547ae440da8d7963"}, - {file = "lupa-2.4-pp37-pypy37_pp73-macosx_11_0_x86_64.whl", hash = "sha256:834f81a582eabb2242599a9ed222f14d4b17ffff986d42ef8e62cae3e45912c0"}, - {file = "lupa-2.4-pp37-pypy37_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5beeb9ee39877302b85226b81fa8038f3a46aba9393c64d08f349bf0455efb73"}, - {file = "lupa-2.4-pp37-pypy37_pp73-win_amd64.whl", hash = "sha256:6e758c5d7c1ed9adca15791d24c78b27f67fa9b0df0126f4334001c94e2742a2"}, - {file = "lupa-2.4-pp38-pypy38_pp73-macosx_11_0_x86_64.whl", hash = "sha256:eb122ed5a987e579b7fc41382946f1185b78672a2aded1263752b98a0aa11f06"}, - {file = "lupa-2.4-pp38-pypy38_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:03fc9263ed07229aaa09fa93a2f485f6b9ce5a2364e80088c8c96376bada65ad"}, - {file = "lupa-2.4-pp38-pypy38_pp73-win_amd64.whl", hash = "sha256:a1a5206eb870b5d21285041fe111b8b41b2da789bbf8a50bc45600be24d7a415"}, - {file = "lupa-2.4-pp39-pypy39_pp73-macosx_11_0_x86_64.whl", hash = "sha256:cc521f6d228749fd57649a956f9543a729e462d7693540d4397e6b9f378e3196"}, - {file = "lupa-2.4-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fdda690d24aa55e00971bc8443a7d8a28aade14eb01603aed65b345c9dcd92e3"}, - {file = "lupa-2.4-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:71e9cfa60042b3de4dd68f00a2c94dd45e03d3583fb0fc802d9fbbb3b32dd2f7"}, - {file = "lupa-2.4.tar.gz", hash = "sha256:5300d21f81aa1bd4d45f55e31dddba3b879895696068a3f84cfcb5fd9148aacd"}, -] - -[[package]] -name = "more-itertools" -version = "10.6.0" -description = "More routines for operating on iterables, beyond itertools" -optional = false -python-versions = ">=3.9" -groups = ["dev"] -files = [ - {file = "more-itertools-10.6.0.tar.gz", hash = "sha256:2cd7fad1009c31cc9fb6a035108509e6547547a7a738374f10bd49a09eb3ee3b"}, - {file = "more_itertools-10.6.0-py3-none-any.whl", hash = "sha256:6eb054cb4b6db1473f6e15fcc676a08e4732548acd47c708f0e179c2c7c01e89"}, -] - -[[package]] -name = "msgpack" -version = "1.1.0" -description = "MessagePack serializer" -optional = false -python-versions = ">=3.8" -groups = ["dev"] -files = [ - {file = "msgpack-1.1.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7ad442d527a7e358a469faf43fda45aaf4ac3249c8310a82f0ccff9164e5dccd"}, - {file = "msgpack-1.1.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:74bed8f63f8f14d75eec75cf3d04ad581da6b914001b474a5d3cd3372c8cc27d"}, - {file = "msgpack-1.1.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:914571a2a5b4e7606997e169f64ce53a8b1e06f2cf2c3a7273aa106236d43dd5"}, - {file = "msgpack-1.1.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c921af52214dcbb75e6bdf6a661b23c3e6417f00c603dd2070bccb5c3ef499f5"}, - {file = "msgpack-1.1.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d8ce0b22b890be5d252de90d0e0d119f363012027cf256185fc3d474c44b1b9e"}, - {file = "msgpack-1.1.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:73322a6cc57fcee3c0c57c4463d828e9428275fb85a27aa2aa1a92fdc42afd7b"}, - {file = "msgpack-1.1.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:e1f3c3d21f7cf67bcf2da8e494d30a75e4cf60041d98b3f79875afb5b96f3a3f"}, - {file = "msgpack-1.1.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:64fc9068d701233effd61b19efb1485587560b66fe57b3e50d29c5d78e7fef68"}, - {file = "msgpack-1.1.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:42f754515e0f683f9c79210a5d1cad631ec3d06cea5172214d2176a42e67e19b"}, - {file = "msgpack-1.1.0-cp310-cp310-win32.whl", hash = "sha256:3df7e6b05571b3814361e8464f9304c42d2196808e0119f55d0d3e62cd5ea044"}, - {file = "msgpack-1.1.0-cp310-cp310-win_amd64.whl", hash = "sha256:685ec345eefc757a7c8af44a3032734a739f8c45d1b0ac45efc5d8977aa4720f"}, - {file = "msgpack-1.1.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:3d364a55082fb2a7416f6c63ae383fbd903adb5a6cf78c5b96cc6316dc1cedc7"}, - {file = "msgpack-1.1.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:79ec007767b9b56860e0372085f8504db5d06bd6a327a335449508bbee9648fa"}, - {file = "msgpack-1.1.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6ad622bf7756d5a497d5b6836e7fc3752e2dd6f4c648e24b1803f6048596f701"}, - {file = "msgpack-1.1.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e59bca908d9ca0de3dc8684f21ebf9a690fe47b6be93236eb40b99af28b6ea6"}, - {file = "msgpack-1.1.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5e1da8f11a3dd397f0a32c76165cf0c4eb95b31013a94f6ecc0b280c05c91b59"}, - {file = "msgpack-1.1.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:452aff037287acb1d70a804ffd022b21fa2bb7c46bee884dbc864cc9024128a0"}, - {file = "msgpack-1.1.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8da4bf6d54ceed70e8861f833f83ce0814a2b72102e890cbdfe4b34764cdd66e"}, - {file = "msgpack-1.1.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:41c991beebf175faf352fb940bf2af9ad1fb77fd25f38d9142053914947cdbf6"}, - {file = "msgpack-1.1.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:a52a1f3a5af7ba1c9ace055b659189f6c669cf3657095b50f9602af3a3ba0fe5"}, - {file = "msgpack-1.1.0-cp311-cp311-win32.whl", hash = "sha256:58638690ebd0a06427c5fe1a227bb6b8b9fdc2bd07701bec13c2335c82131a88"}, - {file = "msgpack-1.1.0-cp311-cp311-win_amd64.whl", hash = "sha256:fd2906780f25c8ed5d7b323379f6138524ba793428db5d0e9d226d3fa6aa1788"}, - {file = "msgpack-1.1.0-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:d46cf9e3705ea9485687aa4001a76e44748b609d260af21c4ceea7f2212a501d"}, - {file = "msgpack-1.1.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:5dbad74103df937e1325cc4bfeaf57713be0b4f15e1c2da43ccdd836393e2ea2"}, - {file = "msgpack-1.1.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:58dfc47f8b102da61e8949708b3eafc3504509a5728f8b4ddef84bd9e16ad420"}, - {file = "msgpack-1.1.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4676e5be1b472909b2ee6356ff425ebedf5142427842aa06b4dfd5117d1ca8a2"}, - {file = "msgpack-1.1.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17fb65dd0bec285907f68b15734a993ad3fc94332b5bb21b0435846228de1f39"}, - {file = "msgpack-1.1.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a51abd48c6d8ac89e0cfd4fe177c61481aca2d5e7ba42044fd218cfd8ea9899f"}, - {file = "msgpack-1.1.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2137773500afa5494a61b1208619e3871f75f27b03bcfca7b3a7023284140247"}, - {file = "msgpack-1.1.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:398b713459fea610861c8a7b62a6fec1882759f308ae0795b5413ff6a160cf3c"}, - {file = "msgpack-1.1.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:06f5fd2f6bb2a7914922d935d3b8bb4a7fff3a9a91cfce6d06c13bc42bec975b"}, - {file = "msgpack-1.1.0-cp312-cp312-win32.whl", hash = "sha256:ad33e8400e4ec17ba782f7b9cf868977d867ed784a1f5f2ab46e7ba53b6e1e1b"}, - {file = "msgpack-1.1.0-cp312-cp312-win_amd64.whl", hash = "sha256:115a7af8ee9e8cddc10f87636767857e7e3717b7a2e97379dc2054712693e90f"}, - {file = "msgpack-1.1.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:071603e2f0771c45ad9bc65719291c568d4edf120b44eb36324dcb02a13bfddf"}, - {file = "msgpack-1.1.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:0f92a83b84e7c0749e3f12821949d79485971f087604178026085f60ce109330"}, - {file = "msgpack-1.1.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:4a1964df7b81285d00a84da4e70cb1383f2e665e0f1f2a7027e683956d04b734"}, - {file = "msgpack-1.1.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:59caf6a4ed0d164055ccff8fe31eddc0ebc07cf7326a2aaa0dbf7a4001cd823e"}, - {file = "msgpack-1.1.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0907e1a7119b337971a689153665764adc34e89175f9a34793307d9def08e6ca"}, - {file = "msgpack-1.1.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:65553c9b6da8166e819a6aa90ad15288599b340f91d18f60b2061f402b9a4915"}, - {file = "msgpack-1.1.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:7a946a8992941fea80ed4beae6bff74ffd7ee129a90b4dd5cf9c476a30e9708d"}, - {file = "msgpack-1.1.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:4b51405e36e075193bc051315dbf29168d6141ae2500ba8cd80a522964e31434"}, - {file = "msgpack-1.1.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:b4c01941fd2ff87c2a934ee6055bda4ed353a7846b8d4f341c428109e9fcde8c"}, - {file = "msgpack-1.1.0-cp313-cp313-win32.whl", hash = "sha256:7c9a35ce2c2573bada929e0b7b3576de647b0defbd25f5139dcdaba0ae35a4cc"}, - {file = "msgpack-1.1.0-cp313-cp313-win_amd64.whl", hash = "sha256:bce7d9e614a04d0883af0b3d4d501171fbfca038f12c77fa838d9f198147a23f"}, - {file = "msgpack-1.1.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c40ffa9a15d74e05ba1fe2681ea33b9caffd886675412612d93ab17b58ea2fec"}, - {file = "msgpack-1.1.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f1ba6136e650898082d9d5a5217d5906d1e138024f836ff48691784bbe1adf96"}, - {file = "msgpack-1.1.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e0856a2b7e8dcb874be44fea031d22e5b3a19121be92a1e098f46068a11b0870"}, - {file = "msgpack-1.1.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:471e27a5787a2e3f974ba023f9e265a8c7cfd373632247deb225617e3100a3c7"}, - {file = "msgpack-1.1.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:646afc8102935a388ffc3914b336d22d1c2d6209c773f3eb5dd4d6d3b6f8c1cb"}, - {file = "msgpack-1.1.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:13599f8829cfbe0158f6456374e9eea9f44eee08076291771d8ae93eda56607f"}, - {file = "msgpack-1.1.0-cp38-cp38-win32.whl", hash = "sha256:8a84efb768fb968381e525eeeb3d92857e4985aacc39f3c47ffd00eb4509315b"}, - {file = "msgpack-1.1.0-cp38-cp38-win_amd64.whl", hash = "sha256:879a7b7b0ad82481c52d3c7eb99bf6f0645dbdec5134a4bddbd16f3506947feb"}, - {file = "msgpack-1.1.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:53258eeb7a80fc46f62fd59c876957a2d0e15e6449a9e71842b6d24419d88ca1"}, - {file = "msgpack-1.1.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:7e7b853bbc44fb03fbdba34feb4bd414322180135e2cb5164f20ce1c9795ee48"}, - {file = "msgpack-1.1.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:f3e9b4936df53b970513eac1758f3882c88658a220b58dcc1e39606dccaaf01c"}, - {file = "msgpack-1.1.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:46c34e99110762a76e3911fc923222472c9d681f1094096ac4102c18319e6468"}, - {file = "msgpack-1.1.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8a706d1e74dd3dea05cb54580d9bd8b2880e9264856ce5068027eed09680aa74"}, - {file = "msgpack-1.1.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:534480ee5690ab3cbed89d4c8971a5c631b69a8c0883ecfea96c19118510c846"}, - {file = "msgpack-1.1.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:8cf9e8c3a2153934a23ac160cc4cba0ec035f6867c8013cc6077a79823370346"}, - {file = "msgpack-1.1.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:3180065ec2abbe13a4ad37688b61b99d7f9e012a535b930e0e683ad6bc30155b"}, - {file = "msgpack-1.1.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:c5a91481a3cc573ac8c0d9aace09345d989dc4a0202b7fcb312c88c26d4e71a8"}, - {file = "msgpack-1.1.0-cp39-cp39-win32.whl", hash = "sha256:f80bc7d47f76089633763f952e67f8214cb7b3ee6bfa489b3cb6a84cfac114cd"}, - {file = "msgpack-1.1.0-cp39-cp39-win_amd64.whl", hash = "sha256:4d1b7ff2d6146e16e8bd665ac726a89c74163ef8cd39fa8c1087d4e52d3a2325"}, - {file = "msgpack-1.1.0.tar.gz", hash = "sha256:dd432ccc2c72b914e4cb77afce64aab761c1137cc698be3984eee260bcb2896e"}, -] - -[[package]] -name = "packaging" -version = "24.2" -description = "Core utilities for Python packages" -optional = false -python-versions = ">=3.8" -groups = ["dev"] -files = [ - {file = "packaging-24.2-py3-none-any.whl", hash = "sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759"}, - {file = "packaging-24.2.tar.gz", hash = "sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f"}, -] - -[[package]] -name = "pbs-installer" -version = "2025.3.17" -description = "Installer for Python Build Standalone" -optional = false -python-versions = ">=3.8" -groups = ["dev"] -files = [ - {file = "pbs_installer-2025.3.17-py3-none-any.whl", hash = "sha256:d2b0563b1d5d814e479f3c43d7aee019250f68a0a113d754714fa9a721f83b47"}, - {file = "pbs_installer-2025.3.17.tar.gz", hash = "sha256:dde058f925b989c1d3bd90739c16ffd0e68732f7716e4d1e01ca480d00a67560"}, -] - -[package.dependencies] -httpx = {version = ">=0.27.0,<1", optional = true, markers = "extra == \"download\""} -zstandard = {version = ">=0.21.0", optional = true, markers = "extra == \"install\""} - -[package.extras] -all = ["pbs-installer[download,install]"] -download = ["httpx (>=0.27.0,<1)"] -install = ["zstandard (>=0.21.0)"] - -[[package]] -name = "pkginfo" -version = "1.12.1.2" -description = "Query metadata from sdists / bdists / installed packages." -optional = false -python-versions = ">=3.8" -groups = ["dev"] -files = [ - {file = "pkginfo-1.12.1.2-py3-none-any.whl", hash = "sha256:c783ac885519cab2c34927ccfa6bf64b5a704d7c69afaea583dd9b7afe969343"}, - {file = "pkginfo-1.12.1.2.tar.gz", hash = "sha256:5cd957824ac36f140260964eba3c6be6442a8359b8c48f4adf90210f33a04b7b"}, -] - -[package.extras] -testing = ["pytest", "pytest-cov", "wheel"] - -[[package]] -name = "platformdirs" -version = "4.3.7" -description = "A small Python package for determining appropriate platform-specific dirs, e.g. a `user data dir`." -optional = false -python-versions = ">=3.9" -groups = ["dev"] -files = [ - {file = "platformdirs-4.3.7-py3-none-any.whl", hash = "sha256:a03875334331946f13c549dbd8f4bac7a13a50a895a0eb1e8c6a8ace80d40a94"}, - {file = "platformdirs-4.3.7.tar.gz", hash = "sha256:eb437d586b6a0986388f0d6f74aa0cde27b48d0e3d66843640bfb6bdcdb6e351"}, -] - -[package.extras] -docs = ["furo (>=2024.8.6)", "proselint (>=0.14)", "sphinx (>=8.1.3)", "sphinx-autodoc-typehints (>=3)"] -test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=8.3.4)", "pytest-cov (>=6)", "pytest-mock (>=3.14)"] -type = ["mypy (>=1.14.1)"] - -[[package]] -name = "poetry" -version = "2.1.1" -description = "Python dependency management and packaging made easy." -optional = false -python-versions = "<4.0,>=3.9" -groups = ["dev"] -files = [ - {file = "poetry-2.1.1-py3-none-any.whl", hash = "sha256:1d433880bd5b401327ddee789ccfe9ff197bf3b0cd240f0bc7cc99c84d14b16c"}, - {file = "poetry-2.1.1.tar.gz", hash = "sha256:d82673865bf13d6cd0dacf28c69a89670456d8df2f9e5da82bfb5f833ba00efc"}, -] - -[package.dependencies] -build = ">=1.2.1,<2.0.0" -cachecontrol = {version = ">=0.14.0,<0.15.0", extras = ["filecache"]} -cleo = ">=2.1.0,<3.0.0" -dulwich = ">=0.22.6,<0.23.0" -fastjsonschema = ">=2.18.0,<3.0.0" -findpython = ">=0.6.2,<0.7.0" -installer = ">=0.7.0,<0.8.0" -keyring = ">=25.1.0,<26.0.0" -packaging = ">=24.0" -pbs-installer = {version = ">=2025.1.6,<2026.0.0", extras = ["download", "install"]} -pkginfo = ">=1.12,<2.0" -platformdirs = ">=3.0.0,<5" -poetry-core = "2.1.1" -pyproject-hooks = ">=1.0.0,<2.0.0" -requests = ">=2.26,<3.0" -requests-toolbelt = ">=1.0.0,<2.0.0" -shellingham = ">=1.5,<2.0" -tomli = {version = ">=2.0.1,<3.0.0", markers = "python_version < \"3.11\""} -tomlkit = ">=0.11.4,<1.0.0" -trove-classifiers = ">=2022.5.19" -virtualenv = ">=20.26.6,<21.0.0" -xattr = {version = ">=1.0.0,<2.0.0", markers = "sys_platform == \"darwin\""} - -[[package]] -name = "poetry-core" -version = "2.1.1" -description = "Poetry PEP 517 Build Backend" -optional = false -python-versions = "<4.0,>=3.9" -groups = ["dev"] -files = [ - {file = "poetry_core-2.1.1-py3-none-any.whl", hash = "sha256:bc3b0382ab4d00d5d780277fd0aad1580eb4403613b37fc60fec407b5bee1fe6"}, - {file = "poetry_core-2.1.1.tar.gz", hash = "sha256:c1a1f6f00e4254742f40988a8caf665549101cf9991122cd5de1198897768b1a"}, -] - -[[package]] -name = "pycparser" -version = "2.22" -description = "C parser in Python" -optional = false -python-versions = ">=3.8" -groups = ["dev"] -markers = "sys_platform == \"linux\" or sys_platform == \"darwin\" or platform_python_implementation == \"PyPy\"" -files = [ - {file = "pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc"}, - {file = "pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6"}, -] - -[[package]] -name = "pyproject-hooks" -version = "1.2.0" -description = "Wrappers to call pyproject.toml-based build backend hooks." -optional = false -python-versions = ">=3.7" -groups = ["dev"] -files = [ - {file = "pyproject_hooks-1.2.0-py3-none-any.whl", hash = "sha256:9e5c6bfa8dcc30091c74b0cf803c81fdd29d94f01992a7707bc97babb1141913"}, - {file = "pyproject_hooks-1.2.0.tar.gz", hash = "sha256:1e859bd5c40fae9448642dd871adf459e5e2084186e8d2c2a79a824c970da1f8"}, -] - -[[package]] -name = "python-dateutil" -version = "2.9.0.post0" -description = "Extensions to the standard Python datetime module" -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" -groups = ["main", "dev"] -files = [ - {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, - {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, -] - -[package.dependencies] -six = ">=1.5" - -[[package]] -name = "pytz" -version = "2025.2" -description = "World timezone definitions, modern and historical" -optional = false -python-versions = "*" -groups = ["main"] -files = [ - {file = "pytz-2025.2-py2.py3-none-any.whl", hash = "sha256:5ddf76296dd8c44c26eb8f4b6f35488f3ccbf6fbbd7adee0b7262d43f0ec2f00"}, - {file = "pytz-2025.2.tar.gz", hash = "sha256:360b9e3dbb49a209c21ad61809c7fb453643e048b38924c765813546746e81c3"}, -] - -[[package]] -name = "pywin32-ctypes" -version = "0.2.3" -description = "A (partial) reimplementation of pywin32 using ctypes/cffi" -optional = false -python-versions = ">=3.6" -groups = ["dev"] -markers = "sys_platform == \"win32\"" -files = [ - {file = "pywin32-ctypes-0.2.3.tar.gz", hash = "sha256:d162dc04946d704503b2edc4d55f3dba5c1d539ead017afa00142c38b9885755"}, - {file = "pywin32_ctypes-0.2.3-py3-none-any.whl", hash = "sha256:8a1513379d709975552d202d942d9837758905c8d01eb82b8bcc30918929e7b8"}, -] - -[[package]] -name = "pyyaml" -version = "6.0.2" -description = "YAML parser and emitter for Python" -optional = false -python-versions = ">=3.8" -groups = ["main", "dev"] -files = [ - {file = "PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086"}, - {file = "PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf"}, - {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8824b5a04a04a047e72eea5cec3bc266db09e35de6bdfe34c9436ac5ee27d237"}, - {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c36280e6fb8385e520936c3cb3b8042851904eba0e58d277dca80a5cfed590b"}, - {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec031d5d2feb36d1d1a24380e4db6d43695f3748343d99434e6f5f9156aaa2ed"}, - {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:936d68689298c36b53b29f23c6dbb74de12b4ac12ca6cfe0e047bedceea56180"}, - {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:23502f431948090f597378482b4812b0caae32c22213aecf3b55325e049a6c68"}, - {file = "PyYAML-6.0.2-cp310-cp310-win32.whl", hash = "sha256:2e99c6826ffa974fe6e27cdb5ed0021786b03fc98e5ee3c5bfe1fd5015f42b99"}, - {file = "PyYAML-6.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:a4d3091415f010369ae4ed1fc6b79def9416358877534caf6a0fdd2146c87a3e"}, - {file = "PyYAML-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cc1c1159b3d456576af7a3e4d1ba7e6924cb39de8f67111c735f6fc832082774"}, - {file = "PyYAML-6.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1e2120ef853f59c7419231f3bf4e7021f1b936f6ebd222406c3b60212205d2ee"}, - {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d225db5a45f21e78dd9358e58a98702a0302f2659a3c6cd320564b75b86f47c"}, - {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ac9328ec4831237bec75defaf839f7d4564be1e6b25ac710bd1a96321cc8317"}, - {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ad2a3decf9aaba3d29c8f537ac4b243e36bef957511b4766cb0057d32b0be85"}, - {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ff3824dc5261f50c9b0dfb3be22b4567a6f938ccce4587b38952d85fd9e9afe4"}, - {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:797b4f722ffa07cc8d62053e4cff1486fa6dc094105d13fea7b1de7d8bf71c9e"}, - {file = "PyYAML-6.0.2-cp311-cp311-win32.whl", hash = "sha256:11d8f3dd2b9c1207dcaf2ee0bbbfd5991f571186ec9cc78427ba5bd32afae4b5"}, - {file = "PyYAML-6.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10ce637b18caea04431ce14fabcf5c64a1c61ec9c56b071a4b7ca131ca52d44"}, - {file = "PyYAML-6.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab"}, - {file = "PyYAML-6.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725"}, - {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5"}, - {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425"}, - {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476"}, - {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48"}, - {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b"}, - {file = "PyYAML-6.0.2-cp312-cp312-win32.whl", hash = "sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4"}, - {file = "PyYAML-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8"}, - {file = "PyYAML-6.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba"}, - {file = "PyYAML-6.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:50187695423ffe49e2deacb8cd10510bc361faac997de9efef88badc3bb9e2d1"}, - {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ffe8360bab4910ef1b9e87fb812d8bc0a308b0d0eef8c8f44e0254ab3b07133"}, - {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:17e311b6c678207928d649faa7cb0d7b4c26a0ba73d41e99c4fff6b6c3276484"}, - {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b189594dbe54f75ab3a1acec5f1e3faa7e8cf2f1e08d9b561cb41b845f69d5"}, - {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:41e4e3953a79407c794916fa277a82531dd93aad34e29c2a514c2c0c5fe971cc"}, - {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:68ccc6023a3400877818152ad9a1033e3db8625d899c72eacb5a668902e4d652"}, - {file = "PyYAML-6.0.2-cp313-cp313-win32.whl", hash = "sha256:bc2fa7c6b47d6bc618dd7fb02ef6fdedb1090ec036abab80d4681424b84c1183"}, - {file = "PyYAML-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563"}, - {file = "PyYAML-6.0.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:24471b829b3bf607e04e88d79542a9d48bb037c2267d7927a874e6c205ca7e9a"}, - {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7fded462629cfa4b685c5416b949ebad6cec74af5e2d42905d41e257e0869f5"}, - {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d84a1718ee396f54f3a086ea0a66d8e552b2ab2017ef8b420e92edbc841c352d"}, - {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9056c1ecd25795207ad294bcf39f2db3d845767be0ea6e6a34d856f006006083"}, - {file = "PyYAML-6.0.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:82d09873e40955485746739bcb8b4586983670466c23382c19cffecbf1fd8706"}, - {file = "PyYAML-6.0.2-cp38-cp38-win32.whl", hash = "sha256:43fa96a3ca0d6b1812e01ced1044a003533c47f6ee8aca31724f78e93ccc089a"}, - {file = "PyYAML-6.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:01179a4a8559ab5de078078f37e5c1a30d76bb88519906844fd7bdea1b7729ff"}, - {file = "PyYAML-6.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:688ba32a1cffef67fd2e9398a2efebaea461578b0923624778664cc1c914db5d"}, - {file = "PyYAML-6.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8786accb172bd8afb8be14490a16625cbc387036876ab6ba70912730faf8e1f"}, - {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8e03406cac8513435335dbab54c0d385e4a49e4945d2909a581c83647ca0290"}, - {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f753120cb8181e736c57ef7636e83f31b9c0d1722c516f7e86cf15b7aa57ff12"}, - {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b1fdb9dc17f5a7677423d508ab4f243a726dea51fa5e70992e59a7411c89d19"}, - {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0b69e4ce7a131fe56b7e4d770c67429700908fc0752af059838b1cfb41960e4e"}, - {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a9f8c2e67970f13b16084e04f134610fd1d374bf477b17ec1599185cf611d725"}, - {file = "PyYAML-6.0.2-cp39-cp39-win32.whl", hash = "sha256:6395c297d42274772abc367baaa79683958044e5d3835486c16da75d2a694631"}, - {file = "PyYAML-6.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:39693e1f8320ae4f43943590b49779ffb98acb81f788220ea932a6b6c51004d8"}, - {file = "pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e"}, -] - -[[package]] -name = "rapidfuzz" -version = "3.12.2" -description = "rapid fuzzy string matching" -optional = false -python-versions = ">=3.9" -groups = ["dev"] -files = [ - {file = "rapidfuzz-3.12.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0b9a75e0385a861178adf59e86d6616cbd0d5adca7228dc9eeabf6f62cf5b0b1"}, - {file = "rapidfuzz-3.12.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:6906a7eb458731e3dd2495af1d0410e23a21a2a2b7ced535e6d5cd15cb69afc5"}, - {file = "rapidfuzz-3.12.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f4b3334a8958b689f292d5ce8a928140ac98919b51e084f04bf0c14276e4c6ba"}, - {file = "rapidfuzz-3.12.2-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:85a54ce30345cff2c79cbcffa063f270ad1daedd0d0c3ff6e541d3c3ba4288cf"}, - {file = "rapidfuzz-3.12.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:acb63c5072c08058f8995404201a52fc4e1ecac105548a4d03c6c6934bda45a3"}, - {file = "rapidfuzz-3.12.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5385398d390c6571f0f2a7837e6ddde0c8b912dac096dc8c87208ce9aaaa7570"}, - {file = "rapidfuzz-3.12.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5032cbffa245b4beba0067f8ed17392ef2501b346ae3c1f1d14b950edf4b6115"}, - {file = "rapidfuzz-3.12.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:195adbb384d89d6c55e2fd71e7fb262010f3196e459aa2f3f45f31dd7185fe72"}, - {file = "rapidfuzz-3.12.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:f43b773a4d4950606fb25568ecde5f25280daf8f97b87eb323e16ecd8177b328"}, - {file = "rapidfuzz-3.12.2-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:55a43be0e0fa956a919043c19d19bd988991d15c59f179d413fe5145ed9deb43"}, - {file = "rapidfuzz-3.12.2-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:71cf1ea16acdebe9e2fb62ee7a77f8f70e877bebcbb33b34e660af2eb6d341d9"}, - {file = "rapidfuzz-3.12.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:a3692d4ab36d44685f61326dca539975a4eda49b2a76f0a3df177d8a2c0de9d2"}, - {file = "rapidfuzz-3.12.2-cp310-cp310-win32.whl", hash = "sha256:09227bd402caa4397ba1d6e239deea635703b042dd266a4092548661fb22b9c6"}, - {file = "rapidfuzz-3.12.2-cp310-cp310-win_amd64.whl", hash = "sha256:0f05b7b95f9f87254b53fa92048367a8232c26cee7fc8665e4337268c3919def"}, - {file = "rapidfuzz-3.12.2-cp310-cp310-win_arm64.whl", hash = "sha256:6938738e00d9eb6e04097b3f565097e20b0c398f9c58959a2bc64f7f6be3d9da"}, - {file = "rapidfuzz-3.12.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:e9c4d984621ae17404c58f8d06ed8b025e167e52c0e6a511dfec83c37e9220cd"}, - {file = "rapidfuzz-3.12.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:9f9132c55d330f0a1d34ce6730a76805323a6250d97468a1ca766a883d6a9a25"}, - {file = "rapidfuzz-3.12.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:39b343b6cb4b2c3dbc8d2d4c5ee915b6088e3b144ddf8305a57eaab16cf9fc74"}, - {file = "rapidfuzz-3.12.2-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:24081077b571ec4ee6d5d7ea0e49bc6830bf05b50c1005028523b9cd356209f3"}, - {file = "rapidfuzz-3.12.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c988a4fc91856260355773bf9d32bebab2083d4c6df33fafeddf4330e5ae9139"}, - {file = "rapidfuzz-3.12.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:780b4469ee21cf62b1b2e8ada042941fd2525e45d5fb6a6901a9798a0e41153c"}, - {file = "rapidfuzz-3.12.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:edd84b0a323885493c893bad16098c5e3b3005d7caa995ae653da07373665d97"}, - {file = "rapidfuzz-3.12.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:efa22059c765b3d8778083805b199deaaf643db070f65426f87d274565ddf36a"}, - {file = "rapidfuzz-3.12.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:095776b11bb45daf7c2973dd61cc472d7ea7f2eecfa454aef940b4675659b92f"}, - {file = "rapidfuzz-3.12.2-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:7e2574cf4aa86065600b664a1ac7b8b8499107d102ecde836aaaa403fc4f1784"}, - {file = "rapidfuzz-3.12.2-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:d5a3425a6c50fd8fbd991d8f085ddb504791dae6ef9cc3ab299fea2cb5374bef"}, - {file = "rapidfuzz-3.12.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:97fb05e1ddb7b71a054040af588b0634214ee87cea87900d309fafc16fd272a4"}, - {file = "rapidfuzz-3.12.2-cp311-cp311-win32.whl", hash = "sha256:b4c5a0413589aef936892fbfa94b7ff6f7dd09edf19b5a7b83896cc9d4e8c184"}, - {file = "rapidfuzz-3.12.2-cp311-cp311-win_amd64.whl", hash = "sha256:58d9ae5cf9246d102db2a2558b67fe7e73c533e5d769099747921232d88b9be2"}, - {file = "rapidfuzz-3.12.2-cp311-cp311-win_arm64.whl", hash = "sha256:7635fe34246cd241c8e35eb83084e978b01b83d5ef7e5bf72a704c637f270017"}, - {file = "rapidfuzz-3.12.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:1d982a651253ffe8434d9934ff0c1089111d60502228464721a2a4587435e159"}, - {file = "rapidfuzz-3.12.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:02e6466caa0222d5233b1f05640873671cd99549a5c5ba4c29151634a1e56080"}, - {file = "rapidfuzz-3.12.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e956b3f053e474abae69ac693a52742109d860ac2375fe88e9387d3277f4c96c"}, - {file = "rapidfuzz-3.12.2-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2dee7d740a2d5418d4f964f39ab8d89923e6b945850db833e798a1969b19542a"}, - {file = "rapidfuzz-3.12.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a057cdb0401e42c84b6516c9b1635f7aedd5e430c6e388bd5f6bcd1d6a0686bb"}, - {file = "rapidfuzz-3.12.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:dccf8d4fb5b86d39c581a59463c596b1d09df976da26ff04ae219604223d502f"}, - {file = "rapidfuzz-3.12.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:21d5b3793c6f5aecca595cd24164bf9d3c559e315ec684f912146fc4e769e367"}, - {file = "rapidfuzz-3.12.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:46a616c0e13cff2de1761b011e0b14bb73b110182f009223f1453d505c9a975c"}, - {file = "rapidfuzz-3.12.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:19fa5bc4301a1ee55400d4a38a8ecf9522b0391fc31e6da5f4d68513fe5c0026"}, - {file = "rapidfuzz-3.12.2-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:544a47190a0d25971658a9365dba7095397b4ce3e897f7dd0a77ca2cf6fa984e"}, - {file = "rapidfuzz-3.12.2-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:f21af27c5e001f0ba1b88c36a0936437dfe034c452548d998891c21125eb640f"}, - {file = "rapidfuzz-3.12.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b63170d9db00629b5b3f2862114d8d6ee19127eaba0eee43762d62a25817dbe0"}, - {file = "rapidfuzz-3.12.2-cp312-cp312-win32.whl", hash = "sha256:6c7152d77b2eb6bfac7baa11f2a9c45fd5a2d848dbb310acd0953b3b789d95c9"}, - {file = "rapidfuzz-3.12.2-cp312-cp312-win_amd64.whl", hash = "sha256:1a314d170ee272ac87579f25a6cf8d16a031e1f7a7b07663434b41a1473bc501"}, - {file = "rapidfuzz-3.12.2-cp312-cp312-win_arm64.whl", hash = "sha256:d41e8231326e94fd07c4d8f424f6bed08fead6f5e6688d1e6e787f1443ae7631"}, - {file = "rapidfuzz-3.12.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:941f31038dba5d3dedcfcceba81d61570ad457c873a24ceb13f4f44fcb574260"}, - {file = "rapidfuzz-3.12.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:fe2dfc454ee51ba168a67b1e92b72aad251e45a074972cef13340bbad2fd9438"}, - {file = "rapidfuzz-3.12.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:78fafaf7f5a48ee35ccd7928339080a0136e27cf97396de45259eca1d331b714"}, - {file = "rapidfuzz-3.12.2-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e0c7989ff32c077bb8fd53253fd6ca569d1bfebc80b17557e60750e6909ba4fe"}, - {file = "rapidfuzz-3.12.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:96fa00bc105caa34b6cd93dca14a29243a3a7f0c336e4dcd36348d38511e15ac"}, - {file = "rapidfuzz-3.12.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:bccfb30c668620c5bc3490f2dc7d7da1cca0ead5a9da8b755e2e02e2ef0dff14"}, - {file = "rapidfuzz-3.12.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2f9b0adc3d894beb51f5022f64717b6114a6fabaca83d77e93ac7675911c8cc5"}, - {file = "rapidfuzz-3.12.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:32691aa59577f42864d5535cb6225d0f47e2c7bff59cf4556e5171e96af68cc1"}, - {file = "rapidfuzz-3.12.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:758b10380ad34c1f51753a070d7bb278001b5e6fcf544121c6df93170952d705"}, - {file = "rapidfuzz-3.12.2-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:50a9c54c0147b468363119132d514c5024fbad1ed8af12bd8bd411b0119f9208"}, - {file = "rapidfuzz-3.12.2-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:e3ceb87c11d2d0fbe8559bb795b0c0604b84cfc8bb7b8720b5c16e9e31e00f41"}, - {file = "rapidfuzz-3.12.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:f7c9a003002434889255ff5676ca0f8934a478065ab5e702f75dc42639505bba"}, - {file = "rapidfuzz-3.12.2-cp313-cp313-win32.whl", hash = "sha256:cf165a76870cd875567941cf861dfd361a0a6e6a56b936c5d30042ddc9def090"}, - {file = "rapidfuzz-3.12.2-cp313-cp313-win_amd64.whl", hash = "sha256:55bcc003541f5f16ec0a73bf6de758161973f9e8d75161954380738dd147f9f2"}, - {file = "rapidfuzz-3.12.2-cp313-cp313-win_arm64.whl", hash = "sha256:69f6ecdf1452139f2b947d0c169a605de578efdb72cbb2373cb0a94edca1fd34"}, - {file = "rapidfuzz-3.12.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c4c852cd8bed1516a64fd6e2d4c6f270d4356196ee03fda2af1e5a9e13c34643"}, - {file = "rapidfuzz-3.12.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:42e7f747b55529a6d0d1588695d71025e884ab48664dca54b840413dea4588d8"}, - {file = "rapidfuzz-3.12.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a749fd2690f24ef256b264a781487746bbb95344364fe8fe356f0eef7ef206ba"}, - {file = "rapidfuzz-3.12.2-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:9a11e1d036170bbafa43a9e63d8c309273564ec5bdfc5439062f439d1a16965a"}, - {file = "rapidfuzz-3.12.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:dfb337f1832c1231e3d5621bd0ebebb854e46036aedae3e6a49c1fc08f16f249"}, - {file = "rapidfuzz-3.12.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e88c6e68fca301722fa3ab7fd3ca46998012c14ada577bc1e2c2fc04f2067ca6"}, - {file = "rapidfuzz-3.12.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:17e1a3a8b4b5125cfb63a6990459b25b87ea769bdaf90d05bb143f8febef076a"}, - {file = "rapidfuzz-3.12.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:b9f8177b24ccc0a843e85932b1088c5e467a7dd7a181c13f84c684b796bea815"}, - {file = "rapidfuzz-3.12.2-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:6c506bdc2f304051592c0d3b0e82eed309248ec10cdf802f13220251358375ea"}, - {file = "rapidfuzz-3.12.2-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:30bf15c1ecec2798b713d551df17f23401a3e3653ad9ed4e83ad1c2b06e86100"}, - {file = "rapidfuzz-3.12.2-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:bd9a67cfc83e8453ef17ddd1c2c4ce4a74d448a197764efb54c29f29fb41f611"}, - {file = "rapidfuzz-3.12.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:7a6eaec2ef658dd650c6eb9b36dff7a361ebd7d8bea990ce9d639b911673b2cb"}, - {file = "rapidfuzz-3.12.2-cp39-cp39-win32.whl", hash = "sha256:d7701769f110332cde45c41759cb2a497de8d2dca55e4c519a46aed5fbb19d1a"}, - {file = "rapidfuzz-3.12.2-cp39-cp39-win_amd64.whl", hash = "sha256:296bf0fd4f678488670e262c87a3e4f91900b942d73ae38caa42a417e53643b1"}, - {file = "rapidfuzz-3.12.2-cp39-cp39-win_arm64.whl", hash = "sha256:7957f5d768de14f6b2715303ccdf224b78416738ee95a028a2965c95f73afbfb"}, - {file = "rapidfuzz-3.12.2-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:e5fd3ce849b27d063755829cda27a9dab6dbd63be3801f2a40c60ec563a4c90f"}, - {file = "rapidfuzz-3.12.2-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:54e53662d71ed660c83c5109127c8e30b9e607884b7c45d2aff7929bbbd00589"}, - {file = "rapidfuzz-3.12.2-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2b9e43cf2213e524f3309d329f1ad8dbf658db004ed44f6ae1cd2919aa997da5"}, - {file = "rapidfuzz-3.12.2-pp310-pypy310_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:29ca445e320e5a8df3bd1d75b4fa4ecfa7c681942b9ac65b55168070a1a1960e"}, - {file = "rapidfuzz-3.12.2-pp310-pypy310_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:83eb7ef732c2f8533c6b5fbe69858a722c218acc3e1fc190ab6924a8af7e7e0e"}, - {file = "rapidfuzz-3.12.2-pp310-pypy310_pp73-win_amd64.whl", hash = "sha256:648adc2dd2cf873efc23befcc6e75754e204a409dfa77efd0fea30d08f22ef9d"}, - {file = "rapidfuzz-3.12.2-pp311-pypy311_pp73-macosx_10_15_x86_64.whl", hash = "sha256:9b1e6f48e1ffa0749261ee23a1c6462bdd0be5eac83093f4711de17a42ae78ad"}, - {file = "rapidfuzz-3.12.2-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:1ae9ded463f2ca4ba1eb762913c5f14c23d2e120739a62b7f4cc102eab32dc90"}, - {file = "rapidfuzz-3.12.2-pp311-pypy311_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dda45f47b559be72ecbce45c7f71dc7c97b9772630ab0f3286d97d2c3025ab71"}, - {file = "rapidfuzz-3.12.2-pp311-pypy311_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b3745c6443890265513a3c8777f2de4cb897aeb906a406f97741019be8ad5bcc"}, - {file = "rapidfuzz-3.12.2-pp311-pypy311_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:36d3ef4f047ed1bc96fa29289f9e67a637ddca5e4f4d3dc7cb7f50eb33ec1664"}, - {file = "rapidfuzz-3.12.2-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:54bb69ebe5ca0bd7527357e348f16a4c0c52fe0c2fcc8a041010467dcb8385f7"}, - {file = "rapidfuzz-3.12.2-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:3f2ddd5b99b254039a8c82be5749d4d75943f62eb2c2918acf6ffd586852834f"}, - {file = "rapidfuzz-3.12.2-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:8117dab9b26a1aaffab59b4e30f80ac4d55e61ad4139a637c149365960933bee"}, - {file = "rapidfuzz-3.12.2-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40c0f16d62d6553527de3dab2fb69709c4383430ea44bce8fb4711ed4cbc6ae3"}, - {file = "rapidfuzz-3.12.2-pp39-pypy39_pp73-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f177e1eb6e4f5261a89c475e21bce7a99064a8f217d2336fb897408f46f0ceaf"}, - {file = "rapidfuzz-3.12.2-pp39-pypy39_pp73-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5df0cecc2852fcb078ed1b4482fac4fc2c2e7787f3edda8920d9a4c0f51b1c95"}, - {file = "rapidfuzz-3.12.2-pp39-pypy39_pp73-win_amd64.whl", hash = "sha256:3b3c4df0321df6f8f0b61afbaa2ced9622750ee1e619128db57a18533d139820"}, - {file = "rapidfuzz-3.12.2.tar.gz", hash = "sha256:b0ba1ccc22fff782e7152a3d3d0caca44ec4e32dc48ba01c560b8593965b5aa3"}, -] - -[package.extras] -all = ["numpy"] - -[[package]] -name = "redis" -version = "5.2.1" -description = "Python client for Redis database and key-value store" -optional = false -python-versions = ">=3.8" -groups = ["main", "dev"] -files = [ - {file = "redis-5.2.1-py3-none-any.whl", hash = "sha256:ee7e1056b9aea0f04c6c2ed59452947f34c4940ee025f5dd83e6a6418b6989e4"}, - {file = "redis-5.2.1.tar.gz", hash = "sha256:16f2e22dff21d5125e8481515e386711a34cbec50f0e44413dd7d9c060a54e0f"}, -] - -[package.dependencies] -async-timeout = {version = ">=4.0.3", markers = "python_full_version < \"3.11.3\""} - -[package.extras] -hiredis = ["hiredis (>=3.0.0)"] -ocsp = ["cryptography (>=36.0.1)", "pyopenssl (==23.2.1)", "requests (>=2.31.0)"] - -[[package]] -name = "requests" -version = "2.32.3" -description = "Python HTTP for Humans." -optional = false -python-versions = ">=3.8" -groups = ["dev"] -files = [ - {file = "requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"}, - {file = "requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760"}, -] - -[package.dependencies] -certifi = ">=2017.4.17" -charset-normalizer = ">=2,<4" -idna = ">=2.5,<4" -urllib3 = ">=1.21.1,<3" - -[package.extras] -socks = ["PySocks (>=1.5.6,!=1.5.7)"] -use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] - -[[package]] -name = "requests-toolbelt" -version = "1.0.0" -description = "A utility belt for advanced users of python-requests" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -groups = ["dev"] -files = [ - {file = "requests-toolbelt-1.0.0.tar.gz", hash = "sha256:7681a0a3d047012b5bdc0ee37d7f8f07ebe76ab08caeccfc3921ce23c88d5bc6"}, - {file = "requests_toolbelt-1.0.0-py2.py3-none-any.whl", hash = "sha256:cccfdd665f0a24fcf4726e690f65639d272bb0637b9b92dfd91a5568ccf6bd06"}, -] - -[package.dependencies] -requests = ">=2.0.1,<3.0.0" - -[[package]] -name = "rq" -version = "1.16.2" -description = "RQ is a simple, lightweight, library for creating background jobs, and processing them." -optional = false -python-versions = ">=3.7" -groups = ["main"] -files = [ - {file = "rq-1.16.2-py3-none-any.whl", hash = "sha256:52e619f6cb469b00e04da74305045d244b75fecb2ecaa4f26422add57d3c5f09"}, - {file = "rq-1.16.2.tar.gz", hash = "sha256:5c5b9ad5fbaf792b8fada25cc7627f4d206a9a4455aced371d4f501cc3f13b34"}, -] - -[package.dependencies] -click = ">=5" -redis = ">=3.5" - -[[package]] -name = "ruff" -version = "0.11.2" -description = "An extremely fast Python linter and code formatter, written in Rust." -optional = false -python-versions = ">=3.7" -groups = ["dev"] -files = [ - {file = "ruff-0.11.2-py3-none-linux_armv6l.whl", hash = "sha256:c69e20ea49e973f3afec2c06376eb56045709f0212615c1adb0eda35e8a4e477"}, - {file = "ruff-0.11.2-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:2c5424cc1c4eb1d8ecabe6d4f1b70470b4f24a0c0171356290b1953ad8f0e272"}, - {file = "ruff-0.11.2-py3-none-macosx_11_0_arm64.whl", hash = "sha256:ecf20854cc73f42171eedb66f006a43d0a21bfb98a2523a809931cda569552d9"}, - {file = "ruff-0.11.2-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0c543bf65d5d27240321604cee0633a70c6c25c9a2f2492efa9f6d4b8e4199bb"}, - {file = "ruff-0.11.2-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:20967168cc21195db5830b9224be0e964cc9c8ecf3b5a9e3ce19876e8d3a96e3"}, - {file = "ruff-0.11.2-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:955a9ce63483999d9f0b8f0b4a3ad669e53484232853054cc8b9d51ab4c5de74"}, - {file = "ruff-0.11.2-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:86b3a27c38b8fce73bcd262b0de32e9a6801b76d52cdb3ae4c914515f0cef608"}, - {file = "ruff-0.11.2-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a3b66a03b248c9fcd9d64d445bafdf1589326bee6fc5c8e92d7562e58883e30f"}, - {file = "ruff-0.11.2-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:0397c2672db015be5aa3d4dac54c69aa012429097ff219392c018e21f5085147"}, - {file = "ruff-0.11.2-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:869bcf3f9abf6457fbe39b5a37333aa4eecc52a3b99c98827ccc371a8e5b6f1b"}, - {file = "ruff-0.11.2-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:2a2b50ca35457ba785cd8c93ebbe529467594087b527a08d487cf0ee7b3087e9"}, - {file = "ruff-0.11.2-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:7c69c74bf53ddcfbc22e6eb2f31211df7f65054bfc1f72288fc71e5f82db3eab"}, - {file = "ruff-0.11.2-py3-none-musllinux_1_2_i686.whl", hash = "sha256:6e8fb75e14560f7cf53b15bbc55baf5ecbe373dd5f3aab96ff7aa7777edd7630"}, - {file = "ruff-0.11.2-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:842a472d7b4d6f5924e9297aa38149e5dcb1e628773b70e6387ae2c97a63c58f"}, - {file = "ruff-0.11.2-py3-none-win32.whl", hash = "sha256:aca01ccd0eb5eb7156b324cfaa088586f06a86d9e5314b0eb330cb48415097cc"}, - {file = "ruff-0.11.2-py3-none-win_amd64.whl", hash = "sha256:3170150172a8f994136c0c66f494edf199a0bbea7a409f649e4bc8f4d7084080"}, - {file = "ruff-0.11.2-py3-none-win_arm64.whl", hash = "sha256:52933095158ff328f4c77af3d74f0379e34fd52f175144cefc1b192e7ccd32b4"}, - {file = "ruff-0.11.2.tar.gz", hash = "sha256:ec47591497d5a1050175bdf4e1a4e6272cddff7da88a2ad595e1e326041d8d94"}, -] - -[[package]] -name = "secretstorage" -version = "3.3.3" -description = "Python bindings to FreeDesktop.org Secret Service API" -optional = false -python-versions = ">=3.6" -groups = ["dev"] -markers = "sys_platform == \"linux\"" -files = [ - {file = "SecretStorage-3.3.3-py3-none-any.whl", hash = "sha256:f356e6628222568e3af06f2eba8df495efa13b3b63081dafd4f7d9a7b7bc9f99"}, - {file = "SecretStorage-3.3.3.tar.gz", hash = "sha256:2403533ef369eca6d2ba81718576c5e0f564d5cca1b58f73a8b23e7d4eeebd77"}, -] - -[package.dependencies] -cryptography = ">=2.0" -jeepney = ">=0.6" - -[[package]] -name = "shellingham" -version = "1.5.4" -description = "Tool to Detect Surrounding Shell" -optional = false -python-versions = ">=3.7" -groups = ["dev"] -files = [ - {file = "shellingham-1.5.4-py2.py3-none-any.whl", hash = "sha256:7ecfff8f2fd72616f7481040475a65b2bf8af90a56c89140852d1120324e8686"}, - {file = "shellingham-1.5.4.tar.gz", hash = "sha256:8dbca0739d487e5bd35ab3ca4b36e11c4078f3a234bfce294b0a0291363404de"}, -] - -[[package]] -name = "six" -version = "1.17.0" -description = "Python 2 and 3 compatibility utilities" -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" -groups = ["main", "dev"] -files = [ - {file = "six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274"}, - {file = "six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81"}, -] - -[[package]] -name = "sniffio" -version = "1.3.1" -description = "Sniff out which async library your code is running under" -optional = false -python-versions = ">=3.7" -groups = ["dev"] -files = [ - {file = "sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2"}, - {file = "sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc"}, -] - -[[package]] -name = "sortedcontainers" -version = "2.4.0" -description = "Sorted Containers -- Sorted List, Sorted Dict, Sorted Set" -optional = false -python-versions = "*" -groups = ["dev"] -files = [ - {file = "sortedcontainers-2.4.0-py2.py3-none-any.whl", hash = "sha256:a163dcaede0f1c021485e957a39245190e74249897e2ae4b2aa38595db237ee0"}, - {file = "sortedcontainers-2.4.0.tar.gz", hash = "sha256:25caa5a06cc30b6b83d11423433f65d1f9d76c4c6a0c90e3379eaa43b9bfdb88"}, -] - -[[package]] -name = "sqlparse" -version = "0.5.3" -description = "A non-validating SQL parser." -optional = false -python-versions = ">=3.8" -groups = ["main"] -files = [ - {file = "sqlparse-0.5.3-py3-none-any.whl", hash = "sha256:cf2196ed3418f3ba5de6af7e82c694a9fbdbfecccdfc72e281548517081f16ca"}, - {file = "sqlparse-0.5.3.tar.gz", hash = "sha256:09f67787f56a0b16ecdbde1bfc7f5d9c3371ca683cfeaa8e6ff60b4807ec9272"}, -] - -[package.extras] -dev = ["build", "hatch"] -doc = ["sphinx"] - -[[package]] -name = "tomli" -version = "2.2.1" -description = "A lil' TOML parser" -optional = false -python-versions = ">=3.8" -groups = ["dev"] -markers = "python_version < \"3.11\"" -files = [ - {file = "tomli-2.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:678e4fa69e4575eb77d103de3df8a895e1591b48e740211bd1067378c69e8249"}, - {file = "tomli-2.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6"}, - {file = "tomli-2.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ece47d672db52ac607a3d9599a9d48dcb2f2f735c6c2d1f34130085bb12b112a"}, - {file = "tomli-2.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6972ca9c9cc9f0acaa56a8ca1ff51e7af152a9f87fb64623e31d5c83700080ee"}, - {file = "tomli-2.2.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c954d2250168d28797dd4e3ac5cf812a406cd5a92674ee4c8f123c889786aa8e"}, - {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8dd28b3e155b80f4d54beb40a441d366adcfe740969820caf156c019fb5c7ec4"}, - {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e59e304978767a54663af13c07b3d1af22ddee3bb2fb0618ca1593e4f593a106"}, - {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:33580bccab0338d00994d7f16f4c4ec25b776af3ffaac1ed74e0b3fc95e885a8"}, - {file = "tomli-2.2.1-cp311-cp311-win32.whl", hash = "sha256:465af0e0875402f1d226519c9904f37254b3045fc5084697cefb9bdde1ff99ff"}, - {file = "tomli-2.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:2d0f2fdd22b02c6d81637a3c95f8cd77f995846af7414c5c4b8d0545afa1bc4b"}, - {file = "tomli-2.2.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:4a8f6e44de52d5e6c657c9fe83b562f5f4256d8ebbfe4ff922c495620a7f6cea"}, - {file = "tomli-2.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8d57ca8095a641b8237d5b079147646153d22552f1c637fd3ba7f4b0b29167a8"}, - {file = "tomli-2.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e340144ad7ae1533cb897d406382b4b6fede8890a03738ff1683af800d54192"}, - {file = "tomli-2.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db2b95f9de79181805df90bedc5a5ab4c165e6ec3fe99f970d0e302f384ad222"}, - {file = "tomli-2.2.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:40741994320b232529c802f8bc86da4e1aa9f413db394617b9a256ae0f9a7f77"}, - {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:400e720fe168c0f8521520190686ef8ef033fb19fc493da09779e592861b78c6"}, - {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:02abe224de6ae62c19f090f68da4e27b10af2b93213d36cf44e6e1c5abd19fdd"}, - {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b82ebccc8c8a36f2094e969560a1b836758481f3dc360ce9a3277c65f374285e"}, - {file = "tomli-2.2.1-cp312-cp312-win32.whl", hash = "sha256:889f80ef92701b9dbb224e49ec87c645ce5df3fa2cc548664eb8a25e03127a98"}, - {file = "tomli-2.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:7fc04e92e1d624a4a63c76474610238576942d6b8950a2d7f908a340494e67e4"}, - {file = "tomli-2.2.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f4039b9cbc3048b2416cc57ab3bda989a6fcf9b36cf8937f01a6e731b64f80d7"}, - {file = "tomli-2.2.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:286f0ca2ffeeb5b9bd4fcc8d6c330534323ec51b2f52da063b11c502da16f30c"}, - {file = "tomli-2.2.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a92ef1a44547e894e2a17d24e7557a5e85a9e1d0048b0b5e7541f76c5032cb13"}, - {file = "tomli-2.2.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9316dc65bed1684c9a98ee68759ceaed29d229e985297003e494aa825ebb0281"}, - {file = "tomli-2.2.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e85e99945e688e32d5a35c1ff38ed0b3f41f43fad8df0bdf79f72b2ba7bc5272"}, - {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ac065718db92ca818f8d6141b5f66369833d4a80a9d74435a268c52bdfa73140"}, - {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:d920f33822747519673ee656a4b6ac33e382eca9d331c87770faa3eef562aeb2"}, - {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a198f10c4d1b1375d7687bc25294306e551bf1abfa4eace6650070a5c1ae2744"}, - {file = "tomli-2.2.1-cp313-cp313-win32.whl", hash = "sha256:d3f5614314d758649ab2ab3a62d4f2004c825922f9e370b29416484086b264ec"}, - {file = "tomli-2.2.1-cp313-cp313-win_amd64.whl", hash = "sha256:a38aa0308e754b0e3c67e344754dff64999ff9b513e691d0e786265c93583c69"}, - {file = "tomli-2.2.1-py3-none-any.whl", hash = "sha256:cb55c73c5f4408779d0cf3eef9f762b9c9f147a77de7b258bef0a5628adc85cc"}, - {file = "tomli-2.2.1.tar.gz", hash = "sha256:cd45e1dc79c835ce60f7404ec8119f2eb06d38b1deba146f07ced3bbc44505ff"}, -] - -[[package]] -name = "tomlkit" -version = "0.13.2" -description = "Style preserving TOML library" -optional = false -python-versions = ">=3.8" -groups = ["dev"] -files = [ - {file = "tomlkit-0.13.2-py3-none-any.whl", hash = "sha256:7a974427f6e119197f670fbbbeae7bef749a6c14e793db934baefc1b5f03efde"}, - {file = "tomlkit-0.13.2.tar.gz", hash = "sha256:fff5fe59a87295b278abd31bec92c15d9bc4a06885ab12bcea52c71119392e79"}, -] - -[[package]] -name = "trove-classifiers" -version = "2025.3.19.19" -description = "Canonical source for classifiers on PyPI (pypi.org)." -optional = false -python-versions = "*" -groups = ["dev"] -files = [ - {file = "trove_classifiers-2025.3.19.19-py3-none-any.whl", hash = "sha256:5fc02770ecd81588a605ac98b9d85d50a5a3f9daa30af2a6b1361a1999d75d07"}, - {file = "trove_classifiers-2025.3.19.19.tar.gz", hash = "sha256:98e9d396fe908d5f43b7454fa4c43d17cd0fdadf046f45fb38a5e3af8d959ecd"}, -] - -[[package]] -name = "typing-extensions" -version = "4.13.0" -description = "Backported and Experimental Type Hints for Python 3.8+" -optional = false -python-versions = ">=3.8" -groups = ["main", "dev"] -files = [ - {file = "typing_extensions-4.13.0-py3-none-any.whl", hash = "sha256:c8dd92cc0d6425a97c18fbb9d1954e5ff92c1ca881a309c45f06ebc0b79058e5"}, - {file = "typing_extensions-4.13.0.tar.gz", hash = "sha256:0a4ac55a5820789d87e297727d229866c9650f6521b64206413c4fbada24d95b"}, -] -markers = {main = "python_version < \"3.11\"", dev = "python_version < \"3.13\""} - -[[package]] -name = "tzdata" -version = "2025.2" -description = "Provider of IANA time zone data" -optional = false -python-versions = ">=2" -groups = ["main"] -markers = "sys_platform == \"win32\"" -files = [ - {file = "tzdata-2025.2-py2.py3-none-any.whl", hash = "sha256:1a403fada01ff9221ca8044d701868fa132215d84beb92242d9acd2147f667a8"}, - {file = "tzdata-2025.2.tar.gz", hash = "sha256:b60a638fcc0daffadf82fe0f57e53d06bdec2f36c4df66280ae79bce6bd6f2b9"}, -] - -[[package]] -name = "urllib3" -version = "2.3.0" -description = "HTTP library with thread-safe connection pooling, file post, and more." -optional = false -python-versions = ">=3.9" -groups = ["dev"] -files = [ - {file = "urllib3-2.3.0-py3-none-any.whl", hash = "sha256:1cee9ad369867bfdbbb48b7dd50374c0967a0bb7710050facf0dd6911440e3df"}, - {file = "urllib3-2.3.0.tar.gz", hash = "sha256:f8c5449b3cf0861679ce7e0503c7b44b5ec981bec0d1d3795a07f1ba96f0204d"}, -] - -[package.extras] -brotli = ["brotli (>=1.0.9) ; platform_python_implementation == \"CPython\"", "brotlicffi (>=0.8.0) ; platform_python_implementation != \"CPython\""] -h2 = ["h2 (>=4,<5)"] -socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] -zstd = ["zstandard (>=0.18.0)"] - -[[package]] -name = "valkey" -version = "6.1.0" -description = "Python client for Valkey forked from redis-py" -optional = true -python-versions = ">=3.8" -groups = ["main"] -markers = "extra == \"valkey\"" -files = [ - {file = "valkey-6.1.0-py3-none-any.whl", hash = "sha256:cfe769edae894f74ac946eff1e93f7d7f466032c3030ba7e9d089a742459ac9c"}, - {file = "valkey-6.1.0.tar.gz", hash = "sha256:a652df15ed89c41935ffae6dfd09c56f4a9ab80b592e5ed9204d538e2ddad6d3"}, -] - -[package.dependencies] -async-timeout = {version = ">=4.0.3", markers = "python_version < \"3.11.3\""} - -[package.extras] -libvalkey = ["libvalkey (>=4.0.1)"] -ocsp = ["cryptography (>=36.0.1)", "pyopenssl (==23.2.1)", "requests (>=2.31.0)"] - -[[package]] -name = "virtualenv" -version = "20.29.3" -description = "Virtual Python Environment builder" -optional = false -python-versions = ">=3.8" -groups = ["dev"] -files = [ - {file = "virtualenv-20.29.3-py3-none-any.whl", hash = "sha256:3e3d00f5807e83b234dfb6122bf37cfadf4be216c53a49ac059d02414f819170"}, - {file = "virtualenv-20.29.3.tar.gz", hash = "sha256:95e39403fcf3940ac45bc717597dba16110b74506131845d9b687d5e73d947ac"}, -] - -[package.dependencies] -distlib = ">=0.3.7,<1" -filelock = ">=3.12.2,<4" -platformdirs = ">=3.9.1,<5" - -[package.extras] -docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.2,!=7.3)", "sphinx-argparse (>=0.4)", "sphinxcontrib-towncrier (>=0.2.1a0)", "towncrier (>=23.6)"] -test = ["covdefaults (>=2.3)", "coverage (>=7.2.7)", "coverage-enable-subprocess (>=1)", "flaky (>=3.7)", "packaging (>=23.1)", "pytest (>=7.4)", "pytest-env (>=0.8.2)", "pytest-freezer (>=0.4.8) ; platform_python_implementation == \"PyPy\" or platform_python_implementation == \"CPython\" and sys_platform == \"win32\" and python_version >= \"3.13\"", "pytest-mock (>=3.11.1)", "pytest-randomly (>=3.12)", "pytest-timeout (>=2.1)", "setuptools (>=68)", "time-machine (>=2.10) ; platform_python_implementation == \"CPython\""] - -[[package]] -name = "xattr" -version = "1.1.4" -description = "Python wrapper for extended filesystem attributes" -optional = false -python-versions = ">=3.8" -groups = ["dev"] -markers = "sys_platform == \"darwin\"" -files = [ - {file = "xattr-1.1.4-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:acb85b6249e9f3ea10cbb56df1021d43f4027212f0d004304bc9075dc7f54769"}, - {file = "xattr-1.1.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:1a848ab125c0fafdc501ccd83b4c9018bba576a037a4ca5960a22f39e295552e"}, - {file = "xattr-1.1.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:467ee77471d26ae5187ee7081b82175b5ca56ead4b71467ec2e6119d1b08beed"}, - {file = "xattr-1.1.4-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0fd35f46cb0154f7033f9d5d0960f226857acb0d1e0d71fd7af18ed84663007c"}, - {file = "xattr-1.1.4-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7d956478e9bb98a1efd20ebc6e5703497c1d2d690d5a13c4df4abf59881eed50"}, - {file = "xattr-1.1.4-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3f25dfdcd974b700fb04a40e14a664a80227ee58e02ea062ac241f0d7dc54b4e"}, - {file = "xattr-1.1.4-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:33b63365c1fcbc80a79f601575bac0d6921732e0245b776876f3db3fcfefe22d"}, - {file = "xattr-1.1.4-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:544542be95c9b49e211f0a463758f200de88ba6d5a94d3c4f42855a484341acd"}, - {file = "xattr-1.1.4-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:ac14c9893f3ea046784b7702be30889b200d31adcd2e6781a8a190b6423f9f2d"}, - {file = "xattr-1.1.4-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:bb4bbe37ba95542081890dd34fa5347bef4651e276647adaa802d5d0d7d86452"}, - {file = "xattr-1.1.4-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:3da489ecef798705f9a39ea8cea4ead0d1eeed55f92c345add89740bd930bab6"}, - {file = "xattr-1.1.4-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:798dd0cbe696635a6f74b06fc430818bf9c3b24314e1502eadf67027ab60c9b0"}, - {file = "xattr-1.1.4-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7b2b6361626efad5eb5a6bf8172c6c67339e09397ee8140ec41258737bea9681"}, - {file = "xattr-1.1.4-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6e7fa20a0c9ce022d19123b1c5b848d00a68b837251835a7929fe041ee81dcd0"}, - {file = "xattr-1.1.4-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6e20eeb08e2c57fc7e71f050b1cfae35cbb46105449853a582bf53fd23c5379e"}, - {file = "xattr-1.1.4-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:477370e75821bded901487e5e752cffe554d1bd3bd4839b627d4d1ee8c95a093"}, - {file = "xattr-1.1.4-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:a8682091cd34a9f4a93c8aaea4101aae99f1506e24da00a3cc3dd2eca9566f21"}, - {file = "xattr-1.1.4-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:2e079b3b1a274ba2121cf0da38bbe5c8d2fb1cc49ecbceb395ce20eb7d69556d"}, - {file = "xattr-1.1.4-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:ae6579dea05bf9f335a082f711d5924a98da563cac72a2d550f5b940c401c0e9"}, - {file = "xattr-1.1.4-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:cd6038ec9df2e67af23c212693751481d5f7e858156924f14340376c48ed9ac7"}, - {file = "xattr-1.1.4-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:608b2877526674eb15df4150ef4b70b7b292ae00e65aecaae2f192af224be200"}, - {file = "xattr-1.1.4-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c54dad1a6a998c6a23edfd25e99f4d38e9b942d54e518570044edf8c767687ea"}, - {file = "xattr-1.1.4-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c0dab6ff72bb2b508f3850c368f8e53bd706585012676e1f71debba3310acde8"}, - {file = "xattr-1.1.4-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7a3c54c6af7cf09432b2c461af257d5f4b1cb2d59eee045f91bacef44421a46d"}, - {file = "xattr-1.1.4-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e346e05a158d554639fbf7a0db169dc693c2d2260c7acb3239448f1ff4a9d67f"}, - {file = "xattr-1.1.4-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:3ff6d9e2103d0d6e5fcd65b85a2005b66ea81c0720a37036445faadc5bbfa424"}, - {file = "xattr-1.1.4-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:7a2ee4563c6414dfec0d1ac610f59d39d5220531ae06373eeb1a06ee37cd193f"}, - {file = "xattr-1.1.4-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:878df1b38cfdadf3184ad8c7b0f516311128d5597b60ac0b3486948953658a83"}, - {file = "xattr-1.1.4-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:0c9b8350244a1c5454f93a8d572628ff71d7e2fc2f7480dcf4c4f0e8af3150fe"}, - {file = "xattr-1.1.4-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:a46bf48fb662b8bd745b78bef1074a1e08f41a531168de62b5d7bd331dadb11a"}, - {file = "xattr-1.1.4-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:83fc3c07b583777b1dda6355329f75ca6b7179fe0d1002f1afe0ef96f7e3b5de"}, - {file = "xattr-1.1.4-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6308b19cff71441513258699f0538394fad5d66e1d324635207a97cb076fd439"}, - {file = "xattr-1.1.4-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:48c00ddc15ddadc9c729cd9504dabf50adb3d9c28f647d4ac9a3df45a046b1a0"}, - {file = "xattr-1.1.4-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a06136196f26293758e1b244200b73156a0274af9a7349fa201c71c7af3bb9e8"}, - {file = "xattr-1.1.4-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:8fc2631a3c6cfcdc71f7f0f847461839963754e76a2015de71e7e71e3304abc0"}, - {file = "xattr-1.1.4-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:d6e1e835f9c938d129dd45e7eb52ebf7d2d6816323dab93ce311bf331f7d2328"}, - {file = "xattr-1.1.4-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:60dea2d369a6484e8b7136224fc2971e10e2c46340d83ab780924afe78c90066"}, - {file = "xattr-1.1.4-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:85c2b778b09d919523f80f244d799a142302582d76da18903dc693207c4020b0"}, - {file = "xattr-1.1.4-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:ee0abba9e1b890d39141714ff43e9666864ca635ea8a5a2194d989e6b17fe862"}, - {file = "xattr-1.1.4-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8e4174ba7f51f46b95ea7918d907c91cd579575d59e6a2f22ca36a0551026737"}, - {file = "xattr-1.1.4-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b2b05e52e99d82d87528c54c2c5c8c5fb0ba435f85ac6545511aeea136e49925"}, - {file = "xattr-1.1.4-cp38-cp38-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3a3696fad746be37de34eb73c60ea67144162bd08106a5308a90ce9dea9a3287"}, - {file = "xattr-1.1.4-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:a3a7149439a26b68904c14fdc4587cde4ac7d80303e9ff0fefcfd893b698c976"}, - {file = "xattr-1.1.4-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:507b36a126ce900dbfa35d4e2c2db92570c933294cba5d161ecd6a89f7b52f43"}, - {file = "xattr-1.1.4-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:9392b417b54923e031041940d396b1d709df1d3779c6744454e1f1c1f4dad4f5"}, - {file = "xattr-1.1.4-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:e9f00315e6c02943893b77f544776b49c756ac76960bea7cb8d7e1b96aefc284"}, - {file = "xattr-1.1.4-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c8f98775065260140efb348b1ff8d50fd66ddcbf0c685b76eb1e87b380aaffb3"}, - {file = "xattr-1.1.4-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b471c6a515f434a167ca16c5c15ff34ee42d11956baa749173a8a4e385ff23e7"}, - {file = "xattr-1.1.4-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ee0763a1b7ceb78ba2f78bee5f30d1551dc26daafcce4ac125115fa1def20519"}, - {file = "xattr-1.1.4-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:099e6e9ce7999b403d36d9cf943105a3d25d8233486b54ec9d1b78623b050433"}, - {file = "xattr-1.1.4-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d3e56faef9dde8d969f0d646fb6171883693f88ae39163ecd919ec707fbafa85"}, - {file = "xattr-1.1.4-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:328156d4e594c9ae63e1072503c168849e601a153ad37f0290743544332d6b6f"}, - {file = "xattr-1.1.4-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:a57a55a27c7864d6916344c9a91776afda6c3b8b2209f8a69b79cdba93fbe128"}, - {file = "xattr-1.1.4-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:3c19cdde08b040df1e99d2500bf8a9cff775ab0e6fa162bf8afe6d84aa93ed04"}, - {file = "xattr-1.1.4-pp310-pypy310_pp73-macosx_10_15_x86_64.whl", hash = "sha256:7c72667f19d3a9acf324aed97f58861d398d87e42314731e7c6ab3ac7850c971"}, - {file = "xattr-1.1.4-pp310-pypy310_pp73-macosx_11_0_arm64.whl", hash = "sha256:67ae934d75ea2563fc48a27c5945749575c74a6de19fdd38390917ddcb0e4f24"}, - {file = "xattr-1.1.4-pp310-pypy310_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2a1b0c348dd8523554dc535540d2046c0c8a535bb086561d8359f3667967b6ca"}, - {file = "xattr-1.1.4-pp310-pypy310_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:22284255d2a8e8f3da195bd8e8d43ce674dbc7c38d38cb6ecfb37fae7755d31f"}, - {file = "xattr-1.1.4-pp310-pypy310_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b38aac5ef4381c26d3ce147ca98fba5a78b1e5bcd6be6755b4908659f2705c6d"}, - {file = "xattr-1.1.4-pp38-pypy38_pp73-macosx_10_9_x86_64.whl", hash = "sha256:803f864af528f6f763a5be1e7b1ccab418e55ae0e4abc8bda961d162f850c991"}, - {file = "xattr-1.1.4-pp38-pypy38_pp73-macosx_11_0_arm64.whl", hash = "sha256:40354ebfb5cecd60a5fbb9833a8a452d147486b0ffec547823658556625d98b5"}, - {file = "xattr-1.1.4-pp38-pypy38_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2abaf5d06be3361bfa8e0db2ee123ba8e92beab5bceed5e9d7847f2145a32e04"}, - {file = "xattr-1.1.4-pp38-pypy38_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3e638e5ffedc3565242b5fa3296899d35161bad771f88d66277b58f03a1ba9fe"}, - {file = "xattr-1.1.4-pp38-pypy38_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0597e919d116ec39997804288d77bec3777228368efc0f2294b84a527fc4f9c2"}, - {file = "xattr-1.1.4-pp39-pypy39_pp73-macosx_10_15_x86_64.whl", hash = "sha256:3cee9455c501d19f065527afda974418b3ef7c61e85d9519d122cd6eb3cb7a00"}, - {file = "xattr-1.1.4-pp39-pypy39_pp73-macosx_11_0_arm64.whl", hash = "sha256:89ed62ce430f5789e15cfc1ccabc172fd8b349c3a17c52d9e6c64ecedf08c265"}, - {file = "xattr-1.1.4-pp39-pypy39_pp73-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e25b824f4b9259cd8bb6e83c4873cf8bf080f6e4fa034a02fe778e07aba8d345"}, - {file = "xattr-1.1.4-pp39-pypy39_pp73-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:8fba66faa0016dfc0af3dd7ac5782b5786a1dfb851f9f3455e266f94c2a05a04"}, - {file = "xattr-1.1.4-pp39-pypy39_pp73-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4ec4b0c3e0a7bcd103f3cf31dd40c349940b2d4223ce43d384a3548992138ef1"}, - {file = "xattr-1.1.4.tar.gz", hash = "sha256:b7b02ecb2270da5b7e7deaeea8f8b528c17368401c2b9d5f63e91f545b45d372"}, -] - -[package.dependencies] -cffi = ">=1.16.0" - -[package.extras] -test = ["pytest"] - -[[package]] -name = "zipp" -version = "3.21.0" -description = "Backport of pathlib-compatible object wrapper for zip files" -optional = false -python-versions = ">=3.9" -groups = ["dev"] -markers = "python_version < \"3.12\"" -files = [ - {file = "zipp-3.21.0-py3-none-any.whl", hash = "sha256:ac1bbe05fd2991f160ebce24ffbac5f6d11d83dc90891255885223d42b3cd931"}, - {file = "zipp-3.21.0.tar.gz", hash = "sha256:2c9958f6430a2040341a52eb608ed6dd93ef4392e02ffe219417c1b28b5dd1f4"}, -] - -[package.extras] -check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1) ; sys_platform != \"cygwin\""] -cover = ["pytest-cov"] -doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] -enabler = ["pytest-enabler (>=2.2)"] -test = ["big-O", "importlib-resources ; python_version < \"3.9\"", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-ignore-flaky"] -type = ["pytest-mypy"] - -[[package]] -name = "zstandard" -version = "0.23.0" -description = "Zstandard bindings for Python" -optional = false -python-versions = ">=3.8" -groups = ["dev"] -files = [ - {file = "zstandard-0.23.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bf0a05b6059c0528477fba9054d09179beb63744355cab9f38059548fedd46a9"}, - {file = "zstandard-0.23.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:fc9ca1c9718cb3b06634c7c8dec57d24e9438b2aa9a0f02b8bb36bf478538880"}, - {file = "zstandard-0.23.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:77da4c6bfa20dd5ea25cbf12c76f181a8e8cd7ea231c673828d0386b1740b8dc"}, - {file = "zstandard-0.23.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b2170c7e0367dde86a2647ed5b6f57394ea7f53545746104c6b09fc1f4223573"}, - {file = "zstandard-0.23.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:c16842b846a8d2a145223f520b7e18b57c8f476924bda92aeee3a88d11cfc391"}, - {file = "zstandard-0.23.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:157e89ceb4054029a289fb504c98c6a9fe8010f1680de0201b3eb5dc20aa6d9e"}, - {file = "zstandard-0.23.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:203d236f4c94cd8379d1ea61db2fce20730b4c38d7f1c34506a31b34edc87bdd"}, - {file = "zstandard-0.23.0-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:dc5d1a49d3f8262be192589a4b72f0d03b72dcf46c51ad5852a4fdc67be7b9e4"}, - {file = "zstandard-0.23.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:752bf8a74412b9892f4e5b58f2f890a039f57037f52c89a740757ebd807f33ea"}, - {file = "zstandard-0.23.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:80080816b4f52a9d886e67f1f96912891074903238fe54f2de8b786f86baded2"}, - {file = "zstandard-0.23.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:84433dddea68571a6d6bd4fbf8ff398236031149116a7fff6f777ff95cad3df9"}, - {file = "zstandard-0.23.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:ab19a2d91963ed9e42b4e8d77cd847ae8381576585bad79dbd0a8837a9f6620a"}, - {file = "zstandard-0.23.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:59556bf80a7094d0cfb9f5e50bb2db27fefb75d5138bb16fb052b61b0e0eeeb0"}, - {file = "zstandard-0.23.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:27d3ef2252d2e62476389ca8f9b0cf2bbafb082a3b6bfe9d90cbcbb5529ecf7c"}, - {file = "zstandard-0.23.0-cp310-cp310-win32.whl", hash = "sha256:5d41d5e025f1e0bccae4928981e71b2334c60f580bdc8345f824e7c0a4c2a813"}, - {file = "zstandard-0.23.0-cp310-cp310-win_amd64.whl", hash = "sha256:519fbf169dfac1222a76ba8861ef4ac7f0530c35dd79ba5727014613f91613d4"}, - {file = "zstandard-0.23.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:34895a41273ad33347b2fc70e1bff4240556de3c46c6ea430a7ed91f9042aa4e"}, - {file = "zstandard-0.23.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:77ea385f7dd5b5676d7fd943292ffa18fbf5c72ba98f7d09fc1fb9e819b34c23"}, - {file = "zstandard-0.23.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:983b6efd649723474f29ed42e1467f90a35a74793437d0bc64a5bf482bedfa0a"}, - {file = "zstandard-0.23.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:80a539906390591dd39ebb8d773771dc4db82ace6372c4d41e2d293f8e32b8db"}, - {file = "zstandard-0.23.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:445e4cb5048b04e90ce96a79b4b63140e3f4ab5f662321975679b5f6360b90e2"}, - {file = "zstandard-0.23.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd30d9c67d13d891f2360b2a120186729c111238ac63b43dbd37a5a40670b8ca"}, - {file = "zstandard-0.23.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d20fd853fbb5807c8e84c136c278827b6167ded66c72ec6f9a14b863d809211c"}, - {file = "zstandard-0.23.0-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ed1708dbf4d2e3a1c5c69110ba2b4eb6678262028afd6c6fbcc5a8dac9cda68e"}, - {file = "zstandard-0.23.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:be9b5b8659dff1f913039c2feee1aca499cfbc19e98fa12bc85e037c17ec6ca5"}, - {file = "zstandard-0.23.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:65308f4b4890aa12d9b6ad9f2844b7ee42c7f7a4fd3390425b242ffc57498f48"}, - {file = "zstandard-0.23.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:98da17ce9cbf3bfe4617e836d561e433f871129e3a7ac16d6ef4c680f13a839c"}, - {file = "zstandard-0.23.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:8ed7d27cb56b3e058d3cf684d7200703bcae623e1dcc06ed1e18ecda39fee003"}, - {file = "zstandard-0.23.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:b69bb4f51daf461b15e7b3db033160937d3ff88303a7bc808c67bbc1eaf98c78"}, - {file = "zstandard-0.23.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:034b88913ecc1b097f528e42b539453fa82c3557e414b3de9d5632c80439a473"}, - {file = "zstandard-0.23.0-cp311-cp311-win32.whl", hash = "sha256:f2d4380bf5f62daabd7b751ea2339c1a21d1c9463f1feb7fc2bdcea2c29c3160"}, - {file = "zstandard-0.23.0-cp311-cp311-win_amd64.whl", hash = "sha256:62136da96a973bd2557f06ddd4e8e807f9e13cbb0bfb9cc06cfe6d98ea90dfe0"}, - {file = "zstandard-0.23.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:b4567955a6bc1b20e9c31612e615af6b53733491aeaa19a6b3b37f3b65477094"}, - {file = "zstandard-0.23.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1e172f57cd78c20f13a3415cc8dfe24bf388614324d25539146594c16d78fcc8"}, - {file = "zstandard-0.23.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b0e166f698c5a3e914947388c162be2583e0c638a4703fc6a543e23a88dea3c1"}, - {file = "zstandard-0.23.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:12a289832e520c6bd4dcaad68e944b86da3bad0d339ef7989fb7e88f92e96072"}, - {file = "zstandard-0.23.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d50d31bfedd53a928fed6707b15a8dbeef011bb6366297cc435accc888b27c20"}, - {file = "zstandard-0.23.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:72c68dda124a1a138340fb62fa21b9bf4848437d9ca60bd35db36f2d3345f373"}, - {file = "zstandard-0.23.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:53dd9d5e3d29f95acd5de6802e909ada8d8d8cfa37a3ac64836f3bc4bc5512db"}, - {file = "zstandard-0.23.0-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:6a41c120c3dbc0d81a8e8adc73312d668cd34acd7725f036992b1b72d22c1772"}, - {file = "zstandard-0.23.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:40b33d93c6eddf02d2c19f5773196068d875c41ca25730e8288e9b672897c105"}, - {file = "zstandard-0.23.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:9206649ec587e6b02bd124fb7799b86cddec350f6f6c14bc82a2b70183e708ba"}, - {file = "zstandard-0.23.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:76e79bc28a65f467e0409098fa2c4376931fd3207fbeb6b956c7c476d53746dd"}, - {file = "zstandard-0.23.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:66b689c107857eceabf2cf3d3fc699c3c0fe8ccd18df2219d978c0283e4c508a"}, - {file = "zstandard-0.23.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:9c236e635582742fee16603042553d276cca506e824fa2e6489db04039521e90"}, - {file = "zstandard-0.23.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:a8fffdbd9d1408006baaf02f1068d7dd1f016c6bcb7538682622c556e7b68e35"}, - {file = "zstandard-0.23.0-cp312-cp312-win32.whl", hash = "sha256:dc1d33abb8a0d754ea4763bad944fd965d3d95b5baef6b121c0c9013eaf1907d"}, - {file = "zstandard-0.23.0-cp312-cp312-win_amd64.whl", hash = "sha256:64585e1dba664dc67c7cdabd56c1e5685233fbb1fc1966cfba2a340ec0dfff7b"}, - {file = "zstandard-0.23.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:576856e8594e6649aee06ddbfc738fec6a834f7c85bf7cadd1c53d4a58186ef9"}, - {file = "zstandard-0.23.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:38302b78a850ff82656beaddeb0bb989a0322a8bbb1bf1ab10c17506681d772a"}, - {file = "zstandard-0.23.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d2240ddc86b74966c34554c49d00eaafa8200a18d3a5b6ffbf7da63b11d74ee2"}, - {file = "zstandard-0.23.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2ef230a8fd217a2015bc91b74f6b3b7d6522ba48be29ad4ea0ca3a3775bf7dd5"}, - {file = "zstandard-0.23.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:774d45b1fac1461f48698a9d4b5fa19a69d47ece02fa469825b442263f04021f"}, - {file = "zstandard-0.23.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6f77fa49079891a4aab203d0b1744acc85577ed16d767b52fc089d83faf8d8ed"}, - {file = "zstandard-0.23.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ac184f87ff521f4840e6ea0b10c0ec90c6b1dcd0bad2f1e4a9a1b4fa177982ea"}, - {file = "zstandard-0.23.0-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:c363b53e257246a954ebc7c488304b5592b9c53fbe74d03bc1c64dda153fb847"}, - {file = "zstandard-0.23.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:e7792606d606c8df5277c32ccb58f29b9b8603bf83b48639b7aedf6df4fe8171"}, - {file = "zstandard-0.23.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a0817825b900fcd43ac5d05b8b3079937073d2b1ff9cf89427590718b70dd840"}, - {file = "zstandard-0.23.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:9da6bc32faac9a293ddfdcb9108d4b20416219461e4ec64dfea8383cac186690"}, - {file = "zstandard-0.23.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:fd7699e8fd9969f455ef2926221e0233f81a2542921471382e77a9e2f2b57f4b"}, - {file = "zstandard-0.23.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:d477ed829077cd945b01fc3115edd132c47e6540ddcd96ca169facff28173057"}, - {file = "zstandard-0.23.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:fa6ce8b52c5987b3e34d5674b0ab529a4602b632ebab0a93b07bfb4dfc8f8a33"}, - {file = "zstandard-0.23.0-cp313-cp313-win32.whl", hash = "sha256:a9b07268d0c3ca5c170a385a0ab9fb7fdd9f5fd866be004c4ea39e44edce47dd"}, - {file = "zstandard-0.23.0-cp313-cp313-win_amd64.whl", hash = "sha256:f3513916e8c645d0610815c257cbfd3242adfd5c4cfa78be514e5a3ebb42a41b"}, - {file = "zstandard-0.23.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:2ef3775758346d9ac6214123887d25c7061c92afe1f2b354f9388e9e4d48acfc"}, - {file = "zstandard-0.23.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4051e406288b8cdbb993798b9a45c59a4896b6ecee2f875424ec10276a895740"}, - {file = "zstandard-0.23.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e2d1a054f8f0a191004675755448d12be47fa9bebbcffa3cdf01db19f2d30a54"}, - {file = "zstandard-0.23.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f83fa6cae3fff8e98691248c9320356971b59678a17f20656a9e59cd32cee6d8"}, - {file = "zstandard-0.23.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:32ba3b5ccde2d581b1e6aa952c836a6291e8435d788f656fe5976445865ae045"}, - {file = "zstandard-0.23.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2f146f50723defec2975fb7e388ae3a024eb7151542d1599527ec2aa9cacb152"}, - {file = "zstandard-0.23.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1bfe8de1da6d104f15a60d4a8a768288f66aa953bbe00d027398b93fb9680b26"}, - {file = "zstandard-0.23.0-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:29a2bc7c1b09b0af938b7a8343174b987ae021705acabcbae560166567f5a8db"}, - {file = "zstandard-0.23.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:61f89436cbfede4bc4e91b4397eaa3e2108ebe96d05e93d6ccc95ab5714be512"}, - {file = "zstandard-0.23.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:53ea7cdc96c6eb56e76bb06894bcfb5dfa93b7adcf59d61c6b92674e24e2dd5e"}, - {file = "zstandard-0.23.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:a4ae99c57668ca1e78597d8b06d5af837f377f340f4cce993b551b2d7731778d"}, - {file = "zstandard-0.23.0-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:379b378ae694ba78cef921581ebd420c938936a153ded602c4fea612b7eaa90d"}, - {file = "zstandard-0.23.0-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:50a80baba0285386f97ea36239855f6020ce452456605f262b2d33ac35c7770b"}, - {file = "zstandard-0.23.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:61062387ad820c654b6a6b5f0b94484fa19515e0c5116faf29f41a6bc91ded6e"}, - {file = "zstandard-0.23.0-cp38-cp38-win32.whl", hash = "sha256:b8c0bd73aeac689beacd4e7667d48c299f61b959475cdbb91e7d3d88d27c56b9"}, - {file = "zstandard-0.23.0-cp38-cp38-win_amd64.whl", hash = "sha256:a05e6d6218461eb1b4771d973728f0133b2a4613a6779995df557f70794fd60f"}, - {file = "zstandard-0.23.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:3aa014d55c3af933c1315eb4bb06dd0459661cc0b15cd61077afa6489bec63bb"}, - {file = "zstandard-0.23.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:0a7f0804bb3799414af278e9ad51be25edf67f78f916e08afdb983e74161b916"}, - {file = "zstandard-0.23.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb2b1ecfef1e67897d336de3a0e3f52478182d6a47eda86cbd42504c5cbd009a"}, - {file = "zstandard-0.23.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:837bb6764be6919963ef41235fd56a6486b132ea64afe5fafb4cb279ac44f259"}, - {file = "zstandard-0.23.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1516c8c37d3a053b01c1c15b182f3b5f5eef19ced9b930b684a73bad121addf4"}, - {file = "zstandard-0.23.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:48ef6a43b1846f6025dde6ed9fee0c24e1149c1c25f7fb0a0585572b2f3adc58"}, - {file = "zstandard-0.23.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:11e3bf3c924853a2d5835b24f03eeba7fc9b07d8ca499e247e06ff5676461a15"}, - {file = "zstandard-0.23.0-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:2fb4535137de7e244c230e24f9d1ec194f61721c86ebea04e1581d9d06ea1269"}, - {file = "zstandard-0.23.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:8c24f21fa2af4bb9f2c492a86fe0c34e6d2c63812a839590edaf177b7398f700"}, - {file = "zstandard-0.23.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:a8c86881813a78a6f4508ef9daf9d4995b8ac2d147dcb1a450448941398091c9"}, - {file = "zstandard-0.23.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:fe3b385d996ee0822fd46528d9f0443b880d4d05528fd26a9119a54ec3f91c69"}, - {file = "zstandard-0.23.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:82d17e94d735c99621bf8ebf9995f870a6b3e6d14543b99e201ae046dfe7de70"}, - {file = "zstandard-0.23.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:c7c517d74bea1a6afd39aa612fa025e6b8011982a0897768a2f7c8ab4ebb78a2"}, - {file = "zstandard-0.23.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:1fd7e0f1cfb70eb2f95a19b472ee7ad6d9a0a992ec0ae53286870c104ca939e5"}, - {file = "zstandard-0.23.0-cp39-cp39-win32.whl", hash = "sha256:43da0f0092281bf501f9c5f6f3b4c975a8a0ea82de49ba3f7100e64d422a1274"}, - {file = "zstandard-0.23.0-cp39-cp39-win_amd64.whl", hash = "sha256:f8346bfa098532bc1fb6c7ef06783e969d87a99dd1d2a5a18a892c1d7a643c58"}, - {file = "zstandard-0.23.0.tar.gz", hash = "sha256:b2d8c62d08e7255f68f7a740bae85b3c9b8e5466baa9cbf7f57f1cde0ac6bc09"}, -] - -[package.dependencies] -cffi = {version = ">=1.11", markers = "platform_python_implementation == \"PyPy\""} - -[package.extras] -cffi = ["cffi (>=1.11)"] - -[extras] -valkey = ["valkey"] -yaml = ["pyyaml"] - -[metadata] -lock-version = "2.1" -python-versions = "^3.10" -content-hash = "91d003aa67d25bcfcdc886beefc43db1ca776b8918ce6a012025bd0b868a1ed8" diff --git a/pyproject.toml b/pyproject.toml index 70d80f1..3222a37 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,65 +1,73 @@ [build-system] -requires = ["poetry-core"] -build-backend = "poetry.core.masonry.api" +requires = ["hatchling"] +build-backend = "hatchling.build" -[tool.poetry] +[project] name = "django-tasks-scheduler" -packages = [ - { include = "scheduler" }, -] -version = "3.0.2" +version = "4.0.5" description = "An async job scheduler for django using redis/valkey brokers" +authors = [{ name = "Daniel Moran", email = "daniel@moransoftware.ca" }] +requires-python = ">=3.10" readme = "README.md" -keywords = ["redis", "valkey", "django", "background-jobs", "job-queue", "task-queue", "redis-queue", "scheduled-jobs"] -authors = [ - "Daniel Moran ", -] -maintainers = [ - "Daniel Moran ", -] license = "MIT" +maintainers = [{ name = "Daniel Moran", email = "daniel@moransoftware.ca" }] +keywords = [ + "redis", + "valkey", + "django", + "background-jobs", + "job-queue", + "task-queue", + "redis-queue", + "scheduled-jobs", +] classifiers = [ - 'Development Status :: 5 - Production/Stable', - 'Environment :: Web Environment', - 'Intended Audience :: Developers', - 'License :: OSI Approved :: MIT License', - 'Operating System :: OS Independent', - 'Programming Language :: Python', - 'Programming Language :: Python :: 3.10', - 'Programming Language :: Python :: 3.11', - 'Programming Language :: Python :: 3.12', - 'Programming Language :: Python :: 3.13', - 'Framework :: Django', - 'Framework :: Django :: 5.0', - 'Framework :: Django :: 5.1', + "Development Status :: 5 - Production/Stable", + "Environment :: Web Environment", + "Intended Audience :: Developers", + "License :: OSI Approved :: MIT License", + "Operating System :: OS Independent", + "Programming Language :: Python", + "Programming Language :: Python :: 3.10", + "Programming Language :: Python :: 3.11", + "Programming Language :: Python :: 3.12", + "Programming Language :: Python :: 3.13", + "Framework :: Django", + "Framework :: Django :: 5.0", + "Framework :: Django :: 5.1", + "Framework :: Django :: 5.2", ] -homepage = "https://github.com/django-commons/django-tasks-scheduler" -documentation = "https://django-tasks-scheduler.readthedocs.io/en/latest/" +dependencies = [ + "django>=5", + "croniter>=2.0", + "click~=8.2", +] + +[project.optional-dependencies] +yaml = ["pyyaml~=6.0"] +valkey = ["valkey>=6.0.2,<7"] +sentry = ["sentry-sdk~=2.19"] -[tool.poetry.urls] +[project.urls] +Homepage = "https://github.com/django-commons/django-tasks-scheduler" +Documentation = "https://django-tasks-scheduler.readthedocs.io/" "Bug Tracker" = "https://github.com/django-commons/django-tasks-scheduler/issues" -"Funding" = "https://github.com/sponsors/cunla" +Funding = "https://github.com/sponsors/cunla" -[tool.poetry.dependencies] -python = "^3.10" -django = ">=5" -croniter = ">=2.0" -click = "^8.1" -rq = "^1.16" -pyyaml = { version = "^6.0", optional = true } -valkey = { version = "^6.0.2", optional = true } +[dependency-groups] +dev = [ + "time-machine>=2.16.0,<3", + "ruff>=0.11", + "coverage~=7.6", + "fakeredis~=2.28", + "pyyaml>=6,<7", +] -[tool.poetry.group.dev.dependencies] -poetry = "^2.0.1" -ruff = "^0.11" -coverage = "^7.6" -fakeredis = { version = "^2.21.5", extras = ['lua'] } -pyyaml = "^6" -freezegun = "^1.5" +[tool.hatch.build.targets.sdist] +include = ["scheduler"] -[tool.poetry.extras] -yaml = ["pyyaml"] -valkey = ["valkey"] +[tool.hatch.build.targets.wheel] +include = ["scheduler"] [tool.ruff] line-length = 120 @@ -68,7 +76,7 @@ exclude = [ 'testproject', '.venv', '.github', - '__pycache', + '__pycache__', ] [tool.ruff.format] diff --git a/scheduler/__init__.py b/scheduler/__init__.py index e7010c5..81ea954 100644 --- a/scheduler/__init__.py +++ b/scheduler/__init__.py @@ -2,8 +2,8 @@ __version__ = importlib.metadata.version("django-tasks-scheduler") -from .decorators import job - __all__ = [ "job", ] + +from scheduler.decorators import job diff --git a/scheduler/admin/ephemeral_models.py b/scheduler/admin/ephemeral_models.py index 4bb1f13..15fddd1 100644 --- a/scheduler/admin/ephemeral_models.py +++ b/scheduler/admin/ephemeral_models.py @@ -1,8 +1,7 @@ from django.contrib import admin from scheduler import views -from scheduler.models.queue import Queue -from scheduler.models.worker import Worker +from scheduler.models.ephemeral_models import Queue, Worker class ImmutableAdmin(admin.ModelAdmin): @@ -38,4 +37,4 @@ class WorkerAdmin(ImmutableAdmin): def changelist_view(self, request, extra_context=None): """The 'change list' admin view for this model.""" - return views.workers(request) + return views.workers_list(request) diff --git a/scheduler/admin/task_admin.py b/scheduler/admin/task_admin.py index 9308930..e33cbef 100644 --- a/scheduler/admin/task_admin.py +++ b/scheduler/admin/task_admin.py @@ -1,13 +1,30 @@ +from typing import List + from django.contrib import admin, messages from django.contrib.contenttypes.admin import GenericStackedInline +from django.db.models import QuerySet +from django.http import HttpRequest +from django.utils import timezone, formats from django.utils.translation import gettext_lazy as _ -from scheduler import tools -from scheduler.broker_types import ConnectionErrorTypes -from scheduler.models.args import TaskArg, TaskKwarg -from scheduler.models.task import Task +from scheduler.helpers.queues import get_queue +from scheduler.models import TaskArg, TaskKwarg, Task, TaskType, get_next_cron_time +from scheduler.redis_models import JobModel from scheduler.settings import SCHEDULER_CONFIG, logger -from scheduler.tools import get_job_executions_for_task, TaskType +from scheduler.types import ConnectionErrorTypes + + +def job_execution_of(job: JobModel, task: Task) -> bool: + return job.scheduled_task_id == task.id and job.task_type == task.task_type + + +def get_job_executions_for_task(queue_name: str, scheduled_task: Task) -> List[JobModel]: + queue = get_queue(queue_name) + job_list: List[JobModel] = JobModel.get_many(queue.get_all_job_names(), connection=queue.connection) + res = sorted( + list(filter(lambda j: job_execution_of(j, scheduled_task), job_list)), key=lambda j: j.created_at, reverse=True + ) + return res class JobArgInline(GenericStackedInline): @@ -32,27 +49,16 @@ class TaskAdmin(admin.ModelAdmin): """TaskAdmin admin view for all task models.""" class Media: - js = ( - "admin/js/jquery.init.js", - "admin/js/select-fields.js", - ) + js = ("admin/js/jquery.init.js", "admin/js/select-fields.js") save_on_top = True change_form_template = "admin/scheduler/change_form.html" - actions = [ - "disable_selected", - "enable_selected", - "enqueue_job_now", - ] - inlines = [ - JobArgInline, - JobKwargInline, - ] - list_filter = ("enabled",) + actions = ["disable_selected", "enable_selected", "enqueue_job_now"] + inlines = [JobArgInline, JobKwargInline] + list_filter = ("enabled", "task_type", "queue") list_display = ( "enabled", "name", - "job_id", "function_string", "is_scheduled", "queue", @@ -65,7 +71,7 @@ class Media: ) list_display_links = ("name",) readonly_fields = ( - "job_id", + "job_name", "successful_runs", "last_successful_run", "failed_runs", @@ -79,8 +85,8 @@ class Media: fields=( "name", "callable", - "task_type", ("enabled", "timeout", "result_ttl"), + "task_type", ) ), ), @@ -94,9 +100,18 @@ class Media: ), ( None, - dict(fields=("interval", "interval_unit", "repeat"), classes=("tasktype-RepeatableTaskType",)), + dict( + fields=( + ( + "interval", + "interval_unit", + ), + "repeat", + ), + classes=("tasktype-RepeatableTaskType",), + ), ), - (_("RQ Settings"), dict(fields=(("queue", "at_front"), "job_id"))), + (_("Queue settings"), dict(fields=(("queue", "at_front"), "job_name"))), ( _("Previous runs info"), dict(fields=(("successful_runs", "last_successful_run"), ("failed_runs", "last_failed_run"))), @@ -106,19 +121,23 @@ class Media: @admin.display(description="Schedule") def task_schedule(self, o: Task) -> str: if o.task_type == TaskType.ONCE.value: - return f"Run once: {o.scheduled_time:%Y-%m-%d %H:%M:%S}" + if timezone.is_naive(o.scheduled_time): + local_time = timezone.make_aware(o.scheduled_time, timezone.get_current_timezone()) + else: + local_time = timezone.localtime(o.scheduled_time) + return f"Run once: {formats.date_format(local_time, 'DATETIME_FORMAT')}" elif o.task_type == TaskType.CRON.value: return f"Cron: {o.cron_string}" - elif o.task_type == TaskType.REPEATABLE.value: + else: # if o.task_type == TaskType.REPEATABLE.value: if o.interval is None or o.interval_unit is None: return "" - return "Repeatable: {} {}".format(o.interval, o.get_interval_unit_display()) + return f"Repeatable: {o.interval} {o.get_interval_unit_display()}" @admin.display(description="Next run") def next_run(self, o: Task) -> str: - return tools.get_next_cron_time(o.cron_string) + return get_next_cron_time(o.cron_string) - def change_view(self, request, object_id, form_url="", extra_context=None): + def change_view(self, request: HttpRequest, object_id, form_url="", extra_context=None): extra = extra_context or {} obj = self.get_object(request, object_id) try: @@ -142,17 +161,17 @@ def change_view(self, request, object_id, form_url="", extra_context=None): return super(TaskAdmin, self).change_view(request, object_id, form_url, extra_context=extra) - def delete_queryset(self, request, queryset): + def delete_queryset(self, request: HttpRequest, queryset: QuerySet) -> None: for job in queryset: job.unschedule() super(TaskAdmin, self).delete_queryset(request, queryset) - def delete_model(self, request, obj): + def delete_model(self, request: HttpRequest, obj: Task) -> None: obj.unschedule() super(TaskAdmin, self).delete_model(request, obj) @admin.action(description=_("Disable selected %(verbose_name_plural)s"), permissions=("change",)) - def disable_selected(self, request, queryset): + def disable_selected(self, request: HttpRequest, queryset: QuerySet) -> None: rows_updated = 0 for obj in queryset.filter(enabled=True).iterator(): obj.enabled = False @@ -165,7 +184,7 @@ def disable_selected(self, request, queryset): ) @admin.action(description=_("Enable selected %(verbose_name_plural)s"), permissions=("change",)) - def enable_selected(self, request, queryset): + def enable_selected(self, request: HttpRequest, queryset: QuerySet) -> None: rows_updated = 0 for obj in queryset.filter(enabled=False).iterator(): obj.enabled = True @@ -176,7 +195,7 @@ def enable_selected(self, request, queryset): self.message_user(request, f"{get_message_bit(rows_updated)} successfully enabled and scheduled.", level=level) @admin.action(description="Enqueue now", permissions=("change",)) - def enqueue_job_now(self, request, queryset): + def enqueue_job_now(self, request: HttpRequest, queryset: QuerySet) -> None: task_names = [] for task in queryset: task.enqueue_to_run() diff --git a/scheduler/decorators.py b/scheduler/decorators.py index c8f7e94..72e15cb 100644 --- a/scheduler/decorators.py +++ b/scheduler/decorators.py @@ -1,43 +1,98 @@ -from scheduler import settings -from .queues import get_queue, QueueNotFoundError -from .rq_classes import rq_job_decorator +from functools import wraps +from typing import Any, Callable, Dict, Optional, Union, List -JOB_METHODS_LIST = list() +from scheduler.helpers.callback import Callback +from scheduler.types import ConnectionType +JOB_METHODS_LIST: List[str] = list() -def job(*args, **kwargs): - """ - The same as rq package's job decorator, but it automatically works out - the ``connection`` argument from SCHEDULER_QUEUES. - And also, it allows simplified ``@job`` syntax to put a job into the default queue. +class job: + def __init__( + self, + queue: Union["Queue", str, None] = None, # noqa: F821 + connection: Optional[ConnectionType] = None, + timeout: Optional[int] = None, + result_ttl: Optional[int] = None, + job_info_ttl: Optional[int] = None, + at_front: bool = False, + meta: Optional[Dict[Any, Any]] = None, + description: Optional[str] = None, + on_failure: Optional[Union["Callback", Callable[..., Any]]] = None, + on_success: Optional[Union["Callback", Callable[..., Any]]] = None, + on_stopped: Optional[Union["Callback", Callable[..., Any]]] = None, + ): + """A decorator that adds a ``delay`` method to the decorated function, which in turn creates a RQ job when + called. Accepts a required ``queue`` argument that can be either a ``Queue`` instance or a string + denoting the queue name. For example:: - """ - if len(args) == 0: - func = None - queue = "default" - else: - if callable(args[0]): - func = args[0] + + >>> @job(queue='default') + >>> def simple_add(x, y): + >>> return x + y + >>> ... + >>> # Puts `simple_add` function into queue + >>> simple_add.delay(1, 2) + + :param queue: The queue to use, can be the Queue class itself, or the queue name (str) + :type queue: Union['Queue', str] + :param connection: Broker Connection + :param timeout: Job timeout + :param result_ttl: Result time to live + :param job_info_ttl: Time to live for job info + :param at_front: Whether to enqueue the job at front of the queue + :param meta: Arbitraty metadata about the job + :param description: Job description + :param on_failure: Callable to run on failure + :param on_success: Callable to run on success + :param on_stopped: Callable to run when stopped + """ + from scheduler.helpers.queues import get_queue + + if queue is None: queue = "default" - else: - func = None - queue = args[0] - args = args[1:] - - if isinstance(queue, str): - try: - queue = get_queue(queue) - if "connection" not in kwargs: - kwargs["connection"] = queue.connection - except KeyError: - raise QueueNotFoundError(f"Queue {queue} does not exist") - - kwargs.setdefault("result_ttl", settings.SCHEDULER_CONFIG.DEFAULT_RESULT_TTL) - kwargs.setdefault("timeout", settings.SCHEDULER_CONFIG.DEFAULT_TIMEOUT) - - decorator = rq_job_decorator(queue, *args, **kwargs) - if func: - JOB_METHODS_LIST.append(f"{func.__module__}.{func.__name__}") - return decorator(func) - return decorator + self.queue = get_queue(queue) if isinstance(queue, str) else queue + self.connection = connection + self.timeout = timeout + self.result_ttl = result_ttl + self.job_info_ttl = job_info_ttl + self.meta = meta + self.at_front = at_front + self.description = description + self.on_success = on_success + self.on_failure = on_failure + self.on_stopped = on_stopped + + def __call__(self, f): + @wraps(f) + def delay(*args, **kwargs): + from scheduler.helpers.queues import get_queue + + queue = get_queue(self.queue) if isinstance(self.queue, str) else self.queue + + job_name = kwargs.pop("job_name", None) + at_front = kwargs.pop("at_front", False) + + if not at_front: + at_front = self.at_front + + return queue.create_and_enqueue_job( + f, + args=args, + kwargs=kwargs, + timeout=self.timeout, + result_ttl=self.result_ttl, + job_info_ttl=self.job_info_ttl, + name=job_name, + at_front=at_front, + meta=self.meta, + description=self.description, + on_failure=self.on_failure, + on_success=self.on_success, + on_stopped=self.on_stopped, + when=None, + ) + + JOB_METHODS_LIST.append(f"{f.__module__}.{f.__name__}") + f.delay = delay + return f diff --git a/scheduler/helpers/__init__.py b/scheduler/helpers/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/scheduler/helpers/callback.py b/scheduler/helpers/callback.py new file mode 100644 index 0000000..4b3c96d --- /dev/null +++ b/scheduler/helpers/callback.py @@ -0,0 +1,37 @@ +import inspect +from typing import Union, Callable, Any, Optional + +from scheduler.helpers.utils import callable_func +from scheduler.timeouts import JobTimeoutException + + +class CallbackSetupError(Exception): + pass + + +class Callback: + def __init__(self, func: Union[str, Callable[..., Any]], timeout: Optional[int] = None): + from scheduler.settings import SCHEDULER_CONFIG + + self.timeout = timeout or SCHEDULER_CONFIG.CALLBACK_TIMEOUT + if not isinstance(self.timeout, int) or self.timeout < 0: + raise CallbackSetupError(f"Callback `timeout` must be a positive int, but received {self.timeout}") + if not isinstance(func, str) and not inspect.isfunction(func) and not inspect.isbuiltin(func): + raise CallbackSetupError(f"Callback `func` must be a string or function, received {func}") + if isinstance(func, str): + try: + func_str = func + func = callable_func(func) + except (TypeError, AttributeError, ModuleNotFoundError, ValueError): + raise CallbackSetupError(f"Callback `func` is not callable: {func_str}") + self.func: Callable[..., Any] = func + + @property + def name(self) -> str: + return f"{self.func.__module__}.{self.func.__qualname__}" + + def __call__(self, *args, **kwargs): + from scheduler.settings import SCHEDULER_CONFIG + + with SCHEDULER_CONFIG.DEATH_PENALTY_CLASS(self.timeout, JobTimeoutException): + return self.func(*args, **kwargs) diff --git a/scheduler/helpers/queues/__init__.py b/scheduler/helpers/queues/__init__.py new file mode 100644 index 0000000..4a77cf3 --- /dev/null +++ b/scheduler/helpers/queues/__init__.py @@ -0,0 +1,10 @@ +__all__ = [ + "Queue", + "InvalidJobOperation", + "get_queue", + "get_all_workers", + "perform_job", +] + +from .getters import get_queue, get_all_workers +from .queue_logic import Queue, InvalidJobOperation, perform_job diff --git a/scheduler/helpers/queues/getters.py b/scheduler/helpers/queues/getters.py new file mode 100644 index 0000000..d491a72 --- /dev/null +++ b/scheduler/helpers/queues/getters.py @@ -0,0 +1,72 @@ +from typing import Set + +from scheduler.redis_models.worker import WorkerModel +from scheduler.settings import SCHEDULER_CONFIG, get_queue_names, get_queue_configuration, QueueConfiguration, logger +from scheduler.types import ConnectionErrorTypes, BrokerMetaData, Broker +from .queue_logic import Queue + + +_BAD_QUEUE_CONFIGURATION = set() + + +def _get_connection(config: QueueConfiguration, use_strict_broker=False): + """Returns a Broker connection to use based on parameters in SCHEDULER_QUEUES""" + if SCHEDULER_CONFIG.BROKER == Broker.FAKEREDIS: + import fakeredis + + broker_cls = fakeredis.FakeRedis if not use_strict_broker else fakeredis.FakeStrictRedis + else: + broker_cls = BrokerMetaData[(SCHEDULER_CONFIG.BROKER, use_strict_broker)].connection_type + if config.URL: + return broker_cls.from_url(config.URL, db=config.DB, **(config.CONNECTION_KWARGS or {})) + if config.UNIX_SOCKET_PATH: + return broker_cls(unix_socket_path=config.UNIX_SOCKET_PATH, db=config.DB) + + if config.SENTINELS: + connection_kwargs = { + "db": config.DB, + "password": config.PASSWORD, + "username": config.USERNAME, + } + connection_kwargs.update(config.CONNECTION_KWARGS or {}) + sentinel_kwargs = config.SENTINEL_KWARGS or {} + SentinelClass = BrokerMetaData[(SCHEDULER_CONFIG.BROKER, use_strict_broker)].sentinel_type + sentinel = SentinelClass(config.SENTINELS, sentinel_kwargs=sentinel_kwargs, **connection_kwargs) + return sentinel.master_for( + service_name=config.MASTER_NAME, + redis_class=broker_cls, + ) + + return broker_cls( + host=config.HOST, + port=config.PORT, + db=config.DB, + username=config.USERNAME, + password=config.PASSWORD, + **(config.CONNECTION_KWARGS or {}), + ) + + +def get_queue(name="default") -> Queue: + """Returns an DjangoQueue using parameters defined in `SCHEDULER_QUEUES`""" + queue_settings = get_queue_configuration(name) + is_async = queue_settings.ASYNC + connection = _get_connection(queue_settings) + return Queue(name=name, connection=connection, is_async=is_async) + + +def get_all_workers() -> Set[WorkerModel]: + queue_names = get_queue_names() + + workers_set: Set[WorkerModel] = set() + for queue_name in queue_names: + if queue_name in _BAD_QUEUE_CONFIGURATION: + continue + connection = _get_connection(get_queue_configuration(queue_name)) + try: + curr_workers: Set[WorkerModel] = set(WorkerModel.all(connection=connection)) + workers_set.update(curr_workers) + except ConnectionErrorTypes as e: + logger.error(f"Could not connect for queue {queue_name}: {e}") + _BAD_QUEUE_CONFIGURATION.add(queue_name) + return workers_set diff --git a/scheduler/helpers/queues/queue_logic.py b/scheduler/helpers/queues/queue_logic.py new file mode 100644 index 0000000..d18c3d8 --- /dev/null +++ b/scheduler/helpers/queues/queue_logic.py @@ -0,0 +1,447 @@ +import asyncio +import sys +import traceback +from datetime import datetime +from typing import Dict, List, Optional, Tuple, Union, Any + +from redis import WatchError + +from scheduler.helpers.callback import Callback +from scheduler.helpers.utils import utcnow, current_timestamp +from scheduler.redis_models import ( + JobNamesRegistry, + FinishedJobRegistry, + ActiveJobRegistry, + FailedJobRegistry, + CanceledJobRegistry, + ScheduledJobRegistry, + QueuedJobRegistry, +) +from scheduler.redis_models import JobStatus, SchedulerLock, Result, ResultType, JobModel +from scheduler.settings import logger, SCHEDULER_CONFIG +from scheduler.types import ConnectionType, FunctionReferenceType, Self + + +class InvalidJobOperation(Exception): + pass + + +class NoSuchJobError(Exception): + pass + + +def perform_job(job_model: JobModel, connection: ConnectionType) -> Any: # noqa + """The main execution method. Invokes the job function with the job arguments. + + :returns: The job's return value + """ + job_model.persist(connection=connection) + _job_stack.append(job_model) + + try: + result = job_model.func(*job_model.args, **job_model.kwargs) + if asyncio.iscoroutine(result): + loop = asyncio.new_event_loop() + coro_result = loop.run_until_complete(result) + result = coro_result + if job_model.success_callback: + job_model.success_callback(job_model, connection, result) # type: ignore + return result + except: + if job_model.failure_callback: + job_model.failure_callback(job_model, connection, *sys.exc_info()) # type: ignore + raise + finally: + assert job_model is _job_stack.pop() + + +_job_stack = [] + + +class Queue: + REGISTRIES = dict( + finished="finished_job_registry", + failed="failed_job_registry", + scheduled="scheduled_job_registry", + active="active_job_registry", + canceled="canceled_job_registry", + queued="queued_job_registry", + ) + + def __init__(self, connection: Optional[ConnectionType], name: str, is_async: bool = True) -> None: + """Initializes a Queue object. + + :param name: The queue name + :param connection: Broker connection + :param is_async: Whether jobs should run "async" (using the worker). + """ + self.connection = connection + self.name = name + self._is_async = is_async + self.queued_job_registry = QueuedJobRegistry(connection=self.connection, name=self.name) + self.active_job_registry = ActiveJobRegistry(connection=self.connection, name=self.name) + self.failed_job_registry = FailedJobRegistry(connection=self.connection, name=self.name) + self.finished_job_registry = FinishedJobRegistry(connection=self.connection, name=self.name) + self.scheduled_job_registry = ScheduledJobRegistry(connection=self.connection, name=self.name) + self.canceled_job_registry = CanceledJobRegistry(connection=self.connection, name=self.name) + + def __len__(self): + return self.count + + @property + def scheduler_pid(self) -> int: + lock = SchedulerLock(self.name) + pid = lock.value(self.connection) + return int(pid.decode()) if pid is not None else None + + def clean_registries(self, timestamp: Optional[float] = None) -> None: + """Remove abandoned jobs from registry and add them to FailedJobRegistry. + + Removes jobs with an expiry time earlier than current_timestamp, specified as seconds since the Unix epoch. + Removed jobs are added to the global failed job queue. + """ + before_score = timestamp or current_timestamp() + self.queued_job_registry.compact() + started_jobs: List[Tuple[str, float]] = self.active_job_registry.get_job_names_before( + self.connection, before_score + ) + + with self.connection.pipeline() as pipeline: + for job_name, job_score in started_jobs: + job = JobModel.get(job_name, connection=self.connection) + if job is None or job.failure_callback is None or job_score + job.timeout > before_score: + continue + + logger.debug(f"Running failure callbacks for {job.name}") + try: + job.failure_callback(job, self.connection, traceback.extract_stack()) + except Exception: # noqa + logger.exception(f"Job {self.name}: error while executing failure callback") + raise + + else: + logger.warning( + f"Queue cleanup: Moving job to {self.failed_job_registry.key} (due to AbandonedJobError)" + ) + exc_string = ( + f"Moved to {self.failed_job_registry.key}, due to AbandonedJobError, at {datetime.now()}" + ) + job.status = JobStatus.FAILED + score = current_timestamp() + SCHEDULER_CONFIG.DEFAULT_FAILURE_TTL + Result.create( + connection=pipeline, + job_name=job.name, + worker_name=job.worker_name, + _type=ResultType.FAILED, + ttl=SCHEDULER_CONFIG.DEFAULT_FAILURE_TTL, + exc_string=exc_string, + ) + self.failed_job_registry.add(pipeline, job.name, score) + job.expire(connection=pipeline, ttl=SCHEDULER_CONFIG.DEFAULT_FAILURE_TTL) + job.save(connection=pipeline) + + for registry in self.REGISTRIES.values(): + getattr(self, registry).cleanup(connection=self.connection, timestamp=before_score) + pipeline.execute() + + def first_queued_job_name(self) -> Optional[str]: + return self.queued_job_registry.get_first() + + @property + def count(self) -> int: + """Returns a count of all messages in the queue.""" + res = 0 + for registry in self.REGISTRIES.values(): + res += getattr(self, registry).count(connection=self.connection) + return res + + def get_registry(self, name: str) -> Union[None, JobNamesRegistry]: + name = name.lower() + if name in Queue.REGISTRIES: + return getattr(self, Queue.REGISTRIES[name]) + return None + + def get_all_job_names(self) -> List[str]: + res = list() + res.extend(self.queued_job_registry.all()) + res.extend(self.finished_job_registry.all()) + res.extend(self.active_job_registry.all()) + res.extend(self.failed_job_registry.all()) + res.extend(self.scheduled_job_registry.all()) + res.extend(self.canceled_job_registry.all()) + return res + + def get_all_jobs(self) -> List[JobModel]: + job_names = self.get_all_job_names() + return JobModel.get_many(job_names, connection=self.connection) + + def create_and_enqueue_job( + self, + func: FunctionReferenceType, + args: Union[Tuple, List, None] = None, + kwargs: Optional[Dict] = None, + when: Optional[datetime] = None, + timeout: Optional[int] = None, + result_ttl: Optional[int] = None, + job_info_ttl: Optional[int] = None, + description: Optional[str] = None, + name: Optional[str] = None, + at_front: bool = False, + meta: Optional[Dict] = None, + on_success: Optional[Callback] = None, + on_failure: Optional[Callback] = None, + on_stopped: Optional[Callback] = None, + task_type: Optional[str] = None, + scheduled_task_id: Optional[int] = None, + pipeline: Optional[ConnectionType] = None, + ) -> JobModel: + """Creates a job to represent the delayed function call and enqueues it. + :param when: When to schedule the job (None to enqueue immediately) + :param func: The reference to the function + :param args: The `*args` to pass to the function + :param kwargs: The `**kwargs` to pass to the function + :param timeout: Function timeout + :param result_ttl: Result time to live + :param job_info_ttl: Time to live + :param description: The job description + :param name: The job name + :param at_front: Whether to enqueue the job at the front + :param meta: Metadata to attach to the job + :param on_success: Callback for on success + :param on_failure: Callback for on failure + :param on_stopped: Callback for on stopped + :param task_type: The task type + :param scheduled_task_id: The scheduled task id + :param pipeline: The Broker Pipeline + :returns: The enqueued Job + """ + status = JobStatus.QUEUED if when is None else JobStatus.SCHEDULED + job_model = JobModel.create( + connection=self.connection, + func=func, + args=args, + kwargs=kwargs, + result_ttl=result_ttl, + job_info_ttl=job_info_ttl, + description=description, + name=name, + meta=meta, + status=status, + timeout=timeout, + on_success=on_success, + on_failure=on_failure, + on_stopped=on_stopped, + queue_name=self.name, + task_type=task_type, + scheduled_task_id=scheduled_task_id, + ) + if when is None: + job_model = self.enqueue_job(job_model, connection=pipeline, at_front=at_front) + elif isinstance(when, datetime): + job_model.save(connection=self.connection) + self.scheduled_job_registry.schedule(self.connection, job_model.name, when) + else: + raise TypeError(f"Invalid type for when=`{when}`") + return job_model + + def job_handle_success( + self, job: JobModel, result: Any, job_info_ttl: int, result_ttl: int, connection: ConnectionType + ): + """Saves and cleanup job after successful execution""" + job.after_execution( + job_info_ttl, + JobStatus.FINISHED, + prev_registry=self.active_job_registry, + new_registry=self.finished_job_registry, + connection=connection, + ) + Result.create( + connection, + job_name=job.name, + worker_name=job.worker_name, + _type=ResultType.SUCCESSFUL, + return_value=result, + ttl=result_ttl, + ) + + def job_handle_failure(self, status: JobStatus, job: JobModel, exc_string: str, connection: ConnectionType): + # Does not set job status since the job might be stopped + job.after_execution( + SCHEDULER_CONFIG.DEFAULT_FAILURE_TTL, + status, + prev_registry=self.active_job_registry, + new_registry=self.failed_job_registry, + connection=connection, + ) + Result.create( + connection, + job.name, + job.worker_name, + ResultType.FAILED, + SCHEDULER_CONFIG.DEFAULT_FAILURE_TTL, + exc_string=exc_string, + ) + + def run_sync(self, job: JobModel) -> JobModel: + """Run a job synchronously, meaning on the same process the method was called.""" + job.prepare_for_execution("sync", self.active_job_registry, self.connection) + try: + result = perform_job(job, self.connection) + + with self.connection.pipeline() as pipeline: + self.job_handle_success( + job, result=result, job_info_ttl=job.job_info_ttl, result_ttl=job.success_ttl, connection=pipeline + ) + + pipeline.execute() + except Exception as e: # noqa + logger.warning(f"Job {job.name} failed with exception: {e}") + with self.connection.pipeline() as pipeline: + exc_string = "".join(traceback.format_exception(*sys.exc_info())) + self.job_handle_failure(JobStatus.FAILED, job, exc_string, pipeline) + pipeline.execute() + return job + + @classmethod + def dequeue_any( + cls, + queues: List[Self], + timeout: Optional[int], + connection: Optional[ConnectionType] = None, + ) -> Tuple[Optional[JobModel], Optional[Self]]: + """Class method returning a Job instance at the front of the given set of Queues, where the order of the queues + is important. + + When all the Queues are empty, depending on the `timeout` argument, either blocks execution of this function + for the duration of the timeout or until new messages arrive on any of the queues, or returns None. + + :param queues: List of Queue objects + :param timeout: Timeout for the pop operation + :param connection: Broker Connection + :returns: Tuple of Job, Queue + """ + + while True: + registries = [q.queued_job_registry for q in queues] + for registry in registries: + registry.compact() + + registry_key, job_name = QueuedJobRegistry.pop(connection, registries, timeout) + if job_name is None: + return None, None + + queue = next(filter(lambda q: q.queued_job_registry.key == registry_key, queues), None) + if queue is None: + logger.warning(f"Could not find queue for registry key {registry_key} in queues") + return None, None + + job = JobModel.get(job_name, connection=connection) + if job is None: + continue + return job, queue + return None, None + + def __repr__(self) -> str: + return f"{self.__class__.__name__}({self.name!r})" + + def __str__(self) -> str: + return f"<{self.__class__.__name__} {self.name}>" + + def _remove_from_registries(self, job_name: str, connection: ConnectionType) -> None: + """Removes the job from all registries besides failed_job_registry""" + self.finished_job_registry.delete(connection=connection, job_name=job_name) + self.scheduled_job_registry.delete(connection=connection, job_name=job_name) + self.active_job_registry.delete(connection=connection, job_name=job_name) + self.canceled_job_registry.delete(connection=connection, job_name=job_name) + self.queued_job_registry.delete(connection=connection, job_name=job_name) + + def cancel_job(self, job_name: str) -> None: + """Cancels the given job, which will prevent the job from ever running (or inspected). + + This method merely exists as a high-level API call to cancel jobs without worrying about the internals required + to implement job cancellation. + + :param job_name: The job name to cancel. + :raises NoSuchJobError: If the job does not exist. + :raises InvalidJobOperation: If the job has already been canceled. + """ + job = JobModel.get(job_name, connection=self.connection) + if job is None: + raise NoSuchJobError(f"No such job: {job_name}") + if job.status == JobStatus.CANCELED: + raise InvalidJobOperation(f"Cannot cancel already canceled job: {job.name}") + + pipe = self.connection.pipeline() + new_status = JobStatus.CANCELED if job.status == JobStatus.QUEUED else JobStatus.STOPPED + + while True: + try: + job.set_field("status", new_status, connection=pipe) + self._remove_from_registries(job_name, connection=pipe) + pipe.execute() + if new_status == JobStatus.CANCELED: + self.canceled_job_registry.add(pipe, job_name, 0) + else: + self.finished_job_registry.add( + pipe, job_name, current_timestamp() + SCHEDULER_CONFIG.DEFAULT_FAILURE_TTL + ) + pipe.execute() + break + except WatchError: + # if the pipeline comes from the caller, we re-raise the exception as it is the responsibility of the + # caller to handle it + raise + + def delete_job(self, job_name: str, expire_job_model: bool = True) -> None: + """Deletes the given job from the queue and all its registries""" + pipe = self.connection.pipeline() + + while True: + try: + self._remove_from_registries(job_name, connection=pipe) + self.failed_job_registry.delete(connection=pipe, job_name=job_name) + if expire_job_model: + job_model = JobModel.get(job_name, connection=self.connection) + if job_model is not None: + job_model.expire(ttl=job_model.job_info_ttl, connection=pipe) + pipe.execute() + break + except WatchError: + pass + + def enqueue_job( + self, job_model: JobModel, connection: Optional[ConnectionType] = None, at_front: bool = False + ) -> JobModel: + """Enqueues a job for delayed execution without checking dependencies. + + If Queue is instantiated with is_async=False, job is executed immediately. + :param job_model: The job redis model + :param connection: The Redis Pipeline + :param at_front: Whether to enqueue the job at the front + + :returns: The enqueued JobModel + """ + + pipe = connection if connection is not None else self.connection.pipeline() + job_model.started_at = None + job_model.ended_at = None + job_model.status = JobStatus.QUEUED + job_model.enqueued_at = utcnow() + job_model.save(connection=pipe) + + if self._is_async: + if at_front: + score = current_timestamp() + else: + score = self.queued_job_registry.get_last_timestamp() or current_timestamp() + self.scheduled_job_registry.delete(connection=pipe, job_name=job_model.name) + self.queued_job_registry.add(connection=pipe, score=score, job_name=job_model.name) + pipe.execute() + logger.debug(f"Pushed job {job_model.name} into {self.name} queued-jobs registry") + else: # sync mode + pipe.execute() + job_model = self.run_sync(job_model) + job_model.expire(ttl=job_model.job_info_ttl, connection=pipe) + pipe.execute() + + return job_model diff --git a/scheduler/helpers/utils.py b/scheduler/helpers/utils.py new file mode 100644 index 0000000..dae312c --- /dev/null +++ b/scheduler/helpers/utils.py @@ -0,0 +1,23 @@ +import datetime +import importlib +import time +from typing import Callable + + +def current_timestamp() -> int: + """Returns current UTC timestamp in secs""" + return int(time.time()) + + +def utcnow() -> datetime.datetime: + """Return now in UTC""" + return datetime.datetime.now(datetime.timezone.utc) + + +def callable_func(callable_str: str) -> Callable: + path = callable_str.split(".") + module = importlib.import_module(".".join(path[:-1])) + func = getattr(module, path[-1]) + if callable(func) is False: + raise TypeError(f"'{callable_str}' is not callable") + return func diff --git a/scheduler/management/commands/delete_failed_executions.py b/scheduler/management/commands/delete_failed_executions.py index 01224e0..6f41980 100644 --- a/scheduler/management/commands/delete_failed_executions.py +++ b/scheduler/management/commands/delete_failed_executions.py @@ -1,8 +1,8 @@ import click from django.core.management.base import BaseCommand -from scheduler.queues import get_queue -from scheduler.rq_classes import JobExecution +from scheduler.helpers.queues import get_queue +from scheduler.redis_models import JobModel class Command(BaseCommand): @@ -15,15 +15,15 @@ def add_arguments(self, parser): def handle(self, *args, **options): queue = get_queue(options.get("queue", "default")) - job_ids = queue.failed_job_registry.get_job_ids() - jobs = JobExecution.fetch_many(job_ids, connection=queue.connection) + job_names = queue.failed_job_registry.all() + jobs = JobModel.get_many(job_names, connection=queue.connection) func_name = options.get("func", None) if func_name is not None: jobs = [job for job in jobs if job.func_name == func_name] dry_run = options.get("dry_run", False) click.echo(f"Found {len(jobs)} failed jobs") - for job in jobs: - click.echo(f"Deleting {job.id}") + for job in job_names: + click.echo(f"Deleting {job}") if not dry_run: - job.delete() + queue.delete_job(job) click.echo(f"Deleted {len(jobs)} failed jobs") diff --git a/scheduler/management/commands/export.py b/scheduler/management/commands/export.py index bb2b249..85c3c9d 100644 --- a/scheduler/management/commands/export.py +++ b/scheduler/management/commands/export.py @@ -1,16 +1,13 @@ import sys import click -from django.apps import apps from django.core.management.base import BaseCommand -from scheduler.tools import MODEL_NAMES +from scheduler.models import Task class Command(BaseCommand): - """ - Export all scheduled jobs - """ + """Export all scheduled jobs""" help = __doc__ @@ -43,13 +40,12 @@ def add_arguments(self, parser): def handle(self, *args, **options): file = open(options.get("filename"), "w") if options.get("filename") else sys.stdout res = list() - for model_name in MODEL_NAMES: - model = apps.get_model(app_label="scheduler", model_name=model_name) - jobs = model.objects.all() - if options.get("enabled"): - jobs = jobs.filter(enabled=True) - for job in jobs: - res.append(job.to_dict()) + + tasks = Task.objects.all() + if options.get("enabled"): + tasks = tasks.filter(enabled=True) + for task in tasks: + res.append(task.to_dict()) if options.get("format") == "json": import json diff --git a/scheduler/management/commands/import.py b/scheduler/management/commands/import.py index 8171781..28007a2 100644 --- a/scheduler/management/commands/import.py +++ b/scheduler/management/commands/import.py @@ -2,15 +2,13 @@ from typing import Dict, Any, Optional import click -from django.apps import apps from django.conf import settings from django.contrib.contenttypes.models import ContentType from django.core.management.base import BaseCommand from django.utils import timezone -from scheduler.models.task import TaskArg, TaskKwarg, Task -from scheduler.models.task import TaskType -from scheduler.tools import MODEL_NAMES +from scheduler.models import TaskArg, TaskKwarg, Task +from scheduler.models import TaskType def job_model_str(model_str: str) -> str: @@ -139,9 +137,7 @@ def handle(self, *args, **options): jobs = yaml.load(file, yaml.SafeLoader) if options.get("reset"): - for model_name in MODEL_NAMES: - model = apps.get_model(app_label="scheduler", model_name=model_name) - model.objects.all().delete() + Task.objects.all().delete() for job in jobs: create_task_from_dict(job, update=options.get("update")) diff --git a/scheduler/management/commands/run_job.py b/scheduler/management/commands/run_job.py index 48c7458..2420c87 100644 --- a/scheduler/management/commands/run_job.py +++ b/scheduler/management/commands/run_job.py @@ -1,7 +1,7 @@ import click from django.core.management.base import BaseCommand -from scheduler.queues import get_queue +from scheduler.helpers.queues import get_queue class Command(BaseCommand): @@ -32,6 +32,6 @@ def handle(self, **options): queue = get_queue(options.get("queue")) func = options.get("callable") args = options.get("args") - job = queue.enqueue_call(func, args=args, timeout=timeout, result_ttl=result_ttl) + job = queue.create_and_enqueue_job(func, args=args, timeout=timeout, result_ttl=result_ttl, when=None) if verbosity: - click.echo(f"Job {job.id} created") + click.echo(f"Job {job.name} created") diff --git a/scheduler/management/commands/rqstats.py b/scheduler/management/commands/scheduler_stats.py similarity index 79% rename from scheduler/management/commands/rqstats.py rename to scheduler/management/commands/scheduler_stats.py index 13a7de8..a52e646 100644 --- a/scheduler/management/commands/rqstats.py +++ b/scheduler/management/commands/scheduler_stats.py @@ -9,13 +9,11 @@ ANSI_LIGHT_WHITE = "\033[1;37m" ANSI_RESET = "\033[0m" -KEYS = ("jobs", "started_jobs", "deferred_jobs", "finished_jobs", "canceled_jobs", "workers") +KEYS = ("queued_jobs", "started_jobs", "finished_jobs", "canceled_jobs", "workers") class Command(BaseCommand): - """ - Print statistics - """ + """Print statistics""" help = __doc__ @@ -52,18 +50,20 @@ def add_arguments(self, parser): def _print_separator(self): click.echo("-" * self.table_width) - def _print_stats_dashboard(self, statistics, prev_stats=None): + def _print_stats_dashboard(self, statistics, prev_stats=None, with_color: bool = True): if self.interval: click.clear() click.echo() click.echo("Django-Scheduler CLI Dashboard") click.echo() self._print_separator() - click.echo(f"| {'Name':<16} | Queued | Active | Deferred | Finished | Canceled | Workers |") + click.echo(f"| {'Name':<16} | Queued | Active | Finished | Canceled | Workers |") self._print_separator() for ind, queue in enumerate(statistics["queues"]): vals = list((queue[k] for k in KEYS)) # Deal with colors + if not with_color: + colors = ["" for _ in KEYS] if prev_stats and len(prev_stats["queues"]) > ind: prev = prev_stats["queues"][ind] prev_vals = tuple(prev[k] for k in KEYS) @@ -73,7 +73,7 @@ def _print_stats_dashboard(self, statistics, prev_stats=None): else: colors = [ANSI_LIGHT_WHITE for _ in range(len(vals))] to_print = " | ".join([f"{colors[i]}{vals[i]:9}{ANSI_RESET}" for i in range(len(vals))]) - click.echo(f"| {queue['name']:<16} | {to_print} |", color=True) + click.echo(f"| {queue['name']:<16} | {to_print} |", color=with_color) self._print_separator() @@ -82,6 +82,9 @@ def _print_stats_dashboard(self, statistics, prev_stats=None): click.echo("Press 'Ctrl+c' to quit") def handle(self, *args, **options): + if options.get("json") and options.get("yaml"): + click.secho("Aborting. Cannot output as both json and yaml", err=True, fg="red") + exit(1) if options.get("json"): import json @@ -97,22 +100,20 @@ def handle(self, *args, **options): click.secho("Aborting. yaml not supported", err=True, fg="red") return - click.secho( - yaml.dump(get_statistics(), default_flow_style=False), - ) + click.secho(yaml.dump(get_statistics(), default_flow_style=False)) return self.interval = options.get("interval") if not self.interval or self.interval < 0: - self._print_stats_dashboard(get_statistics()) + self._print_stats_dashboard(get_statistics(), with_color=not options.get("no_color")) return try: prev = None while True: statistics = get_statistics() - self._print_stats_dashboard(statistics, prev) + self._print_stats_dashboard(statistics, prev, with_color=not options.get("no_color")) prev = statistics time.sleep(self.interval) except KeyboardInterrupt: diff --git a/scheduler/management/commands/rqworker.py b/scheduler/management/commands/scheduler_worker.py similarity index 69% rename from scheduler/management/commands/rqworker.py rename to scheduler/management/commands/scheduler_worker.py index ce6201b..ab122d6 100644 --- a/scheduler/management/commands/rqworker.py +++ b/scheduler/management/commands/scheduler_worker.py @@ -5,11 +5,10 @@ import click from django.core.management.base import BaseCommand from django.db import connections -from rq.logutils import setup_loghandlers -from scheduler.broker_types import ConnectionErrorTypes -from scheduler.rq_classes import register_sentry -from scheduler.tools import create_worker +from scheduler.types import ConnectionErrorTypes +from scheduler.worker import create_worker +from scheduler.settings import logger VERBOSITY_TO_LOG_LEVEL = { 0: logging.CRITICAL, @@ -19,21 +18,16 @@ } WORKER_ARGUMENTS = { + "queues", "name", - "default_result_ttl", "connection", - "exc_handler", - "exception_handlers", - "default_worker_ttl", "maintenance_interval", - "job_class", - "queue_class", - "log_job_description", "job_monitoring_interval", + "dequeue_strategy", "disable_default_exception_handler", - "prepare_for_work", - "serializer", - "work_horse_killed_handler", + "fork_job_execution", + "with_scheduler", + "burst", } @@ -42,23 +36,63 @@ def reset_db_connections(): c.close() +def register_sentry(sentry_dsn, **opts): + try: + import sentry_sdk + from sentry_sdk.integrations.rq import RqIntegration + except ImportError: + logger.error("Sentry SDK not installed. Skipping Sentry Integration") + return + + sentry_sdk.init(sentry_dsn, integrations=[RqIntegration()], **opts) + + class Command(BaseCommand): - """ - Runs RQ workers on specified queues. Note that all queues passed into a - single rqworker command must share the same connection. + """Runs scheduler workers on specified queues. + Note that all queues passed into a single scheduler_worker command must share the same connection. Example usage: - python manage.py rqworker high medium low + python manage.py scheduler_worker high medium low """ args = "" - def add_arguments(self, parser): + def _add_sentry_args(self, parser): + parser.add_argument("--sentry-dsn", action="store", dest="sentry_dsn", help="Sentry DSN to use") + parser.add_argument("--sentry-debug", action="store_true", dest="sentry_debug", help="Enable Sentry debug mode") + parser.add_argument("--sentry-ca-certs", action="store", dest="sentry_ca_certs", help="Path to CA certs file") + + def _add_work_args(self, parser): parser.add_argument( - "--pid", action="store", dest="pidfile", default=None, help="file to write the worker`s pid into" + "--burst", action="store_true", dest="burst", default=False, help="Run worker in burst mode" ) parser.add_argument( - "--burst", action="store_true", dest="burst", default=False, help="Run worker in burst mode" + "--max-jobs", + action="store", + default=None, + dest="max_jobs", + type=int, + help="Maximum number of jobs to execute before terminating worker", + ) + parser.add_argument( + "--max-idle-time", + action="store", + default=None, + dest="max_idle_time", + type=int, + help="Maximum number of seconds to wait for new job before terminating worker", + ) + parser.add_argument( + "--without-scheduler", + action="store_false", + default=True, + dest="with_scheduler", + help="Run worker without scheduler, default to with scheduler", + ) + + def add_arguments(self, parser): + parser.add_argument( + "--pid", action="store", dest="pidfile", default=None, help="file to write the worker`s pid into" ) parser.add_argument("--name", action="store", dest="name", default=None, help="Name of the worker") parser.add_argument( @@ -69,14 +103,6 @@ def add_arguments(self, parser): default=420, help="Default worker timeout to be used", ) - parser.add_argument( - "--max-jobs", - action="store", - default=None, - dest="max_jobs", - type=int, - help="Maximum number of jobs to execute before terminating worker", - ) parser.add_argument( "--fork-job-execution", action="store", @@ -85,16 +111,14 @@ def add_arguments(self, parser): type=bool, help="Fork job execution to another process", ) - parser.add_argument("--job-class", action="store", dest="job_class", help="Jobs class to use") parser.add_argument( "queues", nargs="*", type=str, help="The queues to work on, separated by space, all queues should be using the same redis", ) - parser.add_argument("--sentry-dsn", action="store", dest="sentry_dsn", help="Sentry DSN to use") - parser.add_argument("--sentry-debug", action="store_true", dest="sentry_debug", help="Enable Sentry debug mode") - parser.add_argument("--sentry-ca-certs", action="store", dest="sentry_ca_certs", help="Path to CA certs file") + self._add_sentry_args(parser) + self._add_work_args(parser) def handle(self, **options): queues = options.pop("queues", []) @@ -109,9 +133,9 @@ def handle(self, **options): fp.write(str(os.getpid())) # Verbosity is defined by default in BaseCommand for all commands - verbosity = options.pop("verbosity", 1) + verbosity = options.pop("verbosity", 3) log_level = VERBOSITY_TO_LOG_LEVEL.get(verbosity, logging.INFO) - setup_loghandlers(log_level) + logger.setLevel(log_level) init_options = {k: v for k, v in options.items() if k in WORKER_ARGUMENTS} @@ -128,9 +152,8 @@ def handle(self, **options): register_sentry(options.get("sentry_dsn"), **sentry_opts) w.work( - burst=options.get("burst", False), - logging_level=log_level, max_jobs=options["max_jobs"], + max_idle_time=options.get("max_idle_time", None), ) except ConnectionErrorTypes as e: click.echo(str(e), err=True) diff --git a/scheduler/migrations/0021_remove_task_job_id_task_job_name.py b/scheduler/migrations/0021_remove_task_job_id_task_job_name.py new file mode 100644 index 0000000..3c03f51 --- /dev/null +++ b/scheduler/migrations/0021_remove_task_job_id_task_job_name.py @@ -0,0 +1,22 @@ +# Generated by Django 5.1.7 on 2025-03-24 14:30 + +from django.db import migrations, models + + +class Migration(migrations.Migration): + + dependencies = [ + ('scheduler', '0020_remove_repeatabletask_new_task_id_and_more'), + ] + + operations = [ + migrations.RemoveField( + model_name='task', + name='job_id', + ), + migrations.AddField( + model_name='task', + name='job_name', + field=models.CharField(blank=True, editable=False, help_text='Current job_name on queue', max_length=128, null=True, verbose_name='job name'), + ), + ] diff --git a/scheduler/models/__init__.py b/scheduler/models/__init__.py index e69de29..1b4625a 100644 --- a/scheduler/models/__init__.py +++ b/scheduler/models/__init__.py @@ -0,0 +1,12 @@ +__all__ = [ + "Task", + "TaskType", + "TaskArg", + "TaskKwarg", + "get_scheduled_task", + "run_task", + "get_next_cron_time", +] + +from .args import TaskArg, TaskKwarg +from .task import TaskType, Task, get_scheduled_task, run_task, get_next_cron_time diff --git a/scheduler/models/args.py b/scheduler/models/args.py index f7cd57b..ac2d700 100644 --- a/scheduler/models/args.py +++ b/scheduler/models/args.py @@ -7,7 +7,7 @@ from django.db import models from django.utils.translation import gettext_lazy as _ -from scheduler import tools +from scheduler.helpers import utils ARG_TYPE_TYPES_DICT = { "str": str, @@ -48,7 +48,7 @@ def clean(self): ) try: if self.arg_type == "callable": - tools.callable_func(self.val) + utils.callable_func(self.val) elif self.arg_type == "datetime": datetime.fromisoformat(self.val) elif self.arg_type == "bool": @@ -71,7 +71,7 @@ def delete(self, **kwargs): def value(self): if self.arg_type == "callable": - res = tools.callable_func(self.val)() + res = utils.callable_func(self.val)() elif self.arg_type == "datetime": res = datetime.fromisoformat(self.val) elif self.arg_type == "bool": diff --git a/scheduler/models/ephemeral_models.py b/scheduler/models/ephemeral_models.py new file mode 100644 index 0000000..ca24a73 --- /dev/null +++ b/scheduler/models/ephemeral_models.py @@ -0,0 +1,21 @@ +from django.db import models + + +class Queue(models.Model): + """Placeholder model with no database table, but with django admin page and contenttype permission""" + + class Meta: + managed = False # not in Django's database + default_permissions = () + permissions = [["view", "Access admin page"]] + verbose_name_plural = " Queues" + + +class Worker(models.Model): + """Placeholder model with no database table, but with django admin page and contenttype permission""" + + class Meta: + managed = False # not in Django's database + default_permissions = () + permissions = [["view", "Access admin page"]] + verbose_name_plural = " Workers" diff --git a/scheduler/models/queue.py b/scheduler/models/queue.py deleted file mode 100644 index 84b6c06..0000000 --- a/scheduler/models/queue.py +++ /dev/null @@ -1,12 +0,0 @@ -from django.db import models - - -class Queue(models.Model): - """Placeholder model with no database table, but with django admin page - and contenttype permission""" - - class Meta: - managed = False # not in Django's database - default_permissions = () - permissions = [["view", "Access admin page"]] - verbose_name_plural = " Queues" diff --git a/scheduler/models/task.py b/scheduler/models/task.py index f5bcb4c..352f1f0 100644 --- a/scheduler/models/task.py +++ b/scheduler/models/task.py @@ -1,10 +1,8 @@ import math -import uuid from datetime import timedelta, datetime -from typing import Dict +from typing import Dict, Any, Optional import croniter -from django.apps import apps from django.conf import settings as django_settings from django.contrib import admin from django.contrib.contenttypes.fields import GenericRelation @@ -18,58 +16,58 @@ from django.utils.translation import gettext_lazy as _ from scheduler import settings -from scheduler import tools -from scheduler.models.args import TaskArg, TaskKwarg -from scheduler.queues import get_queue -from scheduler.rq_classes import DjangoQueue -from scheduler.settings import QUEUES -from scheduler.settings import logger -from scheduler.tools import TaskType +from scheduler.helpers.callback import Callback +from scheduler.helpers.queues import Queue +from scheduler.helpers.queues import get_queue +from scheduler.redis_models import JobModel +from scheduler.settings import logger, get_queue_names +from scheduler.types import ConnectionType, TASK_TYPES +from .args import TaskArg, TaskKwarg +from ..helpers import utils -SCHEDULER_INTERVAL = settings.SCHEDULER_CONFIG.SCHEDULER_INTERVAL +def _get_task_for_job(job: JobModel) -> Optional["Task"]: + if job.task_type is None or job.scheduled_task_id is None: + return None + task = Task.objects.filter(id=job.scheduled_task_id).first() + return task -def failure_callback(job, connection, result, *args, **kwargs): - task_type = job.meta.get("task_type", None) - if task_type is None: - return - task = Task.objects.filter(job_id=job.id).first() + +def failure_callback(job: JobModel, connection, result, *args, **kwargs): + task = _get_task_for_job(job) if task is None: - logger.warn(f"Could not find task for job {job.id}") + logger.warn(f"Could not find task for job {job.name}") return mail_admins( f"Task {task.id}/{task.name} has failed", "See django-admin for logs", ) - task.job_id = None + task.job_name = None task.failed_runs += 1 task.last_failed_run = timezone.now() - task.save(schedule_job=True) + task.save(schedule_job=True, clean=False) -def success_callback(job, connection, result, *args, **kwargs): - task_type = job.meta.get("task_type", None) - if task_type is None: - return - - task = Task.objects.filter(job_id=job.id).first() +def success_callback(job: JobModel, connection: ConnectionType, result: Any, *args, **kwargs): + task = _get_task_for_job(job) if task is None: - try: - model = apps.get_model(app_label="scheduler", model_name=task_type) - task = model.objects.filter(job_id=job.id).first() - except LookupError: - pass - if task is None: - logger.warn(f"Could not find task for job {task_type}/{job.id}") + logger.warn(f"Could not find task for job {job.name}") return - task.job_id = None + task.job_name = None task.successful_runs += 1 task.last_successful_run = timezone.now() - task.save(schedule_job=True) + task.save(schedule_job=True, clean=False) def get_queue_choices(): - return [(queue, queue) for queue in QUEUES.keys()] + queue_names = get_queue_names() + return [(queue, queue) for queue in queue_names] + + +class TaskType(models.TextChoices): + CRON = "CronTaskType", _("Cron Task") + REPEATABLE = "RepeatableTaskType", _("Repeatable Task") + ONCE = "OnceTaskType", _("Run once") class Task(models.Model): @@ -95,8 +93,8 @@ class TimeUnits(models.TextChoices): ), ) queue = models.CharField(_("queue"), max_length=255, choices=get_queue_choices, help_text=_("Queue name")) - job_id = models.CharField( - _("job id"), max_length=128, editable=False, blank=True, null=True, help_text=_("Current job_id on queue") + job_name = models.CharField( + _("job name"), max_length=128, editable=False, blank=True, null=True, help_text=_("Current job_name on queue") ) at_front = models.BooleanField( _("At front"), @@ -180,22 +178,23 @@ class TimeUnits(models.TextChoices): def callable_func(self): """Translate callable string to callable""" - return tools.callable_func(self.callable) + return utils.callable_func(self.callable) @admin.display(boolean=True, description=_("is scheduled?")) def is_scheduled(self) -> bool: """Check whether a next job for this task is queued/scheduled to be executed""" - if self.job_id is None: # no job_id => is not scheduled + if self.job_name is None: # no job_id => is not scheduled return False # check whether job_id is in scheduled/queued/active jobs - scheduled_jobs = self.rqueue.scheduled_job_registry.get_job_ids() - enqueued_jobs = self.rqueue.get_job_ids() - active_jobs = self.rqueue.started_job_registry.get_job_ids() - res = (self.job_id in scheduled_jobs) or (self.job_id in enqueued_jobs) or (self.job_id in active_jobs) + res = ( + (self.job_name in self.rqueue.scheduled_job_registry.all()) + or (self.job_name in self.rqueue.queued_job_registry.all()) + or (self.job_name in self.rqueue.active_job_registry.all()) + ) # If the job_id is not scheduled/queued/started, # update the job_id to None. (The job_id belongs to a previous run which is completed) if not res: - self.job_id = None + self.job_name = None super(Task, self).save() return res @@ -218,32 +217,29 @@ def parse_kwargs(self): return dict([kwarg.value() for kwarg in kwargs]) def _next_job_id(self): - addition = uuid.uuid4().hex[-10:] - name = self.name.replace("/", ".") - return f"{self.queue}:{name}:{addition}" + addition = timezone.now().strftime("%Y%m%d%H%M%S%f") + return f"{self.queue}:{self.id}:{addition}" def _enqueue_args(self) -> Dict: - """Args for DjangoQueue.enqueue. - Set all arguments for DjangoQueue.enqueue/enqueue_at. - Particularly: + """Args for Queue.enqueue_call. + Set all arguments for Queue.enqueue. Particularly: - set job timeout and ttl - ensure a callback to reschedule the job next iteration. - Set job-id to proper format - set job meta """ res = dict( - meta=dict( - task_type=self.task_type, - scheduled_task_id=self.id, - ), - on_success=success_callback, - on_failure=failure_callback, - job_id=self._next_job_id(), + meta=dict(), + task_type=self.task_type, + scheduled_task_id=self.id, + on_success=Callback(success_callback), + on_failure=Callback(failure_callback), + name=self._next_job_id(), ) if self.at_front: res["at_front"] = self.at_front if self.timeout: - res["job_timeout"] = self.timeout + res["timeout"] = self.timeout if self.result_ttl is not None: res["result_ttl"] = self.result_ttl if self.task_type == TaskType.REPEATABLE: @@ -252,20 +248,14 @@ def _enqueue_args(self) -> Dict: return res @property - def rqueue(self) -> DjangoQueue: + def rqueue(self) -> Queue: """Returns django-queue for job""" return get_queue(self.queue) def enqueue_to_run(self) -> bool: - """Enqueue task to run now.""" + """Enqueue task to run now as a different instance from the scheduled task.""" kwargs = self._enqueue_args() - job = self.rqueue.enqueue( - tools.run_task, - args=(self.task_type, self.id), - **kwargs, - ) - self.job_id = job.id - self.save(schedule_job=False) + self.rqueue.create_and_enqueue_job(run_task, args=(self.task_type, self.id), when=None, **kwargs) return True def unschedule(self) -> bool: @@ -273,18 +263,15 @@ def unschedule(self) -> bool: If a job is queued to be executed or scheduled to be executed, it will remove it. """ - queue = self.rqueue - if self.job_id is None: - return True - queue.remove(self.job_id) - queue.scheduled_job_registry.remove(self.job_id) - self.job_id = None - self.save(schedule_job=False) + if self.job_name is not None: + self.rqueue.delete_job(self.job_name) + self.job_name = None + self.save(schedule_job=False, clean=False) return True def _schedule_time(self) -> datetime: if self.task_type == TaskType.CRON: - self.scheduled_time = tools.get_next_cron_time(self.cron_string) + self.scheduled_time = get_next_cron_time(self.cron_string) elif self.task_type == TaskType.REPEATABLE: _now = timezone.now() if self.scheduled_time >= _now: @@ -358,21 +345,24 @@ def _schedule(self) -> bool: if not self.enabled: logger.debug(f"Task {str(self)} disabled, enable task before scheduling") return False - if self.task_type in {TaskType.REPEATABLE, TaskType.ONCE} and self._schedule_time() < timezone.now(): - return False schedule_time = self._schedule_time() + if self.task_type in {TaskType.REPEATABLE, TaskType.ONCE} and schedule_time < timezone.now(): + logger.debug(f"Task {str(self)} scheduled time is in the past, not scheduling") + return False kwargs = self._enqueue_args() - job = self.rqueue.enqueue_at( - schedule_time, - tools.run_task, + job = self.rqueue.create_and_enqueue_job( + run_task, args=(self.task_type, self.id), + when=schedule_time, **kwargs, ) - self.job_id = job.id - super(Task, self).save() + self.job_name = job.name return True def save(self, **kwargs): + should_clean = kwargs.pop("clean", True) + if should_clean: + self.clean() schedule_job = kwargs.pop("schedule_job", True) update_fields = kwargs.get("update_fields", None) if update_fields is not None: @@ -394,35 +384,30 @@ def interval_seconds(self): def clean_callable(self): try: - tools.callable_func(self.callable) + utils.callable_func(self.callable) except Exception: raise ValidationError( {"callable": ValidationError(_("Invalid callable, must be importable"), code="invalid")} ) def clean_queue(self): - queue_keys = settings.QUEUES.keys() - if self.queue not in queue_keys: + queue_names = get_queue_names() + if self.queue not in queue_names: raise ValidationError( { "queue": ValidationError( - _("Invalid queue, must be one of: {}".format(", ".join(queue_keys))), code="invalid" + "Invalid queue, must be one of: {}".format(", ".join(queue_names)), code="invalid" ) } ) def clean_interval_unit(self): - if SCHEDULER_INTERVAL > self.interval_seconds(): + config = settings.SCHEDULER_CONFIG + if config.SCHEDULER_INTERVAL > self.interval_seconds(): raise ValidationError( _("Job interval is set lower than %(queue)r queue's interval. minimum interval is %(interval)"), code="invalid", - params={"queue": self.queue, "interval": SCHEDULER_INTERVAL}, - ) - if self.interval_seconds() % SCHEDULER_INTERVAL: - raise ValidationError( - _("Job interval is not a multiple of rq_scheduler's interval frequency: %(interval)ss"), - code="invalid", - params={"interval": SCHEDULER_INTERVAL}, + params={"queue": self.queue, "interval": config.SCHEDULER_INTERVAL}, ) def clean_result_ttl(self) -> None: @@ -446,6 +431,10 @@ def clean_cron_string(self): raise ValidationError({"cron_string": ValidationError(_(str(e)), code="invalid")}) def clean(self): + if self.task_type not in TaskType.values: + raise ValidationError( + {"task_type": ValidationError(_("Invalid task type"), code="invalid")}, + ) self.clean_queue() self.clean_callable() if self.task_type == TaskType.CRON: @@ -453,3 +442,47 @@ def clean(self): if self.task_type == TaskType.REPEATABLE: self.clean_interval_unit() self.clean_result_ttl() + if self.task_type == TaskType.REPEATABLE and self.scheduled_time is None: + self.scheduled_time = timezone.now() + timedelta(seconds=2) + if self.task_type == TaskType.ONCE and self.scheduled_time is None: + raise ValidationError({"scheduled_time": ValidationError(_("Scheduled time is required"), code="invalid")}) + if self.task_type == TaskType.ONCE and self.scheduled_time < timezone.now(): + raise ValidationError( + {"scheduled_time": ValidationError(_("Scheduled time must be in the future"), code="invalid")} + ) + + +def get_next_cron_time(cron_string: Optional[str]) -> Optional[timezone.datetime]: + """Calculate the next scheduled time by creating a crontab object with a cron string""" + if cron_string is None: + return None + now = timezone.now() + itr = croniter.croniter(cron_string, now) + next_itr = itr.get_next(timezone.datetime) + return next_itr + + +def get_scheduled_task(task_type_str: str, task_id: int) -> Task: + # Try with new model names + if task_type_str in TASK_TYPES: + try: + task_type = TaskType(task_type_str) + task = Task.objects.filter(task_type=task_type, id=task_id).first() + if task is None: + raise ValueError(f"Job {task_type}:{task_id} does not exit") + return task + except ValueError: + raise ValueError(f"Invalid task type {task_type_str}") + raise ValueError(f"Job Model {task_type_str} does not exist, choices are {TASK_TYPES}") + + +def run_task(task_model: str, task_id: int) -> Any: + """Run a scheduled job""" + if isinstance(task_id, str): + task_id = int(task_id) + scheduled_task = get_scheduled_task(task_model, task_id) + logger.debug(f"Running task {str(scheduled_task)}") + args = scheduled_task.parse_args() + kwargs = scheduled_task.parse_kwargs() + res = scheduled_task.callable_func()(*args, **kwargs) + return res diff --git a/scheduler/models/worker.py b/scheduler/models/worker.py deleted file mode 100644 index f8ee7af..0000000 --- a/scheduler/models/worker.py +++ /dev/null @@ -1,12 +0,0 @@ -from django.db import models - - -class Worker(models.Model): - """Placeholder model with no database table, but with django admin page - and contenttype permission""" - - class Meta: - managed = False # not in Django's database - default_permissions = () - permissions = [["view", "Access admin page"]] - verbose_name_plural = " Workers" diff --git a/scheduler/queues.py b/scheduler/queues.py deleted file mode 100644 index f7796db..0000000 --- a/scheduler/queues.py +++ /dev/null @@ -1,150 +0,0 @@ -from typing import List, Dict, Set - -from .broker_types import ConnectionErrorTypes, BrokerMetaData -from .rq_classes import JobExecution, DjangoQueue, DjangoWorker -from .settings import SCHEDULER_CONFIG -from .settings import logger, Broker - -_CONNECTION_PARAMS = { - "URL", - "DB", - "USE_REDIS_CACHE", - "UNIX_SOCKET_PATH", - "HOST", - "PORT", - "PASSWORD", - "SENTINELS", - "MASTER_NAME", - "SOCKET_TIMEOUT", - "SSL", - "CONNECTION_KWARGS", -} - - -class QueueNotFoundError(Exception): - pass - - -def _get_broker_connection(config, use_strict_broker=False): - """ - Returns a redis connection from a connection config - """ - if SCHEDULER_CONFIG.BROKER == Broker.FAKEREDIS: - import fakeredis - - broker_cls = fakeredis.FakeRedis if not use_strict_broker else fakeredis.FakeStrictRedis - else: - broker_cls = BrokerMetaData[(SCHEDULER_CONFIG.BROKER, use_strict_broker)].connection_type - logger.debug(f"Getting connection for {config}") - if "URL" in config: - ssl_url_protocol = BrokerMetaData[(SCHEDULER_CONFIG.BROKER, use_strict_broker)].ssl_prefix - if config.get("SSL") or config.get("URL").startswith(f"{ssl_url_protocol}://"): - return broker_cls.from_url( - config["URL"], - db=config.get("DB"), - ssl_cert_reqs=config.get("SSL_CERT_REQS", "required"), - ) - else: - return broker_cls.from_url( - config["URL"], - db=config.get("DB"), - ) - if "UNIX_SOCKET_PATH" in config: - return broker_cls(unix_socket_path=config["UNIX_SOCKET_PATH"], db=config["DB"]) - - if "SENTINELS" in config: - connection_kwargs = { - "db": config.get("DB"), - "password": config.get("PASSWORD"), - "username": config.get("USERNAME"), - "socket_timeout": config.get("SOCKET_TIMEOUT"), - } - connection_kwargs.update(config.get("CONNECTION_KWARGS", {})) - sentinel_kwargs = config.get("SENTINEL_KWARGS", {}) - SentinelClass = BrokerMetaData[(SCHEDULER_CONFIG.BROKER, use_strict_broker)].sentinel_type - sentinel = SentinelClass(config["SENTINELS"], sentinel_kwargs=sentinel_kwargs, **connection_kwargs) - return sentinel.master_for( - service_name=config["MASTER_NAME"], - redis_class=broker_cls, - ) - - return broker_cls( - host=config["HOST"], - port=config["PORT"], - db=config.get("DB", 0), - username=config.get("USERNAME", None), - password=config.get("PASSWORD"), - ssl=config.get("SSL", False), - ssl_cert_reqs=config.get("SSL_CERT_REQS", "required"), - **config.get("CLIENT_KWARGS", {}), - ) - - -def get_connection(queue_settings, use_strict_redis=False): - """Returns a Broker connection to use based on parameters in SCHEDULER_QUEUES""" - return _get_broker_connection(queue_settings, use_strict_redis) - - -def get_queue( - name="default", default_timeout=None, is_async=None, autocommit=None, connection=None, **kwargs -) -> DjangoQueue: - """Returns an DjangoQueue using parameters defined in `SCHEDULER_QUEUES`""" - from .settings import QUEUES - - if name not in QUEUES: - raise QueueNotFoundError(f"Queue {name} not found, queues={QUEUES.keys()}") - queue_settings = QUEUES[name] - if is_async is None: - is_async = queue_settings.get("ASYNC", True) - - if default_timeout is None: - default_timeout = queue_settings.get("DEFAULT_TIMEOUT") - if connection is None: - connection = get_connection(queue_settings) - return DjangoQueue( - name, default_timeout=default_timeout, connection=connection, is_async=is_async, autocommit=autocommit, **kwargs - ) - - -def get_all_workers() -> Set[DjangoWorker]: - from .settings import QUEUES - - workers_set: Set[DjangoWorker] = set() - for queue_name in QUEUES: - connection = get_connection(QUEUES[queue_name]) - try: - curr_workers: Set[DjangoWorker] = set(DjangoWorker.all(connection=connection)) - workers_set.update(curr_workers) - except ConnectionErrorTypes as e: - logger.error(f"Could not connect for queue {queue_name}: {e}") - return workers_set - - -def _queues_share_connection_params(q1_params: Dict, q2_params: Dict): - """Check that both queues share the same connection parameters""" - return all( - ((p not in q1_params and p not in q2_params) or (q1_params.get(p, None) == q2_params.get(p, None))) - for p in _CONNECTION_PARAMS - ) - - -def get_queues(*queue_names, **kwargs) -> List[DjangoQueue]: - """Return queue instances from specified queue names. - All instances must use the same Redis connection. - """ - from .settings import QUEUES - - kwargs["job_class"] = JobExecution - queue_params = QUEUES[queue_names[0]] - queues = [get_queue(queue_names[0], **kwargs)] - # perform consistency checks while building return list - for name in queue_names[1:]: - if not _queues_share_connection_params(queue_params, QUEUES[name]): - raise ValueError( - f'Queues must have the same broker connection. "{name}" and' - f' "{queue_names[0]}" have different connections' - ) - queue = get_queue(name, **kwargs) - queues.append(queue) - - return queues diff --git a/scheduler/redis_models/__init__.py b/scheduler/redis_models/__init__.py new file mode 100644 index 0000000..2c1e269 --- /dev/null +++ b/scheduler/redis_models/__init__.py @@ -0,0 +1,33 @@ +__all__ = [ + "Result", + "ResultType", + "as_str", + "SchedulerLock", + "WorkerModel", + "DequeueTimeout", + "KvLock", + "JobStatus", + "JobModel", + "JobNamesRegistry", + "FinishedJobRegistry", + "ActiveJobRegistry", + "FailedJobRegistry", + "CanceledJobRegistry", + "ScheduledJobRegistry", + "QueuedJobRegistry", +] + +from .base import as_str +from .job import JobStatus, JobModel +from .lock import SchedulerLock, KvLock +from .registry.base_registry import DequeueTimeout, JobNamesRegistry +from .registry.queue_registries import ( + FinishedJobRegistry, + ActiveJobRegistry, + FailedJobRegistry, + CanceledJobRegistry, + ScheduledJobRegistry, + QueuedJobRegistry, +) +from .result import Result, ResultType +from .worker import WorkerModel diff --git a/scheduler/redis_models/base.py b/scheduler/redis_models/base.py new file mode 100644 index 0000000..9ff0f39 --- /dev/null +++ b/scheduler/redis_models/base.py @@ -0,0 +1,250 @@ +import dataclasses +import json +from collections.abc import Sequence +from datetime import datetime, timezone +from enum import Enum +from typing import List, Optional, Union, Dict, Collection, Any, ClassVar, Set, Type + +from redis import Redis + +from scheduler.settings import logger +from scheduler.types import ConnectionType, Self + +MAX_KEYS = 1000 + + +def as_str(v: Union[bytes, str]) -> Optional[str]: + """Converts a `bytes` value to a string using `utf-8`. + + :param v: The value (None/bytes/str) + :raises: ValueError: If the value is not `bytes` or `str` + :returns: Either the decoded string or None + """ + if v is None or isinstance(v, str): + return v + if isinstance(v, bytes): + return v.decode("utf-8") + raise ValueError(f"Unknown type {type(v)} for `{v}`.") + + +def decode_dict(d: Dict[bytes, bytes], exclude_keys: Set[str]) -> Dict[str, str]: + return {k.decode(): v.decode() for (k, v) in d.items() if k.decode() not in exclude_keys} + + +def _serialize(value: Any) -> Optional[Any]: + if value is None: + return None + if isinstance(value, bool): + value = int(value) + elif isinstance(value, Enum): + value = value.value + elif isinstance(value, datetime): + value = value.isoformat() + elif isinstance(value, dict): + value = json.dumps(value) + elif isinstance(value, (int, float)): + return value + elif isinstance(value, (list, set, tuple)): + return json.dumps(value, default=str) + return str(value) + + +def _deserialize(value: str, _type: Type) -> Any: + if value is None: + return None + try: + if _type is str or _type == Optional[str]: + return as_str(value) + if _type is datetime or _type == Optional[datetime]: + return datetime.fromisoformat(as_str(value)) + elif _type is bool: + return bool(int(value)) + elif _type is int or _type == Optional[int]: + return int(value) + elif _type is float or _type == Optional[float]: + return float(value) + elif _type in {List[str], Dict[str, str]}: + return json.loads(value) + elif _type == Optional[Any]: + return json.loads(value) + elif issubclass(_type, Enum): + return _type(as_str(value)) + except (ValueError, TypeError) as e: + logger.warning(f"Failed to deserialize {value} as {_type}: {e}") + return value + + +@dataclasses.dataclass(slots=True, kw_only=True) +class BaseModel: + name: str + _element_key_template: ClassVar[str] = ":element:{}" + # fields that are not serializable using method above and should be dealt with in the subclass + # e.g. args/kwargs for a job + _non_serializable_fields: ClassVar[Set[str]] = set() + + @classmethod + def key_for(cls, name: str) -> str: + return cls._element_key_template.format(name) + + @property + def _key(self) -> str: + return self._element_key_template.format(self.name) + + def serialize(self, with_nones: bool = False) -> Dict[str, str]: + data = dataclasses.asdict( + self, dict_factory=lambda fields: {key: value for (key, value) in fields if not key.startswith("_")} + ) + if not with_nones: + data = {k: v for k, v in data.items() if v is not None and k not in self._non_serializable_fields} + for k in data: + data[k] = _serialize(data[k]) + return data + + @classmethod + def deserialize(cls, data: Dict[str, Any]) -> Self: + types = {f.name: f.type for f in dataclasses.fields(cls) if f.name not in cls._non_serializable_fields} + for k in data: + if k in cls._non_serializable_fields: + continue + if k not in types: + logger.warning(f"Unknown field {k} in {cls.__name__}") + continue + data[k] = _deserialize(data[k], types[k]) + return cls(**data) + + +@dataclasses.dataclass(slots=True, kw_only=True) +class HashModel(BaseModel): + created_at: Optional[datetime] = None + parent: Optional[str] = None + _dirty_fields: Set[str] = dataclasses.field(default_factory=set) # fields that were changed + _save_all: bool = True # Save all fields to broker, after init, or after delete + _list_key: ClassVar[str] = ":list_all:" + _children_key_template: ClassVar[str] = ":children:{}:" + + def __post_init__(self): + self._dirty_fields = set() + self._save_all = True + + def __setattr__(self, key, value): + if key != "_dirty_fields" and hasattr(self, "_dirty_fields"): + self._dirty_fields.add(key) + super(HashModel, self).__setattr__(key, value) + + @property + def _parent_key(self) -> Optional[str]: + if self.parent is None: + return None + return self._children_key_template.format(self.parent) + + @classmethod + def all_names(cls, connection: Redis, parent: Optional[str] = None) -> Collection[str]: + collection_key = cls._children_key_template.format(parent) if parent else cls._list_key + collection_members = connection.smembers(collection_key) + return [r.decode() for r in collection_members] + + @classmethod + def all(cls, connection: Redis, parent: Optional[str] = None) -> List[Self]: + keys = cls.all_names(connection, parent) + items = [cls.get(k, connection) for k in keys] + return [w for w in items if w is not None] + + @classmethod + def exists(cls, name: str, connection: ConnectionType) -> bool: + if name is None: + return False + return connection.exists(cls._element_key_template.format(name)) > 0 + + @classmethod + def delete_many(cls, names: List[str], connection: ConnectionType) -> None: + for name in names: + connection.delete(cls._element_key_template.format(name)) + + @classmethod + def get(cls, name: str, connection: ConnectionType) -> Optional[Self]: + res = connection.hgetall(cls._element_key_template.format(name)) + if not res: + return None + try: + return cls.deserialize(decode_dict(res, set())) + except Exception as e: + logger.warning(f"Failed to deserialize {name}: {e}") + return None + + @classmethod + def get_many(cls, names: Sequence[str], connection: ConnectionType) -> List[Self]: + pipeline = connection.pipeline() + for name in names: + pipeline.hgetall(cls._element_key_template.format(name)) + values = pipeline.execute() + return [(cls.deserialize(decode_dict(v, set())) if v else None) for v in values] + + def save(self, connection: ConnectionType) -> None: + connection.sadd(self._list_key, self.name) + if self._parent_key is not None: + connection.sadd(self._parent_key, self.name) + mapping = self.serialize(with_nones=True) + if not self._save_all and len(self._dirty_fields) > 0: + mapping = {k: v for k, v in mapping.items() if k in self._dirty_fields} + none_values = {k for k, v in mapping.items() if v is None} + if none_values: + connection.hdel(self._key, *none_values) + mapping = {k: v for k, v in mapping.items() if v is not None} + if mapping: + connection.hset(self._key, mapping=mapping) + self._dirty_fields = set() + self._save_all = False + + def delete(self, connection: ConnectionType) -> None: + connection.srem(self._list_key, self._key) + if self._parent_key is not None: + connection.srem(self._parent_key, 0, self._key) + connection.delete(self._key) + self._save_all = True + + @classmethod + def count(cls, connection: ConnectionType, parent: Optional[str] = None) -> int: + if parent is not None: + result = connection.scard(cls._children_key_template.format(parent)) + else: + result = connection.scard(cls._list_key) + return result + + def get_field(self, field: str, connection: ConnectionType) -> Any: + types = {f.name: f.type for f in dataclasses.fields(self)} + res = connection.hget(self._key, field) + return _deserialize(res, types[field]) + + def set_field(self, field: str, value: Any, connection: ConnectionType, set_attribute: bool = True) -> None: + if not hasattr(self, field): + raise AttributeError(f"Field {field} does not exist") + if set_attribute: + setattr(self, field, value) + if value is None: + connection.hdel(self._key, field) + return + value = _serialize(value) + connection.hset(self._key, field, value) + + +@dataclasses.dataclass(slots=True, kw_only=True) +class StreamModel(BaseModel): + _children_key_template: ClassVar[str] = ":children:{}:" + + def __init__(self, name: str, parent: str, created_at: Optional[datetime] = None): + self.name = name + self.created_at: datetime = created_at or datetime.now(timezone.utc) + self.parent: str = parent + + @property + def _parent_key(self) -> str: + return self._children_key_template.format(self.parent) + + @classmethod + def all(cls, connection: ConnectionType, parent: str) -> List[Self]: + results = connection.xrevrange(cls._children_key_template.format(parent), "+", "-") + return [cls.deserialize(decode_dict(result[1], exclude_keys=set())) for result in results] + + def save(self, connection: ConnectionType) -> bool: + result = connection.xadd(self._parent_key, self.serialize(), maxlen=10) + return bool(result) diff --git a/scheduler/redis_models/job.py b/scheduler/redis_models/job.py new file mode 100644 index 0000000..90c5302 --- /dev/null +++ b/scheduler/redis_models/job.py @@ -0,0 +1,328 @@ +import base64 +import dataclasses +import inspect +import numbers +import pickle +from datetime import datetime +from enum import Enum +from typing import ClassVar, Dict, Optional, List, Callable, Any, Union, Tuple + +from scheduler.helpers import utils +from scheduler.helpers.callback import Callback +from scheduler.redis_models.base import HashModel, as_str +from scheduler.settings import SCHEDULER_CONFIG, logger +from scheduler.types import ConnectionType, Self, FunctionReferenceType +from .registry.base_registry import JobNamesRegistry +from ..helpers.utils import current_timestamp + + +class TimeoutFormatError(Exception): + pass + + +class JobStatus(str, Enum): + """The Status of Job within its lifecycle at any given time.""" + + QUEUED = "queued" + FINISHED = "finished" + FAILED = "failed" + STARTED = "started" + SCHEDULED = "scheduled" + STOPPED = "stopped" + CANCELED = "canceled" + + +@dataclasses.dataclass(slots=True, kw_only=True) +class JobModel(HashModel): + _list_key: ClassVar[str] = ":jobs:ALL:" + _children_key_template: ClassVar[str] = ":{}:jobs:" + _element_key_template: ClassVar[str] = ":jobs:{}" + _non_serializable_fields = {"args", "kwargs"} + + args: List[Any] + kwargs: Dict[str, str] + + queue_name: str + description: str + func_name: str + + timeout: int = SCHEDULER_CONFIG.DEFAULT_JOB_TIMEOUT + success_ttl: int = SCHEDULER_CONFIG.DEFAULT_SUCCESS_TTL + job_info_ttl: int = SCHEDULER_CONFIG.DEFAULT_JOB_TTL + status: JobStatus + created_at: datetime + meta: Dict[str, str] + at_front: bool = False + last_heartbeat: Optional[datetime] = None + worker_name: Optional[str] = None + started_at: Optional[datetime] = None + enqueued_at: Optional[datetime] = None + ended_at: Optional[datetime] = None + success_callback_name: Optional[str] = None + success_callback_timeout: int = SCHEDULER_CONFIG.CALLBACK_TIMEOUT + failure_callback_name: Optional[str] = None + failure_callback_timeout: int = SCHEDULER_CONFIG.CALLBACK_TIMEOUT + stopped_callback_name: Optional[str] = None + stopped_callback_timeout: int = SCHEDULER_CONFIG.CALLBACK_TIMEOUT + task_type: Optional[str] = None + scheduled_task_id: Optional[int] = None + + def __hash__(self): + return hash(self.name) + + def __eq__(self, other): # noqa + return isinstance(other, self.__class__) and self.name == other.name + + def __str__(self): + return f"{self.name}: {self.description}" + + def get_status(self, connection: ConnectionType) -> JobStatus: + return self.get_field("status", connection=connection) + + @property + def is_queued(self) -> bool: + return self.status == JobStatus.QUEUED + + @property + def is_canceled(self) -> bool: + return self.status == JobStatus.CANCELED + + @property + def is_failed(self) -> bool: + return self.status == JobStatus.FAILED + + @property + def func(self) -> Callable[[Any], Any]: + return utils.callable_func(self.func_name) + + @property + def is_scheduled_task(self) -> bool: + return self.scheduled_task_id is not None + + def expire(self, ttl: int, connection: ConnectionType) -> None: + """Expire the Job Model if ttl >= 0""" + if ttl == 0: + self.delete(connection=connection) + elif ttl > 0: + connection.expire(self._key, ttl) + + def persist(self, connection: ConnectionType) -> None: + connection.persist(self._key) + + def prepare_for_execution(self, worker_name: str, registry: JobNamesRegistry, connection: ConnectionType) -> None: + """Prepares the job for execution, setting the worker name, + heartbeat information, status and other metadata before execution begins. + :param worker_name: The name of the worker + :param registry: The registry to add the job to + :param current_pid: The current process id + :param connection: The connection to the broker + """ + self.worker_name = worker_name + self.last_heartbeat = utils.utcnow() + self.started_at = self.last_heartbeat + self.status = JobStatus.STARTED + registry.add(connection, self.name, self.last_heartbeat.timestamp()) + self.save(connection=connection) + + def after_execution( + self, + job_info_ttl: int, + status: JobStatus, + connection: ConnectionType, + prev_registry: Optional[JobNamesRegistry] = None, + new_registry: Optional[JobNamesRegistry] = None, + ) -> None: + """After the job is executed, update the status, heartbeat, and other metadata.""" + self.status = status + self.ended_at = utils.utcnow() + self.last_heartbeat = self.ended_at + if prev_registry is not None: + prev_registry.delete(connection, self.name) + if new_registry is not None and job_info_ttl != 0: + new_registry.add(connection, self.name, current_timestamp() + job_info_ttl) + self.save(connection=connection) + + @property + def failure_callback(self) -> Optional[Callback]: + if self.failure_callback_name is None: + return None + logger.debug(f"Running failure callbacks for {self.name}") + return Callback(self.failure_callback_name, self.failure_callback_timeout) + + @property + def success_callback(self) -> Optional[Callable[..., Any]]: + if self.success_callback_name is None: + return None + logger.debug(f"Running success callbacks for {self.name}") + return Callback(self.success_callback_name, self.success_callback_timeout) + + @property + def stopped_callback(self) -> Optional[Callable[..., Any]]: + if self.stopped_callback_name is None: + return None + logger.debug(f"Running stopped callbacks for {self.name}") + return Callback(self.stopped_callback_name, self.stopped_callback_timeout) + + def get_call_string(self): + return _get_call_string(self.func_name, self.args, self.kwargs) + + def serialize(self, with_nones: bool = False) -> Dict[str, str]: + """Serialize the job model to a dictionary.""" + res = super(JobModel, self).serialize(with_nones=with_nones) + res["args"] = base64.encodebytes(pickle.dumps(self.args)).decode("utf-8") + res["kwargs"] = base64.encodebytes(pickle.dumps(self.kwargs)).decode("utf-8") + return res + + @classmethod + def deserialize(cls, data: Dict[str, Any]) -> Self: + """Deserialize the job model from a dictionary.""" + res = super(JobModel, cls).deserialize(data) + res.args = pickle.loads(base64.decodebytes(data.get("args").encode("utf-8"))) + res.kwargs = pickle.loads(base64.decodebytes(data.get("kwargs").encode("utf-8"))) + return res + + @classmethod + def create( + cls, + connection: ConnectionType, + func: FunctionReferenceType, + queue_name: str, + args: Union[List[Any], Optional[Tuple]] = None, + kwargs: Optional[Dict[str, Any]] = None, + result_ttl: Optional[int] = None, + job_info_ttl: Optional[int] = None, + status: Optional[JobStatus] = None, + description: Optional[str] = None, + timeout: Optional[int] = None, + name: Optional[str] = None, + task_type: Optional[str] = None, + scheduled_task_id: Optional[int] = None, + meta: Optional[Dict[str, Any]] = None, + *, + on_success: Optional[Callback] = None, + on_failure: Optional[Callback] = None, + on_stopped: Optional[Callback] = None, + at_front: Optional[bool] = None, + ) -> Self: + """Creates a new job-model for the given function, arguments, and keyword arguments. + :returns: A job-model instance. + """ + args = args or [] + kwargs = kwargs or {} + timeout = _parse_timeout(timeout) or SCHEDULER_CONFIG.DEFAULT_JOB_TIMEOUT + if timeout == 0: + raise ValueError("0 timeout is not allowed. Use -1 for infinite timeout") + job_info_ttl = _parse_timeout(job_info_ttl if job_info_ttl is not None else SCHEDULER_CONFIG.DEFAULT_JOB_TTL) + result_ttl = _parse_timeout(result_ttl) + if not isinstance(args, (tuple, list)): + raise TypeError(f"{args!r} is not a valid args list") + if not isinstance(kwargs, dict): + raise TypeError(f"{kwargs!r} is not a valid kwargs dict") + if on_success and not isinstance(on_success, Callback): + raise ValueError("on_success must be a Callback object") + if on_failure and not isinstance(on_failure, Callback): + raise ValueError("on_failure must be a Callback object") + if on_stopped and not isinstance(on_stopped, Callback): + raise ValueError("on_stopped must be a Callback object") + if name is not None and JobModel.exists(name, connection=connection): + raise ValueError(f"Job with name {name} already exists") + if name is None: + date_str = utils.utcnow().strftime("%Y%m%d%H%M%S%f") + name = f"{queue_name}:{scheduled_task_id or ''}:{date_str}" + + if inspect.ismethod(func): + _func_name = func.__name__ + + elif inspect.isfunction(func) or inspect.isbuiltin(func): + _func_name = f"{func.__module__}.{func.__qualname__}" + elif isinstance(func, str): + _func_name = as_str(func) + elif not inspect.isclass(func) and hasattr(func, "__call__"): # a callable class instance + _func_name = "__call__" + else: + raise TypeError(f"Expected a callable or a string, but got: {func}") + description = description or _get_call_string(func, args or [], kwargs or {}, max_length=75) + job_info_ttl = job_info_ttl if job_info_ttl is not None else SCHEDULER_CONFIG.DEFAULT_JOB_TTL + model = JobModel( + created_at=utils.utcnow(), + name=name, + queue_name=queue_name, + description=description, + func_name=_func_name, + args=args or [], + kwargs=kwargs or {}, + at_front=at_front, + task_type=task_type, + scheduled_task_id=scheduled_task_id, + success_callback_name=on_success.name if on_success else None, + success_callback_timeout=on_success.timeout if on_success else None, + failure_callback_name=on_failure.name if on_failure else None, + failure_callback_timeout=on_failure.timeout if on_failure else None, + stopped_callback_name=on_stopped.name if on_stopped else None, + stopped_callback_timeout=on_stopped.timeout if on_stopped else None, + success_ttl=result_ttl, + job_info_ttl=job_info_ttl, + timeout=timeout, + status=status, + last_heartbeat=None, + meta=meta or {}, + worker_name=None, + enqueued_at=None, + started_at=None, + ended_at=None, + ) + model.save(connection=connection) + return model + + +def _get_call_string( + func_name: Optional[str], args: Any, kwargs: Dict[Any, Any], max_length: Optional[int] = None +) -> Optional[str]: + """ + Returns a string representation of the call, formatted as a regular + Python function invocation statement. If max_length is not None, truncate + arguments with representation longer than max_length. + + :param func_name: The function name + :param args: The function arguments + :param kwargs: The function kwargs + :param max_length: The max length of the return string + :return: A string representation of the function call + """ + if func_name is None: + return None + + arg_list = [as_str(_truncate_long_string(repr(arg), max_length)) for arg in args] + + list_kwargs = [f"{k}={as_str(_truncate_long_string(repr(v), max_length))}" for k, v in kwargs.items()] + arg_list += sorted(list_kwargs) + args = ", ".join(arg_list) + + return f"{func_name}({args})" + + +def _truncate_long_string(data: str, max_length: Optional[int] = None) -> str: + """Truncate arguments with representation longer than max_length""" + if max_length is None: + return data + return (data[:max_length] + "...") if len(data) > max_length else data + + +def _parse_timeout(timeout: Union[int, float, str]) -> int: + """Transfer all kinds of timeout format to an integer representing seconds""" + if not isinstance(timeout, numbers.Integral) and timeout is not None: + try: + timeout = int(timeout) + except ValueError: + digit, unit = timeout[:-1], (timeout[-1:]).lower() + unit_second = {"d": 86400, "h": 3600, "m": 60, "s": 1} + try: + timeout = int(digit) * unit_second[unit] + except (ValueError, KeyError): + raise TimeoutFormatError( + "Timeout must be an integer or a string representing an integer, or " + 'a string with format: digits + unit, unit can be "d", "h", "m", "s", ' + 'such as "1h", "23m".' + ) + + return timeout diff --git a/scheduler/redis_models/lock.py b/scheduler/redis_models/lock.py new file mode 100644 index 0000000..aa060f0 --- /dev/null +++ b/scheduler/redis_models/lock.py @@ -0,0 +1,36 @@ +from typing import Optional, Any + +from scheduler.types import ConnectionType + + +class KvLock(object): + def __init__(self, name: str) -> None: + self.name = name + self.acquired = False + + @property + def _locking_key(self) -> str: + return f"_lock:{self.name}" + + def acquire(self, val: Any, connection: ConnectionType, expire: Optional[int] = None) -> bool: + self.acquired = connection.set(self._locking_key, val, nx=True, ex=expire) + return self.acquired + + def expire(self, connection: ConnectionType, expire: Optional[int] = None) -> bool: + return connection.expire(self._locking_key, expire) + + def release(self, connection: ConnectionType): + connection.delete(self._locking_key) + + def value(self, connection: ConnectionType) -> Any: + return connection.get(self._locking_key) + + +class SchedulerLock(KvLock): + def __init__(self, queue_name: str) -> None: + super().__init__(f"lock:scheduler:{queue_name}") + + +class QueueLock(KvLock): + def __init__(self, queue_name: str) -> None: + super().__init__(f"queue:{queue_name}") diff --git a/scheduler/redis_models/registry/__init__.py b/scheduler/redis_models/registry/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/scheduler/redis_models/registry/base_registry.py b/scheduler/redis_models/registry/base_registry.py new file mode 100644 index 0000000..af9b211 --- /dev/null +++ b/scheduler/redis_models/registry/base_registry.py @@ -0,0 +1,118 @@ +import dataclasses +from collections.abc import Sequence +from typing import ClassVar, Optional, List, Tuple, Any + +from scheduler.helpers.utils import current_timestamp +from scheduler.redis_models.base import as_str, BaseModel +from scheduler.settings import logger +from scheduler.types import ConnectionType, Self + + +class DequeueTimeout(Exception): + pass + + +@dataclasses.dataclass(slots=True, kw_only=True) +class ZSetModel(BaseModel): + def cleanup(self, connection: ConnectionType, timestamp: Optional[float] = None) -> None: + """Remove expired jobs from registry.""" + score = timestamp or current_timestamp() + connection.zremrangebyscore(self._key, 0, score) + + def count(self, connection: ConnectionType) -> int: + """Returns the number of jobs in this registry""" + self.cleanup(connection=connection) + return connection.zcard(self._key) + + def add(self, connection: ConnectionType, job_name: str, score: float, update_existing_only: bool = False) -> int: + return connection.zadd(self._key, {job_name: float(score)}, xx=update_existing_only) + + def delete(self, connection: ConnectionType, job_name: str) -> None: + connection.zrem(self._key, job_name) + + +class JobNamesRegistry(ZSetModel): + _element_key_template: ClassVar[str] = ":registry:{}" + + def __init__(self, connection: ConnectionType, name: str) -> None: + super().__init__(name=name) + self.connection = connection + + def __len__(self) -> int: + return self.count(self.connection) + + def __contains__(self, item: str) -> bool: + return self.connection.zrank(self._key, item) is not None + + def all(self, start: int = 0, end: int = -1) -> List[str]: + """Returns list of all job names. + + :param start: Start score/timestamp, default to 0. + :param end: End score/timestamp, default to -1 (i.e., no max score). + :returns: Returns list of all job names with timestamp from start to end + """ + self.cleanup(self.connection) + res = [as_str(job_name) for job_name in self.connection.zrange(self._key, start, end)] + logger.debug(f"Getting jobs for registry {self._key}: {len(res)} found.") + return res + + def all_with_timestamps(self, start: int = 0, end: int = -1) -> List[Tuple[str, float]]: + """Returns list of all job names with their timestamps. + + :param start: Start score/timestamp, default to 0. + :param end: End score/timestamp, default to -1 (i.e., no max score). + :returns: Returns list of all job names with timestamp from start to end + """ + self.cleanup(self.connection) + res = self.connection.zrange(self._key, start, end, withscores=True) + logger.debug(f"Getting jobs for registry {self._key}: {len(res)} found.") + return [(as_str(job_name), timestamp) for job_name, timestamp in res] + + def get_first(self) -> Optional[str]: + """Returns the first job in the registry.""" + self.cleanup(self.connection) + first_job = self.connection.zrange(self._key, 0, 0) + return first_job[0].decode() if first_job else None + + def get_last_timestamp(self) -> Optional[float]: + """Returns the last timestamp in the registry.""" + self.cleanup(self.connection) + last_timestamp = self.connection.zrange(self._key, -1, -1, withscores=True) + return last_timestamp[0][1] if last_timestamp else None + + @property + def key(self) -> str: + return self._key + + @classmethod + def pop( + cls, connection: ConnectionType, registries: Sequence[Self], timeout: Optional[int] + ) -> Tuple[Optional[str], Optional[str]]: + """Helper method to abstract away from some Redis API details + + :param connection: Broker connection + :param registries: List of registries to pop from + :param timeout: Timeout in seconds + :raises ValueError: If timeout of 0 was passed + :raises DequeueTimeout: BLPOP Timeout + :returns: Tuple of registry key and job name + """ + if timeout == 0: + raise ValueError("Indefinite timeout not supported. Please pick a timeout value > 0") + registry_keys = [r.key for r in registries] + if timeout is not None: # blocking variant + colored_registries = ",".join(map(str, [str(registry) for registry in registry_keys])) + logger.debug(f"Starting BZMPOP operation for queues {colored_registries} with timeout of {timeout}") + result = connection.bzpopmin(registry_keys, timeout) + if not result: + logger.debug(f"BZMPOP timeout, no jobs found on queues {colored_registries}") + raise DequeueTimeout(timeout, registry_keys) + registry_key, job_name, timestamp = result + return as_str(registry_key), as_str(job_name) + else: # non-blocking variant + for registry_key in registry_keys: + results: List[Any] = connection.zpopmin(registry_key) + if results: + job_name, timestamp = results[0] + return as_str(registry_key), as_str(job_name) + return None, None diff --git a/scheduler/redis_models/registry/queue_registries.py b/scheduler/redis_models/registry/queue_registries.py new file mode 100644 index 0000000..9a0d87e --- /dev/null +++ b/scheduler/redis_models/registry/queue_registries.py @@ -0,0 +1,117 @@ +import time +from datetime import datetime, timedelta, timezone +from typing import ClassVar, Optional, List, Tuple + +from scheduler.helpers.utils import current_timestamp +from scheduler.types import ConnectionType +from .base_registry import JobNamesRegistry +from .. import as_str +from ..job import JobModel + + +class QueuedJobRegistry(JobNamesRegistry): + _element_key_template: ClassVar[str] = ":registry:{}:queued_jobs" + + def cleanup(self, connection: ConnectionType, timestamp: Optional[float] = None) -> None: + """This method is only here to prevent errors because this method is automatically called by `count()` + and `all()` methods implemented in JobIdsRegistry.""" + pass + + def compact(self) -> None: + """Removes all "dead" jobs from the queue by cycling through it, while guaranteeing FIFO semantics.""" + jobs_with_ts = self.all_with_timestamps() + for job_name, timestamp in jobs_with_ts: + if not JobModel.exists(job_name, self.connection): + self.delete(connection=self.connection, job_name=job_name) + + def empty(self) -> None: + queued_jobs_count = self.count(connection=self.connection) + with self.connection.pipeline() as pipe: + for offset in range(0, queued_jobs_count, 1000): + job_names = self.all(offset, 1000) + for job_name in job_names: + self.delete(connection=pipe, job_name=job_name) + JobModel.delete_many(job_names, connection=pipe) + pipe.execute() + + +class FinishedJobRegistry(JobNamesRegistry): + _element_key_template: ClassVar[str] = ":registry:{}:finished_jobs" + + +class FailedJobRegistry(JobNamesRegistry): + _element_key_template: ClassVar[str] = ":registry:{}:failed_jobs" + + +class CanceledJobRegistry(JobNamesRegistry): + _element_key_template: ClassVar[str] = ":registry:{}:canceled_jobs" + + def cleanup(self, connection: ConnectionType, timestamp: Optional[float] = None) -> None: + """This method is only here to prevent errors because this method is automatically called by `count()` + and `all()` methods implemented in JobIdsRegistry.""" + pass + + +class ScheduledJobRegistry(JobNamesRegistry): + _element_key_template: ClassVar[str] = ":registry:{}:scheduled_jobs" + + def cleanup(self, connection: ConnectionType, timestamp: Optional[float] = None) -> None: + """This method is only here to prevent errors because this method is automatically called by `count()` + and `all()` methods implemented in JobIdsRegistry.""" + pass + + def schedule(self, connection: ConnectionType, job_name: str, scheduled_datetime: datetime) -> int: + """Adds job_name to registry, scored by its execution time (in UTC). + If datetime has no tzinfo, it will assume localtimezone. + + :param connection: Broker connection + :param job_name: Job name to schedule + :param scheduled_datetime: datetime to schedule job + """ + # If datetime has no timezone, assume server's local timezone + if not scheduled_datetime.tzinfo: + tz = timezone(timedelta(seconds=-(time.timezone if time.daylight == 0 else time.altzone))) + scheduled_datetime = scheduled_datetime.replace(tzinfo=tz) + + timestamp = scheduled_datetime.timestamp() + return self.add(connection=connection, job_name=job_name, score=timestamp) + + def get_jobs_to_schedule(self, timestamp: int, chunk_size: int = 1000) -> List[str]: + """Gets a list of job names that should be scheduled. + + :param timestamp: timestamp/score of jobs in SortedSet. + :param chunk_size: Max results to return. + :returns: A list of job names + """ + jobs_to_schedule = self.connection.zrangebyscore(self._key, 0, max=timestamp, start=0, num=chunk_size) + return [as_str(job_name) for job_name in jobs_to_schedule] + + def get_scheduled_time(self, job_name: str) -> Optional[datetime]: + """Returns datetime (UTC) at which job is scheduled to be enqueued + + :param job_name: Job name + :returns: The scheduled time as datetime object, or None if job is not found + """ + + score: Optional[float] = self.connection.zscore(self._key, job_name) + if not score: + return None + + return datetime.fromtimestamp(score, tz=timezone.utc) + + +class ActiveJobRegistry(JobNamesRegistry): + """Registry of currently executing jobs. Each queue maintains a ActiveJobRegistry.""" + + _element_key_template: ClassVar[str] = ":registry:{}:active" + + def get_job_names_before(self, connection: ConnectionType, timestamp: Optional[float]) -> List[Tuple[str, float]]: + """Returns job names whose score is lower than a timestamp timestamp. + + Returns names for jobs with an expiry time earlier than timestamp, + specified as seconds since the Unix epoch. + timestamp defaults to calltime if unspecified. + """ + score = timestamp or current_timestamp() + jobs_before = connection.zrangebyscore(self._key, 0, score, withscores=True) + return [(as_str(job_name), score) for (job_name, score) in jobs_before] diff --git a/scheduler/redis_models/result.py b/scheduler/redis_models/result.py new file mode 100644 index 0000000..a89af18 --- /dev/null +++ b/scheduler/redis_models/result.py @@ -0,0 +1,67 @@ +import dataclasses +from datetime import datetime +from enum import Enum +from typing import Optional, Any, ClassVar, List + +from scheduler.helpers.utils import utcnow +from scheduler.redis_models.base import StreamModel, decode_dict +from scheduler.types import ConnectionType, Self + + +class ResultType(Enum): + SUCCESSFUL = "successful" + FAILED = "failed" + STOPPED = "stopped" + + +@dataclasses.dataclass(slots=True, kw_only=True) +class Result(StreamModel): + parent: str + type: ResultType + worker_name: str + ttl: Optional[int] = 0 + name: Optional[str] = None + created_at: datetime = dataclasses.field(default_factory=utcnow) + return_value: Optional[Any] = None + exc_string: Optional[str] = None + + _list_key: ClassVar[str] = ":job-results:" + _children_key_template: ClassVar[str] = ":job-results:{}:" + _element_key_template: ClassVar[str] = ":job-results:{}" + + @classmethod + def create( + cls, + connection: ConnectionType, + job_name: str, + worker_name: str, + _type: ResultType, + ttl: int, + return_value: Any = None, + exc_string: Optional[str] = None, + ) -> Self: + result = cls( + parent=job_name, + ttl=ttl, + type=_type, + return_value=return_value, + exc_string=exc_string, + worker_name=worker_name, + ) + result.save(connection) + return result + + @classmethod + def fetch_latest(cls, connection: ConnectionType, job_name: str) -> Optional["Result"]: + """Returns the latest result for given job_name. + + :param connection: Broker connection. + :param job_name: Job name. + :return: Result instance or None if no result is available. + """ + response: List[Any] = connection.xrevrange(cls._children_key_template.format(job_name), "+", "-", count=1) + if not response: + return None + result_id, payload = response[0] + res = cls.deserialize(decode_dict(payload, set())) + return res diff --git a/scheduler/redis_models/worker.py b/scheduler/redis_models/worker.py new file mode 100644 index 0000000..5d31600 --- /dev/null +++ b/scheduler/redis_models/worker.py @@ -0,0 +1,121 @@ +import dataclasses +from datetime import datetime +from enum import Enum +from typing import List, Optional, ClassVar, Any, Generator + +from scheduler.helpers.utils import utcnow +from scheduler.redis_models.base import HashModel, MAX_KEYS +from scheduler.settings import logger +from scheduler.types import ConnectionType, Self + +DEFAULT_WORKER_TTL = 420 + + +class WorkerStatus(str, Enum): + CREATED = "created" + STARTING = "starting" + STARTED = "started" + SUSPENDED = "suspended" + BUSY = "busy" + IDLE = "idle" + + +@dataclasses.dataclass(slots=True, kw_only=True) +class WorkerModel(HashModel): + name: str + queue_names: List[str] + pid: int + hostname: str + ip_address: str + version: str + python_version: str + state: WorkerStatus + job_execution_process_pid: int = 0 + successful_job_count: int = 0 + failed_job_count: int = 0 + completed_jobs: int = 0 + birth: Optional[datetime] = None + last_heartbeat: Optional[datetime] = None + is_suspended: bool = False + current_job_name: Optional[str] = None + stopped_job_name: Optional[str] = None + total_working_time_ms: float = 0.0 + current_job_working_time: float = 0 + last_cleaned_at: Optional[datetime] = None + shutdown_requested_date: Optional[datetime] = None + has_scheduler: bool = False + death: Optional[datetime] = None + + _list_key: ClassVar[str] = ":workers:ALL:" + _children_key_template: ClassVar[str] = ":queue-workers:{}:" + _element_key_template: ClassVar[str] = ":workers:{}" + + def save(self, connection: ConnectionType) -> None: + pipeline = connection.pipeline() + super(WorkerModel, self).save(pipeline) + for queue_name in self.queue_names: + pipeline.sadd(self._children_key_template.format(queue_name), self.name) + pipeline.expire(self._key, DEFAULT_WORKER_TTL + 60) + pipeline.execute() + + def delete(self, connection: ConnectionType) -> None: + logger.debug(f"Deleting worker {self.name}") + pipeline = connection.pipeline() + now = utcnow() + self.death = now + pipeline.hset(self._key, "death", now.isoformat()) + pipeline.expire(self._key, 60) + pipeline.srem(self._list_key, self.name) + for queue_name in self.queue_names: + pipeline.srem(self._children_key_template.format(queue_name), self.name) + pipeline.execute() + + def __eq__(self, other: Self) -> bool: + if not isinstance(other, self.__class__): + raise TypeError("Cannot compare workers to other types (of workers)") + return self._key == other._key + + def __hash__(self): + """The hash does not take the database/connection into account""" + return hash((self._key, ",".join(self.queue_names))) + + def set_current_job_working_time(self, job_execution_time: int, connection: ConnectionType) -> None: + self.set_field("current_job_working_time", job_execution_time, connection=connection) + + def heartbeat(self, connection: ConnectionType, timeout: Optional[int] = None) -> None: + timeout = timeout or DEFAULT_WORKER_TTL + 60 + connection.expire(self._key, timeout) + now = utcnow() + self.set_field("last_heartbeat", now, connection=connection) + logger.debug(f"Next heartbeat for worker {self._key} should arrive in {timeout} seconds.") + + @classmethod + def cleanup(cls, connection: ConnectionType, queue_name: Optional[str] = None): + worker_names = cls.all_names(connection, queue_name) + worker_keys = [cls.key_for(worker_name) for worker_name in worker_names] + with connection.pipeline() as pipeline: + for worker_key in worker_keys: + pipeline.exists(worker_key) + worker_exist = pipeline.execute() + invalid_workers = list() + for i, worker_name in enumerate(worker_names): + if not worker_exist[i]: + invalid_workers.append(worker_name) + if len(invalid_workers) == 0: + return + for invalid_subset in _split_list(invalid_workers, MAX_KEYS): + pipeline.srem(cls._list_key, *invalid_subset) + if queue_name: + pipeline.srem(cls._children_key_template.format(queue_name), *invalid_subset) + pipeline.execute() + + +def _split_list(a_list: List[str], segment_size: int) -> Generator[list[str], Any, None]: + """Splits a list into multiple smaller lists having size `segment_size` + + :param a_list: The list to split + :param segment_size: The segment size to split into + :returns: The list split into smaller lists + """ + for i in range(0, len(a_list), segment_size): + yield a_list[i : i + segment_size] diff --git a/scheduler/rq_classes.py b/scheduler/rq_classes.py deleted file mode 100644 index 2014ca3..0000000 --- a/scheduler/rq_classes.py +++ /dev/null @@ -1,279 +0,0 @@ -from typing import List, Optional, Union - -import django -from django.apps import apps -from rq import Worker -from rq.command import send_stop_job_command -from rq.decorators import job -from rq.exceptions import InvalidJobOperation -from rq.job import Job, JobStatus -from rq.job import get_current_job # noqa -from rq.queue import Queue, logger -from rq.registry import ( - DeferredJobRegistry, - FailedJobRegistry, - FinishedJobRegistry, - ScheduledJobRegistry, - StartedJobRegistry, - CanceledJobRegistry, - BaseRegistry, -) -from rq.scheduler import RQScheduler -from rq.worker import WorkerStatus - -from scheduler import settings -from scheduler.broker_types import PipelineType, ConnectionType - -MODEL_NAMES = ["Task"] -TASK_TYPES = ["OnceTaskType", "RepeatableTaskType", "CronTaskType"] - -rq_job_decorator = job -ExecutionStatus = JobStatus -InvalidJobOperation = InvalidJobOperation - - -def register_sentry(sentry_dsn, **opts): - from rq.contrib.sentry import register_sentry as rq_register_sentry - - rq_register_sentry(sentry_dsn, **opts) - - -def as_str(v: Union[bytes, str]) -> Optional[str]: - """Converts a `bytes` value to a string using `utf-8`. - - :param v: The value (None/bytes/str) - :raises: ValueError: If the value is not `bytes` or `str` - :returns: Either the decoded string or None - """ - if v is None: - return None - if isinstance(v, bytes): - return v.decode("utf-8") - if isinstance(v, str): - return v - raise ValueError("Unknown type %r" % type(v)) - - -class JobExecution(Job): - def __eq__(self, other) -> bool: - return isinstance(other, Job) and self.id == other.id - - @property - def is_scheduled_task(self) -> bool: - return self.meta.get("scheduled_task_id", None) is not None - - def is_execution_of(self, task: "Task") -> bool: # noqa: F821 - return ( - self.meta.get("task_type", None) == task.task_type and self.meta.get("scheduled_task_id", None) == task.id - ) - - def stop_execution(self, connection: ConnectionType): - send_stop_job_command(connection, self.id) - - -class DjangoWorker(Worker): - def __init__(self, *args, **kwargs): - self.fork_job_execution = kwargs.pop("fork_job_execution", True) - job_class = kwargs.get("job_class") or JobExecution - if not isinstance(job_class, type) or not issubclass(job_class, JobExecution): - raise ValueError("job_class must be a subclass of JobExecution") - - # Update kwargs with the potentially modified job_class - kwargs["job_class"] = job_class - kwargs["queue_class"] = DjangoQueue - super(DjangoWorker, self).__init__(*args, **kwargs) - - def __eq__(self, other): - return isinstance(other, Worker) and self.key == other.key and self.name == other.name - - def __hash__(self): - return hash((self.name, self.key, ",".join(self.queue_names()))) - - def __str__(self): - return f"{self.name}/{','.join(self.queue_names())}" - - def _start_scheduler( - self, - burst: bool = False, - logging_level: str = "INFO", - date_format: str = "%H:%M:%S", - log_format: str = "%(asctime)s %(message)s", - ) -> None: - """Starts the scheduler process. - This is specifically designed to be run by the worker when running the `work()` method. - Instantiates the DjangoScheduler and tries to acquire a lock. - If the lock is acquired, start scheduler. - If the worker is on burst mode, just enqueues scheduled jobs and quits, - otherwise, starts the scheduler in a separate process. - - :param burst (bool, optional): Whether to work on burst mode. Defaults to False. - :param logging_level (str, optional): Logging level to use. Defaults to "INFO". - :param date_format (str, optional): Date Format. Defaults to DEFAULT_LOGGING_DATE_FORMAT. - :param log_format (str, optional): Log Format. Defaults to DEFAULT_LOGGING_FORMAT. - """ - self.scheduler = DjangoScheduler( - self.queues, - connection=self.connection, - logging_level=logging_level, - date_format=date_format, - log_format=log_format, - serializer=self.serializer, - ) - self.scheduler.acquire_locks() - if self.scheduler.acquired_locks: - if burst: - self.scheduler.enqueue_scheduled_jobs() - self.scheduler.release_locks() - else: - proc = self.scheduler.start() - self._set_property("scheduler_pid", proc.pid) - - def execute_job(self, job: "Job", queue: "Queue") -> None: - if self.fork_job_execution: - super(DjangoWorker, self).execute_job(job, queue) - else: - self.set_state(WorkerStatus.BUSY) - self.perform_job(job, queue) - self.set_state(WorkerStatus.IDLE) - - def work(self, **kwargs) -> bool: - kwargs.setdefault("with_scheduler", True) - return super(DjangoWorker, self).work(**kwargs) - - def _set_property(self, prop_name: str, val, pipeline: Optional[PipelineType] = None) -> None: - connection = pipeline if pipeline is not None else self.connection - if val is None: - connection.hdel(self.key, prop_name) - else: - connection.hset(self.key, prop_name, val) - - def _get_property(self, prop_name: str, pipeline: Optional[PipelineType] = None) -> Optional[str]: - connection = pipeline if pipeline is not None else self.connection - res = connection.hget(self.key, prop_name) - return as_str(res) - - def scheduler_pid(self) -> Optional[int]: - if len(self.queues) == 0: - logger.warning("No queues to get scheduler pid from") - return None - pid = self.connection.get(DjangoScheduler.get_locking_key(self.queues[0].name)) - return int(pid.decode()) if pid is not None else None - - -class DjangoQueue(Queue): - """A subclass of RQ's QUEUE that allows jobs to be stored temporarily to be enqueued later at the end of Django's - request/response cycle.""" - - REGISTRIES = dict( - finished="finished_job_registry", - failed="failed_job_registry", - scheduled="scheduled_job_registry", - started="started_job_registry", - deferred="deferred_job_registry", - canceled="canceled_job_registry", - ) - - def __init__(self, *args, **kwargs) -> None: - kwargs["job_class"] = JobExecution - super(DjangoQueue, self).__init__(*args, **kwargs) - - def get_registry(self, name: str) -> Union[None, BaseRegistry, "DjangoQueue"]: - name = name.lower() - if name == "queued": - return self - elif name in DjangoQueue.REGISTRIES: - return getattr(self, DjangoQueue.REGISTRIES[name]) - return None - - @property - def finished_job_registry(self) -> FinishedJobRegistry: - return FinishedJobRegistry(self.name, self.connection) - - @property - def started_job_registry(self) -> StartedJobRegistry: - return StartedJobRegistry( - self.name, - self.connection, - job_class=JobExecution, - ) - - @property - def deferred_job_registry(self) -> DeferredJobRegistry: - return DeferredJobRegistry( - self.name, - self.connection, - job_class=JobExecution, - ) - - @property - def failed_job_registry(self) -> FailedJobRegistry: - return FailedJobRegistry( - self.name, - self.connection, - job_class=JobExecution, - ) - - @property - def scheduled_job_registry(self) -> ScheduledJobRegistry: - return ScheduledJobRegistry( - self.name, - self.connection, - job_class=JobExecution, - ) - - @property - def canceled_job_registry(self) -> CanceledJobRegistry: - return CanceledJobRegistry( - self.name, - self.connection, - job_class=JobExecution, - ) - - def get_all_job_ids(self) -> List[str]: - res = list() - res.extend(self.get_job_ids()) - res.extend(self.finished_job_registry.get_job_ids()) - res.extend(self.started_job_registry.get_job_ids()) - res.extend(self.deferred_job_registry.get_job_ids()) - res.extend(self.failed_job_registry.get_job_ids()) - res.extend(self.scheduled_job_registry.get_job_ids()) - res.extend(self.canceled_job_registry.get_job_ids()) - return res - - def get_all_jobs(self) -> List[JobExecution]: - job_ids = self.get_all_job_ids() - return list(filter(lambda j: j is not None, [self.fetch_job(job_id) for job_id in job_ids])) - - def clean_registries(self) -> None: - self.started_job_registry.cleanup() - self.failed_job_registry.cleanup() - self.finished_job_registry.cleanup() - - def remove_job_id(self, job_id: str) -> None: - self.connection.lrem(self.key, 0, job_id) - - def last_job_id(self) -> Optional[str]: - return self.connection.lindex(self.key, 0) - - -class DjangoScheduler(RQScheduler): - def __init__(self, *args, **kwargs) -> None: - kwargs.setdefault("interval", settings.SCHEDULER_CONFIG.SCHEDULER_INTERVAL) - super(DjangoScheduler, self).__init__(*args, **kwargs) - - @staticmethod - def reschedule_all_jobs(): - for model_name in MODEL_NAMES: - model = apps.get_model(app_label="scheduler", model_name=model_name) - enabled_jobs = model.objects.filter(enabled=True) - for item in enabled_jobs: - logger.debug(f"Rescheduling {str(item)}") - item.save() - - def work(self) -> None: - django.setup() - super(DjangoScheduler, self).work() - - def enqueue_scheduled_jobs(self) -> None: - self.reschedule_all_jobs() - super(DjangoScheduler, self).enqueue_scheduled_jobs() diff --git a/scheduler/settings.py b/scheduler/settings.py index db770be..405c5ab 100644 --- a/scheduler/settings.py +++ b/scheduler/settings.py @@ -1,66 +1,58 @@ import logging -from dataclasses import dataclass -from enum import Enum -from typing import Callable +from typing import List, Dict from django.conf import settings from django.core.exceptions import ImproperlyConfigured -logger = logging.getLogger(__package__) +from scheduler.types import SchedulerConfiguration, QueueConfiguration -QUEUES = dict() +logger = logging.getLogger("scheduler") +logging.basicConfig(level=logging.DEBUG) +_QUEUES: Dict[str, QueueConfiguration] = dict() +SCHEDULER_CONFIG: SchedulerConfiguration = SchedulerConfiguration() -class Broker(Enum): - REDIS = "redis" - FAKEREDIS = "fakeredis" - VALKEY = "valkey" - -@dataclass -class SchedulerConfig: - EXECUTIONS_IN_PAGE: int - DEFAULT_RESULT_TTL: int - DEFAULT_TIMEOUT: int - SCHEDULER_INTERVAL: int - BROKER: Broker - TOKEN_VALIDATION_METHOD: Callable[[str], bool] - - -def _token_validation(token: str) -> bool: - return False - - -SCHEDULER_CONFIG: SchedulerConfig = SchedulerConfig( - EXECUTIONS_IN_PAGE=20, - DEFAULT_RESULT_TTL=600, - DEFAULT_TIMEOUT=300, - SCHEDULER_INTERVAL=10, - BROKER=Broker.REDIS, - TOKEN_VALIDATION_METHOD=_token_validation, -) +class QueueNotFoundError(Exception): + pass def conf_settings(): - global QUEUES + global _QUEUES global SCHEDULER_CONFIG - QUEUES = getattr(settings, "SCHEDULER_QUEUES", None) - if QUEUES is None: - logger.warning("Configuration using RQ_QUEUES is deprecated. Use SCHEDULER_QUEUES instead") - QUEUES = getattr(settings, "RQ_QUEUES", None) - if QUEUES is None: - raise ImproperlyConfigured("You have to define SCHEDULER_QUEUES in settings.py") + app_queues = getattr(settings, "SCHEDULER_QUEUES", None) + if app_queues is None or not isinstance(app_queues, dict): + raise ImproperlyConfigured("You have to define SCHEDULER_QUEUES in settings.py as dict") + + for queue_name, queue_config in app_queues.items(): + if isinstance(queue_config, QueueConfiguration): + _QUEUES[queue_name] = queue_config + elif isinstance(queue_config, dict): + _QUEUES[queue_name] = QueueConfiguration(**queue_config) + else: + raise ImproperlyConfigured(f"Queue {queue_name} configuration should be a QueueConfiguration or dict") user_settings = getattr(settings, "SCHEDULER_CONFIG", {}) - if "FAKEREDIS" in user_settings: - logger.warning("Configuration using FAKEREDIS is deprecated. Use BROKER='fakeredis' instead") - user_settings["BROKER"] = Broker.FAKEREDIS if user_settings["FAKEREDIS"] else Broker.REDIS - user_settings.pop("FAKEREDIS") - for k in user_settings: + if isinstance(user_settings, SchedulerConfiguration): + SCHEDULER_CONFIG = user_settings # type: ignore + return + if not isinstance(user_settings, dict): + raise ImproperlyConfigured("SCHEDULER_CONFIG should be a SchedulerConfiguration or dict") + for k, v in user_settings.items(): if k not in SCHEDULER_CONFIG.__annotations__: raise ImproperlyConfigured(f"Unknown setting {k} in SCHEDULER_CONFIG") - setattr(SCHEDULER_CONFIG, k, user_settings[k]) + setattr(SCHEDULER_CONFIG, k, v) conf_settings() + + +def get_queue_names() -> List[str]: + return list(_QUEUES.keys()) + + +def get_queue_configuration(queue_name: str) -> QueueConfiguration: + if queue_name not in _QUEUES: + raise QueueNotFoundError(f"Queue {queue_name} not found, queues={_QUEUES.keys()}") + return _QUEUES[queue_name] diff --git a/scheduler/templates/admin/scheduler/confirm_action.html b/scheduler/templates/admin/scheduler/confirm_action.html index c61b8bf..69dd45c 100644 --- a/scheduler/templates/admin/scheduler/confirm_action.html +++ b/scheduler/templates/admin/scheduler/confirm_action.html @@ -22,7 +22,7 @@
    {% for job in jobs %}
  • - {{ job.id }} + {{ job.name }} {{ job | show_func_name }}
  • {% endfor %} @@ -31,7 +31,7 @@ {% csrf_token %}
    {% for job in jobs %} - + {% endfor %} diff --git a/scheduler/templates/admin/scheduler/job_detail.html b/scheduler/templates/admin/scheduler/job_detail.html index 6892844..5263c73 100644 --- a/scheduler/templates/admin/scheduler/job_detail.html +++ b/scheduler/templates/admin/scheduler/job_detail.html @@ -8,15 +8,15 @@ HomeQueues{{ queue.name }} › - {{ job.id }} + {{ job.name }}
    {% endblock %} {% block content_title %} -

    Job {{ job.id }} +

    Job {{ job.name }} {% if job.is_scheduled_task %} - Link to scheduled job + Link to scheduled job {% endif %}

    @@ -24,123 +24,136 @@

    Job {{ job.id }} {% block content %}
    -
    +
    -
    {{ job.origin }}
    +
    {{ job.queue_name }}
    +
    +
    +
    + +
    + {% if data_is_valid %} + {{ job.func_name }}( + {% if job.args %} + {% for arg in job.args %} + {{ arg|force_escape }}, + {% endfor %} + {% endif %} + {% for key, value in job.kwargs.items %} + {{ key }}={{ value|force_escape }}, + {% endfor %}) + {% else %} + Unpickling Error + {% endif %} +
    +
    +
    + +
    {{ job | show_func_name }}
    +
    -
    - -
    {{ job.timeout }}
    -
    - -
    - -
    {{ job.result_ttl }}
    -
    - -
    - -
    {{ job.created_at|date:"Y-m-d, H:i:s"|default:"-" }}
    -
    - - -
    - -
    {{ job.enqueued_at|date:"Y-m-d, H:i:s"|default:"-" }}
    -
    - -
    - -
    {{ job.started_at|date:"Y-m-d, H:i:s"|default:"-" }}
    +
    + +
    + {% for k in job.meta %} +
    + +
    {{ job.meta | get_item:k }}
    +
    + {% endfor %} +
    +
    +
    -
    - -
    {{ job.ended_at|date:"Y-m-d, H:i:s"|default:"-" }}
    +
    +
    +
    + +
    {{ job.timeout }}
    +
    +
    + +
    {{ job.success_ttl }}
    +
    +
    +
    +
    +

    Job queue interation

    +
    +
    +
    + +
    {{ job.status.value | capfirst }}
    +
    +
    + +
    {{ job.created_at|date:"Y-m-d, H:i:s"|default:"-" }}
    +
    +
    + +
    {{ job.enqueued_at|date:"Y-m-d, H:i:s"|default:"-" }}
    +
    -
    - -
    {{ job.get_status }}
    -
    +
    + +
    {{ job.started_at|date:"Y-m-d, H:i:s"|default:"-" }}
    +
    -
    - -
    - {% if data_is_valid %} - {{ job.func_name }}( - {% if job.args %} - {% for arg in job.args %} - {{ arg|force_escape }}, - {% endfor %} - {% endif %} - {% for key, value in job.kwargs.items %} - {{ key }}={{ value|force_escape }}, - {% endfor %}) - {% else %} - Unpickling Error - {% endif %} +
    + +
    {{ job.ended_at|date:"Y-m-d, H:i:s"|default:"-" }}
    +
    -
    - -
    {{ job | show_func_name }}
    -
    - -
    - -
    - {% for k in job.meta %} -
    - -
    {{ job.meta | get_item:k }}
    -
    - {% endfor %} +
    +
    +

    Last result

    +
    +
    + +
    {{ last_result.created_at|date:"Y-m-d, H:i:s" }}
    -
    - - -
    -
    - +
    +
    - {% if dependency_id %} - - {{ dependency_id }} - + {% if last_result.worker_name %} + {{ last_result.worker_name }} + {% else %} + - {% endif %}
    - {% if exc_info %} + + {% if last_result.exc_string %}
    -
    {% if job.exc_info %}{{ job.exc_info|linebreaks }}{% endif %}
    +
    {{ last_result.exc_string|default:"-"|linebreaks }}
    -
    {% endif %}
    - -
    {{ job.result | default:'-' }}
    + +
    +
    {{ last_result.return_value|default:'-'|linebreaks }}
    +
    - -
    {% if job.is_started %} - -
    - - {% for result in job.results %} -

    Result {{ result.id }}

    - + {% endif %}
    {% endfor %}
    -
    {% endblock %} diff --git a/scheduler/templates/admin/scheduler/jobs-list-with-tasks.partial.html b/scheduler/templates/admin/scheduler/jobs-list-with-tasks.partial.html new file mode 100644 index 0000000..8b5499a --- /dev/null +++ b/scheduler/templates/admin/scheduler/jobs-list-with-tasks.partial.html @@ -0,0 +1,76 @@ +{% load scheduler_tags i18n %} +{% if not add %} +
    +

    Job executions

    +
    + + + + + + + + + + + + + + + + {% for exec in executions %} + + + + + + + + + + + + {% endfor %} + +
    IDScheduled TaskSTATUSCreated atEnqueued atStarted atRan forWorker nameResult
    + {{ exec.name }} + + {% if exec.scheduled_task_id %} + + {{ exec|job_scheduled_task }} + + {% endif %} + + {{ exec|job_status }} + + {{ exec.created_at|date:"Y-m-d, H:i:s"|default:"-" }} + + {{ exec.enqueued_at|date:"Y-m-d, H:i:s"|default:"-" }} + + {{ exec.started_at|date:"Y-m-d, H:i:s"|default:"-" }} + + {{ exec|job_runtime }} + + {{ exec.worker_name|default:"-" }} + + {{ exec|job_result|default:"-" }} +
    +
    +

    + {% if pagination_required %} + {% for i in page_range %} + {% if i == executions.paginator.ELLIPSIS %} + {{ executions.paginator.ELLIPSIS }} + {% elif i == executions.number %} + {{ i }} + {% else %} + {{ i }} + {% endif %} + {% endfor %} + {{ executions.paginator.count }} {% blocktranslate count counter=executions.paginator.count %}entry + {% plural %}entries{% endblocktranslate %} + {% endif %} +

    +
    +{% endif %} \ No newline at end of file diff --git a/scheduler/templates/admin/scheduler/jobs-list.partial.html b/scheduler/templates/admin/scheduler/jobs-list.partial.html index 8186242..b3d7bb4 100644 --- a/scheduler/templates/admin/scheduler/jobs-list.partial.html +++ b/scheduler/templates/admin/scheduler/jobs-list.partial.html @@ -20,7 +20,12 @@

    Job executions

    {% for exec in executions %} - {{ exec.id }} + {{ exec.name }} + {% if exec.scheduled_task_id %} + + Go to scheduled task + + {% endif %} {{ exec|job_status }} diff --git a/scheduler/templates/admin/scheduler/jobs.html b/scheduler/templates/admin/scheduler/jobs.html index e83d96a..7fb9855 100644 --- a/scheduler/templates/admin/scheduler/jobs.html +++ b/scheduler/templates/admin/scheduler/jobs.html @@ -25,13 +25,13 @@
    -
    + {% csrf_token %}
    {% endblock %} -{% block content_title %}

    Workers in {{ queue.name }}

    {% endblock %} +{% block content_title %}

    Queue {{ queue.name }} workers

    {% endblock %} {% block content %} diff --git a/scheduler/templates/admin/scheduler/single_job_action.html b/scheduler/templates/admin/scheduler/single_job_action.html index 53f9089..b6adad5 100644 --- a/scheduler/templates/admin/scheduler/single_job_action.html +++ b/scheduler/templates/admin/scheduler/single_job_action.html @@ -6,7 +6,7 @@ HomeQueues{{ queue.name }} › - {{ job.id }} › + {{ job.name }} › Delete
    {% endblock %} @@ -18,8 +18,8 @@

    Are you sure you want to {{ action }} - - {{ job.id }} ({{ job|show_func_name }}) + + {{ job.name }} ({{ job|show_func_name }}) from {{ queue.name }}? diff --git a/scheduler/templates/admin/scheduler/stats.html b/scheduler/templates/admin/scheduler/stats.html index 369e3a5..bb7e41c 100644 --- a/scheduler/templates/admin/scheduler/stats.html +++ b/scheduler/templates/admin/scheduler/stats.html @@ -9,7 +9,7 @@ } {% endblock %} -{% block content_title %}

    RQ Queues

    {% endblock %} +{% block content_title %}

    Tasks Queues

    {% endblock %} {% block breadcrumbs %}