diff --git a/.devcontainer/Dockerfile b/.devcontainer/Dockerfile new file mode 100644 index 000000000..a06824113 --- /dev/null +++ b/.devcontainer/Dockerfile @@ -0,0 +1,17 @@ +FROM mcr.microsoft.com/devcontainers/python:3.13 + +USER root + +RUN sudo apt-get update \ + && apt-get install -y --no-install-recommends \ + bash-completion \ + postgresql-common \ + && /usr/share/postgresql-common/pgdg/apt.postgresql.org.sh -y \ + && apt-get install -y --no-install-recommends \ + postgresql-client \ + && rm -rf /var/lib/apt/lists/* + +USER vscode + +# Add /home/vscode/.local/bin to PATH for uv tool installs +ENV PATH="/home/vscode/.local/bin:${PATH}" diff --git a/.devcontainer/devcontainer.json b/.devcontainer/devcontainer.json new file mode 100644 index 000000000..a1440f9e2 --- /dev/null +++ b/.devcontainer/devcontainer.json @@ -0,0 +1,22 @@ +{ + "dockerComposeFile": "docker-compose.yml", + "features": { + "ghcr.io/devcontainers/features/node:1": {}, + "ghcr.io/va-h/devcontainers-features/uv:1": { + "shellautocompletion": true + } + }, + "postCreateCommand": ".devcontainer/post-create.sh", + "remoteEnv": { + "PATH": "${containerWorkspaceFolder}/.venv/bin:${containerWorkspaceFolder}/scripts:${containerEnv:PATH}", + "PGDATABASE": "procrastinate", + "PGHOST": "127.0.0.1", + "PGPASSWORD": "password", + "PGUSER": "postgres", + "PROCRASTINATE_APP": "procrastinate.demos.demo_async.app.app", + "UV_LINK_MODE": "copy", + "VIRTUAL_ENV": "${containerWorkspaceFolder}/.venv" + }, + "service": "app", + "workspaceFolder": "/workspaces/${localWorkspaceFolderBasename}" +} diff --git a/.devcontainer/docker-compose.yml b/.devcontainer/docker-compose.yml new file mode 100644 index 000000000..3882c6b02 --- /dev/null +++ b/.devcontainer/docker-compose.yml @@ -0,0 +1,21 @@ +services: + app: + build: + context: .. + dockerfile: .devcontainer/Dockerfile + volumes: + - ../..:/workspaces:cached + command: sleep infinity + network_mode: service:db + db: + image: postgres:18 + restart: unless-stopped + volumes: + - postgres-data:/var/lib/postgresql + environment: + POSTGRES_USER: postgres + POSTGRES_DB: procrastinate + POSTGRES_PASSWORD: password + +volumes: + postgres-data: diff --git a/.devcontainer/post-create.sh b/.devcontainer/post-create.sh new file mode 100755 index 000000000..3ef8f249f --- /dev/null +++ b/.devcontainer/post-create.sh @@ -0,0 +1,27 @@ +#!/usr/bin/env bash + +scripts/bootstrap + +if ! pg_dump --schema-only --table=procrastinate_jobs 1>/dev/null 2>&1; then + echo "Applying migrations" + procrastinate schema --apply || exit 1 +fi + +echo "Migrations applied!" + +echo "" +echo "Welcome to the Procrastinate development container!" +echo "" +echo "You'll find the detailed instructions in the contributing documentation:" +echo " https://procrastinate.readthedocs.io/en/latest/contributing.html" +echo "" +echo "TL;DR: important commands:" +echo "- pytest: Launch the tests" +echo "- tox: Entrypoint for testing multiple python versions as well as docs, linters & formatters" +echo "- procrastinate: Test procrastinate locally." +echo "" +echo "We've gone ahead and set up a few additional commands for you:" +echo "- htmlcov: Opens the test coverage results in your browser" +echo "- htmldoc: Opens the locally built sphinx documentation in your browser" +echo "- lint: Run code formatters & linters" +echo "- docs: Build doc" diff --git a/.github/FUNDING.yml b/.github/FUNDING.yml new file mode 100644 index 000000000..f85b3fd51 --- /dev/null +++ b/.github/FUNDING.yml @@ -0,0 +1 @@ +github: [procrastinate-org] diff --git a/.github/renovate.json5 b/.github/renovate.json5 index 03974a87a..d4addd809 100644 --- a/.github/renovate.json5 +++ b/.github/renovate.json5 @@ -1,26 +1,26 @@ { - "extends": [ - "config:recommended", - ":enablePreCommit", + extends: [ + 'config:recommended', + ':enablePreCommit', ], - "schedule": [ - "before 4am on Saturday" + schedule: [ + 'before 4am on Saturday', ], - "labels": [ - "PR type: dependencies 🤖", + labels: [ + 'PR type: dependencies 🤖', ], - "lockFileMaintenance": { - "enabled": true, - "automerge": true, + lockFileMaintenance: { + enabled: true, + automerge: true, }, - "packageRules": [ + packageRules: [ { - "groupName": "all dependencies", - "groupSlug": "all", - "matchPackagePatterns": [ - "*" - ] - } + groupName: 'all dependencies', + groupSlug: 'all', + matchPackageNames: [ + '*', + ], + }, ], - "automerge": true, + automerge: true, } diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index cc4e97b19..3b74dfae0 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -2,28 +2,38 @@ name: CI on: pull_request: + merge_group: push: branches: - "main" tags: - "*" +env: + UV_FROZEN: "true" + jobs: - build: + tests: strategy: matrix: - python-version: - - "3.9" - - "3.10" - - "3.11" - - "3.12" + include: + - python-version: "3.9" + postgres-version: "13" + - python-version: "3.10" + postgres-version: "14" + - python-version: "3.11" + postgres-version: "15" + - python-version: "3.12" + postgres-version: "16" + - python-version: "3.13" + postgres-version: "17" name: "py${{ matrix.python-version }}" runs-on: ubuntu-latest services: postgres: - image: postgres:17 + image: postgres:${{ matrix.postgres-version }} # Set health checks to wait until postgres has started env: POSTGRES_PASSWORD: postgres @@ -35,83 +45,113 @@ jobs: - 5432:5432 steps: - - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 - - - name: Install poetry - run: pipx install poetry - - - uses: actions/setup-python@v5 + - uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6 with: - python-version: "${{ matrix.python-version }}" - cache: "poetry" + fetch-depth: 0 # Fetch all history for the current branch, needed for checking migrations - - run: poetry install --all-extras + - name: Install the latest version of uv + uses: astral-sh/setup-uv@eb1897b8dc4b5d5bfe39a428a8f2304605e0983c # v7 + with: + python-version: ${{ matrix.python-version }} - name: Run tests - run: scripts/tests + run: uv run pytest --cov=procrastinate --cov-branch env: COVERAGE_FILE: ".coverage.${{ matrix.python-version }}" PGHOST: localhost PGUSER: postgres PGPASSWORD: postgres + PY_COLORS: 1 - name: Store coverage file - uses: actions/upload-artifact@v4 + uses: actions/upload-artifact@v5 with: name: coverage-${{ matrix.python-version }} path: .coverage.${{ matrix.python-version }} include-hidden-files: true + acceptance-tests: + strategy: + matrix: + mode: + - "current_version_without_post_migration" + - "stable_version_without_post_migration" + + name: "e2e ${{ matrix.mode }}" + runs-on: ubuntu-latest + + services: + postgres: + image: postgres:18 + # Set health checks to wait until postgres has started + env: + POSTGRES_PASSWORD: postgres + options: >- + --health-cmd pg_isready + --health-interval 10s + --health-timeout 5s + ports: + - 5432:5432 + + steps: + - uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6 + + - name: Install the latest version of uv + uses: astral-sh/setup-uv@eb1897b8dc4b5d5bfe39a428a8f2304605e0983c # v7 + with: + python-version: "3.13" + + - name: Get latest tag + id: get-latest-tag + run: gh release list --limit 1 --json tagName --jq '"latest_tag="+.[0].tagName' >> $GITHUB_OUTPUT + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + + - name: Run tests + run: uvx nox -s ${{ matrix.mode }} + env: + PGHOST: localhost + PGUSER: postgres + PGPASSWORD: postgres + LATEST_TAG: ${{ steps.get-latest-tag.outputs.latest_tag }} + static-typing: name: Run Pyright runs-on: ubuntu-latest steps: - - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 + - uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6 - - name: Install poetry - run: pipx install poetry - - - uses: actions/setup-python@v5 + - name: Install the latest version of uv + uses: astral-sh/setup-uv@eb1897b8dc4b5d5bfe39a428a8f2304605e0983c # v7 with: - python-version: "3.8" - cache: "poetry" + python-version: "3.9" - name: Install dependencies - run: poetry install --all-extras --with=types + run: uv sync --all-extras --group=types - name: Activate virtualenv - run: echo "$(poetry env info --path)/bin" >> $GITHUB_PATH + run: echo ".venv/bin" >> $GITHUB_PATH - name: Extract pyright version from pre-commit id: pre-commit-pyright-version run: > yq '.repos | filter(.repo == "https://github.com/RobertCraigie/pyright-python").0.rev - | "pyright-version="+sub("^v", "")' + | "pyright-version="+sub("^v", "") | sub(".post\d+$"; "")' .pre-commit-config.yaml >> $GITHUB_OUTPUT - uses: jakebailey/pyright-action@v2 with: version: ${{ steps.pre-commit-pyright-version.outputs.pyright-version }} - report-status: - name: success - runs-on: ubuntu-latest - needs: - - build - - static-typing - steps: - - name: Report success - run: echo 'Success !' - coverage: name: Coverage runs-on: ubuntu-latest - needs: [build] + needs: [tests] steps: - - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 + - uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6 - - uses: actions/download-artifact@v4 + - uses: actions/download-artifact@v6 with: pattern: coverage-* merge-multiple: true @@ -123,11 +163,56 @@ jobs: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - name: Store Pull Request comment to be posted - uses: actions/upload-artifact@v4 + uses: actions/upload-artifact@v5 with: name: python-coverage-comment-action path: python-coverage-comment-action.txt + benchmark: + name: Benchmark + runs-on: ubuntu-latest + permissions: + deployments: write + contents: write + + services: + postgres: + image: postgres:18 + # Set health checks to wait until postgres has started + env: + POSTGRES_PASSWORD: postgres + options: >- + --health-cmd pg_isready + --health-interval 10s + --health-timeout 5s + ports: + - 5432:5432 + + steps: + - uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6 + + - name: Install the latest version of uv + uses: astral-sh/setup-uv@eb1897b8dc4b5d5bfe39a428a8f2304605e0983c # v7 + with: + python-version: "3.13" + + - name: Run benchmarks + run: uv run pytest -m "benchmark" --benchmark-json output.json + env: + PGHOST: localhost + PGUSER: postgres + PGPASSWORD: postgres + + - name: Store benchmark result + if: github.event_name == 'push' && github.ref_type == 'branch' + uses: benchmark-action/github-action-benchmark@d48d326b4ca9ba73ca0cd0d59f108f9e02a381c7 # v1 + with: + name: Procrastinate Benchmarks + tool: "pytest" + output-file-path: output.json + github-token: ${{ secrets.GITHUB_TOKEN }} + auto-push: true + publish: name: Publish package to PyPI if: github.event_name == 'push' && github.ref_type == 'tag' @@ -136,18 +221,16 @@ jobs: permissions: id-token: write needs: - - build + - tests - static-typing steps: - - name: Install poetry - run: | - pipx install poetry - pipx inject poetry 'poetry-dynamic-versioning[plugin]' - - - uses: actions/checkout@11bd71901bbe5b1630ceea73d27597364c9af683 # v4 + - uses: actions/checkout@1af3b93b6815bc44a9784bd300feb67ff0d1eeb3 # v6 + with: + fetch-tags: true + ref: ${{ github.ref }} - - name: Build wheel and sdist - run: poetry build + - name: Build wheel & sdist + run: pipx run uv build - name: Publish package distributions to PyPI uses: pypa/gh-action-pypi-publish@release/v1 diff --git a/.gitignore b/.gitignore index 8d4d11a8a..70cb23511 100644 --- a/.gitignore +++ b/.gitignore @@ -12,3 +12,5 @@ dist docs/_build htmlcov VERSION.txt +.benchmarks +.python-version diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index e7892bafd..6552f4523 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -5,62 +5,112 @@ ci: # See https://pre-commit.com/hooks.html for more hooks repos: - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v5.0.0 + rev: v6.0.0 hooks: # File names - id: check-case-conflict - # File formats + # File formats - id: pretty-format-json - id: check-json - id: check-toml - id: check-yaml - # Executables + # Executables - id: check-executables-have-shebangs exclude: dev-env - id: check-shebang-scripts-are-executable - # Git shenanigans + # Git shenanigans - id: check-merge-conflict - id: check-added-large-files - # Python issues + # Python issues - id: check-ast - id: debug-statements - # Whitespace + # Whitespace - id: end-of-file-fixer - id: trailing-whitespace - id: mixed-line-ending + - repo: https://github.com/RobertCraigie/pyright-python - rev: v1.1.390 + rev: v1.1.405 hooks: - id: pyright additional_dependencies: - aiopg==1.4.0 - - anyio==4.7.0 - - asgiref==3.8.1 - - attrs==24.2.0 - - contextlib2==21.6.0 - - croniter==5.0.1 - - django-stubs==5.1.1 - - django==5.1.4 + - alabaster==0.7.16 ; python_full_version < '3.10' + - alabaster==1.0.0 ; python_full_version >= '3.10' + - asgiref==3.9.1 + - async-timeout==4.0.3 + - attrs==25.3.0 + - babel==2.17.0 + - certifi==2025.8.3 + - charset-normalizer==3.4.3 + - colorama==0.4.6 ; sys_platform == 'win32' + - contextlib2==21.6.0 ; python_full_version < '3.10' + - croniter==6.0.0 + - django==4.2.24 ; python_full_version < '3.10' + - django==5.2.6 ; python_full_version >= '3.10' + - django-stubs==5.1.3 ; python_full_version < '3.10' + - django-stubs==5.2.2 ; python_full_version >= '3.10' + - django-stubs-ext==5.1.3 ; python_full_version < '3.10' + - django-stubs-ext==5.2.2 ; python_full_version >= '3.10' + - docutils==0.21.2 + - greenlet==3.2.4 ; (python_full_version < '3.14' and platform_machine == 'AMD64') or (python_full_version < '3.14' and platform_machine == 'WIN32') or (python_full_version < '3.14' and platform_machine == 'aarch64') or (python_full_version < '3.14' and platform_machine == 'amd64') or (python_full_version < '3.14' and platform_machine == 'ppc64le') or (python_full_version < '3.14' and platform_machine == 'win32') or (python_full_version < '3.14' and platform_machine == 'x86_64') + - idna==3.10 + - imagesize==1.4.1 + - importlib-metadata==8.7.0 ; python_full_version < '3.10' + - jinja2==3.1.6 + - markupsafe==3.0.2 + - packaging==25.0 + - psycopg==3.2.9 + - psycopg-pool==3.2.6 - psycopg2-binary==2.9.10 - - psycopg[pool]==3.2.3 + - pygments==2.19.2 - python-dateutil==2.9.0.post0 - - sphinx==7.4.7 - - sqlalchemy==2.0.36 + - pytz==2025.2 + - requests==2.32.5 + - roman-numerals-py==3.1.0 ; python_full_version >= '3.11' + - six==1.17.0 + - snowballstemmer==3.0.1 + - sphinx==7.4.7 ; python_full_version < '3.10' + - sphinx==8.1.3 ; python_full_version == '3.10.*' + - sphinx==8.2.3 ; python_full_version >= '3.11' + - sphinxcontrib-applehelp==2.0.0 + - sphinxcontrib-devhelp==2.0.0 + - sphinxcontrib-htmlhelp==2.1.0 + - sphinxcontrib-jsmath==1.0.1 + - sphinxcontrib-qthelp==2.0.0 + - sphinxcontrib-serializinghtml==2.0.0 + - sqlalchemy==2.0.43 + - sqlparse==0.5.3 + - tomli==2.2.1 ; python_full_version < '3.11' + - types-pyyaml==6.0.12.20250822 + - typing-extensions==4.15.0 + - tzdata==2025.2 ; sys_platform == 'win32' + - urllib3==2.5.0 + - zipp==3.23.0 ; python_full_version < '3.10' - repo: https://github.com/astral-sh/ruff-pre-commit - rev: v0.8.2 + rev: v0.12.12 hooks: - id: ruff - args: [--fix, --unsafe-fixes] + args: [--fix, --unsafe-fixes, --show-fixes] - id: ruff-format + - repo: https://github.com/PyCQA/doc8 - rev: v1.1.2 + rev: v2.0.0 hooks: - id: doc8 - - repo: https://github.com/ewjoachim/poetry-to-pre-commit - rev: 2.2.0 + + - repo: https://github.com/adamchainz/django-upgrade + rev: 1.27.0 hooks: - - id: sync-repos - args: [--map=pyright-python=pyright, --map=ruff-pre-commit=ruff] + - id: django-upgrade + args: [--target-version, '4.2'] # Replace with Django version - - id: sync-hooks-additional-dependencies - args: ['--bind=pyright=main,types'] + - repo: https://github.com/ewjoachim/sync-pre-commit-with-uv + rev: 1.1.0 + hooks: + - id: sync + + - repo: https://github.com/astral-sh/uv-pre-commit + rev: 0.8.22 + hooks: + - id: uv-lock diff --git a/.readthedocs.yml b/.readthedocs.yml index 19de0a4c5..3a8fc3a0d 100644 --- a/.readthedocs.yml +++ b/.readthedocs.yml @@ -11,9 +11,9 @@ build: python: "latest" jobs: post_create_environment: - - python -m pip install poetry + - python -m pip install uv post_install: - - VIRTUAL_ENV=$READTHEDOCS_VIRTUALENV_PATH python -m poetry install --with docs + - UV_PROJECT_ENVIRONMENT=$READTHEDOCS_VIRTUALENV_PATH python -m uv sync --all-extras --group docs sphinx: configuration: docs/conf.py diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 3bbcc7938..90136f818 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -13,18 +13,31 @@ Of course, feel free to read the script before launching it. This script is intended to be a one-liner that sets up everything you need. It makes the following assumptions: -- You're using `MacOS` or `Linux`, and `bash` or `zsh`. -- You already have `python3` available -- You have `poetry` [installed](https://python-poetry.org/docs/#installation) -- Either you've already setup a PostgreSQL database and environment variables (`PG*`) - are set or you have `docker-compose` available and port 5432 is free. -- Either `psql` and other `libpq` executables are available in the `PATH` or they - are located in `usr/local/opt/libpq/bin` (`Homebrew`). +- You're using `MacOS` or `Linux`, and `bash` or `zsh`. +- You already have `python3` available +- Either: + - you already have `uv`, `pre-commit` and `nox` installed + - or you have `uv` installed and you're ok installing the 2 other tools with `uv` + - or you don't have `uv` installed but it's ok if we install it for you +- Either: + - you've already setup a PostgreSQL database and environment variables (`PG*`) + are set + - or you have `docker compose` available and port 5432 is free. +- Either: + - `psql` and other `libpq` executables are available in the `PATH` + - or they are located in `usr/local/opt/libpq/bin` (`Homebrew`). The `dev-env` script will add the `scripts` folder to your `$PATH` for the current shell, so in the following documentation, if you see `scripts/foo`, you're welcome to call `foo` directly. +### Development Container + +Alternatively, you can utilize our development container setup. In VSCode, select +`Dev Containers: Reopen in Container` from the command palette. This action sets up a +container preconfigured with all required dependencies and automatically provisions a +database. The virtual environment is created and activated seamlessly within the container. + ## Instructions for contribution ### Environment variables @@ -46,7 +59,7 @@ The PostgreSQL database we used is a fresh standard out-of-the-box database on the latest stable version. ```console -$ docker-compose up -d postgres +$ docker compose up -d postgres ``` If you want to try out the project locally, it's useful to have `postgresql-client` @@ -67,26 +80,27 @@ $ /usr/local/opt/libpq/bin/createdb ### Set up your development environment -The development environment is managed by [poetry]. It's a tool that manages +The development environment is managed by [uv]. It's a tool that manages dependencies and virtual environments. We also use [pre-commit] to keep the code -clean. +clean and [nox] to run some tests. -If you don't already have `poetry` or `pre-commit` installed, you can +If you don't already have `uv`, `pre-commit` or `nox` installed, you can install them with: ```console $ scripts/bootstrap ``` -This will install [pipx] if necessary and use it to install `poetry` and +This will install [uv] if necessary and use it to install `nox` and `pre-commit`. Then, install Procrastinate with development dependencies in a virtual environment: ```console -$ poetry env use 3.{x} # Select the Python version you want to use (replace {x}) -$ poetry install -$ poetry shell # Activate the virtual environment +$ uv venv --python=3.{x} # Select the Python version you want to use (replace {x}) +$ uv sync # Install the project and its dependencies +$ uv run $SHELL # Activate the virtual environment +$ exit # Quit the virtual environment ``` You can check that your Python environment is properly activated: @@ -129,7 +143,6 @@ In addition, an [editorconfig] file will help your favorite editor to respect procrastinate coding style. It is automatically used by most famous IDEs, such as Pycharm and VS Code. - ### Write the documentation The documentation is written in `Markdown` and built with `Sphinx` and `MyST`. @@ -173,26 +186,30 @@ ALTER TABLE procrastinate_jobs ADD COLUMN extra TEXT; The name of migration scripts must follow a specific pattern: ``` -xx.yy.zz_ab_very_short_description_of_your_changes.sql +xx.yy.zz_ab_{pre|post}_very_short_description_of_your_changes.sql ``` -`xx.yy.zz` is the number of the latest released version of Procrastinate. (The latest -release is the one marked `Latest release` on the [Procrastinate releases] page.) -`xx`, `yy` and `zz` must be 2-digit numbers, with leading zeros if necessary. -`ab` is the 2-digit migration script's serial number, `01` being the first number in -the series. And, finally, `very_short_description_of_your_changes` is a very short -description of the changes (wow). It is important to use underscores between the -different parts, and between words in the short description. +`xx.yy.zz` is the number of the next minor version of Procrastinate. (The +latest release is the one marked `Latest release` on the [Procrastinate +releases] page.) `xx`, `yy` and `zz` must be 2-digit numbers, with leading +zeros if necessary. `ab` is the 2-digit migration script's serial number, the +first number for each release being `01` for pre-migrations and `50` for +post-migrations. `pre` is if the migration should be applied before upgrading +the code, `post` is if the migration should be applied after upgrading the +code. And, finally, `very_short_description_of_your_changes` is a very short +description of the changes (wow). It is important to use underscores between +the different parts, and between words in the short description. -For example, let's say the latest released version of Procrastinate is `1.0.1`, and -that the `migrations` directory already includes a migration script whose serial -number is `01` for that release number. In that case, if you need to add a migration -script, its name will start with `01.00.01_02_`. +For example, let's say the latest released version of Procrastinate is `3.0.4`, +that the `migrations` directory already includes a post-migration script whose +serial number for that release number and your migration should be +applied after deploying the corresponding python code. In that case, if you +need to add a migration script, its name will start with `03.01.00_51_post_`. ### Backward-compatibility -As a Procrastinate developer, the changes that you make to the Procrastinate database -schema must be compatible with the Python code of previous Procrastinate versions. +As a Procrastinate developer, you must ensure you use pre-migrations and post-migrations +to maintain backward compatibility with previous versions of Procrastinate. For example, let's say that the current Procrastinate database schema includes an SQL function @@ -212,8 +229,8 @@ replace the old function by the new one, and add a migration script that removes function and adds the new one: ```sql -DROP FUNCTION procrastinate_func(integer, text, timestamp); -CREATE FUNCTION procrastinate_func(arg1 integer, arg2 text) +DROP FUNCTION procrastinate_func_v3(integer, text, timestamp); +CREATE FUNCTION procrastinate_func_v3(arg1 integer, arg2 text) RETURNS INT ... ``` @@ -227,57 +244,20 @@ So when you make changes to the Procrastinate database schema you must ensure th new schema still works with old versions of the Procrastinate Python code. Going back to our `procrastinate_func` example. Instead of replacing the old function -by the new one in `schema.sql`, you will leave the old function, and just add the new -one. And your migration script will just involve adding the new version of the function: +by the new one in `schema.sql`, you add a new function in pre-migrations and remove the +old function in post-migrations: ```sql -CREATE FUNCTION procrastinate_func(arg1 integer, arg2 text) +-- xx_xx_xx_01_pre_add_new_version_procrastinate_func.sql +CREATE FUNCTION procrastinate_func_v4(arg1 integer, arg2 text) RETURNS INT ... -``` - -The question that comes next is: when can the old version of `procrastinate_func` be -removed? Or more generally, when can the SQL compatibility layer be removed? - -The answer is some time after the next major version of Procrastinate! -For example, if the current Procrastinate version is 1.5.0, the SQL compatibility layer -will be removed after 2.0.0 is released. The 2.0.0 release will be a pivot release, in -the sense that Procrastinate users who want to upgrade from, say, 1.5.0 to 2.5.0, will -need to upgrade from 1.5.0 to 2.0.0 first, and then from 2.0.0 to 2.5.0. And they will -always migrate the database schema before updating the code. - -The task of removing the SQL compatibility layer after the release of a major version -(e.g. 2.0.0) is the responsibility of Procrastinate maintainers. More specifically, for -the 2.1.0 release, Procrastinate maintainers will need to edit `schema.sql` and remove -the SQL compatibility layer. - -But, as a standard developer, when you make changes to the Procrastinate database schema -that involves leaving or adding SQL statements for compatibility reasons, it's a good -idea to add a migration script for the removal of the SQL compatibility layer. This will -greatly help the Procrastinate maintainers. - -For example, let's say the current released version of Procrastinate is 1.5.0, and you -want to change the signature of `procrastinate_func` as described above. You will add -a `1.5.0` migration script (e.g. -`01.05.00_01_add_new_version_procrastinate_func.sql`) that adds the new version of -the function, as already described above. And you will also add a `2.0.0` migration -script (e.g. `02.00.00_01_remove_old_version_procrastinate_func.sql`) that takes -care of removing the old version of the function: - -```sql +-- xx_xx_xx_50_post_remove_old_version_procrastinate_func.sql DROP FUNCTION procrastinate_func(integer, text, timestamp); -``` - -In this way, you provide the new SQL code, the compatibility layer, and the migration -for the removal of the compatibility layer. +... -:::{note} -The migration scripts that remove the SQL compatibility code are to be added to the -`future_migrations` directory instead of the `migrations` directory. And it will -be the responsibility of Procrastinate maintainers to move them to the -`migrations` directory after the next major release. -::: +``` ### Migration tests @@ -289,6 +269,29 @@ included in the normal test suite, but you can run them specifically with: (venv) $ pytest tests/migration ``` +We run the `acceptance` tests on 3 different configurations: + +- Without the post-migrations applied and with the last released version of + Procrastinate +- Without the post-migrations applied and with the current checked out code +- With all migrations applied and with the current checked out code (this is + just part of the normal test suite) + +This is to ensure that the migrations are backward-compatible and that the database +schema can be upgraded without downtime. We simulate all stages of the upgrade process: + +- (the initial situation being that Procrastinate is running with the last + released version of the code and all migrations of the last released + version have been applied) +- First, the user would apply pre-migrations while the old version of the + code is still running. +- Then, the user would upgrade the code to the new version. +- Finally, the user would apply post-migrations. + +There are cases where new acceptance tests cannot work on the last released version. +In that case, the tests can be skipped by adding `@pytest.mark.skip_before_version("x.y.z")`, +where `x.y.z` is the version of Procrastinate where the test would start running. + ## Try our demos See the demos page for instructions on how to run the demos ({doc}`demos`). @@ -301,23 +304,23 @@ Python environment on the host system. Alternatively, they can be installed in a image, and Procrastinate and all the development tools can be run in Docker containers. Docker is useful when you can't, or don't want to, install system requirements. -This section shows, through `docker-compose` command examples, how to test and run +This section shows, through `docker compose` command examples, how to test and run Procrastinate in Docker. Build the `procrastinate` Docker image: ```console $ export UID GID -$ docker-compose build procrastinate +$ docker compose build procrastinate ``` Run the automated tests: ```console -$ docker-compose run --rm procrastinate pytest +$ docker compose run --rm procrastinate pytest ``` -Docker Compose is configured (in `docker-compose.yml`) to mount the local directory on +Docker Compose is configured (in `docker compose.yml`) to mount the local directory on the host system onto `/src` in the container. This means that local changes made to the Procrastinate code are visible in Procrastinate containers. @@ -326,7 +329,7 @@ container to be run with the current user id and group id. If not set or exporte Procrastinate container will run as root, and files owned by root may be created in the developer's working directory. -In the definition of the `procrastinate` service in `docker-compose.yml` the +In the definition of the `procrastinate` service in `docker compose.yml` the `PROCRASTINATE_APP` variable is set to `procrastinate_demo.app.app` (the Procrastinate demo application). So `procrastinate` commands run in Procrastinate containers are always run as if they were passed `--app procrastinate_demo.app.app`. @@ -334,55 +337,55 @@ containers are always run as if they were passed `--app procrastinate_demo.app.a Run the `procrastinate` command : ```console -$ docker-compose run --rm procrastinate procrastinate -h +$ docker compose run --rm procrastinate procrastinate -h ``` Apply the Procrastinate database schema: ```console -$ docker-compose run --rm procrastinate procrastinate schema --apply +$ docker compose run --rm procrastinate procrastinate schema --apply ``` Run the Procrastinate healthchecks: ```console -$ docker-compose run --rm procrastinate procrastinate healthchecks +$ docker compose run --rm procrastinate procrastinate healthchecks ``` Start a Procrastinate worker (`-d` used to start the container in detached mode): ```console -$ docker-compose up -d procrastinate +$ docker compose up -d procrastinate ``` Run a command (`bash` here) in the Procrastinate worker container just started: ```console -$ docker-compose exec procrastinate bash +$ docker compose exec procrastinate bash ``` Watch the Procrastinate worker logs: ```console -$ docker-compose logs -ft procrastinate +$ docker compose logs -ft procrastinate ``` Use the `procrastinate defer` command to create a job: ```console -$ docker-compose run --rm procrastinate procrastinate defer procrastinate_demo.tasks.sum '{"a":3, "b": 5}' +$ docker compose run --rm procrastinate procrastinate defer procrastinate_demo.tasks.sum '{"a":3, "b": 5}' ``` Or run the demo main file: ```console -$ docker-compose run --rm procrastinate python -m procrastinate_demo +$ docker compose run --rm procrastinate python -m procrastinate_demo ``` Stop and remove all the containers (including the `postgres` container): ```console -$ docker-compose down +$ docker compose down ``` ## Wait, there are `async` and `await` keywords everywhere!? @@ -395,8 +398,8 @@ When possible, we're trying to avoid duplicating code, with designs such as ## Dependencies management -Dependencies for the package are handled by Poetry in -[`pyproject.toml`](https://github.com/procrastinate-org/procrastinate/blob/main/pyproject.toml#L25). +Dependencies for the package are handled by uv in +[`pyproject.toml`](https://github.com/procrastinate-org/procrastinate/blob/main/pyproject.toml). Whenever possible, we avoid pinning or putting any kind of limits on the requirements. We'll typically only do that if we know that there's a known conflict with specific versions. Typically, even if we support a subset of @@ -405,28 +408,17 @@ and if users use procrastinate with unsupported Django version and it works for them, everyone is happy. Dependencies for the development environment are kept in -[`poetry.lock`](https://github.com/procrastinate-org/procrastinate/blob/main/poetry.lock). +[`uv.lock`](https://github.com/procrastinate-org/procrastinate/blob/main/uv.lock). Those are updated regularily by [Renovate](https://docs.renovatebot.com/) which merges their own PRs. -The versions in `pre-commit-config.yaml` are kept in sync with `poetry.lock` -by the `pre-commit` hook -[poetry-to-pre-commit](https://github.com/procrastinate-org/procrastinate/blob/main/.pre-commit-config.yaml#L61). +The versions in `pre-commit-config.yaml` are kept in sync with `uv.lock` +by a local `pre-commit` hook +[script](https://github.com/procrastinate-org/procrastinate/blob/main/scripts/sync-pre-commit.py). If you need to recompute the lockfile in your PR, you can use: ```console -$ # Update all the pinned dependencies in pyproject.toml & all versions in poetry.lock -$ # (there are actually no pinned dependencies in pyproject.toml, so this only updates the -$ # lockfile). -$ poetry update - -$ # Similarly, update dependencies in the lockfile. In procrastinate, it's equivalent -$ # to the command above -$ poetry lock - -$ # Recompute the lockfile (e.g. after the pyproject.toml was updated) without trying -$ # to update anything -$ poetry lock --no-update +$ uv lock ``` ## Core contributor additional documentation @@ -449,7 +441,7 @@ automated. This works with pre-release too. When creating the release, GitHub will save the release info and create a tag with the provided version. The new tag will be seen by GitHub Actions, which will then create a wheel (using the tag as version number, thanks to -`poetry-dynamic-versioning`), and push it to PyPI (using Trusted publishing). +`versioningit`), and push it to PyPI (using Trusted publishing). That tag should also trigger a ReadTheDocs build, which will read GitHub releases (thanks to our `changelog` extension) which will write the changelog in the published documentation (transformed from `Markdown` to @@ -467,8 +459,8 @@ also rebuild the stable and latest doc on [readthedocs](https://readthedocs.org/ [editorconfig]: https://editorconfig.org/ [libpq environment variables]: https://www.postgresql.org/docs/current/libpq-envars.html -[pipx]: https://pipx.pypa.io/stable/ -[poetry]: https://python-poetry.org/ +[uv]: https://docs.astral.sh/uv [pre-commit]: https://pre-commit.com/ [Procrastinate releases]: https://github.com/procrastinate-org/procrastinate/releases [Pytest]: https://docs.pytest.org/en/latest/ +[nox]: https://nox.thea.codes/en/stable/ diff --git a/Dockerfile b/Dockerfile index 636b36d40..328e4ca9a 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,6 +1,6 @@ FROM python:3 -RUN pip install poetry +RUN pip install uv ARG UID=1000 ARG GID=1000 @@ -10,7 +10,7 @@ USER $UID:$GID ENV HOME="/src" COPY pyproject.toml ./ -COPY poetry.lock ./ -RUN poetry install -ENTRYPOINT ["poetry", "run"] +COPY uv.lock ./ +RUN uv sync +ENTRYPOINT ["uv", "run"] CMD ["procrastinate", "worker"] diff --git a/README.md b/README.md index 23caabf58..e177373cc 100644 --- a/README.md +++ b/README.md @@ -6,16 +6,16 @@ [![Continuous Integration](https://img.shields.io/github/actions/workflow/status/procrastinate-org/procrastinate/ci.yml?logo=github&branch=main)](https://github.com/procrastinate-org/procrastinate/actions?workflow=CI) [![Documentation](https://img.shields.io/readthedocs/procrastinate/stable?logo=read-the-docs&logoColor=white)](https://procrastinate.readthedocs.io/en/stable/badge=stable) [![Coverage badge](https://raw.githubusercontent.com/procrastinate-org/procrastinate/python-coverage-comment-action-data/badge.svg)](https://htmlpreview.github.io/?https://github.com/procrastinate-org/procrastinate/blob/python-coverage-comment-action-data/htmlcov/index.html) -[![MIT License](https://img.shields.io/github/license/procrastinate-org/procrastinate?logo=open-source-initiative&logoColor=white)](https://github.com/procrastinate-org/procrastinate/blob/main/LICENSE) +[![MIT License](https://img.shields.io/github/license/procrastinate-org/procrastinate?logo=open-source-initiative&logoColor=white)](https://github.com/procrastinate-org/procrastinate/blob/main/LICENSE.md) [![Contributor Covenant](https://img.shields.io/badge/Contributor%20Covenant-v1.4%20adopted-ff69b4.svg)](https://github.com/procrastinate-org/procrastinate/blob/main/CODE_OF_CONDUCT.md) [![Discord](https://img.shields.io/discord/1197292025725329549?logo=discord&logoColor=white&label=Discord&color=%237289da)](https://discord.gg/JWZeNq6P6Z) **Procrastinate is looking for** [additional maintainers!](https://github.com/procrastinate-org/procrastinate/discussions/748) -Procrastinate is an open-source Python 3.8+ distributed task processing -library, leveraging PostgreSQL to store task definitions, manage locks and +Procrastinate is an open-source Python 3.9+ distributed task processing +library, leveraging PostgreSQL 13+ to store task definitions, manage locks and dispatch tasks. It can be used within both sync and async code, -has [Django](howto/django/configuration) integration, and is easy to use with ASGI frameworks. +has [Django] integration, and is easy to use with ASGI frameworks. It supports periodic tasks, retries, arbitrary task locks etc. In other words, from your main code, you call specific functions (tasks) in a @@ -92,7 +92,7 @@ to the How-To sections for specific features. The Discussion section should hopefully answer your questions. Otherwise, feel free to open an [issue](https://github.com/procrastinate-org/procrastinate/issues). -*Note to my future self: add a quick note here on why this project is named* +_Note to my future self: add a quick note here on why this project is named_ "[Procrastinate]" ;) . todo + todo -- b --> doing + doing -- c --> succeeded + doing -- d --> todo + doing -- e --> failed + todo -- f --> cancelled + doing -- g --> aborted + classDef hidden display: none; +``` + +- **a**: The job was deferred by `my_task.defer()` (or the async equivalent) +- **b**: A worker fetched the job from the database and started processing it +- **c**: A worker finished processing a job successfully +- **d**: The job failed by raising an error but will be retried +- **e**: The job failed by raising an error and won't be retried +- **f**: The job was cancelled by calling `job_manager.cancel_job_by_id(job_id)` (or the async equivalent) before its processing was started +- **g**: The job was aborted during being processed by calling + `job_manager.cancel_job_by_id(job_id, abort=True)` (or the async equivalent). A sync job must also + handle the abort request by checking `context.should_abort()` and raising a + `JobAborted` exception. An async job handles it automatically by internally raising a + `CancelledError` exception. + ## Asynchronous operations & concurrency Here, asynchronous (or async) means "using the Python `async/await` keywords, to @@ -199,28 +234,30 @@ Having sub-workers wait for an available connection in the pool is suboptimal. Y resources will be better used with fewer sub-workers or a larger pool, but there are many factors to take into account when [sizing your pool](https://wiki.postgresql.org/wiki/Number_Of_Database_Connections). -### Mind the `worker_timeout` +### How polling works + +#### `fetch_job_polling_interval` -Even when the database doesn't notify workers regarding newly deferred jobs, idle -workers still poll the database every now and then, just in case. +Even when the database doesn't notify workers regarding newly deferred jobs, each worker still poll the database every now and then, just in case. There could be previously locked jobs that are now free, or scheduled jobs that have -reached the ETA. `worker_timeout` is the {py:meth}`App.run_worker` parameter (or the +reached the ETA. `fetch_job_polling_interval` is the {py:meth}`App.run_worker` parameter (or the equivalent CLI flag) that sizes this "every now and then". -On a non-concurrent idle worker, a database poll is run every `` -seconds. On a concurrent worker, sub-workers poll the database every -`*` seconds. This ensures that, on average, the time -between each database poll is still `` seconds. +A worker will keep fetching new jobs as long as they have capacity to process them. +The polling interval starts from the moment the last attempt to fetch a new job yields no result. + +:::{note} +The polling interval was previously called `timeout` in pre-v3 versions of Procrastinate. It was renamed to `fetch_job_polling_interval` for clarity. +::: -The initial timeout for the first loop of each sub-worker is modified so that the -workers are initially spread across all the total length of the timeout, but the -randomness in job duration could create a situation where there is a long gap between -polls. If you find this to happen in reality, please open an issue, and lower your -`worker_timeout`. +#### `abort_job_polling_interval` -Note that as long as jobs are regularly deferred, or there are enqueued jobs, -sub-workers will not wait and this will not be an issue. This is only about idle -workers taking time to notice that a previously unavailable job has become available. +Another polling interval is the `abort_job_polling_interval`. It defines how often the worker will poll the database for jobs to abort. +When `listen_notify=True`, the worker will likely be notified "instantly" of each abort request prior to polling the database. + +However, when `listen_notify=False` or the abort notification was missed, `abort_job_polling_interval` will represent the maximum delay before the worker reacts to an abort request. + +Note that the worker will only poll the database for abort requests when at least one job is running. ## Procrastinate's usage of PostgreSQL functions and procedures @@ -273,6 +310,12 @@ really ready for production. We'd love if you were to try out Procrastinate in a project of yours and provide us with feedback. +## Does Procrastinate provide any benchmarks? + +We run some very [basic benchmarks](https://github.com/procrastinate-org/procrastinate/tree/main/tests/benchmarks) +on every commit of the main branch to detect performance regressions. +The visualized results can be viewed on our [Benchmarks GitHub Page](https://procrastinate-org.github.io/procrastinate/dev/bench/). + ## Wasn't this project named "Cabbage" ? Yes, in early development, we planned to call this "cabbage" in reference to diff --git a/docs/howto/advanced.md b/docs/howto/advanced.md index dd810e5c3..51884d9b5 100644 --- a/docs/howto/advanced.md +++ b/docs/howto/advanced.md @@ -17,5 +17,6 @@ advanced/events advanced/sync_defer advanced/custom_json_encoder_decoder advanced/blueprints +advanced/shutdown advanced/sphinx ::: diff --git a/docs/howto/advanced/cancellation.md b/docs/howto/advanced/cancellation.md index 0743c21e8..1f193c485 100644 --- a/docs/howto/advanced/cancellation.md +++ b/docs/howto/advanced/cancellation.md @@ -24,11 +24,10 @@ app.job_manager.cancel_job_by_id(33, delete_job=True) await app.job_manager.cancel_job_by_id_async(33, delete_job=True) ``` -## Mark a currently being processed job for abortion +## Mark a running job for abortion If a worker has not picked up the job yet, the below command behaves like the -command without the `abort` option. But if a job is already in the middle of -being processed, the `abort` option marks this job for abortion (see below +command without the `abort` option. But if a job is already running, the `abort` option marks this job for abortion (see below how to handle this request). ```python @@ -38,10 +37,20 @@ app.job_manager.cancel_job_by_id(33, abort=True) await app.job_manager.cancel_job_by_id_async(33, abort=True) ``` -## Handle a abortion request inside the task +Behind the scenes, the worker receives a Postgres notification every time a job is requested to abort, (unless `listen_notify=False`). -In our task, we can check (for example, periodically) if the task should be -aborted. If we want to respect that request (we don't have to), we raise a +The worker also polls (respecting `fetch_job_polling_interval`) the database for abortion requests, as long as the worker is running at least one job (in the absence of running job, there is nothing to abort). + +:::{note} +When a job is requested to abort and that job fails, it will not be retried (regardless of the retry strategy). +::: + +## Handle an abortion request inside the task + +### Sync tasks + +In a sync task, we can check (for example, periodically) if the task should be +aborted. If we want to respect that abortion request (we don't have to), we raise a `JobAborted` error. Any message passed to `JobAborted` (e.g. `raise JobAborted("custom message")`) will end up in the logs. @@ -54,24 +63,31 @@ def my_task(context): do_something_expensive() ``` -There is also an async API +### Async tasks + +For async tasks (coroutines), they are cancelled via the [asyncio cancellation](https://docs.python.org/3/library/asyncio-task.html#task-cancellation) mechasnism. ```python -@app.task(pass_context=True) -async def my_task(context): - for i in range(100): - if await context.should_abort_async(): - raise exceptions.JobAborted - do_something_expensive() +@app.task() +async def my_task(): + do_something_synchronous() + # if the job is aborted while it waits for do_something to complete, asyncio.CancelledError will be raised here + await do_something() ``` -:::{warning} -`context.should_abort()` and `context.should_abort_async()` does poll the -database and might flood the database. Ensure you do it only sometimes and -not from too many parallel tasks. -::: +If you want to have some custom behavior at cancellation time, use a combination of [shielding](https://docs.python.org/3/library/asyncio-task.html#shielding-from-cancellation) and capturing `except asyncio.CancelledError`. -:::{note} -When a task of a job that was requested to be aborted raises an error, the job -is marked as failed (regardless of the retry strategy). -::: +```python +@app.task() +async def my_task(): + try: + important_task = asyncio.create_task(something_important()) + # shield something_important from being cancelled + await asyncio.shield(important_task) + except asyncio.CancelledError: + # capture the error and waits for something important to complete + await important_task + # raise if the job should be marked as aborted, or swallow CancelledError if the job should be + # marked as suceeeded + raise +``` diff --git a/docs/howto/advanced/locks.md b/docs/howto/advanced/locks.md index 5fb1e089c..679a5a2ab 100644 --- a/docs/howto/advanced/locks.md +++ b/docs/howto/advanced/locks.md @@ -40,3 +40,11 @@ define the value when you register the task: def my_task(**kwargs): ... ``` + +## Locks and Priority + +When multiple jobs share the same lock, they are processed one at a time in a specific order: +- descending priority (higher priority first) +- ascending creation time (older job first) + +If any job with the same lock is running, all other jobs with that lock must wait. A high-priority job cannot jump ahead of a currently running job, regardless of the running job's priority. diff --git a/docs/howto/advanced/middleware.md b/docs/howto/advanced/middleware.md index f0717f98b..27e2eb941 100644 --- a/docs/howto/advanced/middleware.md +++ b/docs/howto/advanced/middleware.md @@ -6,7 +6,7 @@ your own decorator instead of `@app.task` and have this decorator implement the actions you need and delegate the rest to `@app.task`. It might look like this: -``` +```python import functools def task(original_func=None, **kwargs): diff --git a/docs/howto/advanced/retry.md b/docs/howto/advanced/retry.md index d8a5ad708..a8aa4cb6b 100644 --- a/docs/howto/advanced/retry.md +++ b/docs/howto/advanced/retry.md @@ -9,36 +9,36 @@ app / machine reboots. ## Simple strategies -- Retry 5 times (so 6 attempts total): - - ```python - @app.task(retry=5) - def flaky_task(): - if random.random() > 0.9: - raise Exception("Who could have seen this coming?") - print("Hello world") - ``` - -- Retry indefinitely: - - ```python - @app.task(retry=True) - def flaky_task(): - if random.random() > 0.9: - raise Exception("Who could have seen this coming?") - print("Hello world") - ``` +- Retry 5 times (so 6 attempts total): + + ```python + @app.task(retry=5) + def flaky_task(): + if random.random() > 0.9: + raise Exception("Who could have seen this coming?") + print("Hello world") + ``` + +- Retry indefinitely: + + ```python + @app.task(retry=True) + def flaky_task(): + if random.random() > 0.9: + raise Exception("Who could have seen this coming?") + print("Hello world") + ``` ## Advanced strategies Advanced strategies let you: -- define a maximum number of retries (if you don't, jobs will be retried indefinitely - until they pass) -- define the retry delay, with constant, linear and exponential backoff options (if - you don't, jobs will be retried immediately) -- define the exception types you want to retry on (if you don't, jobs will be retried - on any type of exceptions) +- define a maximum number of retries (if you don't, jobs will be retried indefinitely + until they pass) +- define the retry delay, with constant, linear and exponential backoff options (if + you don't, jobs will be retried immediately) +- define the exception types you want to retry on (if you don't, jobs will be retried + on any type of exceptions) Define your precise strategy using a {py:class}`RetryStrategy` instance: @@ -57,9 +57,9 @@ def my_other_task(): {py:class}`RetryStrategy` takes 3 parameters related to how long it will wait between retries: -- `wait=5` to wait 5 seconds before each retry -- `linear_wait=5` to wait 5 seconds then 10 then 15 and so on -- `exponential_wait=5` to wait 5 seconds then 25 then 125 and so on +- `wait=5` to wait 5 seconds before each retry +- `linear_wait=5` to wait 5 seconds then 10 then 15 and so on +- `exponential_wait=5` to wait 5 seconds then 25 then 125 and so on ## Implementing your own strategy @@ -73,28 +73,69 @@ The time to wait between retries can be specified with `retry_in` or alternative with `retry_at`. This is similar to how `schedule_in` and `schedule_at` are used when {doc}`scheduling a job in the future `. - ```python - import random - from procrastinate import Job, RetryDecision +```python +import random +from procrastinate import Job, RetryDecision + +class RandomRetryStrategy(procrastinate.BaseRetryStrategy): + max_attempts = 3 + min = 1 + max = 10 + + def get_retry_decision(self, *, exception:Exception, job:Job) -> RetryDecision: + if job.attempts >= max_attempts: + return RetryDecision(should_retry=False) + + wait = random.uniform(self.min, self.max) + + return RetryDecision( + retry_in={"seconds": wait}, # or retry_at (a datetime object) + priority=job.priority + 1, # optional + queue="another_queue", # optional + lock="another_lock", # optional + ) +``` - class RandomRetryStrategy(procrastinate.BaseRetryStrategy): - max_attempts = 3 - min = 1 - max = 10 +There is also a legacy `get_schedule_in` method that is deprecated an will be +removed in a future version in favor of the above `get_retry_decision` method. - def get_retry_decision(self, *, exception:Exception, job:Job) -> RetryDecision: - if job.attempts >= max_attempts: - return RetryDecision(should_retry=False) +## Knowing whether a job is on its last attempt - wait = random.uniform(self.min, self.max) +By using `pass_context=True`, and introspecting the task's retry strategy, +you can know whether a currently executing job is on its last attempt: - return RetryDecision( - retry_in={"seconds": wait}, # or retry_at (a datetime object) - priority=job.priority + 1, # optional - queue="another_queue", # optional - lock="another_lock", # optional - ) - ``` +```python +@app.task(retry=10, pass_context=True) +def my_task(job_context: procrastinate.JobContext) -> None: + job = job_context.job + task = job_context.task + if task.retry.get_retry_decision(exception=Exception(), job=job) is None: + print("Warning: last attempt!") + + if random.random() < 0.9: + raise Exception +``` -There is also a legacy `get_schedule_in` method that is deprecated an will be -removed in a future version in favor of the above `get_retry_decision` method. +# Retry a Job Manually + +Sometimes a manual retry, for instance, after we fix an integration's configuration, can be practical. +This is why the job_manager offers an API to do so. Retrying a `failed` job will set the status of the job +back to `todo` while keeping the history of events in place. The action of retrying a failed job, +is also recording a new Event of type `retried`. + +## Retry a job programatically + +```python +app.job_manager.retry_job_by_id( + job.id, utils.utcnow(), job.priority, job.queue_name, job.lock +) +# or by using the async method +await app.job_manager.retry_job_by_id_async( + job.id, utils.utcnow(), job.priority, job.queue_name, job.lock +) +``` + +## For django users + +An admin action `Retry Failed Job` can also be invoked from the table view of the +Procrastinate Jobs. diff --git a/docs/howto/advanced/shutdown.md b/docs/howto/advanced/shutdown.md new file mode 100644 index 000000000..6ce32b787 --- /dev/null +++ b/docs/howto/advanced/shutdown.md @@ -0,0 +1,95 @@ +# Shutdown a worker + +A worker will keep running until: +- it has the option `wait=False` (default is `True`) and there is no job left +- it has the option `install_signal_handlers=True` (default is `True`) and receives a `SIGINT/SIGTERM` signal +- [task.cancel](https://docs.python.org/3/library/asyncio-task.html#asyncio.Task.cancel) is called on the task created from `app.run_worker_async` + +When a worker is requested to stop, it will attempt to gracefully shut down by waiting for all running jobs to complete. +If a `shutdown_graceful_timeout` option is specified, the worker will attempt to abort all jobs that have not completed by that time. Cancelling the `run_worker_async` task a second time also results in the worker aborting running jobs. + +The worker will then wait for all jobs to complete. + + +:::{note} +The worker aborts its remaining jobs by: +- setting the context so that `JobContext.should_abort` returns `AbortReason.SHUTDOWN` +- calling [task.cancel](https://docs.python.org/3/library/asyncio-task.html#asyncio.Task.cancel) on the underlying asyncio task that runs the job when the job is asynchronous + +Jobs that do not respect the request to abort will prevent the worker from shutting down until they complete. In a way, it will remain a graceful shutdown for those jobs even after `shutdown_graceful_timeout`. + +For more information, see {doc}`./cancellation`. + +Currently, Procrastinate does not provide a built-in method to forcefully terminate a worker. This is something you would want to do with your process manager (e.g. systemd, Docker, Kubernetes), which typically offers options to control process termination. In that case, your jobs will be considered stale, see {doc}`../production/retry_stalled_jobs`. +::: + +## Examples + +### Run a worker until no job is left + +```python +async with app.open_async(): + await app.run_worker_async(wait=False) + # at this point, the worker has gracefully shut down +``` + +### Run a worker until receiving a stop signal + +```python +async with app.open_async(): + # give jobs up to 10 seconds to complete when a stop signal is received + # all jobs still running after 10 seconds are aborted + # In the absence of shutdown_graceful_timeout, the task will complete when all jobs have completed. + await app.run_worker_async(shutdown_graceful_timeout=10) +``` + +### Run a worker until its Task is cancelled + +```python +async with app.open_async(): + worker = asyncio.create_task(app.run_worker_async()) + # eventually + worker.cancel() + try: + await worker + except asyncio.CancelledError: + # wait until all remaining jobs have completed, however long they take + await worker +``` + +### Run a worker until its Task is cancelled with a shutdown timeout + +```python +async with app.open_async(): + worker = asyncio.create_task(app.run_worker_async(shutdown_graceful_timeout=10)) + # eventually + worker.cancel() + try: + await worker + except asyncio.CancelledError: + # at this point, the worker is shutdown. + # Any job that took longer than 10 seconds to complete have aborted + pass +``` + +### Cancel a worker Task and explicitly abort jobs after timeout + +```python +async with app.open_async(): + # Notice that shutdown_graceful_timeout is not specified + worker = asyncio.create_task(app.run_worker_async()) + + # eventually + worker.cancel() + + try: + # give the jobs 10 seconds to complete and abort remaining jobs + await asyncio.wait_for(worker, timeout=10) + except asyncio.CancelledError: + # all jobs have completed within 10 seconds + pass + except asyncio.TimeoutError: + # one or more jobs took longer than 10 seconds and have aborted. + pass + +``` diff --git a/docs/howto/basics.md b/docs/howto/basics.md index dbe638497..35917576d 100644 --- a/docs/howto/basics.md +++ b/docs/howto/basics.md @@ -10,6 +10,7 @@ basics/connector basics/open_connection basics/tasks basics/defer +basics/batch_defer basics/worker basics/command_line basics/windows diff --git a/docs/howto/basics/batch_defer.md b/docs/howto/basics/batch_defer.md new file mode 100644 index 000000000..64d02c421 --- /dev/null +++ b/docs/howto/basics/batch_defer.md @@ -0,0 +1,68 @@ +# Batch defer multiple jobs + +Instead of deferring each job one by one, you can defer multiple jobs at once using the `batch_defer` resp. `batch_defer_async` method. This is useful when you want to defer a large number of jobs in a single call, which can be more efficient than deferring them one by one. + +Let's assume the following task: + +```python +@app.task(queue="some_queue") +def my_task(a: int, b:int): + pass +``` + +## The direct way + +By using the sync method: + +```python +my_task.batch_defer( + {"a": 1, "b": 2}, + {"a": 3, "b": 4}, + {"a": 5, "b": 6}, +) +``` + +Or the async method: + +```python +await my_task.batch_defer_async( + {"a": 1, "b": 2}, + {"a": 3, "b": 4}, + {"a": 5, "b": 6}, +) +``` + +(If you have an iterable of payloads, you can use `*`, e.g. `my_task.batch_defer(*payloads)`.) + +## With parameters + +Using the sync defer method: + +```python +my_task.configure( + lock="the name of my lock", + schedule_in={"hours": 1}, + queue="not_the_default_queue" +).batch_defer( + {"a": 1, "b": 2}, + {"a": 3, "b": 4}, + {"a": 5, "b": 6}, +) + +# or +await my_task.configure( + lock="the name of my lock", + schedule_in={"hours": 1}, + queue="not_the_default_queue" +).batch_defer_async( + {"a": 1, "b": 2}, + {"a": 3, "b": 4}, + {"a": 5, "b": 6}, +) +``` + +:::{warning} +Don't batch defer multiple jobs where the task has the same configured queuing lock, because that +would directly raise an `AlreadyEnqueued` exception and none of those jobs are deferred (the +database transaction will be fully rolled back). +See {doc}`queueing locks <../advanced/queueing_locks>` for more information. diff --git a/docs/howto/basics/defer.md b/docs/howto/basics/defer.md index d090e5fca..565ad2d04 100644 --- a/docs/howto/basics/defer.md +++ b/docs/howto/basics/defer.md @@ -61,6 +61,8 @@ await pattern.defer_async(b=3) await pattern.defer_async(b=4) ``` +For a more efficient way to defer multiple jobs see {doc}`batch defer `. + ## Defer a job if you can't access the task This is useful if the code that defers jobs is not in the same code base as the code diff --git a/docs/howto/basics/worker.md b/docs/howto/basics/worker.md index c5adb23a7..f26c6b484 100644 --- a/docs/howto/basics/worker.md +++ b/docs/howto/basics/worker.md @@ -19,7 +19,7 @@ Naming the worker is optional. :::{note} {py:meth}`App.run_worker` will take care of launching an event loop, opening the app, -running the worker, and when it exists, closing the app and the event loop. +running the worker, and when it exits, closing the app and the event loop. On the other hand, {py:meth}`App.run_worker_async` needs to run while the app is open. The CLI takes care of opening the app. @@ -31,13 +31,7 @@ When running the worker inside a bigger application, you may want to use `install_signal_handlers=False` so that the worker doesn't interfere with your application's signal handlers. -:::{note} -When you run the worker as a task, at any point, you can call `task.cancel()` -to request the worker to gracefully stop at the next opportunity. -You may then wait for it to actually stop using `await task` if you're -ready to wait indefinitely, or `asyncio.wait_for(task, timeout)` if you -want to set a timeout. -::: +For more information about stopping the worker, see {doc}`../advanced/shutdown`. Here is an example FastAPI application that does this: diff --git a/docs/howto/django/configuration.md b/docs/howto/django/configuration.md index 50cbd31c4..e54377fa3 100644 --- a/docs/howto/django/configuration.md +++ b/docs/howto/django/configuration.md @@ -10,12 +10,6 @@ how. For each Python version supported by Procrastinate, Procastinate is tested with the latest Django version supported by that Python version. -As of September 2024, this means Procrastinate is tested with Django 4.2 for -Python 3.8 and 3.9, and Django 5.1 for Python 3.10+. This paragraph is likely -to be outdated in the future, the best way to get up-to-date info is to have a -look at the `tool.poetry.group.django.dependencies` section of the [package -configuration](https://github.com/procrastinate-org/procrastinate/blob/pydjver/pyproject.toml#L79-L83) - ## Installation & configuration To start, install procrastinate with: diff --git a/docs/howto/django/migrations.md b/docs/howto/django/migrations.md index ea80166ec..ca32e5d86 100644 --- a/docs/howto/django/migrations.md +++ b/docs/howto/django/migrations.md @@ -4,14 +4,18 @@ Procrastinate comes with its own migrations so don't forget to run `./manage.py migrate`. Procrastinate provides 2 kinds of migrations: -- The Django equivalent of the `procrastinate` normal migrations, which are - used to create all of the PostgreSQL DDL objects used by Procrastinate. -- Specific noop migrations used for Django to understand the Procrastinate - Models (see {doc}`models`). + +- The Django equivalent of the `procrastinate` normal migrations, which are + used to create all of the PostgreSQL DDL objects used by Procrastinate. +- Specific noop migrations used for Django to understand the Procrastinate + Models (see {doc}`models`). Procrastinate's Django migrations are always kept in sync with your current version of Procrastinate, it's always a good idea to check the release notes and read the migrations when upgrading so that you know what will be happening to the database. -See {doc}`../production/migrations` for more information on migrations. +See {doc}`../production/migrations` for more information on migrations, especially +around `pre` and `post` migrations: if you deploy while the code is running, you'll +want to ensure you run the `pre-` migrations before you deploy the code and the +`post-` migrations after. diff --git a/docs/howto/django/models.md b/docs/howto/django/models.md index eff0c64dc..cbcfd2972 100644 --- a/docs/howto/django/models.md +++ b/docs/howto/django/models.md @@ -14,6 +14,7 @@ from procrastinate.contrib.django.models import ( ProcrastinateJob, ProcrastinateEvent, ProcrastinatePeriodicDefer, + ProcrastinateWorker, ) ProcrastinateJob.objects.filter(task_name="mytask").count() @@ -29,7 +30,7 @@ or events through the ORM. ```{eval-rst} .. automodule:: procrastinate.contrib.django.models - :members: ProcrastinateJob, ProcrastinateEvent, ProcrastinatePeriodicDefer + :members: ProcrastinateJob, ProcrastinateEvent, ProcrastinatePeriodicDefer, ProcrastinateWorker ``` diff --git a/docs/howto/production/delete_finished_jobs.md b/docs/howto/production/delete_finished_jobs.md index 14a7502a7..7c8096632 100644 --- a/docs/howto/production/delete_finished_jobs.md +++ b/docs/howto/production/delete_finished_jobs.md @@ -13,9 +13,9 @@ app.run_worker(delete_jobs="always") With `always`, every finished job will be deleted on completion. Other options are: -- `successful` to only delete successful jobs and keep failed jobs in the database - until explicit deletion. -- `never` to keep every job in the database, this is the default. +- `successful` to only delete successful jobs and keep failed jobs in the database + until explicit deletion. +- `never` to keep every job in the database, this is the default. You can also do this from the CLI: @@ -87,7 +87,7 @@ async def remove_old_jobs(context, timestamp): return await builtin_tasks.remove_old_jobs( context, max_hours=72, - remove_error=True, + remove_failed=True, remove_cancelled=True, remove_aborted=True, ) diff --git a/docs/howto/production/deployment.md b/docs/howto/production/deployment.md index 35704cd00..2c68ee82a 100644 --- a/docs/howto/production/deployment.md +++ b/docs/howto/production/deployment.md @@ -1,6 +1,6 @@ # Deploy Procrastinate in a real environment -While we know Procrastinate has been succesfully deployed in production, -the lib authors where not really involved in the process. We'd love to +While we know Procrastinate has been successfully deployed in production, +the lib authors were not really involved in the process. We'd love to hear from you if you're using Procrastinate in production, how it's working out for you, and if you have some advice to share. diff --git a/docs/howto/production/logging.md b/docs/howto/production/logging.md index 086da5523..d6676b1dc 100644 --- a/docs/howto/production/logging.md +++ b/docs/howto/production/logging.md @@ -7,7 +7,7 @@ messages, they are added as [extra] elements to the logs themselves. This way, you can adapt the logs to whatever format suits your needs the most, using a log filter: -``` +```python import logging class ProcrastinateLogFilter(logging.Filter): @@ -27,8 +27,56 @@ to see them is to use a structured logging library such as [`structlog`]. If you want a minimal example of a logging setup that displays the extra attributes without using third party logging libraries, look at the -[Django demo] +[Django demo]. + +:::{note} +When using the `procrastinate` CLI, procrastinate sets up the logs for you, +but the only customization available is `--log-format` and `--log-format-style`. +If you want to customize the log format further, you will need run your own +script that calls procrastinate's app methods. +::: + +## `structlog` + +[`structlog`](https://www.structlog.org/en/stable/index.html) needs to be +configured in order to have `procrastinate`'s logs be formatted uniformly +with the rest of your application. + +The `structlog` docs has a [how to](https://www.structlog.org/en/stable/standard-library.html#rendering-using-structlog-based-formatters-within-logging). + +A minimal configuration would look like: + +```python +shared_processors = [ + structlog.contextvars.merge_contextvars, + structlog.stdlib.add_logger_name, + structlog.stdlib.add_log_level, + structlog.processors.StackInfoRenderer(), + structlog.dev.set_exc_info, +] + +structlog.configure( + processors=shared_processors + [structlog.stdlib.ProcessorFormatter.wrap_for_formatter], + logger_factory=structlog.stdlib.LoggerFactory(), + cache_logger_on_first_use=True, +) + +formatter = structlog.stdlib.ProcessorFormatter( + foreign_pre_chain=shared_processors, + processors=[ + structlog.stdlib.ProcessorFormatter.remove_processors_meta, + structlog.dev.ConsoleRenderer(event_key="message"), + ], +) + +handler = logging.StreamHandler() +handler.setFormatter(formatter) + +root = logging.getLogger() +root.addHandler(handler) +root.setLevel(log_level) +``` [extra]: https://timber.io/blog/the-pythonic-guide-to-logging/#adding-context [`structlog`]: https://www.structlog.org/en/stable/ -[Django demo]: https://github.com/procrastinate-org/procrastinate/blob/main/procrastinate_demos/demo_django/project/settings.py#L151 +[Django demo]: https://github.com/procrastinate-org/procrastinate/blob/main/procrastinate/demos/demo_django/project/settings.py#L151 diff --git a/docs/howto/production/migrations.md b/docs/howto/production/migrations.md index 3543a7225..8db7fb280 100644 --- a/docs/howto/production/migrations.md +++ b/docs/howto/production/migrations.md @@ -1,5 +1,10 @@ # Migrate the Procrastinate schema +:::{warning} +v3 introduces a new way to handle migrations. Hopefully, easier both for users +and maintainers. Read about pre- and post-migrations below. +::: + When the Procrastinate database schema evolves in new Procrastinate releases, new migrations are released alongside. Look at the [Release notes](https://github.com/procrastinate-org/procrastinate/releases) @@ -31,17 +36,22 @@ on PyPI. A simple way to list all the migrations is to use the command: $ procrastinate schema --migrations-path /home/me/my_venv/lib/python3.x/lib/site-packages/procrastinate/sql/migrations ``` + It's your responsibility to keep track of which migrations have been applied yet or not. Thankfully, the names of procrastinate migrations should help you: they follow a specific pattern: ``` -xx.yy.zz_ab_very_short_description_of_the_migration.sql +{xx.yy.zz}_{ab}_{pre|post}_very_short_description_of_the_migration.sql ``` -- `xx.yy.zz` is the version of Procrastinate the migration script can be applied to. -- `ab` is the migration script's serial number, `01` being the first number in the - series. +- `xx.yy.zz` is the version of Procrastinate the migration script can be applied to. +- `ab` is the migration script's serial number, `01` being the first number in the + series. +- `pre` / `post`: indicates wether the migration should be applied before + upgrading the code (`pre`) or after upgrading the code (`post`) in the context + of a blue-green deployment. On old migrations, if `pre` or `post` is not + specified, it's a `post` migration. :::{note} There is a [debate](https://github.com/procrastinate-org/procrastinate/issues/1040) @@ -50,51 +60,46 @@ directions for how to use classic ones (apart from Django), please feel free to and/or contribute code or documentation if you have an opinion on this. ::: -Let's say you are currently using Procrastinate 1.9.0, and you want to update to -Procrastinate 1.15.0. In that case, before upgrading the Procrastinate Python package -(from 1.9.0 to 1.15.0), you will need to apply all the migration scripts whose versions -are greater than or equal to 1.9.0, and lower than 1.15.0 (1.9.0 ≤ version \< 1.15.0). -And you will apply them in version order, and, for a version, in serial number order. -For example, you will apply the following migration scripts, in that order: - -1. `01.09.00_01_xxxxx.sql` -2. `01.10.00_01_xxxxx.sql` -3. `01.11.00_01_xxxxx.sql` -4. `01.11.00_02_xxxxx.sql` -5. `01.12.00_01_xxxxx.sql` -6. `01.14.00_01_xxxxx.sql` -7. `01.14.00_02_xxxxx.sql` +## How to apply migrations -If you want to upgrade from one Procrastinate major version to another, say from -Procrastinate 1.6.0 to 3.2.0, there are two options, depending on whether you can -interrupt the service to do the migration or not. - -## The easier way, with service interruption +### The easier way, with service interruption 1. Shut down the services that use Procrastinate: both the services that defer tasks and the workers. -2. Apply all the migration scripts (1.6.0 ≤ version \< 3.2.0). -3. Upgrade your code to the new Procrastinate version (3.2.0). +2. Apply all the migration scripts (`pre` & `post`), e.g. with: + +```console +$ MIGRATION_TO_APPLY="02.00.00_01_pre_some_migration.sql" +$ cat $(procrastinate schema --migrations-path)/${MIGRATION_TO_APPLY} | psql +$ MIGRATION_TO_APPLY="02.00.00_01_post_some_migration.sql" +$ cat $(procrastinate schema --migrations-path)/${MIGRATION_TO_APPLY} | psql +$ ... +``` + +3. Upgrade your code to the new Procrastinate version. 4. Start all the services. This, as you've noticed, only works if you're able to stop the services. -## The safer way, without service interruption +### The safer way, without service interruption -:::{note} -This only applies starting at Procrastinate 0.17.0. For previous versions, -you will have to interrupt the service or write custom migrations. -::: +If you need to ensure service continuity, you'll need to make intermediate upgrades. +Basically, you'll need to stop at every version that provides migrations. + +```console +$ MIGRATION_TO_APPLY="02.01.00_01_pre_some_migration.sql" +$ cat $(procrastinate schema --migrations-path)/${MIGRATION_TO_APPLY} | psql + +$ yoursystem/deploy procrastinate 2.1.0 -If you care about service continuity, you'll need to make intermediate upgrades. For -example, to upgrade from Procrastinate 1.6.0 to 3.2.0, here are the steps you will need -to follow: +$ MIGRATION_TO_APPLY="02.01.00_01_post_some_migration.sql" +$ cat $(procrastinate schema --migrations-path)/${MIGRATION_TO_APPLY} | psql -1. Apply all the migration scripts between 1.6.0 and 2.0.0 (1.6.0 ≤ version \< 2.0.0). -2. Live-upgrade the Procrastinate version used in your services, from 1.6.0 to 2.0.0. -3. Apply all the migration scripts between 2.0.0 and 3.0.0 (2.0.0 ≤ version \< 3.0.0). -4. Live-upgrade the Procrastinate version used in your services, from 2.0.0 to 3.0.0. -5. Apply all the migration scripts between 3.0.0 and 3.2.0 (3.0.0 ≤ version \< 3.2.0). -6. Live-upgrade the Procrastinate version used in your services, from 3.0.0 and 3.2.0. +$ MIGRATION_TO_APPLY="02.02.00_01_pre_some_migration.sql" +$ cat $(procrastinate schema --migrations-path)/${MIGRATION_TO_APPLY} | psql -Following this process you can go from 1.6.0 to 3.2.0 with no service discontinuity. +$ yoursystem/deploy procrastinate 2.2.0 + +$ MIGRATION_TO_APPLY="02.02.00_01_post_some_migration.sql" +$ cat $(procrastinate schema --migrations-path)/${MIGRATION_TO_APPLY} | psql +``` diff --git a/docs/howto/production/retry_stalled_jobs.md b/docs/howto/production/retry_stalled_jobs.md index c8018b041..9d06dfdfa 100644 --- a/docs/howto/production/retry_stalled_jobs.md +++ b/docs/howto/production/retry_stalled_jobs.md @@ -7,27 +7,42 @@ terminate immediately, possibly leaving jobs with the `doing` status in the queu And, if no specific action is taken, these *stalled* jobs will remain in the queue forever, and their execution will never resume. -To address that problem, Procrastinate offers functions that can be used in a periodic -task for retrying stalled jobs. Add the following in your code to enable automatic retry -of tasks after some time: +To address this problem, Procrastinate workers update heartbeats at a regular +interval (every 10 seconds by default). If a worker is terminated without a regular +shutdown, the heartbeat of that worker will not be updated, and the worker will be +considered stalled. Jobs in the `doing` state of such stalled workers are considered +stalled as well and can be fetched by the with the {py:meth}`JobManager.get_stalled_jobs` +method. + +:::{note} +Regular worker shutdowns delete the worker's heartbeat from the database. Heartbeats +of stalled worker are also pruned after a certain duration (30 seconds by default) to +avoid having too many heartbeats of old worker runs in the database, but stalled jobs +can still be detected. +::: + +Those stalled jobs can then be retried for example by a periodic task. To enable +this add this task to your code: ```python -# time in seconds for running jobs to be deemed as stalled -RUNNING_JOBS_MAX_TIME = 30 - @app.periodic(cron="*/10 * * * *") @app.task(queueing_lock="retry_stalled_jobs", pass_context=True) async def retry_stalled_jobs(context, timestamp): - stalled_jobs = await app.job_manager.get_stalled_jobs( - nb_seconds=RUNNING_JOBS_MAX_TIME - ) + stalled_jobs = await app.job_manager.get_stalled_jobs() for job in stalled_jobs: await app.job_manager.retry_job(job) ``` This defines a periodic task, configured to be deferred at every 10th minute. The task -retrieves all the jobs that have been in the `doing` status for more than -30 seconds, and restarts them (marking them with the `todo` status in the database). +retrieves all the jobs that have been in the `doing` status of workers that have not +received a heartbeat since the last (by default) 30 seconds. This duration can be +configured with the `seconds_since_heartbeat` parameter of the `get_stalled_jobs` method. + +:::{note} +If you change the `seconds_since_heartbeat` parameter, make sure to also check the +`update_heartbeat_interval` and `stalled_worker_timeout` parameters of the worker +and adjust them accordingly. +::: With this, if you have multiple workers, and, for some reason, one of them gets killed while running jobs, then one of the remaining workers will run the @@ -38,7 +53,10 @@ on specific parameters, or the duration before a task is considered stalled shou depend on the task), you're free to make the periodic task function more complex and add your logic to it. See {py:meth}`JobManager.get_stalled_jobs` for details. -Also, note that if a task is considered stalled, it will be retried, but if it's -actually running, then you may have your task running twice. Make sure to only retry -a task when you're reasonably sure that it is not running anymore, so make sure your -stalled duration is sufficient. +:::{warning} +`get_stalled_jobs` also accepts a `nb_seconds` parameter, which if set fetches +stalled jobs that have been in the `doing` state for more than the specified seconds +without even considering the worker heartbeat. This parameter is deprecated and will be +removed in a next major release as it may lead to wrongly retrying jobs that are still +running. +::: diff --git a/docs/reference.rst b/docs/reference.rst index a0e9f60f3..b3883ab45 100644 --- a/docs/reference.rst +++ b/docs/reference.rst @@ -32,7 +32,7 @@ When tasks are created with argument ``pass_context``, they are provided a `JobContext` argument: .. autoclass:: procrastinate.JobContext - :members: app, worker_name, worker_queues, job, task + :members: app, worker_name, worker_queues, job, task, should_abort Blueprints ---------- @@ -80,7 +80,7 @@ Exceptions .. automodule:: procrastinate.exceptions :members: ProcrastinateException, LoadFromPathError, ConnectorException, AlreadyEnqueued, AppNotOpen, TaskNotFound, - UnboundTaskError + UnboundTaskError, JobAborted Job statuses ------------ diff --git a/noxfile.py b/noxfile.py new file mode 100644 index 000000000..c94953286 --- /dev/null +++ b/noxfile.py @@ -0,0 +1,111 @@ +from __future__ import annotations + +import os +import pathlib +import shutil +import tempfile + +import nox # type: ignore +import packaging.version + + +def fetch_latest_tag(session: nox.Session) -> packaging.version.Version: + if "LATEST_TAG" in os.environ: + return packaging.version.Version(os.environ["LATEST_TAG"]) + + session.run("git", "fetch", "--tags", external=True) + out = session.run("git", "tag", "--list", external=True, silent=True) + assert out + return max(packaging.version.Version(tag) for tag in out.splitlines()) + + +def get_pre_migration(latest_tag: packaging.version.Version) -> str: + migrations_folder = ( + pathlib.Path(__file__).parent / "procrastinate" / "sql" / "migrations" + ) + migrations = sorted(migrations_folder.glob("*.sql")) + pre_migration: pathlib.Path | None = None + for migration in migrations: + mig_version = packaging.version.Version(migration.name.split("_")[0]) + if mig_version > latest_tag and "_post_" in migration.name: + break + + pre_migration = migration + + assert pre_migration is not None + return pre_migration.name + + +@nox.session +def current_version_with_post_migration(session: nox.Session): + session.run("uv", "sync", "--all-extras", external=True) + session.run("uv", "run", "pytest", *session.posargs, external=True) + + +@nox.session +def current_version_without_post_migration(session: nox.Session): + latest_tag = fetch_latest_tag(session) + pre_migration = get_pre_migration(latest_tag) + + session.run( + "uv", + "sync", + "--all-extras", + "--group", + "test", + external=True, + env={"UV_PROJECT_ENVIRONMENT": session.virtualenv.location}, + ) + session.run( + "pytest", + f"--migrate-until={pre_migration}", + "./tests/acceptance", + *session.posargs, + external=True, + ) + + +@nox.session +def stable_version_without_post_migration(session: nox.Session): + latest_tag = fetch_latest_tag(session) + pre_migration = get_pre_migration(latest_tag) + + with tempfile.TemporaryDirectory() as temp_dir: + session.chdir(temp_dir) + + temp_path = pathlib.Path(temp_dir) + base_path = pathlib.Path(__file__).parent + + # Install test dependencies and copy tests + shutil.copytree(base_path / "tests", temp_path / "tests") + shutil.copy(base_path / "pyproject.toml", temp_path / "pyproject.toml") + shutil.copy(base_path / "uv.lock", temp_path / "uv.lock") + session.run( + "uv", + "sync", + "--all-extras", + "--group", + "test", + "--no-install-project", + external=True, + env={ + "UV_PROJECT_ENVIRONMENT": session.virtualenv.location, + }, + ) + + # Install latest procrastinate from GitHub + # During a tag release, we have not yet published the new version to PyPI + # so we need to install it from GitHub + session.install( + f"procrastinate @ git+https://github.com/procrastinate-org/procrastinate.git@{latest_tag}" + ) + + session.run( + "pytest", + f"--migrate-until={pre_migration}", + f"--latest-version={latest_tag}", + "./tests/acceptance", + *session.posargs, + # This is necessary for pytest-django, due to not installing the project + env={"PYTHONPATH": temp_dir}, + ) diff --git a/poetry.lock b/poetry.lock deleted file mode 100644 index 75584f562..000000000 --- a/poetry.lock +++ /dev/null @@ -1,1889 +0,0 @@ -# This file is automatically @generated by Poetry 1.8.4 and should not be changed by hand. - -[[package]] -name = "aiopg" -version = "1.4.0" -description = "Postgres integration with asyncio." -optional = false -python-versions = ">=3.7" -files = [ - {file = "aiopg-1.4.0-py3-none-any.whl", hash = "sha256:aea46e8aff30b039cfa818e6db4752c97656e893fc75e5a5dc57355a9e9dedbd"}, - {file = "aiopg-1.4.0.tar.gz", hash = "sha256:116253bef86b4d954116716d181e9a0294037f266718b2e1c9766af995639d71"}, -] - -[package.dependencies] -async-timeout = ">=3.0,<5.0" -psycopg2-binary = ">=2.9.5" - -[package.extras] -sa = ["sqlalchemy[postgresql-psycopg2binary] (>=1.3,<1.5)"] - -[[package]] -name = "alabaster" -version = "0.7.16" -description = "A light, configurable Sphinx theme" -optional = false -python-versions = ">=3.9" -files = [ - {file = "alabaster-0.7.16-py3-none-any.whl", hash = "sha256:b46733c07dce03ae4e150330b975c75737fa60f0a7c591b6c8bf4928a28e2c92"}, - {file = "alabaster-0.7.16.tar.gz", hash = "sha256:75a8b99c28a5dad50dd7f8ccdd447a121ddb3892da9e53d1ca5cca3106d58d65"}, -] - -[[package]] -name = "anyio" -version = "4.7.0" -description = "High level compatibility layer for multiple asynchronous event loop implementations" -optional = false -python-versions = ">=3.9" -files = [ - {file = "anyio-4.7.0-py3-none-any.whl", hash = "sha256:ea60c3723ab42ba6fff7e8ccb0488c898ec538ff4df1f1d5e642c3601d07e352"}, - {file = "anyio-4.7.0.tar.gz", hash = "sha256:2f834749c602966b7d456a7567cafcb309f96482b5081d14ac93ccd457f9dd48"}, -] - -[package.dependencies] -exceptiongroup = {version = ">=1.0.2", markers = "python_version < \"3.11\""} -idna = ">=2.8" -sniffio = ">=1.1" -typing_extensions = {version = ">=4.5", markers = "python_version < \"3.13\""} - -[package.extras] -doc = ["Sphinx (>=7.4,<8.0)", "packaging", "sphinx-autodoc-typehints (>=1.2.0)", "sphinx_rtd_theme"] -test = ["anyio[trio]", "coverage[toml] (>=7)", "exceptiongroup (>=1.2.0)", "hypothesis (>=4.0)", "psutil (>=5.9)", "pytest (>=7.0)", "pytest-mock (>=3.6.1)", "trustme", "truststore (>=0.9.1)", "uvloop (>=0.21)"] -trio = ["trio (>=0.26.1)"] - -[[package]] -name = "asgiref" -version = "3.8.1" -description = "ASGI specs, helper code, and adapters" -optional = false -python-versions = ">=3.8" -files = [ - {file = "asgiref-3.8.1-py3-none-any.whl", hash = "sha256:3e1e3ecc849832fe52ccf2cb6686b7a55f82bb1d6aee72a58826471390335e47"}, - {file = "asgiref-3.8.1.tar.gz", hash = "sha256:c343bd80a0bec947a9860adb4c432ffa7db769836c64238fc34bdc3fec84d590"}, -] - -[package.dependencies] -typing-extensions = {version = ">=4", markers = "python_version < \"3.11\""} - -[package.extras] -tests = ["mypy (>=0.800)", "pytest", "pytest-asyncio"] - -[[package]] -name = "async-timeout" -version = "4.0.3" -description = "Timeout context manager for asyncio programs" -optional = false -python-versions = ">=3.7" -files = [ - {file = "async-timeout-4.0.3.tar.gz", hash = "sha256:4640d96be84d82d02ed59ea2b7105a0f7b33abe8703703cd0ab0bf87c427522f"}, - {file = "async_timeout-4.0.3-py3-none-any.whl", hash = "sha256:7405140ff1230c310e51dc27b3145b9092d659ce68ff733fb0cefe3ee42be028"}, -] - -[[package]] -name = "attrs" -version = "24.2.0" -description = "Classes Without Boilerplate" -optional = false -python-versions = ">=3.7" -files = [ - {file = "attrs-24.2.0-py3-none-any.whl", hash = "sha256:81921eb96de3191c8258c199618104dd27ac608d9366f5e35d011eae1867ede2"}, - {file = "attrs-24.2.0.tar.gz", hash = "sha256:5cfb1b9148b5b086569baec03f20d7b6bf3bcacc9a42bebf87ffaaca362f6346"}, -] - -[package.extras] -benchmark = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-codspeed", "pytest-mypy-plugins", "pytest-xdist[psutil]"] -cov = ["cloudpickle", "coverage[toml] (>=5.3)", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] -dev = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pre-commit", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] -docs = ["cogapp", "furo", "myst-parser", "sphinx", "sphinx-notfound-page", "sphinxcontrib-towncrier", "towncrier (<24.7)"] -tests = ["cloudpickle", "hypothesis", "mypy (>=1.11.1)", "pympler", "pytest (>=4.3.0)", "pytest-mypy-plugins", "pytest-xdist[psutil]"] -tests-mypy = ["mypy (>=1.11.1)", "pytest-mypy-plugins"] - -[[package]] -name = "babel" -version = "2.16.0" -description = "Internationalization utilities" -optional = false -python-versions = ">=3.8" -files = [ - {file = "babel-2.16.0-py3-none-any.whl", hash = "sha256:368b5b98b37c06b7daf6696391c3240c938b37767d4584413e8438c5c435fa8b"}, - {file = "babel-2.16.0.tar.gz", hash = "sha256:d1f3554ca26605fe173f3de0c65f750f5a42f924499bf134de6423582298e316"}, -] - -[package.extras] -dev = ["freezegun (>=1.0,<2.0)", "pytest (>=6.0)", "pytest-cov"] - -[[package]] -name = "beautifulsoup4" -version = "4.12.3" -description = "Screen-scraping library" -optional = false -python-versions = ">=3.6.0" -files = [ - {file = "beautifulsoup4-4.12.3-py3-none-any.whl", hash = "sha256:b80878c9f40111313e55da8ba20bdba06d8fa3969fc68304167741bbf9e082ed"}, - {file = "beautifulsoup4-4.12.3.tar.gz", hash = "sha256:74e3d1928edc070d21748185c46e3fb33490f22f52a3addee9aee0f4f7781051"}, -] - -[package.dependencies] -soupsieve = ">1.2" - -[package.extras] -cchardet = ["cchardet"] -chardet = ["chardet"] -charset-normalizer = ["charset-normalizer"] -html5lib = ["html5lib"] -lxml = ["lxml"] - -[[package]] -name = "certifi" -version = "2024.8.30" -description = "Python package for providing Mozilla's CA Bundle." -optional = false -python-versions = ">=3.6" -files = [ - {file = "certifi-2024.8.30-py3-none-any.whl", hash = "sha256:922820b53db7a7257ffbda3f597266d435245903d80737e34f8a45ff3e3230d8"}, - {file = "certifi-2024.8.30.tar.gz", hash = "sha256:bec941d2aa8195e248a60b31ff9f0558284cf01a52591ceda73ea9afffd69fd9"}, -] - -[[package]] -name = "charset-normalizer" -version = "3.4.0" -description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." -optional = false -python-versions = ">=3.7.0" -files = [ - {file = "charset_normalizer-3.4.0-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:4f9fc98dad6c2eaa32fc3af1417d95b5e3d08aff968df0cd320066def971f9a6"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0de7b687289d3c1b3e8660d0741874abe7888100efe14bd0f9fd7141bcbda92b"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5ed2e36c3e9b4f21dd9422f6893dec0abf2cca553af509b10cd630f878d3eb99"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:40d3ff7fc90b98c637bda91c89d51264a3dcf210cade3a2c6f838c7268d7a4ca"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1110e22af8ca26b90bd6364fe4c763329b0ebf1ee213ba32b68c73de5752323d"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:86f4e8cca779080f66ff4f191a685ced73d2f72d50216f7112185dc02b90b9b7"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f683ddc7eedd742e2889d2bfb96d69573fde1d92fcb811979cdb7165bb9c7d3"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:27623ba66c183eca01bf9ff833875b459cad267aeeb044477fedac35e19ba907"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:f606a1881d2663630ea5b8ce2efe2111740df4b687bd78b34a8131baa007f79b"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:0b309d1747110feb25d7ed6b01afdec269c647d382c857ef4663bbe6ad95a912"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:136815f06a3ae311fae551c3df1f998a1ebd01ddd424aa5603a4336997629e95"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:14215b71a762336254351b00ec720a8e85cada43b987da5a042e4ce3e82bd68e"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:79983512b108e4a164b9c8d34de3992f76d48cadc9554c9e60b43f308988aabe"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-win32.whl", hash = "sha256:c94057af19bc953643a33581844649a7fdab902624d2eb739738a30e2b3e60fc"}, - {file = "charset_normalizer-3.4.0-cp310-cp310-win_amd64.whl", hash = "sha256:55f56e2ebd4e3bc50442fbc0888c9d8c94e4e06a933804e2af3e89e2f9c1c749"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:0d99dd8ff461990f12d6e42c7347fd9ab2532fb70e9621ba520f9e8637161d7c"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c57516e58fd17d03ebe67e181a4e4e2ccab1168f8c2976c6a334d4f819fe5944"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:6dba5d19c4dfab08e58d5b36304b3f92f3bd5d42c1a3fa37b5ba5cdf6dfcbcee"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bf4475b82be41b07cc5e5ff94810e6a01f276e37c2d55571e3fe175e467a1a1c"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ce031db0408e487fd2775d745ce30a7cd2923667cf3b69d48d219f1d8f5ddeb6"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8ff4e7cdfdb1ab5698e675ca622e72d58a6fa2a8aa58195de0c0061288e6e3ea"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3710a9751938947e6327ea9f3ea6332a09bf0ba0c09cae9cb1f250bd1f1549bc"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:82357d85de703176b5587dbe6ade8ff67f9f69a41c0733cf2425378b49954de5"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:47334db71978b23ebcf3c0f9f5ee98b8d65992b65c9c4f2d34c2eaf5bcaf0594"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:8ce7fd6767a1cc5a92a639b391891bf1c268b03ec7e021c7d6d902285259685c"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:f1a2f519ae173b5b6a2c9d5fa3116ce16e48b3462c8b96dfdded11055e3d6365"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:63bc5c4ae26e4bc6be6469943b8253c0fd4e4186c43ad46e713ea61a0ba49129"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:bcb4f8ea87d03bc51ad04add8ceaf9b0f085ac045ab4d74e73bbc2dc033f0236"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-win32.whl", hash = "sha256:9ae4ef0b3f6b41bad6366fb0ea4fc1d7ed051528e113a60fa2a65a9abb5b1d99"}, - {file = "charset_normalizer-3.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:cee4373f4d3ad28f1ab6290684d8e2ebdb9e7a1b74fdc39e4c211995f77bec27"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0713f3adb9d03d49d365b70b84775d0a0d18e4ab08d12bc46baa6132ba78aaf6"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:de7376c29d95d6719048c194a9cf1a1b0393fbe8488a22008610b0361d834ecf"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4a51b48f42d9358460b78725283f04bddaf44a9358197b889657deba38f329db"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b295729485b06c1a0683af02a9e42d2caa9db04a373dc38a6a58cdd1e8abddf1"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ee803480535c44e7f5ad00788526da7d85525cfefaf8acf8ab9a310000be4b03"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3d59d125ffbd6d552765510e3f31ed75ebac2c7470c7274195b9161a32350284"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8cda06946eac330cbe6598f77bb54e690b4ca93f593dee1568ad22b04f347c15"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:07afec21bbbbf8a5cc3651aa96b980afe2526e7f048fdfb7f1014d84acc8b6d8"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6b40e8d38afe634559e398cc32b1472f376a4099c75fe6299ae607e404c033b2"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:b8dcd239c743aa2f9c22ce674a145e0a25cb1566c495928440a181ca1ccf6719"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:84450ba661fb96e9fd67629b93d2941c871ca86fc38d835d19d4225ff946a631"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:44aeb140295a2f0659e113b31cfe92c9061622cadbc9e2a2f7b8ef6b1e29ef4b"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1db4e7fefefd0f548d73e2e2e041f9df5c59e178b4c72fbac4cc6f535cfb1565"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-win32.whl", hash = "sha256:5726cf76c982532c1863fb64d8c6dd0e4c90b6ece9feb06c9f202417a31f7dd7"}, - {file = "charset_normalizer-3.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:b197e7094f232959f8f20541ead1d9862ac5ebea1d58e9849c1bf979255dfac9"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:dd4eda173a9fcccb5f2e2bd2a9f423d180194b1bf17cf59e3269899235b2a114"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e9e3c4c9e1ed40ea53acf11e2a386383c3304212c965773704e4603d589343ed"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:92a7e36b000bf022ef3dbb9c46bfe2d52c047d5e3f3343f43204263c5addc250"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:54b6a92d009cbe2fb11054ba694bc9e284dad30a26757b1e372a1fdddaf21920"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ffd9493de4c922f2a38c2bf62b831dcec90ac673ed1ca182fe11b4d8e9f2a64"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:35c404d74c2926d0287fbd63ed5d27eb911eb9e4a3bb2c6d294f3cfd4a9e0c23"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4796efc4faf6b53a18e3d46343535caed491776a22af773f366534056c4e1fbc"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e7fdd52961feb4c96507aa649550ec2a0d527c086d284749b2f582f2d40a2e0d"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:92db3c28b5b2a273346bebb24857fda45601aef6ae1c011c0a997106581e8a88"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:ab973df98fc99ab39080bfb0eb3a925181454d7c3ac8a1e695fddfae696d9e90"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:4b67fdab07fdd3c10bb21edab3cbfe8cf5696f453afce75d815d9d7223fbe88b"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:aa41e526a5d4a9dfcfbab0716c7e8a1b215abd3f3df5a45cf18a12721d31cb5d"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ffc519621dce0c767e96b9c53f09c5d215578e10b02c285809f76509a3931482"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-win32.whl", hash = "sha256:f19c1585933c82098c2a520f8ec1227f20e339e33aca8fa6f956f6691b784e67"}, - {file = "charset_normalizer-3.4.0-cp313-cp313-win_amd64.whl", hash = "sha256:707b82d19e65c9bd28b81dde95249b07bf9f5b90ebe1ef17d9b57473f8a64b7b"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:dbe03226baf438ac4fda9e2d0715022fd579cb641c4cf639fa40d53b2fe6f3e2"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dd9a8bd8900e65504a305bf8ae6fa9fbc66de94178c420791d0293702fce2df7"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b8831399554b92b72af5932cdbbd4ddc55c55f631bb13ff8fe4e6536a06c5c51"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a14969b8691f7998e74663b77b4c36c0337cb1df552da83d5c9004a93afdb574"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dcaf7c1524c0542ee2fc82cc8ec337f7a9f7edee2532421ab200d2b920fc97cf"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:425c5f215d0eecee9a56cdb703203dda90423247421bf0d67125add85d0c4455"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:d5b054862739d276e09928de37c79ddeec42a6e1bfc55863be96a36ba22926f6"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_i686.whl", hash = "sha256:f3e73a4255342d4eb26ef6df01e3962e73aa29baa3124a8e824c5d3364a65748"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_ppc64le.whl", hash = "sha256:2f6c34da58ea9c1a9515621f4d9ac379871a8f21168ba1b5e09d74250de5ad62"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_s390x.whl", hash = "sha256:f09cb5a7bbe1ecae6e87901a2eb23e0256bb524a79ccc53eb0b7629fbe7677c4"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:0099d79bdfcf5c1f0c2c72f91516702ebf8b0b8ddd8905f97a8aecf49712c621"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-win32.whl", hash = "sha256:9c98230f5042f4945f957d006edccc2af1e03ed5e37ce7c373f00a5a4daa6149"}, - {file = "charset_normalizer-3.4.0-cp37-cp37m-win_amd64.whl", hash = "sha256:62f60aebecfc7f4b82e3f639a7d1433a20ec32824db2199a11ad4f5e146ef5ee"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:af73657b7a68211996527dbfeffbb0864e043d270580c5aef06dc4b659a4b578"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:cab5d0b79d987c67f3b9e9c53f54a61360422a5a0bc075f43cab5621d530c3b6"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:9289fd5dddcf57bab41d044f1756550f9e7cf0c8e373b8cdf0ce8773dc4bd417"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6b493a043635eb376e50eedf7818f2f322eabbaa974e948bd8bdd29eb7ef2a51"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9fa2566ca27d67c86569e8c85297aaf413ffab85a8960500f12ea34ff98e4c41"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:a8e538f46104c815be19c975572d74afb53f29650ea2025bbfaef359d2de2f7f"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6fd30dc99682dc2c603c2b315bded2799019cea829f8bf57dc6b61efde6611c8"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2006769bd1640bdf4d5641c69a3d63b71b81445473cac5ded39740a226fa88ab"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:dc15e99b2d8a656f8e666854404f1ba54765871104e50c8e9813af8a7db07f12"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:ab2e5bef076f5a235c3774b4f4028a680432cded7cad37bba0fd90d64b187d19"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:4ec9dd88a5b71abfc74e9df5ebe7921c35cbb3b641181a531ca65cdb5e8e4dea"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_s390x.whl", hash = "sha256:43193c5cda5d612f247172016c4bb71251c784d7a4d9314677186a838ad34858"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:aa693779a8b50cd97570e5a0f343538a8dbd3e496fa5dcb87e29406ad0299654"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-win32.whl", hash = "sha256:7706f5850360ac01d80c89bcef1640683cc12ed87f42579dab6c5d3ed6888613"}, - {file = "charset_normalizer-3.4.0-cp38-cp38-win_amd64.whl", hash = "sha256:c3e446d253bd88f6377260d07c895816ebf33ffffd56c1c792b13bff9c3e1ade"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:980b4f289d1d90ca5efcf07958d3eb38ed9c0b7676bf2831a54d4f66f9c27dfa"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:f28f891ccd15c514a0981f3b9db9aa23d62fe1a99997512b0491d2ed323d229a"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8aacce6e2e1edcb6ac625fb0f8c3a9570ccc7bfba1f63419b3769ccf6a00ed0"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:bd7af3717683bea4c87acd8c0d3d5b44d56120b26fd3f8a692bdd2d5260c620a"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5ff2ed8194587faf56555927b3aa10e6fb69d931e33953943bc4f837dfee2242"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e91f541a85298cf35433bf66f3fab2a4a2cff05c127eeca4af174f6d497f0d4b"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:309a7de0a0ff3040acaebb35ec45d18db4b28232f21998851cfa709eeff49d62"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:285e96d9d53422efc0d7a17c60e59f37fbf3dfa942073f666db4ac71e8d726d0"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:5d447056e2ca60382d460a604b6302d8db69476fd2015c81e7c35417cfabe4cd"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:20587d20f557fe189b7947d8e7ec5afa110ccf72a3128d61a2a387c3313f46be"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:130272c698667a982a5d0e626851ceff662565379baf0ff2cc58067b81d4f11d"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:ab22fbd9765e6954bc0bcff24c25ff71dcbfdb185fcdaca49e81bac68fe724d3"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:7782afc9b6b42200f7362858f9e73b1f8316afb276d316336c0ec3bd73312742"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-win32.whl", hash = "sha256:2de62e8801ddfff069cd5c504ce3bc9672b23266597d4e4f50eda28846c322f2"}, - {file = "charset_normalizer-3.4.0-cp39-cp39-win_amd64.whl", hash = "sha256:95c3c157765b031331dd4db3c775e58deaee050a3042fcad72cbc4189d7c8dca"}, - {file = "charset_normalizer-3.4.0-py3-none-any.whl", hash = "sha256:fe9f97feb71aa9896b81973a7bbada8c49501dc73e58a10fcef6663af95e5079"}, - {file = "charset_normalizer-3.4.0.tar.gz", hash = "sha256:223217c3d4f82c3ac5e29032b3f1c2eb0fb591b72161f86d93f5719079dae93e"}, -] - -[[package]] -name = "colorama" -version = "0.4.6" -description = "Cross-platform colored terminal text." -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" -files = [ - {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, - {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, -] - -[[package]] -name = "contextlib2" -version = "21.6.0" -description = "Backports and enhancements for the contextlib module" -optional = false -python-versions = ">=3.6" -files = [ - {file = "contextlib2-21.6.0-py2.py3-none-any.whl", hash = "sha256:3fbdb64466afd23abaf6c977627b75b6139a5a3e8ce38405c5b413aed7a0471f"}, - {file = "contextlib2-21.6.0.tar.gz", hash = "sha256:ab1e2bfe1d01d968e1b7e8d9023bc51ef3509bba217bb730cee3827e1ee82869"}, -] - -[[package]] -name = "coverage" -version = "7.6.9" -description = "Code coverage measurement for Python" -optional = false -python-versions = ">=3.9" -files = [ - {file = "coverage-7.6.9-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:85d9636f72e8991a1706b2b55b06c27545448baf9f6dbf51c4004609aacd7dcb"}, - {file = "coverage-7.6.9-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:608a7fd78c67bee8936378299a6cb9f5149bb80238c7a566fc3e6717a4e68710"}, - {file = "coverage-7.6.9-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:96d636c77af18b5cb664ddf12dab9b15a0cfe9c0bde715da38698c8cea748bfa"}, - {file = "coverage-7.6.9-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d75cded8a3cff93da9edc31446872d2997e327921d8eed86641efafd350e1df1"}, - {file = "coverage-7.6.9-cp310-cp310-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f7b15f589593110ae767ce997775d645b47e5cbbf54fd322f8ebea6277466cec"}, - {file = "coverage-7.6.9-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:44349150f6811b44b25574839b39ae35291f6496eb795b7366fef3bd3cf112d3"}, - {file = "coverage-7.6.9-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:d891c136b5b310d0e702e186d70cd16d1119ea8927347045124cb286b29297e5"}, - {file = "coverage-7.6.9-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:db1dab894cc139f67822a92910466531de5ea6034ddfd2b11c0d4c6257168073"}, - {file = "coverage-7.6.9-cp310-cp310-win32.whl", hash = "sha256:41ff7b0da5af71a51b53f501a3bac65fb0ec311ebed1632e58fc6107f03b9198"}, - {file = "coverage-7.6.9-cp310-cp310-win_amd64.whl", hash = "sha256:35371f8438028fdccfaf3570b31d98e8d9eda8bb1d6ab9473f5a390969e98717"}, - {file = "coverage-7.6.9-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:932fc826442132dde42ee52cf66d941f581c685a6313feebed358411238f60f9"}, - {file = "coverage-7.6.9-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:085161be5f3b30fd9b3e7b9a8c301f935c8313dcf928a07b116324abea2c1c2c"}, - {file = "coverage-7.6.9-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ccc660a77e1c2bf24ddbce969af9447a9474790160cfb23de6be4fa88e3951c7"}, - {file = "coverage-7.6.9-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c69e42c892c018cd3c8d90da61d845f50a8243062b19d228189b0224150018a9"}, - {file = "coverage-7.6.9-cp311-cp311-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0824a28ec542a0be22f60c6ac36d679e0e262e5353203bea81d44ee81fe9c6d4"}, - {file = "coverage-7.6.9-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:4401ae5fc52ad8d26d2a5d8a7428b0f0c72431683f8e63e42e70606374c311a1"}, - {file = "coverage-7.6.9-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:98caba4476a6c8d59ec1eb00c7dd862ba9beca34085642d46ed503cc2d440d4b"}, - {file = "coverage-7.6.9-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ee5defd1733fd6ec08b168bd4f5387d5b322f45ca9e0e6c817ea6c4cd36313e3"}, - {file = "coverage-7.6.9-cp311-cp311-win32.whl", hash = "sha256:f2d1ec60d6d256bdf298cb86b78dd715980828f50c46701abc3b0a2b3f8a0dc0"}, - {file = "coverage-7.6.9-cp311-cp311-win_amd64.whl", hash = "sha256:0d59fd927b1f04de57a2ba0137166d31c1a6dd9e764ad4af552912d70428c92b"}, - {file = "coverage-7.6.9-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:99e266ae0b5d15f1ca8d278a668df6f51cc4b854513daab5cae695ed7b721cf8"}, - {file = "coverage-7.6.9-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:9901d36492009a0a9b94b20e52ebfc8453bf49bb2b27bca2c9706f8b4f5a554a"}, - {file = "coverage-7.6.9-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:abd3e72dd5b97e3af4246cdada7738ef0e608168de952b837b8dd7e90341f015"}, - {file = "coverage-7.6.9-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ff74026a461eb0660366fb01c650c1d00f833a086b336bdad7ab00cc952072b3"}, - {file = "coverage-7.6.9-cp312-cp312-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:65dad5a248823a4996724a88eb51d4b31587aa7aa428562dbe459c684e5787ae"}, - {file = "coverage-7.6.9-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:22be16571504c9ccea919fcedb459d5ab20d41172056206eb2994e2ff06118a4"}, - {file = "coverage-7.6.9-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:0f957943bc718b87144ecaee70762bc2bc3f1a7a53c7b861103546d3a403f0a6"}, - {file = "coverage-7.6.9-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:0ae1387db4aecb1f485fb70a6c0148c6cdaebb6038f1d40089b1fc84a5db556f"}, - {file = "coverage-7.6.9-cp312-cp312-win32.whl", hash = "sha256:1a330812d9cc7ac2182586f6d41b4d0fadf9be9049f350e0efb275c8ee8eb692"}, - {file = "coverage-7.6.9-cp312-cp312-win_amd64.whl", hash = "sha256:b12c6b18269ca471eedd41c1b6a1065b2f7827508edb9a7ed5555e9a56dcfc97"}, - {file = "coverage-7.6.9-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:899b8cd4781c400454f2f64f7776a5d87bbd7b3e7f7bda0cb18f857bb1334664"}, - {file = "coverage-7.6.9-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:61f70dc68bd36810972e55bbbe83674ea073dd1dcc121040a08cdf3416c5349c"}, - {file = "coverage-7.6.9-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8a289d23d4c46f1a82d5db4abeb40b9b5be91731ee19a379d15790e53031c014"}, - {file = "coverage-7.6.9-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7e216d8044a356fc0337c7a2a0536d6de07888d7bcda76febcb8adc50bdbbd00"}, - {file = "coverage-7.6.9-cp313-cp313-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3c026eb44f744acaa2bda7493dad903aa5bf5fc4f2554293a798d5606710055d"}, - {file = "coverage-7.6.9-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:e77363e8425325384f9d49272c54045bbed2f478e9dd698dbc65dbc37860eb0a"}, - {file = "coverage-7.6.9-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:777abfab476cf83b5177b84d7486497e034eb9eaea0d746ce0c1268c71652077"}, - {file = "coverage-7.6.9-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:447af20e25fdbe16f26e84eb714ba21d98868705cb138252d28bc400381f6ffb"}, - {file = "coverage-7.6.9-cp313-cp313-win32.whl", hash = "sha256:d872ec5aeb086cbea771c573600d47944eea2dcba8be5f3ee649bfe3cb8dc9ba"}, - {file = "coverage-7.6.9-cp313-cp313-win_amd64.whl", hash = "sha256:fd1213c86e48dfdc5a0cc676551db467495a95a662d2396ecd58e719191446e1"}, - {file = "coverage-7.6.9-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:ba9e7484d286cd5a43744e5f47b0b3fb457865baf07bafc6bee91896364e1419"}, - {file = "coverage-7.6.9-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:e5ea1cf0872ee455c03e5674b5bca5e3e68e159379c1af0903e89f5eba9ccc3a"}, - {file = "coverage-7.6.9-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2d10e07aa2b91835d6abec555ec8b2733347956991901eea6ffac295f83a30e4"}, - {file = "coverage-7.6.9-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:13a9e2d3ee855db3dd6ea1ba5203316a1b1fd8eaeffc37c5b54987e61e4194ae"}, - {file = "coverage-7.6.9-cp313-cp313t-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c38bf15a40ccf5619fa2fe8f26106c7e8e080d7760aeccb3722664c8656b030"}, - {file = "coverage-7.6.9-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:d5275455b3e4627c8e7154feaf7ee0743c2e7af82f6e3b561967b1cca755a0be"}, - {file = "coverage-7.6.9-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:8f8770dfc6e2c6a2d4569f411015c8d751c980d17a14b0530da2d7f27ffdd88e"}, - {file = "coverage-7.6.9-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:8d2dfa71665a29b153a9681edb1c8d9c1ea50dfc2375fb4dac99ea7e21a0bcd9"}, - {file = "coverage-7.6.9-cp313-cp313t-win32.whl", hash = "sha256:5e6b86b5847a016d0fbd31ffe1001b63355ed309651851295315031ea7eb5a9b"}, - {file = "coverage-7.6.9-cp313-cp313t-win_amd64.whl", hash = "sha256:97ddc94d46088304772d21b060041c97fc16bdda13c6c7f9d8fcd8d5ae0d8611"}, - {file = "coverage-7.6.9-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:adb697c0bd35100dc690de83154627fbab1f4f3c0386df266dded865fc50a902"}, - {file = "coverage-7.6.9-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:be57b6d56e49c2739cdf776839a92330e933dd5e5d929966fbbd380c77f060be"}, - {file = "coverage-7.6.9-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f1592791f8204ae9166de22ba7e6705fa4ebd02936c09436a1bb85aabca3e599"}, - {file = "coverage-7.6.9-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4e12ae8cc979cf83d258acb5e1f1cf2f3f83524d1564a49d20b8bec14b637f08"}, - {file = "coverage-7.6.9-cp39-cp39-manylinux_2_5_x86_64.manylinux1_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bb5555cff66c4d3d6213a296b360f9e1a8e323e74e0426b6c10ed7f4d021e464"}, - {file = "coverage-7.6.9-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:b9389a429e0e5142e69d5bf4a435dd688c14478a19bb901735cdf75e57b13845"}, - {file = "coverage-7.6.9-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:592ac539812e9b46046620341498caf09ca21023c41c893e1eb9dbda00a70cbf"}, - {file = "coverage-7.6.9-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:a27801adef24cc30871da98a105f77995e13a25a505a0161911f6aafbd66e678"}, - {file = "coverage-7.6.9-cp39-cp39-win32.whl", hash = "sha256:8e3c3e38930cfb729cb8137d7f055e5a473ddaf1217966aa6238c88bd9fd50e6"}, - {file = "coverage-7.6.9-cp39-cp39-win_amd64.whl", hash = "sha256:e28bf44afa2b187cc9f41749138a64435bf340adfcacb5b2290c070ce99839d4"}, - {file = "coverage-7.6.9-pp39.pp310-none-any.whl", hash = "sha256:f3ca78518bc6bc92828cd11867b121891d75cae4ea9e908d72030609b996db1b"}, - {file = "coverage-7.6.9.tar.gz", hash = "sha256:4a8d8977b0c6ef5aeadcb644da9e69ae0dcfe66ec7f368c89c72e058bd71164d"}, -] - -[package.dependencies] -tomli = {version = "*", optional = true, markers = "python_full_version <= \"3.11.0a6\" and extra == \"toml\""} - -[package.extras] -toml = ["tomli"] - -[[package]] -name = "croniter" -version = "5.0.1" -description = "croniter provides iteration for datetime object with cron like format" -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,>=2.6" -files = [ - {file = "croniter-5.0.1-py2.py3-none-any.whl", hash = "sha256:eb28439742291f6c10b181df1a5ecf421208b1fc62ef44501daec1780a0b09e9"}, - {file = "croniter-5.0.1.tar.gz", hash = "sha256:7d9b1ef25b10eece48fdf29d8ac52f9b6252abff983ac614ade4f3276294019e"}, -] - -[package.dependencies] -python-dateutil = "*" -pytz = ">2021.1" - -[[package]] -name = "django" -version = "4.2.17" -description = "A high-level Python web framework that encourages rapid development and clean, pragmatic design." -optional = false -python-versions = ">=3.8" -files = [ - {file = "Django-4.2.17-py3-none-any.whl", hash = "sha256:3a93350214ba25f178d4045c0786c61573e7dbfa3c509b3551374f1e11ba8de0"}, - {file = "Django-4.2.17.tar.gz", hash = "sha256:6b56d834cc94c8b21a8f4e775064896be3b4a4ca387f2612d4406a5927cd2fdc"}, -] - -[package.dependencies] -asgiref = ">=3.6.0,<4" -sqlparse = ">=0.3.1" -tzdata = {version = "*", markers = "sys_platform == \"win32\""} - -[package.extras] -argon2 = ["argon2-cffi (>=19.1.0)"] -bcrypt = ["bcrypt"] - -[[package]] -name = "django" -version = "5.1.4" -description = "A high-level Python web framework that encourages rapid development and clean, pragmatic design." -optional = false -python-versions = ">=3.10" -files = [ - {file = "Django-5.1.4-py3-none-any.whl", hash = "sha256:236e023f021f5ce7dee5779de7b286565fdea5f4ab86bae5338e3f7b69896cf0"}, - {file = "Django-5.1.4.tar.gz", hash = "sha256:de450c09e91879fa5a307f696e57c851955c910a438a35e6b4c895e86bedc82a"}, -] - -[package.dependencies] -asgiref = ">=3.8.1,<4" -sqlparse = ">=0.3.1" -tzdata = {version = "*", markers = "sys_platform == \"win32\""} - -[package.extras] -argon2 = ["argon2-cffi (>=19.1.0)"] -bcrypt = ["bcrypt"] - -[[package]] -name = "django-stubs" -version = "5.1.1" -description = "Mypy stubs for Django" -optional = false -python-versions = ">=3.8" -files = [ - {file = "django_stubs-5.1.1-py3-none-any.whl", hash = "sha256:c4dc64260bd72e6d32b9e536e8dd0d9247922f0271f82d1d5132a18f24b388ac"}, - {file = "django_stubs-5.1.1.tar.gz", hash = "sha256:126d354bbdff4906c4e93e6361197f6fbfb6231c3df6def85a291dae6f9f577b"}, -] - -[package.dependencies] -asgiref = "*" -django = "*" -django-stubs-ext = ">=5.1.1" -tomli = {version = "*", markers = "python_version < \"3.11\""} -types-PyYAML = "*" -typing-extensions = ">=4.11.0" - -[package.extras] -compatible-mypy = ["mypy (>=1.12,<1.14)"] -oracle = ["oracledb"] -redis = ["redis"] - -[[package]] -name = "django-stubs-ext" -version = "5.1.1" -description = "Monkey-patching and extensions for django-stubs" -optional = false -python-versions = ">=3.8" -files = [ - {file = "django_stubs_ext-5.1.1-py3-none-any.whl", hash = "sha256:3907f99e178c93323e2ce908aef8352adb8c047605161f8d9e5e7b4efb5a6a9c"}, - {file = "django_stubs_ext-5.1.1.tar.gz", hash = "sha256:db7364e4f50ae7e5360993dbd58a3a57ea4b2e7e5bab0fbd525ccdb3e7975d1c"}, -] - -[package.dependencies] -django = "*" -typing-extensions = "*" - -[[package]] -name = "docutils" -version = "0.21.2" -description = "Docutils -- Python Documentation Utilities" -optional = false -python-versions = ">=3.9" -files = [ - {file = "docutils-0.21.2-py3-none-any.whl", hash = "sha256:dafca5b9e384f0e419294eb4d2ff9fa826435bf15f15b7bd45723e8ad76811b2"}, - {file = "docutils-0.21.2.tar.gz", hash = "sha256:3a6b18732edf182daa3cd12775bbb338cf5691468f91eeeb109deff6ebfa986f"}, -] - -[[package]] -name = "dunamai" -version = "1.23.0" -description = "Dynamic version generation" -optional = false -python-versions = ">=3.5" -files = [ - {file = "dunamai-1.23.0-py3-none-any.whl", hash = "sha256:a0906d876e92441793c6a423e16a4802752e723e9c9a5aabdc5535df02dbe041"}, - {file = "dunamai-1.23.0.tar.gz", hash = "sha256:a163746de7ea5acb6dacdab3a6ad621ebc612ed1e528aaa8beedb8887fccd2c4"}, -] - -[package.dependencies] -packaging = ">=20.9" - -[[package]] -name = "exceptiongroup" -version = "1.2.2" -description = "Backport of PEP 654 (exception groups)" -optional = false -python-versions = ">=3.7" -files = [ - {file = "exceptiongroup-1.2.2-py3-none-any.whl", hash = "sha256:3111b9d131c238bec2f8f516e123e14ba243563fb135d3fe885990585aa7795b"}, - {file = "exceptiongroup-1.2.2.tar.gz", hash = "sha256:47c2edf7c6738fafb49fd34290706d1a1a2f4d1c6df275526b62cbb4aa5393cc"}, -] - -[package.extras] -test = ["pytest (>=6)"] - -[[package]] -name = "furo" -version = "2024.8.6" -description = "A clean customisable Sphinx documentation theme." -optional = false -python-versions = ">=3.8" -files = [ - {file = "furo-2024.8.6-py3-none-any.whl", hash = "sha256:6cd97c58b47813d3619e63e9081169880fbe331f0ca883c871ff1f3f11814f5c"}, - {file = "furo-2024.8.6.tar.gz", hash = "sha256:b63e4cee8abfc3136d3bc03a3d45a76a850bada4d6374d24c1716b0e01394a01"}, -] - -[package.dependencies] -beautifulsoup4 = "*" -pygments = ">=2.7" -sphinx = ">=6.0,<9.0" -sphinx-basic-ng = ">=1.0.0.beta2" - -[[package]] -name = "greenlet" -version = "3.1.1" -description = "Lightweight in-process concurrent programming" -optional = false -python-versions = ">=3.7" -files = [ - {file = "greenlet-3.1.1-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:0bbae94a29c9e5c7e4a2b7f0aae5c17e8e90acbfd3bf6270eeba60c39fce3563"}, - {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0fde093fb93f35ca72a556cf72c92ea3ebfda3d79fc35bb19fbe685853869a83"}, - {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:36b89d13c49216cadb828db8dfa6ce86bbbc476a82d3a6c397f0efae0525bdd0"}, - {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:94b6150a85e1b33b40b1464a3f9988dcc5251d6ed06842abff82e42632fac120"}, - {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:93147c513fac16385d1036b7e5b102c7fbbdb163d556b791f0f11eada7ba65dc"}, - {file = "greenlet-3.1.1-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:da7a9bff22ce038e19bf62c4dd1ec8391062878710ded0a845bcf47cc0200617"}, - {file = "greenlet-3.1.1-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:b2795058c23988728eec1f36a4e5e4ebad22f8320c85f3587b539b9ac84128d7"}, - {file = "greenlet-3.1.1-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:ed10eac5830befbdd0c32f83e8aa6288361597550ba669b04c48f0f9a2c843c6"}, - {file = "greenlet-3.1.1-cp310-cp310-win_amd64.whl", hash = "sha256:77c386de38a60d1dfb8e55b8c1101d68c79dfdd25c7095d51fec2dd800892b80"}, - {file = "greenlet-3.1.1-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:e4d333e558953648ca09d64f13e6d8f0523fa705f51cae3f03b5983489958c70"}, - {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:09fc016b73c94e98e29af67ab7b9a879c307c6731a2c9da0db5a7d9b7edd1159"}, - {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d5e975ca70269d66d17dd995dafc06f1b06e8cb1ec1e9ed54c1d1e4a7c4cf26e"}, - {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3b2813dc3de8c1ee3f924e4d4227999285fd335d1bcc0d2be6dc3f1f6a318ec1"}, - {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e347b3bfcf985a05e8c0b7d462ba6f15b1ee1c909e2dcad795e49e91b152c383"}, - {file = "greenlet-3.1.1-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9e8f8c9cb53cdac7ba9793c276acd90168f416b9ce36799b9b885790f8ad6c0a"}, - {file = "greenlet-3.1.1-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:62ee94988d6b4722ce0028644418d93a52429e977d742ca2ccbe1c4f4a792511"}, - {file = "greenlet-3.1.1-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:1776fd7f989fc6b8d8c8cb8da1f6b82c5814957264d1f6cf818d475ec2bf6395"}, - {file = "greenlet-3.1.1-cp311-cp311-win_amd64.whl", hash = "sha256:48ca08c771c268a768087b408658e216133aecd835c0ded47ce955381105ba39"}, - {file = "greenlet-3.1.1-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:4afe7ea89de619adc868e087b4d2359282058479d7cfb94970adf4b55284574d"}, - {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f406b22b7c9a9b4f8aa9d2ab13d6ae0ac3e85c9a809bd590ad53fed2bf70dc79"}, - {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:c3a701fe5a9695b238503ce5bbe8218e03c3bcccf7e204e455e7462d770268aa"}, - {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2846930c65b47d70b9d178e89c7e1a69c95c1f68ea5aa0a58646b7a96df12441"}, - {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:99cfaa2110534e2cf3ba31a7abcac9d328d1d9f1b95beede58294a60348fba36"}, - {file = "greenlet-3.1.1-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1443279c19fca463fc33e65ef2a935a5b09bb90f978beab37729e1c3c6c25fe9"}, - {file = "greenlet-3.1.1-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:b7cede291382a78f7bb5f04a529cb18e068dd29e0fb27376074b6d0317bf4dd0"}, - {file = "greenlet-3.1.1-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:23f20bb60ae298d7d8656c6ec6db134bca379ecefadb0b19ce6f19d1f232a942"}, - {file = "greenlet-3.1.1-cp312-cp312-win_amd64.whl", hash = "sha256:7124e16b4c55d417577c2077be379514321916d5790fa287c9ed6f23bd2ffd01"}, - {file = "greenlet-3.1.1-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:05175c27cb459dcfc05d026c4232f9de8913ed006d42713cb8a5137bd49375f1"}, - {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:935e943ec47c4afab8965954bf49bfa639c05d4ccf9ef6e924188f762145c0ff"}, - {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:667a9706c970cb552ede35aee17339a18e8f2a87a51fba2ed39ceeeb1004798a"}, - {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b8a678974d1f3aa55f6cc34dc480169d58f2e6d8958895d68845fa4ab566509e"}, - {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:efc0f674aa41b92da8c49e0346318c6075d734994c3c4e4430b1c3f853e498e4"}, - {file = "greenlet-3.1.1-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0153404a4bb921f0ff1abeb5ce8a5131da56b953eda6e14b88dc6bbc04d2049e"}, - {file = "greenlet-3.1.1-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:275f72decf9932639c1c6dd1013a1bc266438eb32710016a1c742df5da6e60a1"}, - {file = "greenlet-3.1.1-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:c4aab7f6381f38a4b42f269057aee279ab0fc7bf2e929e3d4abfae97b682a12c"}, - {file = "greenlet-3.1.1-cp313-cp313-win_amd64.whl", hash = "sha256:b42703b1cf69f2aa1df7d1030b9d77d3e584a70755674d60e710f0af570f3761"}, - {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f1695e76146579f8c06c1509c7ce4dfe0706f49c6831a817ac04eebb2fd02011"}, - {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:7876452af029456b3f3549b696bb36a06db7c90747740c5302f74a9e9fa14b13"}, - {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4ead44c85f8ab905852d3de8d86f6f8baf77109f9da589cb4fa142bd3b57b475"}, - {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8320f64b777d00dd7ccdade271eaf0cad6636343293a25074cc5566160e4de7b"}, - {file = "greenlet-3.1.1-cp313-cp313t-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6510bf84a6b643dabba74d3049ead221257603a253d0a9873f55f6a59a65f822"}, - {file = "greenlet-3.1.1-cp313-cp313t-musllinux_1_1_aarch64.whl", hash = "sha256:04b013dc07c96f83134b1e99888e7a79979f1a247e2a9f59697fa14b5862ed01"}, - {file = "greenlet-3.1.1-cp313-cp313t-musllinux_1_1_x86_64.whl", hash = "sha256:411f015496fec93c1c8cd4e5238da364e1da7a124bcb293f085bf2860c32c6f6"}, - {file = "greenlet-3.1.1-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:47da355d8687fd65240c364c90a31569a133b7b60de111c255ef5b606f2ae291"}, - {file = "greenlet-3.1.1-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:98884ecf2ffb7d7fe6bd517e8eb99d31ff7855a840fa6d0d63cd07c037f6a981"}, - {file = "greenlet-3.1.1-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f1d4aeb8891338e60d1ab6127af1fe45def5259def8094b9c7e34690c8858803"}, - {file = "greenlet-3.1.1-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db32b5348615a04b82240cc67983cb315309e88d444a288934ee6ceaebcad6cc"}, - {file = "greenlet-3.1.1-cp37-cp37m-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:dcc62f31eae24de7f8dce72134c8651c58000d3b1868e01392baea7c32c247de"}, - {file = "greenlet-3.1.1-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:1d3755bcb2e02de341c55b4fca7a745a24a9e7212ac953f6b3a48d117d7257aa"}, - {file = "greenlet-3.1.1-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:b8da394b34370874b4572676f36acabac172602abf054cbc4ac910219f3340af"}, - {file = "greenlet-3.1.1-cp37-cp37m-win32.whl", hash = "sha256:a0dfc6c143b519113354e780a50381508139b07d2177cb6ad6a08278ec655798"}, - {file = "greenlet-3.1.1-cp37-cp37m-win_amd64.whl", hash = "sha256:54558ea205654b50c438029505def3834e80f0869a70fb15b871c29b4575ddef"}, - {file = "greenlet-3.1.1-cp38-cp38-macosx_11_0_universal2.whl", hash = "sha256:346bed03fe47414091be4ad44786d1bd8bef0c3fcad6ed3dee074a032ab408a9"}, - {file = "greenlet-3.1.1-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:dfc59d69fc48664bc693842bd57acfdd490acafda1ab52c7836e3fc75c90a111"}, - {file = "greenlet-3.1.1-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d21e10da6ec19b457b82636209cbe2331ff4306b54d06fa04b7c138ba18c8a81"}, - {file = "greenlet-3.1.1-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:37b9de5a96111fc15418819ab4c4432e4f3c2ede61e660b1e33971eba26ef9ba"}, - {file = "greenlet-3.1.1-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6ef9ea3f137e5711f0dbe5f9263e8c009b7069d8a1acea822bd5e9dae0ae49c8"}, - {file = "greenlet-3.1.1-cp38-cp38-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:85f3ff71e2e60bd4b4932a043fbbe0f499e263c628390b285cb599154a3b03b1"}, - {file = "greenlet-3.1.1-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:95ffcf719966dd7c453f908e208e14cde192e09fde6c7186c8f1896ef778d8cd"}, - {file = "greenlet-3.1.1-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:03a088b9de532cbfe2ba2034b2b85e82df37874681e8c470d6fb2f8c04d7e4b7"}, - {file = "greenlet-3.1.1-cp38-cp38-win32.whl", hash = "sha256:8b8b36671f10ba80e159378df9c4f15c14098c4fd73a36b9ad715f057272fbef"}, - {file = "greenlet-3.1.1-cp38-cp38-win_amd64.whl", hash = "sha256:7017b2be767b9d43cc31416aba48aab0d2309ee31b4dbf10a1d38fb7972bdf9d"}, - {file = "greenlet-3.1.1-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:396979749bd95f018296af156201d6211240e7a23090f50a8d5d18c370084dc3"}, - {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ca9d0ff5ad43e785350894d97e13633a66e2b50000e8a183a50a88d834752d42"}, - {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f6ff3b14f2df4c41660a7dec01045a045653998784bf8cfcb5a525bdffffbc8f"}, - {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:94ebba31df2aa506d7b14866fed00ac141a867e63143fe5bca82a8e503b36437"}, - {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:73aaad12ac0ff500f62cebed98d8789198ea0e6f233421059fa68a5aa7220145"}, - {file = "greenlet-3.1.1-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:63e4844797b975b9af3a3fb8f7866ff08775f5426925e1e0bbcfe7932059a12c"}, - {file = "greenlet-3.1.1-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:7939aa3ca7d2a1593596e7ac6d59391ff30281ef280d8632fa03d81f7c5f955e"}, - {file = "greenlet-3.1.1-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:d0028e725ee18175c6e422797c407874da24381ce0690d6b9396c204c7f7276e"}, - {file = "greenlet-3.1.1-cp39-cp39-win32.whl", hash = "sha256:5e06afd14cbaf9e00899fae69b24a32f2196c19de08fcb9f4779dd4f004e5e7c"}, - {file = "greenlet-3.1.1-cp39-cp39-win_amd64.whl", hash = "sha256:3319aa75e0e0639bc15ff54ca327e8dc7a6fe404003496e3c6925cd3142e0e22"}, - {file = "greenlet-3.1.1.tar.gz", hash = "sha256:4ce3ac6cdb6adf7946475d7ef31777c26d94bccc377e070a7986bd2d5c515467"}, -] - -[package.extras] -docs = ["Sphinx", "furo"] -test = ["objgraph", "psutil"] - -[[package]] -name = "idna" -version = "3.10" -description = "Internationalized Domain Names in Applications (IDNA)" -optional = false -python-versions = ">=3.6" -files = [ - {file = "idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3"}, - {file = "idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9"}, -] - -[package.extras] -all = ["flake8 (>=7.1.1)", "mypy (>=1.11.2)", "pytest (>=8.3.2)", "ruff (>=0.6.2)"] - -[[package]] -name = "imagesize" -version = "1.4.1" -description = "Getting image size from png/jpeg/jpeg2000/gif file" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -files = [ - {file = "imagesize-1.4.1-py2.py3-none-any.whl", hash = "sha256:0d8d18d08f840c19d0ee7ca1fd82490fdc3729b7ac93f49870406ddde8ef8d8b"}, - {file = "imagesize-1.4.1.tar.gz", hash = "sha256:69150444affb9cb0d5cc5a92b3676f0b2fb7cd9ae39e947a5e11a36b4497cd4a"}, -] - -[[package]] -name = "importlib-metadata" -version = "8.5.0" -description = "Read metadata from Python packages" -optional = false -python-versions = ">=3.8" -files = [ - {file = "importlib_metadata-8.5.0-py3-none-any.whl", hash = "sha256:45e54197d28b7a7f1559e60b95e7c567032b602131fbd588f1497f47880aa68b"}, - {file = "importlib_metadata-8.5.0.tar.gz", hash = "sha256:71522656f0abace1d072b9e5481a48f07c138e00f079c38c8f883823f9c26bd7"}, -] - -[package.dependencies] -zipp = ">=3.20" - -[package.extras] -check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"] -cover = ["pytest-cov"] -doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] -enabler = ["pytest-enabler (>=2.2)"] -perf = ["ipython"] -test = ["flufl.flake8", "importlib-resources (>=1.3)", "jaraco.test (>=5.4)", "packaging", "pyfakefs", "pytest (>=6,!=8.1.*)", "pytest-perf (>=0.9.2)"] -type = ["pytest-mypy"] - -[[package]] -name = "iniconfig" -version = "2.0.0" -description = "brain-dead simple config-ini parsing" -optional = false -python-versions = ">=3.7" -files = [ - {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, - {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, -] - -[[package]] -name = "jinja2" -version = "3.1.4" -description = "A very fast and expressive template engine." -optional = false -python-versions = ">=3.7" -files = [ - {file = "jinja2-3.1.4-py3-none-any.whl", hash = "sha256:bc5dd2abb727a5319567b7a813e6a2e7318c39f4f487cfe6c89c6f9c7d25197d"}, - {file = "jinja2-3.1.4.tar.gz", hash = "sha256:4a3aee7acbbe7303aede8e9648d13b8bf88a429282aa6122a993f0ac800cb369"}, -] - -[package.dependencies] -MarkupSafe = ">=2.0" - -[package.extras] -i18n = ["Babel (>=2.7)"] - -[[package]] -name = "markdown-it-py" -version = "3.0.0" -description = "Python port of markdown-it. Markdown parsing, done right!" -optional = false -python-versions = ">=3.8" -files = [ - {file = "markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb"}, - {file = "markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1"}, -] - -[package.dependencies] -mdurl = ">=0.1,<1.0" - -[package.extras] -benchmarking = ["psutil", "pytest", "pytest-benchmark"] -code-style = ["pre-commit (>=3.0,<4.0)"] -compare = ["commonmark (>=0.9,<1.0)", "markdown (>=3.4,<4.0)", "mistletoe (>=1.0,<2.0)", "mistune (>=2.0,<3.0)", "panflute (>=2.3,<3.0)"] -linkify = ["linkify-it-py (>=1,<3)"] -plugins = ["mdit-py-plugins"] -profiling = ["gprof2dot"] -rtd = ["jupyter_sphinx", "mdit-py-plugins", "myst-parser", "pyyaml", "sphinx", "sphinx-copybutton", "sphinx-design", "sphinx_book_theme"] -testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] - -[[package]] -name = "markupsafe" -version = "3.0.2" -description = "Safely add untrusted strings to HTML/XML markup." -optional = false -python-versions = ">=3.9" -files = [ - {file = "MarkupSafe-3.0.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7e94c425039cde14257288fd61dcfb01963e658efbc0ff54f5306b06054700f8"}, - {file = "MarkupSafe-3.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9e2d922824181480953426608b81967de705c3cef4d1af983af849d7bd619158"}, - {file = "MarkupSafe-3.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:38a9ef736c01fccdd6600705b09dc574584b89bea478200c5fbf112a6b0d5579"}, - {file = "MarkupSafe-3.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bbcb445fa71794da8f178f0f6d66789a28d7319071af7a496d4d507ed566270d"}, - {file = "MarkupSafe-3.0.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:57cb5a3cf367aeb1d316576250f65edec5bb3be939e9247ae594b4bcbc317dfb"}, - {file = "MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:3809ede931876f5b2ec92eef964286840ed3540dadf803dd570c3b7e13141a3b"}, - {file = "MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e07c3764494e3776c602c1e78e298937c3315ccc9043ead7e685b7f2b8d47b3c"}, - {file = "MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:b424c77b206d63d500bcb69fa55ed8d0e6a3774056bdc4839fc9298a7edca171"}, - {file = "MarkupSafe-3.0.2-cp310-cp310-win32.whl", hash = "sha256:fcabf5ff6eea076f859677f5f0b6b5c1a51e70a376b0579e0eadef8db48c6b50"}, - {file = "MarkupSafe-3.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:6af100e168aa82a50e186c82875a5893c5597a0c1ccdb0d8b40240b1f28b969a"}, - {file = "MarkupSafe-3.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9025b4018f3a1314059769c7bf15441064b2207cb3f065e6ea1e7359cb46db9d"}, - {file = "MarkupSafe-3.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:93335ca3812df2f366e80509ae119189886b0f3c2b81325d39efdb84a1e2ae93"}, - {file = "MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2cb8438c3cbb25e220c2ab33bb226559e7afb3baec11c4f218ffa7308603c832"}, - {file = "MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a123e330ef0853c6e822384873bef7507557d8e4a082961e1defa947aa59ba84"}, - {file = "MarkupSafe-3.0.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e084f686b92e5b83186b07e8a17fc09e38fff551f3602b249881fec658d3eca"}, - {file = "MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d8213e09c917a951de9d09ecee036d5c7d36cb6cb7dbaece4c71a60d79fb9798"}, - {file = "MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:5b02fb34468b6aaa40dfc198d813a641e3a63b98c2b05a16b9f80b7ec314185e"}, - {file = "MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:0bff5e0ae4ef2e1ae4fdf2dfd5b76c75e5c2fa4132d05fc1b0dabcd20c7e28c4"}, - {file = "MarkupSafe-3.0.2-cp311-cp311-win32.whl", hash = "sha256:6c89876f41da747c8d3677a2b540fb32ef5715f97b66eeb0c6b66f5e3ef6f59d"}, - {file = "MarkupSafe-3.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:70a87b411535ccad5ef2f1df5136506a10775d267e197e4cf531ced10537bd6b"}, - {file = "MarkupSafe-3.0.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:9778bd8ab0a994ebf6f84c2b949e65736d5575320a17ae8984a77fab08db94cf"}, - {file = "MarkupSafe-3.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:846ade7b71e3536c4e56b386c2a47adf5741d2d8b94ec9dc3e92e5e1ee1e2225"}, - {file = "MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c99d261bd2d5f6b59325c92c73df481e05e57f19837bdca8413b9eac4bd8028"}, - {file = "MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e17c96c14e19278594aa4841ec148115f9c7615a47382ecb6b82bd8fea3ab0c8"}, - {file = "MarkupSafe-3.0.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:88416bd1e65dcea10bc7569faacb2c20ce071dd1f87539ca2ab364bf6231393c"}, - {file = "MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2181e67807fc2fa785d0592dc2d6206c019b9502410671cc905d132a92866557"}, - {file = "MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:52305740fe773d09cffb16f8ed0427942901f00adedac82ec8b67752f58a1b22"}, - {file = "MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ad10d3ded218f1039f11a75f8091880239651b52e9bb592ca27de44eed242a48"}, - {file = "MarkupSafe-3.0.2-cp312-cp312-win32.whl", hash = "sha256:0f4ca02bea9a23221c0182836703cbf8930c5e9454bacce27e767509fa286a30"}, - {file = "MarkupSafe-3.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:8e06879fc22a25ca47312fbe7c8264eb0b662f6db27cb2d3bbbc74b1df4b9b87"}, - {file = "MarkupSafe-3.0.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ba9527cdd4c926ed0760bc301f6728ef34d841f405abf9d4f959c478421e4efd"}, - {file = "MarkupSafe-3.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f8b3d067f2e40fe93e1ccdd6b2e1d16c43140e76f02fb1319a05cf2b79d99430"}, - {file = "MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:569511d3b58c8791ab4c2e1285575265991e6d8f8700c7be0e88f86cb0672094"}, - {file = "MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15ab75ef81add55874e7ab7055e9c397312385bd9ced94920f2802310c930396"}, - {file = "MarkupSafe-3.0.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f3818cb119498c0678015754eba762e0d61e5b52d34c8b13d770f0719f7b1d79"}, - {file = "MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:cdb82a876c47801bb54a690c5ae105a46b392ac6099881cdfb9f6e95e4014c6a"}, - {file = "MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:cabc348d87e913db6ab4aa100f01b08f481097838bdddf7c7a84b7575b7309ca"}, - {file = "MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:444dcda765c8a838eaae23112db52f1efaf750daddb2d9ca300bcae1039adc5c"}, - {file = "MarkupSafe-3.0.2-cp313-cp313-win32.whl", hash = "sha256:bcf3e58998965654fdaff38e58584d8937aa3096ab5354d493c77d1fdd66d7a1"}, - {file = "MarkupSafe-3.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:e6a2a455bd412959b57a172ce6328d2dd1f01cb2135efda2e4576e8a23fa3b0f"}, - {file = "MarkupSafe-3.0.2-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:b5a6b3ada725cea8a5e634536b1b01c30bcdcd7f9c6fff4151548d5bf6b3a36c"}, - {file = "MarkupSafe-3.0.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:a904af0a6162c73e3edcb969eeeb53a63ceeb5d8cf642fade7d39e7963a22ddb"}, - {file = "MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4aa4e5faecf353ed117801a068ebab7b7e09ffb6e1d5e412dc852e0da018126c"}, - {file = "MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0ef13eaeee5b615fb07c9a7dadb38eac06a0608b41570d8ade51c56539e509d"}, - {file = "MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d16a81a06776313e817c951135cf7340a3e91e8c1ff2fac444cfd75fffa04afe"}, - {file = "MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:6381026f158fdb7c72a168278597a5e3a5222e83ea18f543112b2662a9b699c5"}, - {file = "MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:3d79d162e7be8f996986c064d1c7c817f6df3a77fe3d6859f6f9e7be4b8c213a"}, - {file = "MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:131a3c7689c85f5ad20f9f6fb1b866f402c445b220c19fe4308c0b147ccd2ad9"}, - {file = "MarkupSafe-3.0.2-cp313-cp313t-win32.whl", hash = "sha256:ba8062ed2cf21c07a9e295d5b8a2a5ce678b913b45fdf68c32d95d6c1291e0b6"}, - {file = "MarkupSafe-3.0.2-cp313-cp313t-win_amd64.whl", hash = "sha256:e444a31f8db13eb18ada366ab3cf45fd4b31e4db1236a4448f68778c1d1a5a2f"}, - {file = "MarkupSafe-3.0.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:eaa0a10b7f72326f1372a713e73c3f739b524b3af41feb43e4921cb529f5929a"}, - {file = "MarkupSafe-3.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:48032821bbdf20f5799ff537c7ac3d1fba0ba032cfc06194faffa8cda8b560ff"}, - {file = "MarkupSafe-3.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1a9d3f5f0901fdec14d8d2f66ef7d035f2157240a433441719ac9a3fba440b13"}, - {file = "MarkupSafe-3.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:88b49a3b9ff31e19998750c38e030fc7bb937398b1f78cfa599aaef92d693144"}, - {file = "MarkupSafe-3.0.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cfad01eed2c2e0c01fd0ecd2ef42c492f7f93902e39a42fc9ee1692961443a29"}, - {file = "MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:1225beacc926f536dc82e45f8a4d68502949dc67eea90eab715dea3a21c1b5f0"}, - {file = "MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:3169b1eefae027567d1ce6ee7cae382c57fe26e82775f460f0b2778beaad66c0"}, - {file = "MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:eb7972a85c54febfb25b5c4b4f3af4dcc731994c7da0d8a0b4a6eb0640e1d178"}, - {file = "MarkupSafe-3.0.2-cp39-cp39-win32.whl", hash = "sha256:8c4e8c3ce11e1f92f6536ff07154f9d49677ebaaafc32db9db4620bc11ed480f"}, - {file = "MarkupSafe-3.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:6e296a513ca3d94054c2c881cc913116e90fd030ad1c656b3869762b754f5f8a"}, - {file = "markupsafe-3.0.2.tar.gz", hash = "sha256:ee55d3edf80167e48ea11a923c7386f4669df67d7994554387f84e7d8b0a2bf0"}, -] - -[[package]] -name = "mdit-py-plugins" -version = "0.4.2" -description = "Collection of plugins for markdown-it-py" -optional = false -python-versions = ">=3.8" -files = [ - {file = "mdit_py_plugins-0.4.2-py3-none-any.whl", hash = "sha256:0c673c3f889399a33b95e88d2f0d111b4447bdfea7f237dab2d488f459835636"}, - {file = "mdit_py_plugins-0.4.2.tar.gz", hash = "sha256:5f2cd1fdb606ddf152d37ec30e46101a60512bc0e5fa1a7002c36647b09e26b5"}, -] - -[package.dependencies] -markdown-it-py = ">=1.0.0,<4.0.0" - -[package.extras] -code-style = ["pre-commit"] -rtd = ["myst-parser", "sphinx-book-theme"] -testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] - -[[package]] -name = "mdurl" -version = "0.1.2" -description = "Markdown URL utilities" -optional = false -python-versions = ">=3.7" -files = [ - {file = "mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8"}, - {file = "mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba"}, -] - -[[package]] -name = "migra" -version = "3.0.1663481299" -description = "Like `diff` but for PostgreSQL schemas" -optional = false -python-versions = ">=3.7,<4" -files = [ - {file = "migra-3.0.1663481299-py3-none-any.whl", hash = "sha256:061643e9af63488e085d729f267ed4af4249789979732b703ddeb2c478ec9a93"}, - {file = "migra-3.0.1663481299.tar.gz", hash = "sha256:0cf0c125d553008d9ff5402663a51703ccc474bb65b5a4f4727906dbf58e217f"}, -] - -[package.dependencies] -schemainspect = ">=3.1.1663480743" -six = "*" -sqlbag = "*" - -[package.extras] -pg = ["psycopg2-binary"] - -[[package]] -name = "mypy" -version = "1.13.0" -description = "Optional static typing for Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "mypy-1.13.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:6607e0f1dd1fb7f0aca14d936d13fd19eba5e17e1cd2a14f808fa5f8f6d8f60a"}, - {file = "mypy-1.13.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:8a21be69bd26fa81b1f80a61ee7ab05b076c674d9b18fb56239d72e21d9f4c80"}, - {file = "mypy-1.13.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7b2353a44d2179846a096e25691d54d59904559f4232519d420d64da6828a3a7"}, - {file = "mypy-1.13.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:0730d1c6a2739d4511dc4253f8274cdd140c55c32dfb0a4cf8b7a43f40abfa6f"}, - {file = "mypy-1.13.0-cp310-cp310-win_amd64.whl", hash = "sha256:c5fc54dbb712ff5e5a0fca797e6e0aa25726c7e72c6a5850cfd2adbc1eb0a372"}, - {file = "mypy-1.13.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:581665e6f3a8a9078f28d5502f4c334c0c8d802ef55ea0e7276a6e409bc0d82d"}, - {file = "mypy-1.13.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:3ddb5b9bf82e05cc9a627e84707b528e5c7caaa1c55c69e175abb15a761cec2d"}, - {file = "mypy-1.13.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:20c7ee0bc0d5a9595c46f38beb04201f2620065a93755704e141fcac9f59db2b"}, - {file = "mypy-1.13.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:3790ded76f0b34bc9c8ba4def8f919dd6a46db0f5a6610fb994fe8efdd447f73"}, - {file = "mypy-1.13.0-cp311-cp311-win_amd64.whl", hash = "sha256:51f869f4b6b538229c1d1bcc1dd7d119817206e2bc54e8e374b3dfa202defcca"}, - {file = "mypy-1.13.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:5c7051a3461ae84dfb5dd15eff5094640c61c5f22257c8b766794e6dd85e72d5"}, - {file = "mypy-1.13.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:39bb21c69a5d6342f4ce526e4584bc5c197fd20a60d14a8624d8743fffb9472e"}, - {file = "mypy-1.13.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:164f28cb9d6367439031f4c81e84d3ccaa1e19232d9d05d37cb0bd880d3f93c2"}, - {file = "mypy-1.13.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:a4c1bfcdbce96ff5d96fc9b08e3831acb30dc44ab02671eca5953eadad07d6d0"}, - {file = "mypy-1.13.0-cp312-cp312-win_amd64.whl", hash = "sha256:a0affb3a79a256b4183ba09811e3577c5163ed06685e4d4b46429a271ba174d2"}, - {file = "mypy-1.13.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:a7b44178c9760ce1a43f544e595d35ed61ac2c3de306599fa59b38a6048e1aa7"}, - {file = "mypy-1.13.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:5d5092efb8516d08440e36626f0153b5006d4088c1d663d88bf79625af3d1d62"}, - {file = "mypy-1.13.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:de2904956dac40ced10931ac967ae63c5089bd498542194b436eb097a9f77bc8"}, - {file = "mypy-1.13.0-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:7bfd8836970d33c2105562650656b6846149374dc8ed77d98424b40b09340ba7"}, - {file = "mypy-1.13.0-cp313-cp313-win_amd64.whl", hash = "sha256:9f73dba9ec77acb86457a8fc04b5239822df0c14a082564737833d2963677dbc"}, - {file = "mypy-1.13.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:100fac22ce82925f676a734af0db922ecfea991e1d7ec0ceb1e115ebe501301a"}, - {file = "mypy-1.13.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:7bcb0bb7f42a978bb323a7c88f1081d1b5dee77ca86f4100735a6f541299d8fb"}, - {file = "mypy-1.13.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bde31fc887c213e223bbfc34328070996061b0833b0a4cfec53745ed61f3519b"}, - {file = "mypy-1.13.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:07de989f89786f62b937851295ed62e51774722e5444a27cecca993fc3f9cd74"}, - {file = "mypy-1.13.0-cp38-cp38-win_amd64.whl", hash = "sha256:4bde84334fbe19bad704b3f5b78c4abd35ff1026f8ba72b29de70dda0916beb6"}, - {file = "mypy-1.13.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:0246bcb1b5de7f08f2826451abd947bf656945209b140d16ed317f65a17dc7dc"}, - {file = "mypy-1.13.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:7f5b7deae912cf8b77e990b9280f170381fdfbddf61b4ef80927edd813163732"}, - {file = "mypy-1.13.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7029881ec6ffb8bc233a4fa364736789582c738217b133f1b55967115288a2bc"}, - {file = "mypy-1.13.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:3e38b980e5681f28f033f3be86b099a247b13c491f14bb8b1e1e134d23bb599d"}, - {file = "mypy-1.13.0-cp39-cp39-win_amd64.whl", hash = "sha256:a6789be98a2017c912ae6ccb77ea553bbaf13d27605d2ca20a76dfbced631b24"}, - {file = "mypy-1.13.0-py3-none-any.whl", hash = "sha256:9c250883f9fd81d212e0952c92dbfcc96fc237f4b7c92f56ac81fd48460b3e5a"}, - {file = "mypy-1.13.0.tar.gz", hash = "sha256:0291a61b6fbf3e6673e3405cfcc0e7650bebc7939659fdca2702958038bd835e"}, -] - -[package.dependencies] -mypy-extensions = ">=1.0.0" -tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} -typing-extensions = ">=4.6.0" - -[package.extras] -dmypy = ["psutil (>=4.0)"] -faster-cache = ["orjson"] -install-types = ["pip"] -mypyc = ["setuptools (>=50)"] -reports = ["lxml"] - -[[package]] -name = "mypy-extensions" -version = "1.0.0" -description = "Type system extensions for programs checked with the mypy type checker." -optional = false -python-versions = ">=3.5" -files = [ - {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, - {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, -] - -[[package]] -name = "myst-parser" -version = "3.0.1" -description = "An extended [CommonMark](https://spec.commonmark.org/) compliant parser," -optional = false -python-versions = ">=3.8" -files = [ - {file = "myst_parser-3.0.1-py3-none-any.whl", hash = "sha256:6457aaa33a5d474aca678b8ead9b3dc298e89c68e67012e73146ea6fd54babf1"}, - {file = "myst_parser-3.0.1.tar.gz", hash = "sha256:88f0cb406cb363b077d176b51c476f62d60604d68a8dcdf4832e080441301a87"}, -] - -[package.dependencies] -docutils = ">=0.18,<0.22" -jinja2 = "*" -markdown-it-py = ">=3.0,<4.0" -mdit-py-plugins = ">=0.4,<1.0" -pyyaml = "*" -sphinx = ">=6,<8" - -[package.extras] -code-style = ["pre-commit (>=3.0,<4.0)"] -linkify = ["linkify-it-py (>=2.0,<3.0)"] -rtd = ["ipython", "sphinx (>=7)", "sphinx-autodoc2 (>=0.5.0,<0.6.0)", "sphinx-book-theme (>=1.1,<2.0)", "sphinx-copybutton", "sphinx-design", "sphinx-pyscript", "sphinx-tippy (>=0.4.3)", "sphinx-togglebutton", "sphinxext-opengraph (>=0.9.0,<0.10.0)", "sphinxext-rediraffe (>=0.2.7,<0.3.0)"] -testing = ["beautifulsoup4", "coverage[toml]", "defusedxml", "pytest (>=8,<9)", "pytest-cov", "pytest-param-files (>=0.6.0,<0.7.0)", "pytest-regressions", "sphinx-pytest"] -testing-docutils = ["pygments", "pytest (>=8,<9)", "pytest-param-files (>=0.6.0,<0.7.0)"] - -[[package]] -name = "packaging" -version = "24.2" -description = "Core utilities for Python packages" -optional = false -python-versions = ">=3.8" -files = [ - {file = "packaging-24.2-py3-none-any.whl", hash = "sha256:09abb1bccd265c01f4a3aa3f7a7db064b36514d2cba19a2f694fe6150451a759"}, - {file = "packaging-24.2.tar.gz", hash = "sha256:c228a6dc5e932d346bc5739379109d49e8853dd8223571c7c5b55260edc0b97f"}, -] - -[[package]] -name = "pluggy" -version = "1.5.0" -description = "plugin and hook calling mechanisms for python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pluggy-1.5.0-py3-none-any.whl", hash = "sha256:44e1ad92c8ca002de6377e165f3e0f1be63266ab4d554740532335b9d75ea669"}, - {file = "pluggy-1.5.0.tar.gz", hash = "sha256:2cffa88e94fdc978c4c574f15f9e59b7f4201d439195c3715ca9e2486f1d0cf1"}, -] - -[package.extras] -dev = ["pre-commit", "tox"] -testing = ["pytest", "pytest-benchmark"] - -[[package]] -name = "psycopg" -version = "3.2.3" -description = "PostgreSQL database adapter for Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "psycopg-3.2.3-py3-none-any.whl", hash = "sha256:644d3973fe26908c73d4be746074f6e5224b03c1101d302d9a53bf565ad64907"}, - {file = "psycopg-3.2.3.tar.gz", hash = "sha256:a5764f67c27bec8bfac85764d23c534af2c27b893550377e37ce59c12aac47a2"}, -] - -[package.dependencies] -psycopg-binary = {version = "3.2.3", optional = true, markers = "implementation_name != \"pypy\" and extra == \"binary\""} -psycopg-pool = {version = "*", optional = true, markers = "extra == \"pool\""} -typing-extensions = {version = ">=4.6", markers = "python_version < \"3.13\""} -tzdata = {version = "*", markers = "sys_platform == \"win32\""} - -[package.extras] -binary = ["psycopg-binary (==3.2.3)"] -c = ["psycopg-c (==3.2.3)"] -dev = ["ast-comments (>=1.1.2)", "black (>=24.1.0)", "codespell (>=2.2)", "dnspython (>=2.1)", "flake8 (>=4.0)", "mypy (>=1.11)", "types-setuptools (>=57.4)", "wheel (>=0.37)"] -docs = ["Sphinx (>=5.0)", "furo (==2022.6.21)", "sphinx-autobuild (>=2021.3.14)", "sphinx-autodoc-typehints (>=1.12)"] -pool = ["psycopg-pool"] -test = ["anyio (>=4.0)", "mypy (>=1.11)", "pproxy (>=2.7)", "pytest (>=6.2.5)", "pytest-cov (>=3.0)", "pytest-randomly (>=3.5)"] - -[[package]] -name = "psycopg-binary" -version = "3.2.3" -description = "PostgreSQL database adapter for Python -- C optimisation distribution" -optional = false -python-versions = ">=3.8" -files = [ - {file = "psycopg_binary-3.2.3-cp310-cp310-macosx_12_0_x86_64.whl", hash = "sha256:965455eac8547f32b3181d5ec9ad8b9be500c10fe06193543efaaebe3e4ce70c"}, - {file = "psycopg_binary-3.2.3-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:71adcc8bc80a65b776510bc39992edf942ace35b153ed7a9c6c573a6849ce308"}, - {file = "psycopg_binary-3.2.3-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f73adc05452fb85e7a12ed3f69c81540a8875960739082e6ea5e28c373a30774"}, - {file = "psycopg_binary-3.2.3-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e8630943143c6d6ca9aefc88bbe5e76c90553f4e1a3b2dc339e67dc34aa86f7e"}, - {file = "psycopg_binary-3.2.3-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3bffb61e198a91f712cc3d7f2d176a697cb05b284b2ad150fb8edb308eba9002"}, - {file = "psycopg_binary-3.2.3-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dc4fa2240c9fceddaa815a58f29212826fafe43ce80ff666d38c4a03fb036955"}, - {file = "psycopg_binary-3.2.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:192a5f8496e6e1243fdd9ac20e117e667c0712f148c5f9343483b84435854c78"}, - {file = "psycopg_binary-3.2.3-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:64dc6e9ec64f592f19dc01a784e87267a64a743d34f68488924251253da3c818"}, - {file = "psycopg_binary-3.2.3-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:79498df398970abcee3d326edd1d4655de7d77aa9aecd578154f8af35ce7bbd2"}, - {file = "psycopg_binary-3.2.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:949551752930d5e478817e0b49956350d866b26578ced0042a61967e3fcccdea"}, - {file = "psycopg_binary-3.2.3-cp310-cp310-win_amd64.whl", hash = "sha256:80a2337e2dfb26950894c8301358961430a0304f7bfe729d34cc036474e9c9b1"}, - {file = "psycopg_binary-3.2.3-cp311-cp311-macosx_12_0_x86_64.whl", hash = "sha256:6d8f2144e0d5808c2e2aed40fbebe13869cd00c2ae745aca4b3b16a435edb056"}, - {file = "psycopg_binary-3.2.3-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:94253be2b57ef2fea7ffe08996067aabf56a1eb9648342c9e3bad9e10c46e045"}, - {file = "psycopg_binary-3.2.3-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fda0162b0dbfa5eaed6cdc708179fa27e148cb8490c7d62e5cf30713909658ea"}, - {file = "psycopg_binary-3.2.3-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2c0419cdad8c70eaeb3116bb28e7b42d546f91baf5179d7556f230d40942dc78"}, - {file = "psycopg_binary-3.2.3-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:74fbf5dd3ef09beafd3557631e282f00f8af4e7a78fbfce8ab06d9cd5a789aae"}, - {file = "psycopg_binary-3.2.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7d784f614e4d53050cbe8abf2ae9d1aaacf8ed31ce57b42ce3bf2a48a66c3a5c"}, - {file = "psycopg_binary-3.2.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:4e76ce2475ed4885fe13b8254058be710ec0de74ebd8ef8224cf44a9a3358e5f"}, - {file = "psycopg_binary-3.2.3-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:5938b257b04c851c2d1e6cb2f8c18318f06017f35be9a5fe761ee1e2e344dfb7"}, - {file = "psycopg_binary-3.2.3-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:257c4aea6f70a9aef39b2a77d0658a41bf05c243e2bf41895eb02220ac6306f3"}, - {file = "psycopg_binary-3.2.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:06b5cc915e57621eebf2393f4173793ed7e3387295f07fed93ed3fb6a6ccf585"}, - {file = "psycopg_binary-3.2.3-cp311-cp311-win_amd64.whl", hash = "sha256:09baa041856b35598d335b1a74e19a49da8500acedf78164600694c0ba8ce21b"}, - {file = "psycopg_binary-3.2.3-cp312-cp312-macosx_12_0_x86_64.whl", hash = "sha256:48f8ca6ee8939bab760225b2ab82934d54330eec10afe4394a92d3f2a0c37dd6"}, - {file = "psycopg_binary-3.2.3-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:5361ea13c241d4f0ec3f95e0bf976c15e2e451e9cc7ef2e5ccfc9d170b197a40"}, - {file = "psycopg_binary-3.2.3-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cb987f14af7da7c24f803111dbc7392f5070fd350146af3345103f76ea82e339"}, - {file = "psycopg_binary-3.2.3-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0463a11b1cace5a6aeffaf167920707b912b8986a9c7920341c75e3686277920"}, - {file = "psycopg_binary-3.2.3-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8b7be9a6c06518967b641fb15032b1ed682fd3b0443f64078899c61034a0bca6"}, - {file = "psycopg_binary-3.2.3-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:64a607e630d9f4b2797f641884e52b9f8e239d35943f51bef817a384ec1678fe"}, - {file = "psycopg_binary-3.2.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:fa33ead69ed133210d96af0c63448b1385df48b9c0247eda735c5896b9e6dbbf"}, - {file = "psycopg_binary-3.2.3-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:1f8b0d0e99d8e19923e6e07379fa00570be5182c201a8c0b5aaa9a4d4a4ea20b"}, - {file = "psycopg_binary-3.2.3-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:709447bd7203b0b2debab1acec23123eb80b386f6c29e7604a5d4326a11e5bd6"}, - {file = "psycopg_binary-3.2.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:5e37d5027e297a627da3551a1e962316d0f88ee4ada74c768f6c9234e26346d9"}, - {file = "psycopg_binary-3.2.3-cp312-cp312-win_amd64.whl", hash = "sha256:261f0031ee6074765096a19b27ed0f75498a8338c3dcd7f4f0d831e38adf12d1"}, - {file = "psycopg_binary-3.2.3-cp313-cp313-macosx_12_0_x86_64.whl", hash = "sha256:41fdec0182efac66b27478ac15ef54c9ebcecf0e26ed467eb7d6f262a913318b"}, - {file = "psycopg_binary-3.2.3-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:07d019a786eb020c0f984691aa1b994cb79430061065a694cf6f94056c603d26"}, - {file = "psycopg_binary-3.2.3-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4c57615791a337378fe5381143259a6c432cdcbb1d3e6428bfb7ce59fff3fb5c"}, - {file = "psycopg_binary-3.2.3-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e8eb9a4e394926b93ad919cad1b0a918e9b4c846609e8c1cfb6b743683f64da0"}, - {file = "psycopg_binary-3.2.3-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:5905729668ef1418bd36fbe876322dcb0f90b46811bba96d505af89e6fbdce2f"}, - {file = "psycopg_binary-3.2.3-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd65774ed7d65101b314808b6893e1a75b7664f680c3ef18d2e5c84d570fa393"}, - {file = "psycopg_binary-3.2.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:700679c02f9348a0d0a2adcd33a0275717cd0d0aee9d4482b47d935023629505"}, - {file = "psycopg_binary-3.2.3-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:96334bb64d054e36fed346c50c4190bad9d7c586376204f50bede21a913bf942"}, - {file = "psycopg_binary-3.2.3-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:9099e443d4cc24ac6872e6a05f93205ba1a231b1a8917317b07c9ef2b955f1f4"}, - {file = "psycopg_binary-3.2.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:1985ab05e9abebfbdf3163a16ebb37fbc5d49aff2bf5b3d7375ff0920bbb54cd"}, - {file = "psycopg_binary-3.2.3-cp313-cp313-win_amd64.whl", hash = "sha256:e90352d7b610b4693fad0feea48549d4315d10f1eba5605421c92bb834e90170"}, - {file = "psycopg_binary-3.2.3-cp38-cp38-macosx_12_0_x86_64.whl", hash = "sha256:69320f05de8cdf4077ecd7fefdec223890eea232af0d58f2530cbda2871244a0"}, - {file = "psycopg_binary-3.2.3-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4926ea5c46da30bec4a85907aa3f7e4ea6313145b2aa9469fdb861798daf1502"}, - {file = "psycopg_binary-3.2.3-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c64c4cd0d50d5b2288ab1bcb26c7126c772bbdebdfadcd77225a77df01c4a57e"}, - {file = "psycopg_binary-3.2.3-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:05a1bdce30356e70a05428928717765f4a9229999421013f41338d9680d03a63"}, - {file = "psycopg_binary-3.2.3-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7ad357e426b0ea5c3043b8ec905546fa44b734bf11d33b3da3959f6e4447d350"}, - {file = "psycopg_binary-3.2.3-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:967b47a0fd237aa17c2748fdb7425015c394a6fb57cdad1562e46a6eb070f96d"}, - {file = "psycopg_binary-3.2.3-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:71db8896b942770ed7ab4efa59b22eee5203be2dfdee3c5258d60e57605d688c"}, - {file = "psycopg_binary-3.2.3-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:2773f850a778575dd7158a6dd072f7925b67f3ba305e2003538e8831fec77a1d"}, - {file = "psycopg_binary-3.2.3-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:aeddf7b3b3f6e24ccf7d0edfe2d94094ea76b40e831c16eff5230e040ce3b76b"}, - {file = "psycopg_binary-3.2.3-cp38-cp38-win_amd64.whl", hash = "sha256:824c867a38521d61d62b60aca7db7ca013a2b479e428a0db47d25d8ca5067410"}, - {file = "psycopg_binary-3.2.3-cp39-cp39-macosx_12_0_x86_64.whl", hash = "sha256:9994f7db390c17fc2bd4c09dca722fd792ff8a49bb3bdace0c50a83f22f1767d"}, - {file = "psycopg_binary-3.2.3-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1303bf8347d6be7ad26d1362af2c38b3a90b8293e8d56244296488ee8591058e"}, - {file = "psycopg_binary-3.2.3-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:842da42a63ecb32612bb7f5b9e9f8617eab9bc23bd58679a441f4150fcc51c96"}, - {file = "psycopg_binary-3.2.3-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2bb342a01c76f38a12432848e6013c57eb630103e7556cf79b705b53814c3949"}, - {file = "psycopg_binary-3.2.3-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fd40af959173ea0d087b6b232b855cfeaa6738f47cb2a0fd10a7f4fa8b74293f"}, - {file = "psycopg_binary-3.2.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:9b60b465773a52c7d4705b0a751f7f1cdccf81dd12aee3b921b31a6e76b07b0e"}, - {file = "psycopg_binary-3.2.3-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:fc6d87a1c44df8d493ef44988a3ded751e284e02cdf785f746c2d357e99782a6"}, - {file = "psycopg_binary-3.2.3-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:f0b018e37608c3bfc6039a1dc4eb461e89334465a19916be0153c757a78ea426"}, - {file = "psycopg_binary-3.2.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:2a29f5294b0b6360bfda69653697eff70aaf2908f58d1073b0acd6f6ab5b5a4f"}, - {file = "psycopg_binary-3.2.3-cp39-cp39-win_amd64.whl", hash = "sha256:e56b1fd529e5dde2d1452a7d72907b37ed1b4f07fdced5d8fb1e963acfff6749"}, -] - -[[package]] -name = "psycopg-pool" -version = "3.2.4" -description = "Connection Pool for Psycopg" -optional = false -python-versions = ">=3.8" -files = [ - {file = "psycopg_pool-3.2.4-py3-none-any.whl", hash = "sha256:f6a22cff0f21f06d72fb2f5cb48c618946777c49385358e0c88d062c59cbd224"}, - {file = "psycopg_pool-3.2.4.tar.gz", hash = "sha256:61774b5bbf23e8d22bedc7504707135aaf744679f8ef9b3fe29942920746a6ed"}, -] - -[package.dependencies] -typing-extensions = ">=4.6" - -[[package]] -name = "psycopg2-binary" -version = "2.9.10" -description = "psycopg2 - Python-PostgreSQL Database Adapter" -optional = false -python-versions = ">=3.8" -files = [ - {file = "psycopg2-binary-2.9.10.tar.gz", hash = "sha256:4b3df0e6990aa98acda57d983942eff13d824135fe2250e6522edaa782a06de2"}, - {file = "psycopg2_binary-2.9.10-cp310-cp310-macosx_12_0_x86_64.whl", hash = "sha256:0ea8e3d0ae83564f2fc554955d327fa081d065c8ca5cc6d2abb643e2c9c1200f"}, - {file = "psycopg2_binary-2.9.10-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:3e9c76f0ac6f92ecfc79516a8034a544926430f7b080ec5a0537bca389ee0906"}, - {file = "psycopg2_binary-2.9.10-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2ad26b467a405c798aaa1458ba09d7e2b6e5f96b1ce0ac15d82fd9f95dc38a92"}, - {file = "psycopg2_binary-2.9.10-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:270934a475a0e4b6925b5f804e3809dd5f90f8613621d062848dd82f9cd62007"}, - {file = "psycopg2_binary-2.9.10-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:48b338f08d93e7be4ab2b5f1dbe69dc5e9ef07170fe1f86514422076d9c010d0"}, - {file = "psycopg2_binary-2.9.10-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f4152f8f76d2023aac16285576a9ecd2b11a9895373a1f10fd9db54b3ff06b4"}, - {file = "psycopg2_binary-2.9.10-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:32581b3020c72d7a421009ee1c6bf4a131ef5f0a968fab2e2de0c9d2bb4577f1"}, - {file = "psycopg2_binary-2.9.10-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:2ce3e21dc3437b1d960521eca599d57408a695a0d3c26797ea0f72e834c7ffe5"}, - {file = "psycopg2_binary-2.9.10-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:e984839e75e0b60cfe75e351db53d6db750b00de45644c5d1f7ee5d1f34a1ce5"}, - {file = "psycopg2_binary-2.9.10-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:3c4745a90b78e51d9ba06e2088a2fe0c693ae19cc8cb051ccda44e8df8a6eb53"}, - {file = "psycopg2_binary-2.9.10-cp310-cp310-win32.whl", hash = "sha256:e5720a5d25e3b99cd0dc5c8a440570469ff82659bb09431c1439b92caf184d3b"}, - {file = "psycopg2_binary-2.9.10-cp310-cp310-win_amd64.whl", hash = "sha256:3c18f74eb4386bf35e92ab2354a12c17e5eb4d9798e4c0ad3a00783eae7cd9f1"}, - {file = "psycopg2_binary-2.9.10-cp311-cp311-macosx_12_0_x86_64.whl", hash = "sha256:04392983d0bb89a8717772a193cfaac58871321e3ec69514e1c4e0d4957b5aff"}, - {file = "psycopg2_binary-2.9.10-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:1a6784f0ce3fec4edc64e985865c17778514325074adf5ad8f80636cd029ef7c"}, - {file = "psycopg2_binary-2.9.10-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b5f86c56eeb91dc3135b3fd8a95dc7ae14c538a2f3ad77a19645cf55bab1799c"}, - {file = "psycopg2_binary-2.9.10-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2b3d2491d4d78b6b14f76881905c7a8a8abcf974aad4a8a0b065273a0ed7a2cb"}, - {file = "psycopg2_binary-2.9.10-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2286791ececda3a723d1910441c793be44625d86d1a4e79942751197f4d30341"}, - {file = "psycopg2_binary-2.9.10-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:512d29bb12608891e349af6a0cccedce51677725a921c07dba6342beaf576f9a"}, - {file = "psycopg2_binary-2.9.10-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:5a507320c58903967ef7384355a4da7ff3f28132d679aeb23572753cbf2ec10b"}, - {file = "psycopg2_binary-2.9.10-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:6d4fa1079cab9018f4d0bd2db307beaa612b0d13ba73b5c6304b9fe2fb441ff7"}, - {file = "psycopg2_binary-2.9.10-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:851485a42dbb0bdc1edcdabdb8557c09c9655dfa2ca0460ff210522e073e319e"}, - {file = "psycopg2_binary-2.9.10-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:35958ec9e46432d9076286dda67942ed6d968b9c3a6a2fd62b48939d1d78bf68"}, - {file = "psycopg2_binary-2.9.10-cp311-cp311-win32.whl", hash = "sha256:ecced182e935529727401b24d76634a357c71c9275b356efafd8a2a91ec07392"}, - {file = "psycopg2_binary-2.9.10-cp311-cp311-win_amd64.whl", hash = "sha256:ee0e8c683a7ff25d23b55b11161c2663d4b099770f6085ff0a20d4505778d6b4"}, - {file = "psycopg2_binary-2.9.10-cp312-cp312-macosx_12_0_x86_64.whl", hash = "sha256:880845dfe1f85d9d5f7c412efea7a08946a46894537e4e5d091732eb1d34d9a0"}, - {file = "psycopg2_binary-2.9.10-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:9440fa522a79356aaa482aa4ba500b65f28e5d0e63b801abf6aa152a29bd842a"}, - {file = "psycopg2_binary-2.9.10-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e3923c1d9870c49a2d44f795df0c889a22380d36ef92440ff618ec315757e539"}, - {file = "psycopg2_binary-2.9.10-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7b2c956c028ea5de47ff3a8d6b3cc3330ab45cf0b7c3da35a2d6ff8420896526"}, - {file = "psycopg2_binary-2.9.10-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f758ed67cab30b9a8d2833609513ce4d3bd027641673d4ebc9c067e4d208eec1"}, - {file = "psycopg2_binary-2.9.10-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8cd9b4f2cfab88ed4a9106192de509464b75a906462fb846b936eabe45c2063e"}, - {file = "psycopg2_binary-2.9.10-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6dc08420625b5a20b53551c50deae6e231e6371194fa0651dbe0fb206452ae1f"}, - {file = "psycopg2_binary-2.9.10-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:d7cd730dfa7c36dbe8724426bf5612798734bff2d3c3857f36f2733f5bfc7c00"}, - {file = "psycopg2_binary-2.9.10-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:155e69561d54d02b3c3209545fb08938e27889ff5a10c19de8d23eb5a41be8a5"}, - {file = "psycopg2_binary-2.9.10-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c3cc28a6fd5a4a26224007712e79b81dbaee2ffb90ff406256158ec4d7b52b47"}, - {file = "psycopg2_binary-2.9.10-cp312-cp312-win32.whl", hash = "sha256:ec8a77f521a17506a24a5f626cb2aee7850f9b69a0afe704586f63a464f3cd64"}, - {file = "psycopg2_binary-2.9.10-cp312-cp312-win_amd64.whl", hash = "sha256:18c5ee682b9c6dd3696dad6e54cc7ff3a1a9020df6a5c0f861ef8bfd338c3ca0"}, - {file = "psycopg2_binary-2.9.10-cp313-cp313-macosx_12_0_x86_64.whl", hash = "sha256:26540d4a9a4e2b096f1ff9cce51253d0504dca5a85872c7f7be23be5a53eb18d"}, - {file = "psycopg2_binary-2.9.10-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:e217ce4d37667df0bc1c397fdcd8de5e81018ef305aed9415c3b093faaeb10fb"}, - {file = "psycopg2_binary-2.9.10-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:245159e7ab20a71d989da00f280ca57da7641fa2cdcf71749c193cea540a74f7"}, - {file = "psycopg2_binary-2.9.10-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3c4ded1a24b20021ebe677b7b08ad10bf09aac197d6943bfe6fec70ac4e4690d"}, - {file = "psycopg2_binary-2.9.10-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3abb691ff9e57d4a93355f60d4f4c1dd2d68326c968e7db17ea96df3c023ef73"}, - {file = "psycopg2_binary-2.9.10-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8608c078134f0b3cbd9f89b34bd60a943b23fd33cc5f065e8d5f840061bd0673"}, - {file = "psycopg2_binary-2.9.10-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:230eeae2d71594103cd5b93fd29d1ace6420d0b86f4778739cb1a5a32f607d1f"}, - {file = "psycopg2_binary-2.9.10-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:bb89f0a835bcfc1d42ccd5f41f04870c1b936d8507c6df12b7737febc40f0909"}, - {file = "psycopg2_binary-2.9.10-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:f0c2d907a1e102526dd2986df638343388b94c33860ff3bbe1384130828714b1"}, - {file = "psycopg2_binary-2.9.10-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:f8157bed2f51db683f31306aa497311b560f2265998122abe1dce6428bd86567"}, - {file = "psycopg2_binary-2.9.10-cp38-cp38-macosx_12_0_x86_64.whl", hash = "sha256:eb09aa7f9cecb45027683bb55aebaaf45a0df8bf6de68801a6afdc7947bb09d4"}, - {file = "psycopg2_binary-2.9.10-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b73d6d7f0ccdad7bc43e6d34273f70d587ef62f824d7261c4ae9b8b1b6af90e8"}, - {file = "psycopg2_binary-2.9.10-cp38-cp38-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ce5ab4bf46a211a8e924d307c1b1fcda82368586a19d0a24f8ae166f5c784864"}, - {file = "psycopg2_binary-2.9.10-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:056470c3dc57904bbf63d6f534988bafc4e970ffd50f6271fc4ee7daad9498a5"}, - {file = "psycopg2_binary-2.9.10-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:73aa0e31fa4bb82578f3a6c74a73c273367727de397a7a0f07bd83cbea696baa"}, - {file = "psycopg2_binary-2.9.10-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:8de718c0e1c4b982a54b41779667242bc630b2197948405b7bd8ce16bcecac92"}, - {file = "psycopg2_binary-2.9.10-cp38-cp38-musllinux_1_2_i686.whl", hash = "sha256:5c370b1e4975df846b0277b4deba86419ca77dbc25047f535b0bb03d1a544d44"}, - {file = "psycopg2_binary-2.9.10-cp38-cp38-musllinux_1_2_ppc64le.whl", hash = "sha256:ffe8ed017e4ed70f68b7b371d84b7d4a790368db9203dfc2d222febd3a9c8863"}, - {file = "psycopg2_binary-2.9.10-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:8aecc5e80c63f7459a1a2ab2c64df952051df196294d9f739933a9f6687e86b3"}, - {file = "psycopg2_binary-2.9.10-cp39-cp39-macosx_12_0_x86_64.whl", hash = "sha256:7a813c8bdbaaaab1f078014b9b0b13f5de757e2b5d9be6403639b298a04d218b"}, - {file = "psycopg2_binary-2.9.10-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d00924255d7fc916ef66e4bf22f354a940c67179ad3fd7067d7a0a9c84d2fbfc"}, - {file = "psycopg2_binary-2.9.10-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7559bce4b505762d737172556a4e6ea8a9998ecac1e39b5233465093e8cee697"}, - {file = "psycopg2_binary-2.9.10-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e8b58f0a96e7a1e341fc894f62c1177a7c83febebb5ff9123b579418fdc8a481"}, - {file = "psycopg2_binary-2.9.10-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b269105e59ac96aba877c1707c600ae55711d9dcd3fc4b5012e4af68e30c648"}, - {file = "psycopg2_binary-2.9.10-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:79625966e176dc97ddabc142351e0409e28acf4660b88d1cf6adb876d20c490d"}, - {file = "psycopg2_binary-2.9.10-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:8aabf1c1a04584c168984ac678a668094d831f152859d06e055288fa515e4d30"}, - {file = "psycopg2_binary-2.9.10-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:19721ac03892001ee8fdd11507e6a2e01f4e37014def96379411ca99d78aeb2c"}, - {file = "psycopg2_binary-2.9.10-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:7f5d859928e635fa3ce3477704acee0f667b3a3d3e4bb109f2b18d4005f38287"}, - {file = "psycopg2_binary-2.9.10-cp39-cp39-win32.whl", hash = "sha256:3216ccf953b3f267691c90c6fe742e45d890d8272326b4a8b20850a03d05b7b8"}, - {file = "psycopg2_binary-2.9.10-cp39-cp39-win_amd64.whl", hash = "sha256:30e34c4e97964805f715206c7b789d54a78b70f3ff19fbe590104b71c45600e5"}, -] - -[[package]] -name = "pygments" -version = "2.18.0" -description = "Pygments is a syntax highlighting package written in Python." -optional = false -python-versions = ">=3.8" -files = [ - {file = "pygments-2.18.0-py3-none-any.whl", hash = "sha256:b8e6aca0523f3ab76fee51799c488e38782ac06eafcf95e7ba832985c8e7b13a"}, - {file = "pygments-2.18.0.tar.gz", hash = "sha256:786ff802f32e91311bff3889f6e9a86e81505fe99f2735bb6d60ae0c5004f199"}, -] - -[package.extras] -windows-terminal = ["colorama (>=0.4.6)"] - -[[package]] -name = "pytest" -version = "8.3.4" -description = "pytest: simple powerful testing with Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pytest-8.3.4-py3-none-any.whl", hash = "sha256:50e16d954148559c9a74109af1eaf0c945ba2d8f30f0a3d3335edde19788b6f6"}, - {file = "pytest-8.3.4.tar.gz", hash = "sha256:965370d062bce11e73868e0335abac31b4d3de0e82f4007408d242b4f8610761"}, -] - -[package.dependencies] -colorama = {version = "*", markers = "sys_platform == \"win32\""} -exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} -iniconfig = "*" -packaging = "*" -pluggy = ">=1.5,<2" -tomli = {version = ">=1", markers = "python_version < \"3.11\""} - -[package.extras] -dev = ["argcomplete", "attrs (>=19.2)", "hypothesis (>=3.56)", "mock", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] - -[[package]] -name = "pytest-asyncio" -version = "0.24.0" -description = "Pytest support for asyncio" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pytest_asyncio-0.24.0-py3-none-any.whl", hash = "sha256:a811296ed596b69bf0b6f3dc40f83bcaf341b155a269052d82efa2b25ac7037b"}, - {file = "pytest_asyncio-0.24.0.tar.gz", hash = "sha256:d081d828e576d85f875399194281e92bf8a68d60d72d1a2faf2feddb6c46b276"}, -] - -[package.dependencies] -pytest = ">=8.2,<9" - -[package.extras] -docs = ["sphinx (>=5.3)", "sphinx-rtd-theme (>=1.0)"] -testing = ["coverage (>=6.2)", "hypothesis (>=5.7.1)"] - -[[package]] -name = "pytest-cov" -version = "6.0.0" -description = "Pytest plugin for measuring coverage." -optional = false -python-versions = ">=3.9" -files = [ - {file = "pytest-cov-6.0.0.tar.gz", hash = "sha256:fde0b595ca248bb8e2d76f020b465f3b107c9632e6a1d1705f17834c89dcadc0"}, - {file = "pytest_cov-6.0.0-py3-none-any.whl", hash = "sha256:eee6f1b9e61008bd34975a4d5bab25801eb31898b032dd55addc93e96fcaaa35"}, -] - -[package.dependencies] -coverage = {version = ">=7.5", extras = ["toml"]} -pytest = ">=4.6" - -[package.extras] -testing = ["fields", "hunter", "process-tests", "pytest-xdist", "virtualenv"] - -[[package]] -name = "pytest-django" -version = "4.9.0" -description = "A Django plugin for pytest." -optional = false -python-versions = ">=3.8" -files = [ - {file = "pytest_django-4.9.0-py3-none-any.whl", hash = "sha256:1d83692cb39188682dbb419ff0393867e9904094a549a7d38a3154d5731b2b99"}, - {file = "pytest_django-4.9.0.tar.gz", hash = "sha256:8bf7bc358c9ae6f6fc51b6cebb190fe20212196e6807121f11bd6a3b03428314"}, -] - -[package.dependencies] -pytest = ">=7.0.0" - -[package.extras] -docs = ["sphinx", "sphinx-rtd-theme"] -testing = ["Django", "django-configurations (>=2.0)"] - -[[package]] -name = "pytest-mock" -version = "3.14.0" -description = "Thin-wrapper around the mock package for easier use with pytest" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pytest-mock-3.14.0.tar.gz", hash = "sha256:2719255a1efeceadbc056d6bf3df3d1c5015530fb40cf347c0f9afac88410bd0"}, - {file = "pytest_mock-3.14.0-py3-none-any.whl", hash = "sha256:0b72c38033392a5f4621342fe11e9219ac11ec9d375f8e2a0c164539e0d70f6f"}, -] - -[package.dependencies] -pytest = ">=6.2.5" - -[package.extras] -dev = ["pre-commit", "pytest-asyncio", "tox"] - -[[package]] -name = "python-dateutil" -version = "2.9.0.post0" -description = "Extensions to the standard Python datetime module" -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" -files = [ - {file = "python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3"}, - {file = "python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427"}, -] - -[package.dependencies] -six = ">=1.5" - -[[package]] -name = "pytz" -version = "2024.2" -description = "World timezone definitions, modern and historical" -optional = false -python-versions = "*" -files = [ - {file = "pytz-2024.2-py2.py3-none-any.whl", hash = "sha256:31c7c1817eb7fae7ca4b8c7ee50c72f93aa2dd863de768e1ef4245d426aa0725"}, - {file = "pytz-2024.2.tar.gz", hash = "sha256:2aa355083c50a0f93fa581709deac0c9ad65cca8a9e9beac660adcbd493c798a"}, -] - -[[package]] -name = "pyyaml" -version = "6.0.2" -description = "YAML parser and emitter for Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086"}, - {file = "PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf"}, - {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8824b5a04a04a047e72eea5cec3bc266db09e35de6bdfe34c9436ac5ee27d237"}, - {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c36280e6fb8385e520936c3cb3b8042851904eba0e58d277dca80a5cfed590b"}, - {file = "PyYAML-6.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec031d5d2feb36d1d1a24380e4db6d43695f3748343d99434e6f5f9156aaa2ed"}, - {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:936d68689298c36b53b29f23c6dbb74de12b4ac12ca6cfe0e047bedceea56180"}, - {file = "PyYAML-6.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:23502f431948090f597378482b4812b0caae32c22213aecf3b55325e049a6c68"}, - {file = "PyYAML-6.0.2-cp310-cp310-win32.whl", hash = "sha256:2e99c6826ffa974fe6e27cdb5ed0021786b03fc98e5ee3c5bfe1fd5015f42b99"}, - {file = "PyYAML-6.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:a4d3091415f010369ae4ed1fc6b79def9416358877534caf6a0fdd2146c87a3e"}, - {file = "PyYAML-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cc1c1159b3d456576af7a3e4d1ba7e6924cb39de8f67111c735f6fc832082774"}, - {file = "PyYAML-6.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1e2120ef853f59c7419231f3bf4e7021f1b936f6ebd222406c3b60212205d2ee"}, - {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d225db5a45f21e78dd9358e58a98702a0302f2659a3c6cd320564b75b86f47c"}, - {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ac9328ec4831237bec75defaf839f7d4564be1e6b25ac710bd1a96321cc8317"}, - {file = "PyYAML-6.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ad2a3decf9aaba3d29c8f537ac4b243e36bef957511b4766cb0057d32b0be85"}, - {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ff3824dc5261f50c9b0dfb3be22b4567a6f938ccce4587b38952d85fd9e9afe4"}, - {file = "PyYAML-6.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:797b4f722ffa07cc8d62053e4cff1486fa6dc094105d13fea7b1de7d8bf71c9e"}, - {file = "PyYAML-6.0.2-cp311-cp311-win32.whl", hash = "sha256:11d8f3dd2b9c1207dcaf2ee0bbbfd5991f571186ec9cc78427ba5bd32afae4b5"}, - {file = "PyYAML-6.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10ce637b18caea04431ce14fabcf5c64a1c61ec9c56b071a4b7ca131ca52d44"}, - {file = "PyYAML-6.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab"}, - {file = "PyYAML-6.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725"}, - {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5"}, - {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425"}, - {file = "PyYAML-6.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476"}, - {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48"}, - {file = "PyYAML-6.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b"}, - {file = "PyYAML-6.0.2-cp312-cp312-win32.whl", hash = "sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4"}, - {file = "PyYAML-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8"}, - {file = "PyYAML-6.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba"}, - {file = "PyYAML-6.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:50187695423ffe49e2deacb8cd10510bc361faac997de9efef88badc3bb9e2d1"}, - {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ffe8360bab4910ef1b9e87fb812d8bc0a308b0d0eef8c8f44e0254ab3b07133"}, - {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:17e311b6c678207928d649faa7cb0d7b4c26a0ba73d41e99c4fff6b6c3276484"}, - {file = "PyYAML-6.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b189594dbe54f75ab3a1acec5f1e3faa7e8cf2f1e08d9b561cb41b845f69d5"}, - {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:41e4e3953a79407c794916fa277a82531dd93aad34e29c2a514c2c0c5fe971cc"}, - {file = "PyYAML-6.0.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:68ccc6023a3400877818152ad9a1033e3db8625d899c72eacb5a668902e4d652"}, - {file = "PyYAML-6.0.2-cp313-cp313-win32.whl", hash = "sha256:bc2fa7c6b47d6bc618dd7fb02ef6fdedb1090ec036abab80d4681424b84c1183"}, - {file = "PyYAML-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563"}, - {file = "PyYAML-6.0.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:24471b829b3bf607e04e88d79542a9d48bb037c2267d7927a874e6c205ca7e9a"}, - {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d7fded462629cfa4b685c5416b949ebad6cec74af5e2d42905d41e257e0869f5"}, - {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d84a1718ee396f54f3a086ea0a66d8e552b2ab2017ef8b420e92edbc841c352d"}, - {file = "PyYAML-6.0.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9056c1ecd25795207ad294bcf39f2db3d845767be0ea6e6a34d856f006006083"}, - {file = "PyYAML-6.0.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:82d09873e40955485746739bcb8b4586983670466c23382c19cffecbf1fd8706"}, - {file = "PyYAML-6.0.2-cp38-cp38-win32.whl", hash = "sha256:43fa96a3ca0d6b1812e01ced1044a003533c47f6ee8aca31724f78e93ccc089a"}, - {file = "PyYAML-6.0.2-cp38-cp38-win_amd64.whl", hash = "sha256:01179a4a8559ab5de078078f37e5c1a30d76bb88519906844fd7bdea1b7729ff"}, - {file = "PyYAML-6.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:688ba32a1cffef67fd2e9398a2efebaea461578b0923624778664cc1c914db5d"}, - {file = "PyYAML-6.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8786accb172bd8afb8be14490a16625cbc387036876ab6ba70912730faf8e1f"}, - {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8e03406cac8513435335dbab54c0d385e4a49e4945d2909a581c83647ca0290"}, - {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f753120cb8181e736c57ef7636e83f31b9c0d1722c516f7e86cf15b7aa57ff12"}, - {file = "PyYAML-6.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b1fdb9dc17f5a7677423d508ab4f243a726dea51fa5e70992e59a7411c89d19"}, - {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0b69e4ce7a131fe56b7e4d770c67429700908fc0752af059838b1cfb41960e4e"}, - {file = "PyYAML-6.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a9f8c2e67970f13b16084e04f134610fd1d374bf477b17ec1599185cf611d725"}, - {file = "PyYAML-6.0.2-cp39-cp39-win32.whl", hash = "sha256:6395c297d42274772abc367baaa79683958044e5d3835486c16da75d2a694631"}, - {file = "PyYAML-6.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:39693e1f8320ae4f43943590b49779ffb98acb81f788220ea932a6b6c51004d8"}, - {file = "pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e"}, -] - -[[package]] -name = "requests" -version = "2.32.3" -description = "Python HTTP for Humans." -optional = false -python-versions = ">=3.8" -files = [ - {file = "requests-2.32.3-py3-none-any.whl", hash = "sha256:70761cfe03c773ceb22aa2f671b4757976145175cdfca038c02654d061d6dcc6"}, - {file = "requests-2.32.3.tar.gz", hash = "sha256:55365417734eb18255590a9ff9eb97e9e1da868d4ccd6402399eaf68af20a760"}, -] - -[package.dependencies] -certifi = ">=2017.4.17" -charset-normalizer = ">=2,<4" -idna = ">=2.5,<4" -urllib3 = ">=1.21.1,<3" - -[package.extras] -socks = ["PySocks (>=1.5.6,!=1.5.7)"] -use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] - -[[package]] -name = "ruff" -version = "0.8.2" -description = "An extremely fast Python linter and code formatter, written in Rust." -optional = false -python-versions = ">=3.7" -files = [ - {file = "ruff-0.8.2-py3-none-linux_armv6l.whl", hash = "sha256:c49ab4da37e7c457105aadfd2725e24305ff9bc908487a9bf8d548c6dad8bb3d"}, - {file = "ruff-0.8.2-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:ec016beb69ac16be416c435828be702ee694c0d722505f9c1f35e1b9c0cc1bf5"}, - {file = "ruff-0.8.2-py3-none-macosx_11_0_arm64.whl", hash = "sha256:f05cdf8d050b30e2ba55c9b09330b51f9f97d36d4673213679b965d25a785f3c"}, - {file = "ruff-0.8.2-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:60f578c11feb1d3d257b2fb043ddb47501ab4816e7e221fbb0077f0d5d4e7b6f"}, - {file = "ruff-0.8.2-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:cbd5cf9b0ae8f30eebc7b360171bd50f59ab29d39f06a670b3e4501a36ba5897"}, - {file = "ruff-0.8.2-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b402ddee3d777683de60ff76da801fa7e5e8a71038f57ee53e903afbcefdaa58"}, - {file = "ruff-0.8.2-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:705832cd7d85605cb7858d8a13d75993c8f3ef1397b0831289109e953d833d29"}, - {file = "ruff-0.8.2-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:32096b41aaf7a5cc095fa45b4167b890e4c8d3fd217603f3634c92a541de7248"}, - {file = "ruff-0.8.2-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:e769083da9439508833cfc7c23e351e1809e67f47c50248250ce1ac52c21fb93"}, - {file = "ruff-0.8.2-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:5fe716592ae8a376c2673fdfc1f5c0c193a6d0411f90a496863c99cd9e2ae25d"}, - {file = "ruff-0.8.2-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:81c148825277e737493242b44c5388a300584d73d5774defa9245aaef55448b0"}, - {file = "ruff-0.8.2-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:d261d7850c8367704874847d95febc698a950bf061c9475d4a8b7689adc4f7fa"}, - {file = "ruff-0.8.2-py3-none-musllinux_1_2_i686.whl", hash = "sha256:1ca4e3a87496dc07d2427b7dd7ffa88a1e597c28dad65ae6433ecb9f2e4f022f"}, - {file = "ruff-0.8.2-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:729850feed82ef2440aa27946ab39c18cb4a8889c1128a6d589ffa028ddcfc22"}, - {file = "ruff-0.8.2-py3-none-win32.whl", hash = "sha256:ac42caaa0411d6a7d9594363294416e0e48fc1279e1b0e948391695db2b3d5b1"}, - {file = "ruff-0.8.2-py3-none-win_amd64.whl", hash = "sha256:2aae99ec70abf43372612a838d97bfe77d45146254568d94926e8ed5bbb409ea"}, - {file = "ruff-0.8.2-py3-none-win_arm64.whl", hash = "sha256:fb88e2a506b70cfbc2de6fae6681c4f944f7dd5f2fe87233a7233d888bad73e8"}, - {file = "ruff-0.8.2.tar.gz", hash = "sha256:b84f4f414dda8ac7f75075c1fa0b905ac0ff25361f42e6d5da681a465e0f78e5"}, -] - -[[package]] -name = "schemainspect" -version = "3.1.1663587362" -description = "Schema inspection for PostgreSQL (and possibly others)" -optional = false -python-versions = ">=3.7,<4" -files = [ - {file = "schemainspect-3.1.1663587362-py3-none-any.whl", hash = "sha256:3071265712863c4d4e742940a4b44ac685135af3c93416872ec1bb6c822c4aca"}, - {file = "schemainspect-3.1.1663587362.tar.gz", hash = "sha256:a295ad56f7a19c09e5e1ef9f16dadbf6392e26196cb5f05b5afe613c99ce7468"}, -] - -[package.dependencies] -sqlalchemy = "*" - -[[package]] -name = "setuptools" -version = "75.6.0" -description = "Easily download, build, install, upgrade, and uninstall Python packages" -optional = false -python-versions = ">=3.9" -files = [ - {file = "setuptools-75.6.0-py3-none-any.whl", hash = "sha256:ce74b49e8f7110f9bf04883b730f4765b774ef3ef28f722cce7c273d253aaf7d"}, - {file = "setuptools-75.6.0.tar.gz", hash = "sha256:8199222558df7c86216af4f84c30e9b34a61d8ba19366cc914424cdbd28252f6"}, -] - -[package.extras] -check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)", "ruff (>=0.7.0)"] -core = ["importlib_metadata (>=6)", "jaraco.collections", "jaraco.functools (>=4)", "jaraco.text (>=3.7)", "more_itertools", "more_itertools (>=8.8)", "packaging", "packaging (>=24.2)", "platformdirs (>=4.2.2)", "tomli (>=2.0.1)", "wheel (>=0.43.0)"] -cover = ["pytest-cov"] -doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "pyproject-hooks (!=1.1)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier", "towncrier (<24.7)"] -enabler = ["pytest-enabler (>=2.2)"] -test = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "ini2toml[lite] (>=0.14)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "jaraco.test (>=5.5)", "packaging (>=24.2)", "pip (>=19.1)", "pyproject-hooks (!=1.1)", "pytest (>=6,!=8.1.*)", "pytest-home (>=0.5)", "pytest-perf", "pytest-subprocess", "pytest-timeout", "pytest-xdist (>=3)", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel (>=0.44.0)"] -type = ["importlib_metadata (>=7.0.2)", "jaraco.develop (>=7.21)", "mypy (>=1.12,<1.14)", "pytest-mypy"] - -[[package]] -name = "six" -version = "1.17.0" -description = "Python 2 and 3 compatibility utilities" -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,>=2.7" -files = [ - {file = "six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274"}, - {file = "six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81"}, -] - -[[package]] -name = "sniffio" -version = "1.3.1" -description = "Sniff out which async library your code is running under" -optional = false -python-versions = ">=3.7" -files = [ - {file = "sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2"}, - {file = "sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc"}, -] - -[[package]] -name = "snowballstemmer" -version = "2.2.0" -description = "This package provides 29 stemmers for 28 languages generated from Snowball algorithms." -optional = false -python-versions = "*" -files = [ - {file = "snowballstemmer-2.2.0-py2.py3-none-any.whl", hash = "sha256:c8e1716e83cc398ae16824e5572ae04e0d9fc2c6b985fb0f900f5f0c96ecba1a"}, - {file = "snowballstemmer-2.2.0.tar.gz", hash = "sha256:09b16deb8547d3412ad7b590689584cd0fe25ec8db3be37788be3810cbf19cb1"}, -] - -[[package]] -name = "soupsieve" -version = "2.6" -description = "A modern CSS selector implementation for Beautiful Soup." -optional = false -python-versions = ">=3.8" -files = [ - {file = "soupsieve-2.6-py3-none-any.whl", hash = "sha256:e72c4ff06e4fb6e4b5a9f0f55fe6e81514581fca1515028625d0f299c602ccc9"}, - {file = "soupsieve-2.6.tar.gz", hash = "sha256:e2e68417777af359ec65daac1057404a3c8a5455bb8abc36f1a9866ab1a51abb"}, -] - -[[package]] -name = "sphinx" -version = "7.4.7" -description = "Python documentation generator" -optional = false -python-versions = ">=3.9" -files = [ - {file = "sphinx-7.4.7-py3-none-any.whl", hash = "sha256:c2419e2135d11f1951cd994d6eb18a1835bd8fdd8429f9ca375dc1f3281bd239"}, - {file = "sphinx-7.4.7.tar.gz", hash = "sha256:242f92a7ea7e6c5b406fdc2615413890ba9f699114a9c09192d7dfead2ee9cfe"}, -] - -[package.dependencies] -alabaster = ">=0.7.14,<0.8.0" -babel = ">=2.13" -colorama = {version = ">=0.4.6", markers = "sys_platform == \"win32\""} -docutils = ">=0.20,<0.22" -imagesize = ">=1.3" -importlib-metadata = {version = ">=6.0", markers = "python_version < \"3.10\""} -Jinja2 = ">=3.1" -packaging = ">=23.0" -Pygments = ">=2.17" -requests = ">=2.30.0" -snowballstemmer = ">=2.2" -sphinxcontrib-applehelp = "*" -sphinxcontrib-devhelp = "*" -sphinxcontrib-htmlhelp = ">=2.0.0" -sphinxcontrib-jsmath = "*" -sphinxcontrib-qthelp = "*" -sphinxcontrib-serializinghtml = ">=1.1.9" -tomli = {version = ">=2", markers = "python_version < \"3.11\""} - -[package.extras] -docs = ["sphinxcontrib-websupport"] -lint = ["flake8 (>=6.0)", "importlib-metadata (>=6.0)", "mypy (==1.10.1)", "pytest (>=6.0)", "ruff (==0.5.2)", "sphinx-lint (>=0.9)", "tomli (>=2)", "types-docutils (==0.21.0.20240711)", "types-requests (>=2.30.0)"] -test = ["cython (>=3.0)", "defusedxml (>=0.7.1)", "pytest (>=8.0)", "setuptools (>=70.0)", "typing_extensions (>=4.9)"] - -[[package]] -name = "sphinx-basic-ng" -version = "1.0.0b2" -description = "A modern skeleton for Sphinx themes." -optional = false -python-versions = ">=3.7" -files = [ - {file = "sphinx_basic_ng-1.0.0b2-py3-none-any.whl", hash = "sha256:eb09aedbabfb650607e9b4b68c9d240b90b1e1be221d6ad71d61c52e29f7932b"}, - {file = "sphinx_basic_ng-1.0.0b2.tar.gz", hash = "sha256:9ec55a47c90c8c002b5960c57492ec3021f5193cb26cebc2dc4ea226848651c9"}, -] - -[package.dependencies] -sphinx = ">=4.0" - -[package.extras] -docs = ["furo", "ipython", "myst-parser", "sphinx-copybutton", "sphinx-inline-tabs"] - -[[package]] -name = "sphinx-copybutton" -version = "0.5.2" -description = "Add a copy button to each of your code cells." -optional = false -python-versions = ">=3.7" -files = [ - {file = "sphinx-copybutton-0.5.2.tar.gz", hash = "sha256:4cf17c82fb9646d1bc9ca92ac280813a3b605d8c421225fd9913154103ee1fbd"}, - {file = "sphinx_copybutton-0.5.2-py3-none-any.whl", hash = "sha256:fb543fd386d917746c9a2c50360c7905b605726b9355cd26e9974857afeae06e"}, -] - -[package.dependencies] -sphinx = ">=1.8" - -[package.extras] -code-style = ["pre-commit (==2.12.1)"] -rtd = ["ipython", "myst-nb", "sphinx", "sphinx-book-theme", "sphinx-examples"] - -[[package]] -name = "sphinx-github-changelog" -version = "1.4.0" -description = "Build a sphinx changelog from GitHub Releases" -optional = false -python-versions = "<4.0,>=3.8" -files = [ - {file = "sphinx_github_changelog-1.4.0-py3-none-any.whl", hash = "sha256:cdf2099ea3e4587ae8637be7ba609738bfdeca4bd80c5df6fc45046735ae5c2f"}, - {file = "sphinx_github_changelog-1.4.0.tar.gz", hash = "sha256:204745e93a1f280e4664977b5fee526b0a011c92ca19c304bd01fd641ddb6393"}, -] - -[package.dependencies] -docutils = "*" -requests = "*" -Sphinx = "*" - -[[package]] -name = "sphinxcontrib-applehelp" -version = "2.0.0" -description = "sphinxcontrib-applehelp is a Sphinx extension which outputs Apple help books" -optional = false -python-versions = ">=3.9" -files = [ - {file = "sphinxcontrib_applehelp-2.0.0-py3-none-any.whl", hash = "sha256:4cd3f0ec4ac5dd9c17ec65e9ab272c9b867ea77425228e68ecf08d6b28ddbdb5"}, - {file = "sphinxcontrib_applehelp-2.0.0.tar.gz", hash = "sha256:2f29ef331735ce958efa4734873f084941970894c6090408b079c61b2e1c06d1"}, -] - -[package.extras] -lint = ["mypy", "ruff (==0.5.5)", "types-docutils"] -standalone = ["Sphinx (>=5)"] -test = ["pytest"] - -[[package]] -name = "sphinxcontrib-devhelp" -version = "2.0.0" -description = "sphinxcontrib-devhelp is a sphinx extension which outputs Devhelp documents" -optional = false -python-versions = ">=3.9" -files = [ - {file = "sphinxcontrib_devhelp-2.0.0-py3-none-any.whl", hash = "sha256:aefb8b83854e4b0998877524d1029fd3e6879210422ee3780459e28a1f03a8a2"}, - {file = "sphinxcontrib_devhelp-2.0.0.tar.gz", hash = "sha256:411f5d96d445d1d73bb5d52133377b4248ec79db5c793ce7dbe59e074b4dd1ad"}, -] - -[package.extras] -lint = ["mypy", "ruff (==0.5.5)", "types-docutils"] -standalone = ["Sphinx (>=5)"] -test = ["pytest"] - -[[package]] -name = "sphinxcontrib-htmlhelp" -version = "2.1.0" -description = "sphinxcontrib-htmlhelp is a sphinx extension which renders HTML help files" -optional = false -python-versions = ">=3.9" -files = [ - {file = "sphinxcontrib_htmlhelp-2.1.0-py3-none-any.whl", hash = "sha256:166759820b47002d22914d64a075ce08f4c46818e17cfc9470a9786b759b19f8"}, - {file = "sphinxcontrib_htmlhelp-2.1.0.tar.gz", hash = "sha256:c9e2916ace8aad64cc13a0d233ee22317f2b9025b9cf3295249fa985cc7082e9"}, -] - -[package.extras] -lint = ["mypy", "ruff (==0.5.5)", "types-docutils"] -standalone = ["Sphinx (>=5)"] -test = ["html5lib", "pytest"] - -[[package]] -name = "sphinxcontrib-jsmath" -version = "1.0.1" -description = "A sphinx extension which renders display math in HTML via JavaScript" -optional = false -python-versions = ">=3.5" -files = [ - {file = "sphinxcontrib-jsmath-1.0.1.tar.gz", hash = "sha256:a9925e4a4587247ed2191a22df5f6970656cb8ca2bd6284309578f2153e0c4b8"}, - {file = "sphinxcontrib_jsmath-1.0.1-py2.py3-none-any.whl", hash = "sha256:2ec2eaebfb78f3f2078e73666b1415417a116cc848b72e5172e596c871103178"}, -] - -[package.extras] -test = ["flake8", "mypy", "pytest"] - -[[package]] -name = "sphinxcontrib-programoutput" -version = "0.18" -description = "Sphinx extension to include program output" -optional = false -python-versions = ">=3.8" -files = [ - {file = "sphinxcontrib_programoutput-0.18-py3-none-any.whl", hash = "sha256:8a651bc85de69a808a064ff0e48d06c12b9347da4fe5fdb1e94914b01e1b0c36"}, - {file = "sphinxcontrib_programoutput-0.18.tar.gz", hash = "sha256:09e68b6411d937a80b6085f4fdeaa42e0dc5555480385938465f410589d2eed8"}, -] - -[package.dependencies] -Sphinx = ">=5.0.0" - -[package.extras] -docs = ["furo"] - -[[package]] -name = "sphinxcontrib-qthelp" -version = "2.0.0" -description = "sphinxcontrib-qthelp is a sphinx extension which outputs QtHelp documents" -optional = false -python-versions = ">=3.9" -files = [ - {file = "sphinxcontrib_qthelp-2.0.0-py3-none-any.whl", hash = "sha256:b18a828cdba941ccd6ee8445dbe72ffa3ef8cbe7505d8cd1fa0d42d3f2d5f3eb"}, - {file = "sphinxcontrib_qthelp-2.0.0.tar.gz", hash = "sha256:4fe7d0ac8fc171045be623aba3e2a8f613f8682731f9153bb2e40ece16b9bbab"}, -] - -[package.extras] -lint = ["mypy", "ruff (==0.5.5)", "types-docutils"] -standalone = ["Sphinx (>=5)"] -test = ["defusedxml (>=0.7.1)", "pytest"] - -[[package]] -name = "sphinxcontrib-serializinghtml" -version = "2.0.0" -description = "sphinxcontrib-serializinghtml is a sphinx extension which outputs \"serialized\" HTML files (json and pickle)" -optional = false -python-versions = ">=3.9" -files = [ - {file = "sphinxcontrib_serializinghtml-2.0.0-py3-none-any.whl", hash = "sha256:6e2cb0eef194e10c27ec0023bfeb25badbbb5868244cf5bc5bdc04e4464bf331"}, - {file = "sphinxcontrib_serializinghtml-2.0.0.tar.gz", hash = "sha256:e9d912827f872c029017a53f0ef2180b327c3f7fd23c87229f7a8e8b70031d4d"}, -] - -[package.extras] -lint = ["mypy", "ruff (==0.5.5)", "types-docutils"] -standalone = ["Sphinx (>=5)"] -test = ["pytest"] - -[[package]] -name = "sqlalchemy" -version = "2.0.36" -description = "Database Abstraction Library" -optional = false -python-versions = ">=3.7" -files = [ - {file = "SQLAlchemy-2.0.36-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:59b8f3adb3971929a3e660337f5dacc5942c2cdb760afcabb2614ffbda9f9f72"}, - {file = "SQLAlchemy-2.0.36-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:37350015056a553e442ff672c2d20e6f4b6d0b2495691fa239d8aa18bb3bc908"}, - {file = "SQLAlchemy-2.0.36-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8318f4776c85abc3f40ab185e388bee7a6ea99e7fa3a30686580b209eaa35c08"}, - {file = "SQLAlchemy-2.0.36-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c245b1fbade9c35e5bd3b64270ab49ce990369018289ecfde3f9c318411aaa07"}, - {file = "SQLAlchemy-2.0.36-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:69f93723edbca7342624d09f6704e7126b152eaed3cdbb634cb657a54332a3c5"}, - {file = "SQLAlchemy-2.0.36-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f9511d8dd4a6e9271d07d150fb2f81874a3c8c95e11ff9af3a2dfc35fe42ee44"}, - {file = "SQLAlchemy-2.0.36-cp310-cp310-win32.whl", hash = "sha256:c3f3631693003d8e585d4200730616b78fafd5a01ef8b698f6967da5c605b3fa"}, - {file = "SQLAlchemy-2.0.36-cp310-cp310-win_amd64.whl", hash = "sha256:a86bfab2ef46d63300c0f06936bd6e6c0105faa11d509083ba8f2f9d237fb5b5"}, - {file = "SQLAlchemy-2.0.36-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:fd3a55deef00f689ce931d4d1b23fa9f04c880a48ee97af488fd215cf24e2a6c"}, - {file = "SQLAlchemy-2.0.36-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4f5e9cd989b45b73bd359f693b935364f7e1f79486e29015813c338450aa5a71"}, - {file = "SQLAlchemy-2.0.36-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d0ddd9db6e59c44875211bc4c7953a9f6638b937b0a88ae6d09eb46cced54eff"}, - {file = "SQLAlchemy-2.0.36-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2519f3a5d0517fc159afab1015e54bb81b4406c278749779be57a569d8d1bb0d"}, - {file = "SQLAlchemy-2.0.36-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:59b1ee96617135f6e1d6f275bbe988f419c5178016f3d41d3c0abb0c819f75bb"}, - {file = "SQLAlchemy-2.0.36-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:39769a115f730d683b0eb7b694db9789267bcd027326cccc3125e862eb03bfd8"}, - {file = "SQLAlchemy-2.0.36-cp311-cp311-win32.whl", hash = "sha256:66bffbad8d6271bb1cc2f9a4ea4f86f80fe5e2e3e501a5ae2a3dc6a76e604e6f"}, - {file = "SQLAlchemy-2.0.36-cp311-cp311-win_amd64.whl", hash = "sha256:23623166bfefe1487d81b698c423f8678e80df8b54614c2bf4b4cfcd7c711959"}, - {file = "SQLAlchemy-2.0.36-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:f7b64e6ec3f02c35647be6b4851008b26cff592a95ecb13b6788a54ef80bbdd4"}, - {file = "SQLAlchemy-2.0.36-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:46331b00096a6db1fdc052d55b101dbbfc99155a548e20a0e4a8e5e4d1362855"}, - {file = "SQLAlchemy-2.0.36-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fdf3386a801ea5aba17c6410dd1dc8d39cf454ca2565541b5ac42a84e1e28f53"}, - {file = "SQLAlchemy-2.0.36-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ac9dfa18ff2a67b09b372d5db8743c27966abf0e5344c555d86cc7199f7ad83a"}, - {file = "SQLAlchemy-2.0.36-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:90812a8933df713fdf748b355527e3af257a11e415b613dd794512461eb8a686"}, - {file = "SQLAlchemy-2.0.36-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:1bc330d9d29c7f06f003ab10e1eaced295e87940405afe1b110f2eb93a233588"}, - {file = "SQLAlchemy-2.0.36-cp312-cp312-win32.whl", hash = "sha256:79d2e78abc26d871875b419e1fd3c0bca31a1cb0043277d0d850014599626c2e"}, - {file = "SQLAlchemy-2.0.36-cp312-cp312-win_amd64.whl", hash = "sha256:b544ad1935a8541d177cb402948b94e871067656b3a0b9e91dbec136b06a2ff5"}, - {file = "SQLAlchemy-2.0.36-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:b5cc79df7f4bc3d11e4b542596c03826063092611e481fcf1c9dfee3c94355ef"}, - {file = "SQLAlchemy-2.0.36-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:3c01117dd36800f2ecaa238c65365b7b16497adc1522bf84906e5710ee9ba0e8"}, - {file = "SQLAlchemy-2.0.36-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9bc633f4ee4b4c46e7adcb3a9b5ec083bf1d9a97c1d3854b92749d935de40b9b"}, - {file = "SQLAlchemy-2.0.36-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9e46ed38affdfc95d2c958de328d037d87801cfcbea6d421000859e9789e61c2"}, - {file = "SQLAlchemy-2.0.36-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:b2985c0b06e989c043f1dc09d4fe89e1616aadd35392aea2844f0458a989eacf"}, - {file = "SQLAlchemy-2.0.36-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4a121d62ebe7d26fec9155f83f8be5189ef1405f5973ea4874a26fab9f1e262c"}, - {file = "SQLAlchemy-2.0.36-cp313-cp313-win32.whl", hash = "sha256:0572f4bd6f94752167adfd7c1bed84f4b240ee6203a95e05d1e208d488d0d436"}, - {file = "SQLAlchemy-2.0.36-cp313-cp313-win_amd64.whl", hash = "sha256:8c78ac40bde930c60e0f78b3cd184c580f89456dd87fc08f9e3ee3ce8765ce88"}, - {file = "SQLAlchemy-2.0.36-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:be9812b766cad94a25bc63bec11f88c4ad3629a0cec1cd5d4ba48dc23860486b"}, - {file = "SQLAlchemy-2.0.36-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:50aae840ebbd6cdd41af1c14590e5741665e5272d2fee999306673a1bb1fdb4d"}, - {file = "SQLAlchemy-2.0.36-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4557e1f11c5f653ebfdd924f3f9d5ebfc718283b0b9beebaa5dd6b77ec290971"}, - {file = "SQLAlchemy-2.0.36-cp37-cp37m-musllinux_1_2_aarch64.whl", hash = "sha256:07b441f7d03b9a66299ce7ccf3ef2900abc81c0db434f42a5694a37bd73870f2"}, - {file = "SQLAlchemy-2.0.36-cp37-cp37m-musllinux_1_2_x86_64.whl", hash = "sha256:28120ef39c92c2dd60f2721af9328479516844c6b550b077ca450c7d7dc68575"}, - {file = "SQLAlchemy-2.0.36-cp37-cp37m-win32.whl", hash = "sha256:b81ee3d84803fd42d0b154cb6892ae57ea6b7c55d8359a02379965706c7efe6c"}, - {file = "SQLAlchemy-2.0.36-cp37-cp37m-win_amd64.whl", hash = "sha256:f942a799516184c855e1a32fbc7b29d7e571b52612647866d4ec1c3242578fcb"}, - {file = "SQLAlchemy-2.0.36-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:3d6718667da04294d7df1670d70eeddd414f313738d20a6f1d1f379e3139a545"}, - {file = "SQLAlchemy-2.0.36-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:72c28b84b174ce8af8504ca28ae9347d317f9dba3999e5981a3cd441f3712e24"}, - {file = "SQLAlchemy-2.0.36-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b11d0cfdd2b095e7b0686cf5fabeb9c67fae5b06d265d8180715b8cfa86522e3"}, - {file = "SQLAlchemy-2.0.36-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e32092c47011d113dc01ab3e1d3ce9f006a47223b18422c5c0d150af13a00687"}, - {file = "SQLAlchemy-2.0.36-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:6a440293d802d3011028e14e4226da1434b373cbaf4a4bbb63f845761a708346"}, - {file = "SQLAlchemy-2.0.36-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:c54a1e53a0c308a8e8a7dffb59097bff7facda27c70c286f005327f21b2bd6b1"}, - {file = "SQLAlchemy-2.0.36-cp38-cp38-win32.whl", hash = "sha256:1e0d612a17581b6616ff03c8e3d5eff7452f34655c901f75d62bd86449d9750e"}, - {file = "SQLAlchemy-2.0.36-cp38-cp38-win_amd64.whl", hash = "sha256:8958b10490125124463095bbdadda5aa22ec799f91958e410438ad6c97a7b793"}, - {file = "SQLAlchemy-2.0.36-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:dc022184d3e5cacc9579e41805a681187650e170eb2fd70e28b86192a479dcaa"}, - {file = "SQLAlchemy-2.0.36-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:b817d41d692bf286abc181f8af476c4fbef3fd05e798777492618378448ee689"}, - {file = "SQLAlchemy-2.0.36-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a4e46a888b54be23d03a89be510f24a7652fe6ff660787b96cd0e57a4ebcb46d"}, - {file = "SQLAlchemy-2.0.36-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c4ae3005ed83f5967f961fd091f2f8c5329161f69ce8480aa8168b2d7fe37f06"}, - {file = "SQLAlchemy-2.0.36-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:03e08af7a5f9386a43919eda9de33ffda16b44eb11f3b313e6822243770e9763"}, - {file = "SQLAlchemy-2.0.36-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:3dbb986bad3ed5ceaf090200eba750b5245150bd97d3e67343a3cfed06feecf7"}, - {file = "SQLAlchemy-2.0.36-cp39-cp39-win32.whl", hash = "sha256:9fe53b404f24789b5ea9003fc25b9a3988feddebd7e7b369c8fac27ad6f52f28"}, - {file = "SQLAlchemy-2.0.36-cp39-cp39-win_amd64.whl", hash = "sha256:af148a33ff0349f53512a049c6406923e4e02bf2f26c5fb285f143faf4f0e46a"}, - {file = "SQLAlchemy-2.0.36-py3-none-any.whl", hash = "sha256:fddbe92b4760c6f5d48162aef14824add991aeda8ddadb3c31d56eb15ca69f8e"}, - {file = "sqlalchemy-2.0.36.tar.gz", hash = "sha256:7f2767680b6d2398aea7082e45a774b2b0767b5c8d8ffb9c8b683088ea9b29c5"}, -] - -[package.dependencies] -greenlet = {version = "!=0.4.17", markers = "python_version < \"3.13\" and (platform_machine == \"aarch64\" or platform_machine == \"ppc64le\" or platform_machine == \"x86_64\" or platform_machine == \"amd64\" or platform_machine == \"AMD64\" or platform_machine == \"win32\" or platform_machine == \"WIN32\")"} -mypy = {version = ">=0.910", optional = true, markers = "extra == \"mypy\""} -typing-extensions = ">=4.6.0" - -[package.extras] -aiomysql = ["aiomysql (>=0.2.0)", "greenlet (!=0.4.17)"] -aioodbc = ["aioodbc", "greenlet (!=0.4.17)"] -aiosqlite = ["aiosqlite", "greenlet (!=0.4.17)", "typing_extensions (!=3.10.0.1)"] -asyncio = ["greenlet (!=0.4.17)"] -asyncmy = ["asyncmy (>=0.2.3,!=0.2.4,!=0.2.6)", "greenlet (!=0.4.17)"] -mariadb-connector = ["mariadb (>=1.0.1,!=1.1.2,!=1.1.5,!=1.1.10)"] -mssql = ["pyodbc"] -mssql-pymssql = ["pymssql"] -mssql-pyodbc = ["pyodbc"] -mypy = ["mypy (>=0.910)"] -mysql = ["mysqlclient (>=1.4.0)"] -mysql-connector = ["mysql-connector-python"] -oracle = ["cx_oracle (>=8)"] -oracle-oracledb = ["oracledb (>=1.0.1)"] -postgresql = ["psycopg2 (>=2.7)"] -postgresql-asyncpg = ["asyncpg", "greenlet (!=0.4.17)"] -postgresql-pg8000 = ["pg8000 (>=1.29.1)"] -postgresql-psycopg = ["psycopg (>=3.0.7)"] -postgresql-psycopg2binary = ["psycopg2-binary"] -postgresql-psycopg2cffi = ["psycopg2cffi"] -postgresql-psycopgbinary = ["psycopg[binary] (>=3.0.7)"] -pymysql = ["pymysql"] -sqlcipher = ["sqlcipher3_binary"] - -[[package]] -name = "sqlbag" -version = "0.1.1617247075" -description = "various snippets of SQL-related boilerplate" -optional = false -python-versions = "*" -files = [ - {file = "sqlbag-0.1.1617247075-py2.py3-none-any.whl", hash = "sha256:ecdef26d661f8640711030ac6ee618deb92b91f9f0fc2efbf8a3b133af13092d"}, - {file = "sqlbag-0.1.1617247075.tar.gz", hash = "sha256:b9d7862c3b2030356d796ca872907962fd54704066978d7ae89383f5123366ed"}, -] - -[package.dependencies] -packaging = "*" -six = "*" -sqlalchemy = "*" - -[package.extras] -maria = ["pymysql"] -pendulum = ["pendulum", "relativedelta"] -pg = ["psycopg2"] - -[[package]] -name = "sqlparse" -version = "0.5.2" -description = "A non-validating SQL parser." -optional = false -python-versions = ">=3.8" -files = [ - {file = "sqlparse-0.5.2-py3-none-any.whl", hash = "sha256:e99bc85c78160918c3e1d9230834ab8d80fc06c59d03f8db2618f65f65dda55e"}, - {file = "sqlparse-0.5.2.tar.gz", hash = "sha256:9e37b35e16d1cc652a2545f0997c1deb23ea28fa1f3eefe609eee3063c3b105f"}, -] - -[package.extras] -dev = ["build", "hatch"] -doc = ["sphinx"] - -[[package]] -name = "tomli" -version = "2.2.1" -description = "A lil' TOML parser" -optional = false -python-versions = ">=3.8" -files = [ - {file = "tomli-2.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:678e4fa69e4575eb77d103de3df8a895e1591b48e740211bd1067378c69e8249"}, - {file = "tomli-2.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6"}, - {file = "tomli-2.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ece47d672db52ac607a3d9599a9d48dcb2f2f735c6c2d1f34130085bb12b112a"}, - {file = "tomli-2.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6972ca9c9cc9f0acaa56a8ca1ff51e7af152a9f87fb64623e31d5c83700080ee"}, - {file = "tomli-2.2.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c954d2250168d28797dd4e3ac5cf812a406cd5a92674ee4c8f123c889786aa8e"}, - {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8dd28b3e155b80f4d54beb40a441d366adcfe740969820caf156c019fb5c7ec4"}, - {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e59e304978767a54663af13c07b3d1af22ddee3bb2fb0618ca1593e4f593a106"}, - {file = "tomli-2.2.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:33580bccab0338d00994d7f16f4c4ec25b776af3ffaac1ed74e0b3fc95e885a8"}, - {file = "tomli-2.2.1-cp311-cp311-win32.whl", hash = "sha256:465af0e0875402f1d226519c9904f37254b3045fc5084697cefb9bdde1ff99ff"}, - {file = "tomli-2.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:2d0f2fdd22b02c6d81637a3c95f8cd77f995846af7414c5c4b8d0545afa1bc4b"}, - {file = "tomli-2.2.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:4a8f6e44de52d5e6c657c9fe83b562f5f4256d8ebbfe4ff922c495620a7f6cea"}, - {file = "tomli-2.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8d57ca8095a641b8237d5b079147646153d22552f1c637fd3ba7f4b0b29167a8"}, - {file = "tomli-2.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e340144ad7ae1533cb897d406382b4b6fede8890a03738ff1683af800d54192"}, - {file = "tomli-2.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db2b95f9de79181805df90bedc5a5ab4c165e6ec3fe99f970d0e302f384ad222"}, - {file = "tomli-2.2.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:40741994320b232529c802f8bc86da4e1aa9f413db394617b9a256ae0f9a7f77"}, - {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:400e720fe168c0f8521520190686ef8ef033fb19fc493da09779e592861b78c6"}, - {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:02abe224de6ae62c19f090f68da4e27b10af2b93213d36cf44e6e1c5abd19fdd"}, - {file = "tomli-2.2.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b82ebccc8c8a36f2094e969560a1b836758481f3dc360ce9a3277c65f374285e"}, - {file = "tomli-2.2.1-cp312-cp312-win32.whl", hash = "sha256:889f80ef92701b9dbb224e49ec87c645ce5df3fa2cc548664eb8a25e03127a98"}, - {file = "tomli-2.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:7fc04e92e1d624a4a63c76474610238576942d6b8950a2d7f908a340494e67e4"}, - {file = "tomli-2.2.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f4039b9cbc3048b2416cc57ab3bda989a6fcf9b36cf8937f01a6e731b64f80d7"}, - {file = "tomli-2.2.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:286f0ca2ffeeb5b9bd4fcc8d6c330534323ec51b2f52da063b11c502da16f30c"}, - {file = "tomli-2.2.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a92ef1a44547e894e2a17d24e7557a5e85a9e1d0048b0b5e7541f76c5032cb13"}, - {file = "tomli-2.2.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9316dc65bed1684c9a98ee68759ceaed29d229e985297003e494aa825ebb0281"}, - {file = "tomli-2.2.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e85e99945e688e32d5a35c1ff38ed0b3f41f43fad8df0bdf79f72b2ba7bc5272"}, - {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ac065718db92ca818f8d6141b5f66369833d4a80a9d74435a268c52bdfa73140"}, - {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:d920f33822747519673ee656a4b6ac33e382eca9d331c87770faa3eef562aeb2"}, - {file = "tomli-2.2.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a198f10c4d1b1375d7687bc25294306e551bf1abfa4eace6650070a5c1ae2744"}, - {file = "tomli-2.2.1-cp313-cp313-win32.whl", hash = "sha256:d3f5614314d758649ab2ab3a62d4f2004c825922f9e370b29416484086b264ec"}, - {file = "tomli-2.2.1-cp313-cp313-win_amd64.whl", hash = "sha256:a38aa0308e754b0e3c67e344754dff64999ff9b513e691d0e786265c93583c69"}, - {file = "tomli-2.2.1-py3-none-any.whl", hash = "sha256:cb55c73c5f4408779d0cf3eef9f762b9c9f147a77de7b258bef0a5628adc85cc"}, - {file = "tomli-2.2.1.tar.gz", hash = "sha256:cd45e1dc79c835ce60f7404ec8119f2eb06d38b1deba146f07ced3bbc44505ff"}, -] - -[[package]] -name = "types-pyyaml" -version = "6.0.12.20240917" -description = "Typing stubs for PyYAML" -optional = false -python-versions = ">=3.8" -files = [ - {file = "types-PyYAML-6.0.12.20240917.tar.gz", hash = "sha256:d1405a86f9576682234ef83bcb4e6fff7c9305c8b1fbad5e0bcd4f7dbdc9c587"}, - {file = "types_PyYAML-6.0.12.20240917-py3-none-any.whl", hash = "sha256:392b267f1c0fe6022952462bf5d6523f31e37f6cea49b14cee7ad634b6301570"}, -] - -[[package]] -name = "typing-extensions" -version = "4.12.2" -description = "Backported and Experimental Type Hints for Python 3.8+" -optional = false -python-versions = ">=3.8" -files = [ - {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"}, - {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, -] - -[[package]] -name = "tzdata" -version = "2024.2" -description = "Provider of IANA time zone data" -optional = false -python-versions = ">=2" -files = [ - {file = "tzdata-2024.2-py2.py3-none-any.whl", hash = "sha256:a48093786cdcde33cad18c2555e8532f34422074448fbc874186f0abd79565cd"}, - {file = "tzdata-2024.2.tar.gz", hash = "sha256:7d85cc416e9382e69095b7bdf4afd9e3880418a2413feec7069d533d6b4e31cc"}, -] - -[[package]] -name = "urllib3" -version = "2.2.3" -description = "HTTP library with thread-safe connection pooling, file post, and more." -optional = false -python-versions = ">=3.8" -files = [ - {file = "urllib3-2.2.3-py3-none-any.whl", hash = "sha256:ca899ca043dcb1bafa3e262d73aa25c465bfb49e0bd9dd5d59f1d0acba2f8fac"}, - {file = "urllib3-2.2.3.tar.gz", hash = "sha256:e7d814a81dad81e6caf2ec9fdedb284ecc9c73076b62654547cc64ccdcae26e9"}, -] - -[package.extras] -brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] -h2 = ["h2 (>=4,<5)"] -socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] -zstd = ["zstandard (>=0.18.0)"] - -[[package]] -name = "zipp" -version = "3.21.0" -description = "Backport of pathlib-compatible object wrapper for zip files" -optional = false -python-versions = ">=3.9" -files = [ - {file = "zipp-3.21.0-py3-none-any.whl", hash = "sha256:ac1bbe05fd2991f160ebce24ffbac5f6d11d83dc90891255885223d42b3cd931"}, - {file = "zipp-3.21.0.tar.gz", hash = "sha256:2c9958f6430a2040341a52eb608ed6dd93ef4392e02ffe219417c1b28b5dd1f4"}, -] - -[package.extras] -check = ["pytest-checkdocs (>=2.4)", "pytest-ruff (>=0.2.1)"] -cover = ["pytest-cov"] -doc = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (>=3.5)", "sphinx-lint"] -enabler = ["pytest-enabler (>=2.2)"] -test = ["big-O", "importlib-resources", "jaraco.functools", "jaraco.itertools", "jaraco.test", "more-itertools", "pytest (>=6,!=8.1.*)", "pytest-ignore-flaky"] -type = ["pytest-mypy"] - -[extras] -aiopg = ["aiopg", "psycopg2-binary"] -django = ["django"] -psycopg2 = ["psycopg2-binary"] -sphinx = ["sphinx"] -sqlalchemy = ["sqlalchemy"] - -[metadata] -lock-version = "2.0" -python-versions = "^3.9" -content-hash = "cd1df6f53a7fc2ada2f69b6464c33eccd250dd9fe92d2e0015a654db08bf4340" diff --git a/procrastinate/app.py b/procrastinate/app.py index ebd1b0705..9398d68a8 100644 --- a/procrastinate/app.py +++ b/procrastinate/app.py @@ -5,7 +5,13 @@ import functools import logging from collections.abc import Iterable, Iterator -from typing import TYPE_CHECKING, Any +from typing import ( + TYPE_CHECKING, + Any, + TypedDict, +) + +from typing_extensions import NotRequired, Unpack from procrastinate import blueprints, exceptions, jobs, manager, schema, utils from procrastinate import connector as connector_module @@ -16,6 +22,22 @@ logger = logging.getLogger(__name__) +class WorkerOptions(TypedDict): + queues: NotRequired[Iterable[str]] + name: NotRequired[str] + concurrency: NotRequired[int] + wait: NotRequired[bool] + fetch_job_polling_interval: NotRequired[float] + abort_job_polling_interval: NotRequired[float] + shutdown_graceful_timeout: NotRequired[float] + listen_notify: NotRequired[bool] + delete_jobs: NotRequired[str | jobs.DeleteJobCondition] + additional_context: NotRequired[dict[str, Any]] + install_signal_handlers: NotRequired[bool] + update_heartbeat_interval: NotRequired[float] + stalled_worker_timeout: NotRequired[float] + + class App(blueprints.Blueprint): """ The App is the main entry point for procrastinate integration. @@ -45,7 +67,7 @@ def __init__( *, connector: connector_module.BaseConnector, import_paths: Iterable[str] | None = None, - worker_defaults: dict | None = None, + worker_defaults: WorkerOptions | None = None, periodic_defaults: dict | None = None, ): """ @@ -206,10 +228,10 @@ def configure_task( ) raise exceptions.TaskNotFound from exc - def _worker(self, **kwargs) -> worker.Worker: + def _worker(self, **kwargs: Unpack[WorkerOptions]) -> worker.Worker: from procrastinate import worker - final_kwargs = {**self.worker_defaults, **kwargs} + final_kwargs: WorkerOptions = {**self.worker_defaults, **kwargs} return worker.Worker(app=self, **final_kwargs) @@ -225,7 +247,7 @@ def perform_import_paths(self): extra={"action": "imported_tasks", "tasks": list(self.tasks)}, ) - async def run_worker_async(self, **kwargs) -> None: + async def run_worker_async(self, **kwargs: Unpack[WorkerOptions]) -> None: """ Run a worker. This worker will run in the foreground and execute the jobs in the provided queues. If wait is True, the function will not @@ -250,17 +272,40 @@ async def run_worker_async(self, **kwargs) -> None: Name of the worker. Will be passed in the `JobContext` and used in the logs (defaults to ``None`` which will result in the worker named ``worker``). - timeout: ``float`` - Indicates the maximum duration (in seconds) the worker waits between - each database job poll. Raising this parameter can lower the rate at which - the worker makes queries to the database for requesting jobs. + fetch_job_polling_interval : ``float`` + Maximum time (in seconds) between database job polls. + + Controls the frequency of database queries for new jobs to start. + + When `listen_notify` is True, the polling interval acts as a fallback + mechanism and can reasonably be set to a higher value. + (defaults to 5.0) - listen_notify: ``bool`` - If ``True``, the worker will dedicate a connection from the pool to - listening to database events, notifying of newly available jobs. - If ``False``, the worker will just poll the database periodically - (see ``timeout``). (defaults to ``True``) - delete_jobs: ``str`` + abort_job_polling_interval : ``float`` + Maximum time (in seconds) between database abort requet polls. + + Controls the frequency of database queries for abort requests + + When `listen_notify` is True, the polling interval acts as a fallback + mechanism and can reasonably be set to a higher value. + + (defaults to 5.0) + shutdown_graceful_timeout: ``float`` + Indicates the maximum duration (in seconds) the worker waits for jobs to + complete when requested to stop. Jobs that have not been completed by that time + are aborted. A value of None corresponds to no timeout. + + (defaults to None) + listen_notify : ``bool`` + If ``True``, allocates a connection from the pool to + listen for: + - new job availability + - job abort requests + + Provides lower latency for job updates compared to polling alone. + + Note: Worker polls the database regardless of this setting. (defaults to ``True``) + delete_jobs : ``str`` If ``always``, the worker will automatically delete all jobs on completion. If ``successful`` the worker will only delete successful jobs. If ``never``, the worker will keep the jobs in the database. @@ -273,16 +318,16 @@ async def run_worker_async(self, **kwargs) -> None: worker. Use ``False`` if you want to handle signals yourself (e.g. if you run the work as an async task in a bigger application) (defaults to ``True``) + update_heartbeat_interval: ``float`` + Time in seconds between heartbeat updates of the worker. (defaults to 10) + stalled_worker_timeout: ``float`` + Time in seconds after which a worker is considered stalled if no heartbeat has + been received. A worker prunes stalled workers from the database at startup. + (defaults to 30) """ self.perform_import_paths() worker = self._worker(**kwargs) - task = asyncio.create_task(worker.run()) - try: - await asyncio.shield(task) - except asyncio.CancelledError: - worker.stop() - await task - raise + await worker.run() def run_worker(self, **kwargs) -> None: """ diff --git a/procrastinate/builtin_tasks.py b/procrastinate/builtin_tasks.py index 422cc37c4..358459af5 100644 --- a/procrastinate/builtin_tasks.py +++ b/procrastinate/builtin_tasks.py @@ -11,7 +11,7 @@ async def remove_old_jobs( *, max_hours: int, queue: str | None = None, - remove_error: bool | None = False, + remove_failed: bool | None = False, remove_cancelled: bool | None = False, remove_aborted: bool | None = False, ) -> None: @@ -26,7 +26,7 @@ async def remove_old_jobs( queue : The name of the queue in which jobs will be deleted. If not specified, the task will delete jobs from all queues. - remove_error: + remove_failed: By default only successful jobs will be removed. When this parameter is True failed jobs will also be deleted. remove_cancelled: @@ -36,11 +36,10 @@ async def remove_old_jobs( By default only successful jobs will be removed. When this parameter is True aborted jobs will also be deleted. """ - assert context.app await context.app.job_manager.delete_old_jobs( nb_hours=max_hours, queue=queue, - include_error=remove_error, + include_failed=remove_failed, include_cancelled=remove_cancelled, include_aborted=remove_aborted, ) diff --git a/procrastinate/cli.py b/procrastinate/cli.py index 33cc281ab..7fb43364f 100644 --- a/procrastinate/cli.py +++ b/procrastinate/cli.py @@ -12,7 +12,7 @@ from typing import Any, Callable, Literal, Union import procrastinate -from procrastinate import connector, exceptions, jobs, shell, types, utils, worker +from procrastinate import connector, exceptions, jobs, shell, types, utils logger = logging.getLogger(__name__) @@ -292,11 +292,26 @@ def configure_worker_parser(subparsers: argparse._SubParsersAction): ) add_argument( worker_parser, - "-t", - "--timeout", + "-p", + "--fetch-job-polling-interval", type=float, help="How long to wait for database event push before polling", - envvar="WORKER_TIMEOUT", + envvar="WORKER_FETCH_JOB_POLLING_INTERVAL", + ) + add_argument( + worker_parser, + "-a", + "--abort-job-polling-interval", + type=float, + help="How often to polling for abort requests", + envvar="WORKER_ABORT_JOB_POLLING_INTERVAL", + ) + add_argument( + worker_parser, + "--shutdown-graceful-timeout", + type=float, + help="How long to wait for jobs to complete when shutting down before aborting them", + envvar="WORKER_SHUTDOWN_GRACEFUL_TIMEOUT", ) add_argument( worker_parser, @@ -324,8 +339,8 @@ def configure_worker_parser(subparsers: argparse._SubParsersAction): add_argument( worker_parser, "--delete-jobs", - choices=worker.DeleteJobCondition, - type=worker.DeleteJobCondition, + choices=jobs.DeleteJobCondition, + type=jobs.DeleteJobCondition, help="If set, delete jobs on completion", envvar="WORKER_DELETE_JOBS", ) diff --git a/procrastinate/connector.py b/procrastinate/connector.py index 941b5b191..8ca8676f6 100644 --- a/procrastinate/connector.py +++ b/procrastinate/connector.py @@ -1,8 +1,7 @@ from __future__ import annotations -import asyncio -from collections.abc import Iterable -from typing import Any, Callable +from collections.abc import Awaitable, Iterable +from typing import Any, Callable, Protocol from typing_extensions import LiteralString @@ -14,6 +13,10 @@ LISTEN_TIMEOUT = 30.0 +class Notify(Protocol): + def __call__(self, *, channel: str, payload: str) -> Awaitable[None]: ... + + class BaseConnector: json_dumps: Callable | None = None json_loads: Callable | None = None @@ -60,7 +63,9 @@ async def execute_query_all_async( raise exceptions.SyncConnectorConfigurationError async def listen_notify( - self, event: asyncio.Event, channels: Iterable[str] + self, + on_notification: Notify, + channels: Iterable[str], ) -> None: raise exceptions.SyncConnectorConfigurationError @@ -99,6 +104,6 @@ def execute_query_all( return utils.async_to_sync(self.execute_query_all_async, query, **arguments) async def listen_notify( - self, event: asyncio.Event, channels: Iterable[str] + self, on_notification: Notify, channels: Iterable[str] ) -> None: raise NotImplementedError diff --git a/procrastinate/contrib/aiopg/aiopg_connector.py b/procrastinate/contrib/aiopg/aiopg_connector.py index 4a8a0c015..00e6d0ed7 100644 --- a/procrastinate/contrib/aiopg/aiopg_connector.py +++ b/procrastinate/contrib/aiopg/aiopg_connector.py @@ -3,8 +3,9 @@ import asyncio import functools import logging +import re from collections.abc import AsyncGenerator, Coroutine, Iterable -from typing import Any, Callable +from typing import Any, Callable, TypeVar, cast import aiopg import psycopg2 @@ -13,12 +14,13 @@ import psycopg2.sql from psycopg2.extras import Json, RealDictCursor -from procrastinate import connector, exceptions, sql, utils +from procrastinate import connector, exceptions, manager, sql, utils from procrastinate.contrib.psycopg2 import psycopg2_connector logger = logging.getLogger(__name__) CoroutineFunction = Callable[..., Coroutine] +T = TypeVar("T", bound=CoroutineFunction) @utils.async_context_decorator @@ -31,12 +33,23 @@ async def wrap_exceptions() -> AsyncGenerator[None, None]: try: yield except psycopg2.errors.UniqueViolation as exc: - raise exceptions.UniqueViolation(constraint_name=exc.diag.constraint_name) + constraint_name = exc.diag.constraint_name + queueing_lock = None + if constraint_name == manager.QUEUEING_LOCK_CONSTRAINT: + assert exc.diag.message_detail + match = re.search(r"Key \((.*?)\)=\((.*?)\)", exc.diag.message_detail) + assert match + column, queueing_lock = match.groups() + assert column == "queueing_lock" + + raise exceptions.UniqueViolation( + constraint_name=constraint_name, queueing_lock=queueing_lock + ) except psycopg2.Error as exc: raise exceptions.ConnectorException from exc -def wrap_query_exceptions(coro: CoroutineFunction) -> CoroutineFunction: +def wrap_query_exceptions(coro: T) -> T: """ Detect aiopg OperationalError's with a "server closed the connection unexpectedly" message and retry a number of times. @@ -70,7 +83,8 @@ async def wrapped(*args, **kwargs): f"Could not get a valid connection after {max_tries} tries" ) from final_exc - return wrapped + f = cast(T, wrapped) + return f class AiopgConnector(connector.BaseAsyncConnector): @@ -226,13 +240,18 @@ def __del__(self): while self._pool._free: self._pool._free.popleft().close() + def _wrap_value(self, value: Any) -> Any: + if isinstance(value, dict): + return Json(value, dumps=self.json_dumps) + elif isinstance(value, list): + return [self._wrap_value(item) for item in value] + elif isinstance(value, tuple): + return tuple([self._wrap_value(item) for item in value]) + else: + return value + def _wrap_json(self, arguments: dict[str, Any]): - return { - key: Json(value, dumps=self.json_dumps) - if isinstance(value, dict) - else value - for key, value in arguments.items() - } + return {key: self._wrap_value(value) for key, value in arguments.items()} # Pools and single connections do not exactly share their cursor API: # - connection.cursor() is an async context manager (async with) @@ -284,7 +303,7 @@ def _make_dynamic_query(self, query: str, **identifiers: str) -> Any: @wrap_exceptions() async def listen_notify( - self, event: asyncio.Event, channels: Iterable[str] + self, on_notification: connector.Notify, channels: Iterable[str] ) -> None: # We need to acquire a dedicated connection, and use the listen # query @@ -305,14 +324,14 @@ async def listen_notify( query=sql.queries["listen_queue"], channel_name=channel_name ), ) - # Initial set() lets caller know that we're ready to listen - event.set() - await self._loop_notify(event=event, connection=connection) + await self._loop_notify( + on_notification=on_notification, connection=connection + ) @wrap_exceptions() async def _loop_notify( self, - event: asyncio.Event, + on_notification: connector.Notify, connection: aiopg.Connection, timeout: float = connector.LISTEN_TIMEOUT, ) -> None: @@ -325,12 +344,15 @@ async def _loop_notify( if connection.closed: return try: - await asyncio.wait_for(connection.notifies.get(), timeout) + notification = await asyncio.wait_for( + connection.notifies.get(), timeout + ) + await on_notification( + channel=notification.channel, payload=notification.payload + ) except asyncio.TimeoutError: continue except psycopg2.Error: # aiopg>=1.3.1 will raise if the connection is closed while # we wait continue - - event.set() diff --git a/procrastinate/contrib/django/__init__.py b/procrastinate/contrib/django/__init__.py index 8eacf0c47..148cb7d1b 100644 --- a/procrastinate/contrib/django/__init__.py +++ b/procrastinate/contrib/django/__init__.py @@ -7,4 +7,3 @@ "app", "connector_params", ] -default_app_config = "procrastinate.contrib.django.apps.ProcrastinateConfig" diff --git a/procrastinate/contrib/django/admin.py b/procrastinate/contrib/django/admin.py index 80576953f..f1ae8b5b1 100644 --- a/procrastinate/contrib/django/admin.py +++ b/procrastinate/contrib/django/admin.py @@ -2,12 +2,19 @@ import json +from django.apps import apps from django.contrib import admin +from django.db.models import Prefetch, QuerySet +from django.http.request import HttpRequest from django.template.loader import render_to_string from django.utils import timezone from django.utils.html import format_html from django.utils.safestring import mark_safe +from procrastinate import App, utils +from procrastinate.contrib.django.apps import ProcrastinateConfig +from procrastinate.jobs import Status + from . import models JOB_STATUS_EMOJI_MAPPING = { @@ -16,7 +23,7 @@ "failed": "❌", "succeeded": "✅", "cancelled": "🤚", - "aborting": "🔌🕑️", + "aborting": "🔌🕑️", # legacy, not used anymore "aborted": "🔌", } @@ -77,6 +84,18 @@ def has_add_permission(self, request, obj=None): def has_delete_permission(self, request, obj=None): return False + def get_queryset(self, request): + return ( + super() + .get_queryset(request) + .prefetch_related( + Prefetch( + "procrastinateevent_set", + queryset=models.ProcrastinateEvent.objects.order_by("-at"), + ) + ) + ) + @admin.display(description="Status") def pretty_status(self, instance: models.ProcrastinateJob) -> str: emoji = JOB_STATUS_EMOJI_MAPPING.get(instance.status, "") @@ -103,7 +122,7 @@ def pretty_args(self, instance: models.ProcrastinateJob) -> str: @admin.display(description="Summary") def summary(self, instance: models.ProcrastinateJob) -> str: - if last_event := instance.procrastinateevent_set.latest(): # type: ignore[attr-defined] + if last_event := instance.procrastinateevent_set.first(): # type: ignore[attr-defined] return mark_safe( render_to_string( "procrastinate/admin/summary.html", @@ -115,3 +134,30 @@ def summary(self, instance: models.ProcrastinateJob) -> str: ).strip() ) return "" + + @admin.action(description="Retry Job") + def retry(self, request: HttpRequest, queryset: QuerySet[models.ProcrastinateJob]): + app_config: ProcrastinateConfig = apps.get_app_config("procrastinate") # pyright: ignore [reportAssignmentType] + p_app: App = app_config.app + for job in queryset.filter( + status__in=(Status.FAILED.value, Status.DOING.value) + ): + p_app.job_manager.retry_job_by_id( + job.id, utils.utcnow(), job.priority, job.queue_name, job.lock + ) + + @admin.action(description="Cancel Job (only 'todo' jobs)") + def cancel(self, request: HttpRequest, queryset: QuerySet[models.ProcrastinateJob]): + app_config: ProcrastinateConfig = apps.get_app_config("procrastinate") # pyright: ignore [reportAssignmentType] + p_app: App = app_config.app + for job in queryset.filter(status=Status.TODO.value): + p_app.job_manager.cancel_job_by_id(job.id, abort=False) + + @admin.action(description="Abort Job (includes 'todo' & 'doing' jobs)") + def abort(self, request: HttpRequest, queryset: QuerySet[models.ProcrastinateJob]): + app_config: ProcrastinateConfig = apps.get_app_config("procrastinate") # pyright: ignore [reportAssignmentType] + p_app: App = app_config.app + for job in queryset.filter(status__in=(Status.TODO.value, Status.DOING.value)): + p_app.job_manager.cancel_job_by_id(job.id, abort=True) + + actions = [retry, cancel, abort] diff --git a/procrastinate/contrib/django/django_connector.py b/procrastinate/contrib/django/django_connector.py index 6196e5401..0b3ece5b0 100644 --- a/procrastinate/contrib/django/django_connector.py +++ b/procrastinate/contrib/django/django_connector.py @@ -1,6 +1,5 @@ from __future__ import annotations -import asyncio import contextlib from collections.abc import Generator, Iterable from typing import ( @@ -112,11 +111,18 @@ def _dictfetch(self, cursor): columns = [col[0] for col in cursor.description] return (dict(zip(columns, row)) for row in cursor.fetchall()) + def _wrap_value(self, value: Any) -> Any: + if isinstance(value, dict): + return Jsonb(value) + elif isinstance(value, list): + return [self._wrap_value(item) for item in value] + elif isinstance(value, tuple): + return tuple([self._wrap_value(item) for item in value]) + else: + return value + def _wrap_json(self, arguments: dict[str, Any]) -> dict[str, Any]: - return { - key: Jsonb(value) if isinstance(value, dict) else value - for key, value in arguments.items() - } + return {key: self._wrap_value(value) for key, value in arguments.items()} @wrap_exceptions() def execute_query(self, query: LiteralString, **arguments: Any) -> None: @@ -140,7 +146,7 @@ def execute_query_all( return list(self._dictfetch(cursor)) async def listen_notify( - self, event: asyncio.Event, channels: Iterable[str] + self, on_notification: connector.Notify, channels: Iterable[str] ) -> None: raise NotImplementedError( "listen/notify is not supported with Django connector" diff --git a/procrastinate/contrib/django/migrations/0030_alter_procrastinateevent_options.py b/procrastinate/contrib/django/migrations/0030_alter_procrastinateevent_options.py index 758c5a43c..62434e079 100644 --- a/procrastinate/contrib/django/migrations/0030_alter_procrastinateevent_options.py +++ b/procrastinate/contrib/django/migrations/0030_alter_procrastinateevent_options.py @@ -5,13 +5,13 @@ class Migration(migrations.Migration): - dependencies = [ - ("procrastinate", "0029_add_additional_params_to_retry_job"), - ] - operations = [ migrations.AlterModelOptions( name="procrastinateevent", options={"get_latest_by": "at", "managed": False}, ), ] + name = "0030_alter_procrastinateevent_options" + dependencies = [ + ("procrastinate", "0029_add_additional_params_to_retry_job"), + ] diff --git a/procrastinate/contrib/django/migrations/0032_pre_cancel_notification.py b/procrastinate/contrib/django/migrations/0032_pre_cancel_notification.py new file mode 100644 index 000000000..fd3e864fa --- /dev/null +++ b/procrastinate/contrib/django/migrations/0032_pre_cancel_notification.py @@ -0,0 +1,20 @@ +from __future__ import annotations + +from django.db import migrations, models + +from .. import migrations_utils + + +class Migration(migrations.Migration): + operations = [ + migrations_utils.RunProcrastinateSQL( + name="03.00.00_01_pre_cancel_notification.sql" + ), + migrations.AddField( + "procrastinatejob", + "abort_requested", + models.BooleanField(), + ), + ] + name = "0032_pre_cancel_notification" + dependencies = [("procrastinate", "0031_add_indexes_for_fetch_job")] diff --git a/procrastinate/contrib/django/migrations/0033_post_cancel_notification.py b/procrastinate/contrib/django/migrations/0033_post_cancel_notification.py new file mode 100644 index 000000000..1a51a34af --- /dev/null +++ b/procrastinate/contrib/django/migrations/0033_post_cancel_notification.py @@ -0,0 +1,30 @@ +from __future__ import annotations + +from django.db import migrations, models + +from .. import migrations_utils + + +class Migration(migrations.Migration): + operations = [ + migrations_utils.RunProcrastinateSQL( + name="03.00.00_50_post_cancel_notification.sql" + ), + migrations.AlterField( + "procrastinatejob", + "status", + models.CharField( + choices=[ + ("todo", "todo"), + ("doing", "doing"), + ("succeeded", "succeeded"), + ("failed", "failed"), + ("cancelled", "cancelled"), + ("aborted", "aborted"), + ], + max_length=32, + ), + ), + ] + name = "0033_post_cancel_notification" + dependencies = [("procrastinate", "0032_pre_cancel_notification")] diff --git a/procrastinate/contrib/django/migrations/0034_pre_add_heartbeat.py b/procrastinate/contrib/django/migrations/0034_pre_add_heartbeat.py new file mode 100644 index 000000000..e0f95b4a2 --- /dev/null +++ b/procrastinate/contrib/django/migrations/0034_pre_add_heartbeat.py @@ -0,0 +1,18 @@ +from __future__ import annotations + +from django.db import migrations, models + +from .. import migrations_utils + + +class Migration(migrations.Migration): + operations = [ + migrations_utils.RunProcrastinateSQL(name="03.01.00_01_pre_add_heartbeat.sql"), + migrations.AddField( + "procrastinatejob", + "heartbeat_updated_at", + models.DateTimeField(blank=True, null=True), + ), + ] + name = "0034_pre_add_heartbeat" + dependencies = [("procrastinate", "0033_post_cancel_notification")] diff --git a/procrastinate/contrib/django/migrations/0035_post_add_heartbeat.py b/procrastinate/contrib/django/migrations/0035_post_add_heartbeat.py new file mode 100644 index 000000000..53034ef17 --- /dev/null +++ b/procrastinate/contrib/django/migrations/0035_post_add_heartbeat.py @@ -0,0 +1,13 @@ +from __future__ import annotations + +from django.db import migrations + +from .. import migrations_utils + + +class Migration(migrations.Migration): + operations = [ + migrations_utils.RunProcrastinateSQL(name="03.01.00_50_post_add_heartbeat.sql"), + ] + name = "0035_post_add_heartbeat" + dependencies = [("procrastinate", "0034_pre_add_heartbeat")] diff --git a/procrastinate/contrib/django/migrations/0036_add_worker_model.py b/procrastinate/contrib/django/migrations/0036_add_worker_model.py new file mode 100644 index 000000000..93d9d7ff5 --- /dev/null +++ b/procrastinate/contrib/django/migrations/0036_add_worker_model.py @@ -0,0 +1,37 @@ +from __future__ import annotations + +from django.db import migrations, models + +import procrastinate.contrib.django.models + + +class Migration(migrations.Migration): + operations = [ + migrations.CreateModel( + name="ProcrastinateWorker", + fields=[ + ("id", models.AutoField(primary_key=True, serialize=False)), + ("last_heartbeat", models.DateTimeField()), + ], + options={ + "db_table": "procrastinate_workers", + "managed": False, + }, + bases=( + procrastinate.contrib.django.models.ProcrastinateReadOnlyModelMixin, + models.Model, + ), + ), + migrations.AddField( + "procrastinatejob", + "worker", + models.ForeignKey( + blank=True, + null=True, + on_delete=models.SET_NULL, + to="procrastinate.procrastinateworker", + ), + ), + ] + name = "0036_add_worker_model" + dependencies = [("procrastinate", "0035_post_add_heartbeat")] diff --git a/procrastinate/contrib/django/migrations/0037_pre_batch_defer_jobs.py b/procrastinate/contrib/django/migrations/0037_pre_batch_defer_jobs.py new file mode 100644 index 000000000..f3ecb25f7 --- /dev/null +++ b/procrastinate/contrib/django/migrations/0037_pre_batch_defer_jobs.py @@ -0,0 +1,15 @@ +from __future__ import annotations + +from django.db import migrations + +from .. import migrations_utils + + +class Migration(migrations.Migration): + operations = [ + migrations_utils.RunProcrastinateSQL( + name="03.02.00_01_pre_batch_defer_jobs.sql" + ), + ] + name = "0037_pre_batch_defer_jobs" + dependencies = [("procrastinate", "0036_add_worker_model")] diff --git a/procrastinate/contrib/django/migrations/0038_post_batch_defer_jobs.py b/procrastinate/contrib/django/migrations/0038_post_batch_defer_jobs.py new file mode 100644 index 000000000..0547911f7 --- /dev/null +++ b/procrastinate/contrib/django/migrations/0038_post_batch_defer_jobs.py @@ -0,0 +1,15 @@ +from __future__ import annotations + +from django.db import migrations + +from .. import migrations_utils + + +class Migration(migrations.Migration): + operations = [ + migrations_utils.RunProcrastinateSQL( + name="03.02.00_50_post_batch_defer_jobs.sql" + ), + ] + name = "0038_post_batch_defer_jobs" + dependencies = [("procrastinate", "0037_pre_batch_defer_jobs")] diff --git a/procrastinate/contrib/django/migrations/0039_pre_priority_lock_fetch_job.py b/procrastinate/contrib/django/migrations/0039_pre_priority_lock_fetch_job.py new file mode 100644 index 000000000..4fb1bc14f --- /dev/null +++ b/procrastinate/contrib/django/migrations/0039_pre_priority_lock_fetch_job.py @@ -0,0 +1,17 @@ +from __future__ import annotations + +from django.db import migrations + +from .. import migrations_utils + + +class Migration(migrations.Migration): + operations = [ + migrations_utils.RunProcrastinateSQL( + name="03.03.00_01_pre_priority_lock_fetch_job.sql" + ), + ] + name = "0039_pre_priority_lock_fetch_job" + dependencies = [ + ("procrastinate", "0038_post_batch_defer_jobs"), + ] diff --git a/procrastinate/contrib/django/migrations/0040_pre_retry_failed_job.py b/procrastinate/contrib/django/migrations/0040_pre_retry_failed_job.py new file mode 100644 index 000000000..b5eb08663 --- /dev/null +++ b/procrastinate/contrib/django/migrations/0040_pre_retry_failed_job.py @@ -0,0 +1,17 @@ +from __future__ import annotations + +from django.db import migrations + +from .. import migrations_utils + + +class Migration(migrations.Migration): + operations = [ + migrations_utils.RunProcrastinateSQL( + name="03.04.00_01_pre_add_retry_failed_job_procedure.sql" + ), + ] + name = "0040_retry_failed_job" + dependencies = [ + ("procrastinate", "0039_pre_priority_lock_fetch_job"), + ] diff --git a/procrastinate/contrib/django/migrations/0041_post_retry_failed_job.py b/procrastinate/contrib/django/migrations/0041_post_retry_failed_job.py new file mode 100644 index 000000000..37912df97 --- /dev/null +++ b/procrastinate/contrib/django/migrations/0041_post_retry_failed_job.py @@ -0,0 +1,17 @@ +from __future__ import annotations + +from django.db import migrations + +from .. import migrations_utils + + +class Migration(migrations.Migration): + operations = [ + migrations_utils.RunProcrastinateSQL( + name="03.04.00_50_post_add_retry_failed_job_procedure.sql" + ), + ] + name = "0040_retry_failed_job" + dependencies = [ + ("procrastinate", "0040_pre_retry_failed_job"), + ] diff --git a/procrastinate/contrib/django/migrations_utils.py b/procrastinate/contrib/django/migrations_utils.py index 513ecc2a0..6fb8a3d27 100644 --- a/procrastinate/contrib/django/migrations_utils.py +++ b/procrastinate/contrib/django/migrations_utils.py @@ -1,7 +1,7 @@ from __future__ import annotations import functools -import importlib.resources as importlib_resources +from importlib import resources from django.db import migrations @@ -13,7 +13,7 @@ def list_migration_files() -> dict[str, str]: """ return { p.name: p.read_text(encoding="utf-8") - for p in importlib_resources.files("procrastinate.sql.migrations").iterdir() + for p in resources.files("procrastinate.sql.migrations").iterdir() if p.name.endswith(".sql") } diff --git a/procrastinate/contrib/django/models.py b/procrastinate/contrib/django/models.py index 23206d710..41b78b232 100644 --- a/procrastinate/contrib/django/models.py +++ b/procrastinate/contrib/django/models.py @@ -60,6 +60,20 @@ def __getattribute__(self, name: str) -> Any: return super().__getattribute__(name) +class ProcrastinateWorker(ProcrastinateReadOnlyModelMixin, models.Model): + id = models.BigAutoField(primary_key=True) + last_heartbeat = models.DateTimeField() + + objects = ProcrastinateReadOnlyManager() + + class Meta: + managed = False + db_table = "procrastinate_workers" + + def __str__(self) -> str: + return f"Worker {self.id} - Last heartbeat at {self.last_heartbeat}" + + class ProcrastinateJob(ProcrastinateReadOnlyModelMixin, models.Model): STATUSES = ( "todo", @@ -67,7 +81,6 @@ class ProcrastinateJob(ProcrastinateReadOnlyModelMixin, models.Model): "succeeded", "failed", "cancelled", - "aborting", "aborted", ) id = models.BigAutoField(primary_key=True) @@ -80,6 +93,10 @@ class ProcrastinateJob(ProcrastinateReadOnlyModelMixin, models.Model): scheduled_at = models.DateTimeField(blank=True, null=True) attempts = models.IntegerField() queueing_lock = models.TextField(unique=True, blank=True, null=True) + abort_requested = models.BooleanField() + worker = models.ForeignKey( + ProcrastinateWorker, on_delete=models.SET_NULL, blank=True, null=True + ) objects = ProcrastinateReadOnlyManager() @@ -99,6 +116,7 @@ def procrastinate_job(self) -> jobs.Job: status=self.status, scheduled_at=self.scheduled_at, attempts=self.attempts, + abort_requested=self.abort_requested, queueing_lock=self.queueing_lock, ) diff --git a/procrastinate/contrib/django/settings.py b/procrastinate/contrib/django/settings.py index bd0a4e6ee..d773a9e5a 100644 --- a/procrastinate/contrib/django/settings.py +++ b/procrastinate/contrib/django/settings.py @@ -5,6 +5,8 @@ from django.conf import settings as django_settings from typing_extensions import dataclass_transform +from procrastinate.app import WorkerOptions + @dataclass_transform() class BaseSettings: @@ -20,7 +22,7 @@ class Settings(BaseSettings): AUTODISCOVER_MODULE_NAME: str = "tasks" IMPORT_PATHS: list[str] = [] DATABASE_ALIAS: str = "default" - WORKER_DEFAULTS: dict[str, str] | None = None + WORKER_DEFAULTS: WorkerOptions | None = None PERIODIC_DEFAULTS: dict[str, str] | None = None ON_APP_READY: str | None = None READONLY_MODELS: bool = True diff --git a/procrastinate/contrib/psycopg2/psycopg2_connector.py b/procrastinate/contrib/psycopg2/psycopg2_connector.py index fb38999ab..0bae25685 100644 --- a/procrastinate/contrib/psycopg2/psycopg2_connector.py +++ b/procrastinate/contrib/psycopg2/psycopg2_connector.py @@ -3,6 +3,7 @@ import contextlib import functools import logging +import re from collections.abc import Generator, Iterator from typing import Any, Callable @@ -11,7 +12,7 @@ import psycopg2.pool from psycopg2.extras import Json, RealDictCursor -from procrastinate import connector, exceptions +from procrastinate import connector, exceptions, manager logger = logging.getLogger(__name__) @@ -24,7 +25,18 @@ def wrap_exceptions() -> Generator[None, None, None]: try: yield except psycopg2.errors.UniqueViolation as exc: - raise exceptions.UniqueViolation(constraint_name=exc.diag.constraint_name) + constraint_name = exc.diag.constraint_name + queueing_lock = None + if constraint_name == manager.QUEUEING_LOCK_CONSTRAINT: + assert exc.diag.message_detail + match = re.search(r"Key \((.*?)\)=\((.*?)\)", exc.diag.message_detail) + assert match + column, queueing_lock = match.groups() + assert column == "queueing_lock" + + raise exceptions.UniqueViolation( + constraint_name=constraint_name, queueing_lock=queueing_lock + ) except psycopg2.Error as exc: raise exceptions.ConnectorException from exc @@ -162,13 +174,18 @@ def pool(self) -> psycopg2.pool.AbstractConnectionPool: raise exceptions.AppNotOpen return self._pool + def _wrap_value(self, value: Any) -> Any: + if isinstance(value, dict): + return Json(value, dumps=self.json_dumps) + elif isinstance(value, list): + return [self._wrap_value(item) for item in value] + elif isinstance(value, tuple): + return tuple([self._wrap_value(item) for item in value]) + else: + return value + def _wrap_json(self, arguments: dict[str, Any]): - return { - key: Json(value, dumps=self.json_dumps) - if isinstance(value, dict) - else value - for key, value in arguments.items() - } + return {key: self._wrap_value(value) for key, value in arguments.items()} @contextlib.contextmanager def _connection(self) -> Iterator[psycopg2.extensions.connection]: diff --git a/procrastinate/contrib/sqlalchemy/psycopg2_connector.py b/procrastinate/contrib/sqlalchemy/psycopg2_connector.py index 1459c5caa..206426d1b 100644 --- a/procrastinate/contrib/sqlalchemy/psycopg2_connector.py +++ b/procrastinate/contrib/sqlalchemy/psycopg2_connector.py @@ -8,9 +8,10 @@ import psycopg2.errors import sqlalchemy +import sqlalchemy.exc from psycopg2.extras import Json -from procrastinate import connector, exceptions +from procrastinate import connector, exceptions, manager @contextlib.contextmanager @@ -20,12 +21,24 @@ def wrap_exceptions() -> Generator[None, None, None]: """ try: yield - except sqlalchemy.exc.SQLAlchemyError as exc: + except sqlalchemy.exc.StatementError as exc: if isinstance(exc.orig, psycopg2.errors.UniqueViolation): + exc = exc.orig + constraint_name = exc.diag.constraint_name + queueing_lock = None + if constraint_name == manager.QUEUEING_LOCK_CONSTRAINT: + assert exc.diag.message_detail + match = re.search(r"Key \((.*?)\)=\((.*?)\)", exc.diag.message_detail) + assert match + column, queueing_lock = match.groups() + assert column == "queueing_lock" + raise exceptions.UniqueViolation( - constraint_name=exc.orig.diag.constraint_name + constraint_name=constraint_name, queueing_lock=queueing_lock ) raise exceptions.ConnectorException from exc + except sqlalchemy.exc.SQLAlchemyError as exc: + raise exceptions.ConnectorException from exc def wrap_query_exceptions(func: Callable) -> Callable: @@ -133,13 +146,18 @@ def engine(self) -> sqlalchemy.engine.Engine: raise exceptions.AppNotOpen return self._engine + def _wrap_value(self, value: Any) -> Any: + if isinstance(value, dict): + return Json(value, dumps=self.json_dumps) + elif isinstance(value, list): + return [self._wrap_value(item) for item in value] + elif isinstance(value, tuple): + return tuple([self._wrap_value(item) for item in value]) + else: + return value + def _wrap_json(self, arguments: dict[str, Any]): - return { - key: Json(value, dumps=self.json_dumps) - if isinstance(value, dict) - else value - for key, value in arguments.items() - } + return {key: self._wrap_value(value) for key, value in arguments.items()} @wrap_exceptions() @wrap_query_exceptions diff --git a/procrastinate_demos/README.md b/procrastinate/demos/README.md similarity index 69% rename from procrastinate_demos/README.md rename to procrastinate/demos/README.md index b7d7cb554..e0c5a6e2b 100644 --- a/procrastinate_demos/README.md +++ b/procrastinate/demos/README.md @@ -3,12 +3,12 @@ This modules contains 3 mini-applications that showcase using procrastinate in difference contexts: -- [demo_django]: a Django application, -- [demo_async]: an async application, it could be a - FastAPI application, but to make things simpler, it's just a plain - asyncio application. -- [demo_sync]: a synchronous application, similarily, it - could be representative of a Flask application. +- [demo_django]: a Django application, +- [demo_async]: an async application, it could be a + FastAPI application, but to make things simpler, it's just a plain + asyncio application. +- [demo_sync]: a synchronous application, similarily, it + could be representative of a Flask application. The demos are there both to showcase the code and as a way to easily recreate the issues that are reported in the issues. They are not @@ -17,8 +17,8 @@ up the Procrastinate development environment (see [contributing doc](contributing)) To run the demos, set PROCRASTINATE_APP to -`procrastinate_demos..app.app`, then run the -`procrastinate` CLI or `python -m procrastinate_demos.` +`procrastinate.demos..app.app`, then run the +`procrastinate` CLI or `python -m procrastinate.demos.` for the application main entrypoint. For all apps, you'll need to have a PostgreSQL database running, and set @@ -34,13 +34,13 @@ baclground processes). Launch the worker in the first terminal: ```console -$ PROCRASTINATE_APP=procrastinate_demos.demo_async.app.app procrastinate worker +$ PROCRASTINATE_APP=procrastinate.demos.demo_async.app.app procrastinate worker ``` In the second terminal, run the application: ```console -$ python -m procrastinate_demos.demo_async +$ python -m procrastinate.demos.demo_async ``` Defer a job by sending commands, as indicated by the application. @@ -50,11 +50,11 @@ Defer a job by sending commands, as indicated by the application. Same with `sync`: ```console -$ PROCRASTINATE_APP=procrastinate_demos.demo_sync.app.app procrastinate worker +$ PROCRASTINATE_APP=procrastinate.demos.demo_sync.app.app procrastinate worker ``` ```console -$ python -m procrastinate_demos.demo_sync +$ python -m procrastinate.demos.demo_sync ``` ## Django demo @@ -62,14 +62,14 @@ $ python -m procrastinate_demos.demo_sync In the first terminal, run the migrations, and then the Django server: ```console -$ procrastinate_demos/demo_django/manage.py migrate -$ procrastinate_demos/demo_django/manage.py runserver +$ procrastinate/demos/demo_django/manage.py migrate +$ procrastinate/demos/demo_django/manage.py runserver ``` In the second terminal, run the procrastinate worker: ```console -$ procrastinate_demos/demo_django/manage.py procrastinate worker +$ procrastinate/demos/demo_django/manage.py procrastinate worker ``` In your browser (`http://localhost:8000/`), you can now: - Create a @@ -86,7 +86,7 @@ deferring a job from another job.) You can visit the admin, too. You'll need to create a superuser first: ```console -$ procrastinate_demos/demo_django/manage.py createsuperuser +$ procrastinate/demos/demo_django/manage.py createsuperuser ``` Then lauch the server, head to `http://localhost:8000/admin/` and see the jobs, @@ -94,6 +94,6 @@ the events and the periodic defers. (…Yes I’m not a frontend dev :) ) -[demo_async]: https://github.com/procrastinate-org/procrastinate/tree/main/procrastinate_demos/demo_async/ -[demo_django]: https://github.com/procrastinate-org/procrastinate/tree/main/procrastinate_demos/demo_django/ -[demo_sync]: https://github.com/procrastinate-org/procrastinate/tree/main/procrastinate_demos/demo_sync/ +[demo_async]: https://github.com/procrastinate-org/procrastinate/tree/main/procrastinate/demos/demo_async/ +[demo_django]: https://github.com/procrastinate-org/procrastinate/tree/main/procrastinate/demos/demo_django/ +[demo_sync]: https://github.com/procrastinate-org/procrastinate/tree/main/procrastinate/demos/demo_sync/ diff --git a/procrastinate_demos/__init__.py b/procrastinate/demos/__init__.py similarity index 100% rename from procrastinate_demos/__init__.py rename to procrastinate/demos/__init__.py diff --git a/procrastinate_demos/demo_async/__init__.py b/procrastinate/demos/demo_async/__init__.py similarity index 100% rename from procrastinate_demos/demo_async/__init__.py rename to procrastinate/demos/demo_async/__init__.py diff --git a/procrastinate_demos/demo_async/__main__.py b/procrastinate/demos/demo_async/__main__.py similarity index 100% rename from procrastinate_demos/demo_async/__main__.py rename to procrastinate/demos/demo_async/__main__.py diff --git a/procrastinate_demos/demo_async/app.py b/procrastinate/demos/demo_async/app.py similarity index 69% rename from procrastinate_demos/demo_async/app.py rename to procrastinate/demos/demo_async/app.py index 1c5cd26a1..1d834bbaf 100644 --- a/procrastinate_demos/demo_async/app.py +++ b/procrastinate/demos/demo_async/app.py @@ -4,5 +4,5 @@ app = procrastinate.App( connector=procrastinate.PsycopgConnector(), - import_paths=["procrastinate_demos.demo_async.tasks"], + import_paths=["procrastinate.demos.demo_async.tasks"], ) diff --git a/procrastinate_demos/demo_async/tasks.py b/procrastinate/demos/demo_async/tasks.py similarity index 100% rename from procrastinate_demos/demo_async/tasks.py rename to procrastinate/demos/demo_async/tasks.py diff --git a/procrastinate_demos/demo_django/__init__.py b/procrastinate/demos/demo_django/__init__.py similarity index 100% rename from procrastinate_demos/demo_django/__init__.py rename to procrastinate/demos/demo_django/__init__.py diff --git a/procrastinate_demos/demo_django/__main__.py b/procrastinate/demos/demo_django/__main__.py similarity index 100% rename from procrastinate_demos/demo_django/__main__.py rename to procrastinate/demos/demo_django/__main__.py diff --git a/procrastinate_demos/demo_django/demo/__init__.py b/procrastinate/demos/demo_django/demo/__init__.py similarity index 100% rename from procrastinate_demos/demo_django/demo/__init__.py rename to procrastinate/demos/demo_django/demo/__init__.py diff --git a/procrastinate_demos/demo_django/demo/admin.py b/procrastinate/demos/demo_django/demo/admin.py similarity index 100% rename from procrastinate_demos/demo_django/demo/admin.py rename to procrastinate/demos/demo_django/demo/admin.py diff --git a/procrastinate_demos/demo_django/demo/apps.py b/procrastinate/demos/demo_django/demo/apps.py similarity index 66% rename from procrastinate_demos/demo_django/demo/apps.py rename to procrastinate/demos/demo_django/demo/apps.py index 17e8b805f..a1d9fc474 100644 --- a/procrastinate_demos/demo_django/demo/apps.py +++ b/procrastinate/demos/demo_django/demo/apps.py @@ -4,4 +4,4 @@ class DemoConfig(AppConfig): - name = "procrastinate_demos.demo_django.demo" + name = "procrastinate.demos.demo_django.demo" diff --git a/procrastinate_demos/demo_django/demo/migrations/0001_initial.py b/procrastinate/demos/demo_django/demo/migrations/0001_initial.py similarity index 100% rename from procrastinate_demos/demo_django/demo/migrations/0001_initial.py rename to procrastinate/demos/demo_django/demo/migrations/0001_initial.py diff --git a/procrastinate_demos/demo_django/demo/migrations/__init__.py b/procrastinate/demos/demo_django/demo/migrations/__init__.py similarity index 100% rename from procrastinate_demos/demo_django/demo/migrations/__init__.py rename to procrastinate/demos/demo_django/demo/migrations/__init__.py diff --git a/procrastinate_demos/demo_django/demo/models.py b/procrastinate/demos/demo_django/demo/models.py similarity index 100% rename from procrastinate_demos/demo_django/demo/models.py rename to procrastinate/demos/demo_django/demo/models.py diff --git a/procrastinate_demos/demo_django/demo/tasks.py b/procrastinate/demos/demo_django/demo/tasks.py similarity index 100% rename from procrastinate_demos/demo_django/demo/tasks.py rename to procrastinate/demos/demo_django/demo/tasks.py diff --git a/procrastinate_demos/demo_django/demo/templates/demo/book_form.html b/procrastinate/demos/demo_django/demo/templates/demo/book_form.html similarity index 100% rename from procrastinate_demos/demo_django/demo/templates/demo/book_form.html rename to procrastinate/demos/demo_django/demo/templates/demo/book_form.html diff --git a/procrastinate_demos/demo_django/demo/templates/demo/book_list.html b/procrastinate/demos/demo_django/demo/templates/demo/book_list.html similarity index 100% rename from procrastinate_demos/demo_django/demo/templates/demo/book_list.html rename to procrastinate/demos/demo_django/demo/templates/demo/book_list.html diff --git a/procrastinate_demos/demo_django/demo/views.py b/procrastinate/demos/demo_django/demo/views.py similarity index 100% rename from procrastinate_demos/demo_django/demo/views.py rename to procrastinate/demos/demo_django/demo/views.py diff --git a/procrastinate_demos/demo_django/manage.py b/procrastinate/demos/demo_django/manage.py similarity index 91% rename from procrastinate_demos/demo_django/manage.py rename to procrastinate/demos/demo_django/manage.py index 4b4b417f6..793f2c7ed 100755 --- a/procrastinate_demos/demo_django/manage.py +++ b/procrastinate/demos/demo_django/manage.py @@ -10,7 +10,7 @@ def main(): """Run administrative tasks.""" os.environ.setdefault( - "DJANGO_SETTINGS_MODULE", "procrastinate_demos.demo_django.project.settings" + "DJANGO_SETTINGS_MODULE", "procrastinate.demos.demo_django.project.settings" ) try: from django.core.management import execute_from_command_line diff --git a/procrastinate_demos/demo_django/project/__init__.py b/procrastinate/demos/demo_django/project/__init__.py similarity index 100% rename from procrastinate_demos/demo_django/project/__init__.py rename to procrastinate/demos/demo_django/project/__init__.py diff --git a/procrastinate_demos/demo_django/project/asgi.py b/procrastinate/demos/demo_django/project/asgi.py similarity index 86% rename from procrastinate_demos/demo_django/project/asgi.py rename to procrastinate/demos/demo_django/project/asgi.py index 2eba4f33b..aec69591f 100644 --- a/procrastinate_demos/demo_django/project/asgi.py +++ b/procrastinate/demos/demo_django/project/asgi.py @@ -14,7 +14,7 @@ from django.core.asgi import get_asgi_application os.environ.setdefault( - "DJANGO_SETTINGS_MODULE", "procrastinate_demos.demo_django.project.settings" + "DJANGO_SETTINGS_MODULE", "procrastinate.demos.demo_django.project.settings" ) application = get_asgi_application() diff --git a/procrastinate_demos/demo_django/project/settings.py b/procrastinate/demos/demo_django/project/settings.py similarity index 94% rename from procrastinate_demos/demo_django/project/settings.py rename to procrastinate/demos/demo_django/project/settings.py index 6c04cb8e8..0483bbbac 100644 --- a/procrastinate_demos/demo_django/project/settings.py +++ b/procrastinate/demos/demo_django/project/settings.py @@ -40,7 +40,7 @@ "django.contrib.sessions", "django.contrib.messages", "django.contrib.staticfiles", - "procrastinate_demos.demo_django.demo", + "procrastinate.demos.demo_django.demo", "procrastinate.contrib.django", ] @@ -54,7 +54,7 @@ "django.middleware.clickjacking.XFrameOptionsMiddleware", ] -ROOT_URLCONF = "procrastinate_demos.demo_django.project.urls" +ROOT_URLCONF = "procrastinate.demos.demo_django.project.urls" TEMPLATES = [ { @@ -72,7 +72,7 @@ }, ] -WSGI_APPLICATION = "procrastinate_demos.demo_django.project.wsgi.application" +WSGI_APPLICATION = "procrastinate.demos.demo_django.project.wsgi.application" # Database @@ -165,7 +165,7 @@ def filter(self, record: logging.LogRecord): }, "filters": { "procrastinate": { - "()": "procrastinate_demos.demo_django.project.settings.ProcrastinateFilter", + "()": "procrastinate.demos.demo_django.project.settings.ProcrastinateFilter", "name": "procrastinate", }, }, @@ -178,4 +178,4 @@ def filter(self, record: logging.LogRecord): }, } -PROCRASTINATE_ON_APP_READY = "procrastinate_demos.demo_django.demo.tasks.on_app_ready" +PROCRASTINATE_ON_APP_READY = "procrastinate.demos.demo_django.demo.tasks.on_app_ready" diff --git a/procrastinate_demos/demo_django/project/urls.py b/procrastinate/demos/demo_django/project/urls.py similarity index 94% rename from procrastinate_demos/demo_django/project/urls.py rename to procrastinate/demos/demo_django/project/urls.py index 6307b94d0..9c4d8d941 100644 --- a/procrastinate_demos/demo_django/project/urls.py +++ b/procrastinate/demos/demo_django/project/urls.py @@ -21,7 +21,7 @@ from django.contrib.staticfiles import views from django.urls import path, re_path -from procrastinate_demos.demo_django.demo.views import CreateBookView, ListBooksView +from procrastinate.demos.demo_django.demo.views import CreateBookView, ListBooksView urlpatterns = [ path("admin/", admin.site.urls), diff --git a/procrastinate_demos/demo_django/project/wsgi.py b/procrastinate/demos/demo_django/project/wsgi.py similarity index 86% rename from procrastinate_demos/demo_django/project/wsgi.py rename to procrastinate/demos/demo_django/project/wsgi.py index 5aefbb683..2c352cdc8 100644 --- a/procrastinate_demos/demo_django/project/wsgi.py +++ b/procrastinate/demos/demo_django/project/wsgi.py @@ -14,7 +14,7 @@ from django.core.wsgi import get_wsgi_application os.environ.setdefault( - "DJANGO_SETTINGS_MODULE", "procrastinate_demos.demo_django.project.settings" + "DJANGO_SETTINGS_MODULE", "procrastinate.demos.demo_django.project.settings" ) application = get_wsgi_application() diff --git a/procrastinate_demos/demo_sync/__init__.py b/procrastinate/demos/demo_sync/__init__.py similarity index 100% rename from procrastinate_demos/demo_sync/__init__.py rename to procrastinate/demos/demo_sync/__init__.py diff --git a/procrastinate_demos/demo_sync/__main__.py b/procrastinate/demos/demo_sync/__main__.py similarity index 100% rename from procrastinate_demos/demo_sync/__main__.py rename to procrastinate/demos/demo_sync/__main__.py diff --git a/procrastinate_demos/demo_sync/app.py b/procrastinate/demos/demo_sync/app.py similarity index 69% rename from procrastinate_demos/demo_sync/app.py rename to procrastinate/demos/demo_sync/app.py index 23aec368a..6d78a7128 100644 --- a/procrastinate_demos/demo_sync/app.py +++ b/procrastinate/demos/demo_sync/app.py @@ -4,5 +4,5 @@ app = procrastinate.App( connector=procrastinate.PsycopgConnector(), - import_paths=["procrastinate_demos.demo_sync.tasks"], + import_paths=["procrastinate.demos.demo_sync.tasks"], ) diff --git a/procrastinate_demos/demo_sync/tasks.py b/procrastinate/demos/demo_sync/tasks.py similarity index 100% rename from procrastinate_demos/demo_sync/tasks.py rename to procrastinate/demos/demo_sync/tasks.py diff --git a/procrastinate/exceptions.py b/procrastinate/exceptions.py index d1ec5d0bb..1d5298458 100644 --- a/procrastinate/exceptions.py +++ b/procrastinate/exceptions.py @@ -50,19 +50,6 @@ def __init__(self, retry_decision: RetryDecision): super().__init__() -class JobError(ProcrastinateException): - """ - Job ended with an exception. - """ - - def __init__( - self, *args, retry_exception: JobRetry | None = None, critical: bool = False - ): - super().__init__(*args) - self.retry_exception = retry_exception - self.critical = critical - - class JobAborted(ProcrastinateException): """ Job was aborted. @@ -100,9 +87,10 @@ class UniqueViolation(ConnectorException): ``exception.constraint_name``. """ - def __init__(self, *args, constraint_name: str | None): + def __init__(self, *args, constraint_name: str | None, queueing_lock: str | None): super().__init__(*args) self.constraint_name = constraint_name + self.queueing_lock = queueing_lock class NoResult(ConnectorException): diff --git a/procrastinate/job_context.py b/procrastinate/job_context.py index 77052ce63..dd37202da 100644 --- a/procrastinate/job_context.py +++ b/procrastinate/job_context.py @@ -2,112 +2,90 @@ import time from collections.abc import Iterable -from typing import Any +from enum import Enum +from typing import Any, Callable import attr from procrastinate import app as app_module -from procrastinate import jobs, tasks, types +from procrastinate import jobs, tasks, utils @attr.dataclass(kw_only=True) class JobResult: - start_timestamp: float | None = None + start_timestamp: float end_timestamp: float | None = None result: Any = None def duration(self, current_timestamp: float) -> float | None: - if self.start_timestamp is None: - return None return (self.end_timestamp or current_timestamp) - self.start_timestamp def as_dict(self): result = {} - if self.start_timestamp: - result.update( - { - "start_timestamp": self.start_timestamp, - "duration": self.duration(current_timestamp=time.time()), - } - ) + result.update( + { + "start_timestamp": self.start_timestamp, + "duration": self.duration(current_timestamp=time.time()), + } + ) + if self.end_timestamp: result.update({"end_timestamp": self.end_timestamp, "result": self.result}) return result +class AbortReason(Enum): + """ + An enumeration of reasons a job is being aborted + """ + + USER_REQUEST = "user_request" #: The user requested to abort the job + SHUTDOWN = ( + "shutdown" #: The job is being aborted as part of shutting down the worker + ) + + @attr.dataclass(frozen=True, kw_only=True) class JobContext: """ Execution context of a running job. - In theory, all attributes are optional. In practice, in a task, they will - always be set to their proper value. """ #: Procrastinate `App` running this job - app: app_module.App | None = None + app: app_module.App #: Name of the worker (may be useful for logging) worker_name: str | None = None #: Queues listened by this worker worker_queues: Iterable[str] | None = None - #: In case there are multiple async sub-workers, this is the id of the sub-worker. - worker_id: int | None = None #: Corresponding :py:class:`~jobs.Job` - job: jobs.Job | None = None - #: Corresponding :py:class:`~tasks.Task` - task: tasks.Task[Any, Any, Any] | None = None - job_result: JobResult = attr.ib(factory=JobResult) + job: jobs.Job + #: Time the job started to be processed + start_timestamp: float + additional_context: dict = attr.ib(factory=dict) - def log_extra(self, action: str, **kwargs: Any) -> types.JSONDict: - extra: types.JSONDict = { - "action": action, - "worker": { - "name": self.worker_name, - "id": self.worker_id, - "queues": self.worker_queues, - }, - } - if self.job: - extra["job"] = self.job.log_context() + #: Callable returning the reason the job should be aborted (or None if it + #: should not be aborted) + abort_reason: Callable[[], AbortReason | None] - return {**extra, **self.job_result.as_dict(), **kwargs} + def should_abort(self) -> bool: + """ + Returns True if the job should be aborted: in that case, the job should + stop processing as soon as possible and raise raise + :py:class:`~exceptions.JobAborted` + """ + return bool(self.abort_reason()) def evolve(self, **update: Any) -> JobContext: return attr.evolve(self, **update) @property def queues_display(self) -> str: - if self.worker_queues: - return f"queues {', '.join(self.worker_queues)}" - else: - return "all queues" - - def job_description(self, current_timestamp: float) -> str: - message = f"worker {self.worker_id}: " - if self.job: - message += self.job.call_string - duration = self.job_result.duration(current_timestamp) - if duration is not None: - message += f" (started {duration:.3f} s ago)" - else: - message += "no current job" - - return message + return utils.queues_display(self.worker_queues) - def should_abort(self) -> bool: - assert self.app - assert self.job - assert self.job.id - - job_id = self.job.id - status = self.app.job_manager.get_job_status(job_id) - return status == jobs.Status.ABORTING - - async def should_abort_async(self) -> bool: - assert self.app - assert self.job - assert self.job.id - - job_id = self.job.id - status = await self.app.job_manager.get_job_status_async(job_id) - return status == jobs.Status.ABORTING + @property + def task(self) -> tasks.Task: + """ + The :py:class:`~tasks.Task` associated to the job + """ + return self.app.tasks[self.job.task_name] diff --git a/procrastinate/jobs.py b/procrastinate/jobs.py index 6a2560c01..e393d03c5 100644 --- a/procrastinate/jobs.py +++ b/procrastinate/jobs.py @@ -4,9 +4,10 @@ import functools import logging from enum import Enum -from typing import TYPE_CHECKING, Any +from typing import TYPE_CHECKING, Any, TypedDict, Union import attr +from typing_extensions import Literal from procrastinate import types @@ -22,6 +23,19 @@ cached_property = getattr(functools, "cached_property", property) +class JobInserted(TypedDict): + type: Literal["job_inserted"] + job_id: int + + +class AbortJobRequested(TypedDict): + type: Literal["abort_job_requested"] + job_id: int + + +Notification = Union[JobInserted, AbortJobRequested] + + def check_aware( instance: Job, attribute: attr.Attribute, value: datetime.datetime ) -> None: @@ -39,10 +53,20 @@ class Status(Enum): SUCCEEDED = "succeeded" #: The job ended successfully FAILED = "failed" #: The job ended with an error CANCELLED = "cancelled" #: The job was cancelled - ABORTING = "aborting" #: The job is requested to be aborted + ABORTING = "aborting" #: legacy, not used anymore ABORTED = "aborted" #: The job was aborted +class DeleteJobCondition(Enum): + """ + An enumeration with all the possible conditions to delete a job + """ + + NEVER = "never" #: Keep jobs in database after completion + SUCCESSFUL = "successful" #: Delete only successful jobs + ALWAYS = "always" #: Always delete jobs at completion + + @attr.dataclass(frozen=True, kw_only=True) class Job: """ @@ -72,6 +96,10 @@ class Job: ) #: Number of times the job has been tried. attempts: int = 0 + #: True if the job is requested to abort + abort_requested: bool = False + #: ID of the worker that is processing the job + worker_id: int | None = None @classmethod def from_row(cls, row: dict[str, Any]) -> Job: @@ -86,6 +114,8 @@ def from_row(cls, row: dict[str, Any]) -> Job: scheduled_at=row["scheduled_at"], queue=row["queue_name"], attempts=row["attempts"], + abort_requested=row.get("abort_requested", False), + worker_id=row.get("worker_id"), ) def asdict(self) -> dict[str, Any]: @@ -128,35 +158,76 @@ def make_new_job(self, **task_kwargs: types.JSONValue) -> Job: return self.job.evolve(task_kwargs=final_kwargs) - def _log_before_defer_job(self, job: Job) -> None: + def _log_before_defer_jobs(self, jobs: list[Job]) -> None: + job_count = len(jobs) logger.debug( - f"About to defer job {job.call_string}", - extra={"action": "about_to_defer_job", "job": job.log_context()}, + f"About to defer {job_count} {'job' if job_count == 1 else 'jobs'}", + extra={ + "action": "about_to_defer_jobs", + "jobs": [job.log_context() for job in jobs], + }, ) - def _log_after_defer_job(self, job: Job) -> None: + def _log_after_defer_jobs(self, jobs: list[Job]) -> None: + job_count = len(jobs) logger.info( - f"Deferred job {job.call_string}", - extra={"action": "job_defer", "job": job.log_context()}, + f"Deferred {job_count} {'job' if job_count == 1 else 'jobs'}", + extra={ + "action": "jobs_deferred", + "jobs": [job.log_context() for job in jobs], + }, ) async def defer_async(self, **task_kwargs: types.JSONValue) -> int: """ - See `Task.defer` for details. + See `Task.defer_async` for details. """ # Make sure this code stays synchronized with .defer() job = self.make_new_job(**task_kwargs) - self._log_before_defer_job(job=job) + self._log_before_defer_jobs(jobs=[job]) job = await self.job_manager.defer_job_async(job=job) - self._log_after_defer_job(job=job) + self._log_after_defer_jobs(jobs=[job]) assert job.id # for mypy return job.id + async def batch_defer_async(self, *task_kwargs: types.JSONDict) -> list[int]: + """ + See `Task.batch_defer_async` for details. + """ + jobs = [self.make_new_job(**kwargs) for kwargs in task_kwargs] + self._log_before_defer_jobs(jobs=jobs) + jobs = await self.job_manager.batch_defer_jobs_async(jobs=jobs) + self._log_after_defer_jobs(jobs=jobs) + + job_ids: list[int] = [] + for job in jobs: + assert job.id # for mypy + job_ids.append(job.id) + return job_ids + def defer(self, **task_kwargs: types.JSONValue) -> int: + """ + See `Task.defer` for details. + """ # Make sure this code stays synchronized with .defer_async() job = self.make_new_job(**task_kwargs) - self._log_before_defer_job(job=job) + self._log_before_defer_jobs(jobs=[job]) job = self.job_manager.defer_job(job=job) - self._log_after_defer_job(job=job) + self._log_after_defer_jobs(jobs=[job]) assert job.id # for mypy return job.id + + def batch_defer(self, *task_kwargs: types.JSONDict) -> list[int]: + """ + See `Task.batch_defer` for details. + """ + jobs = [self.make_new_job(**kwargs) for kwargs in task_kwargs] + self._log_before_defer_jobs(jobs=jobs) + jobs = self.job_manager.batch_defer_jobs(jobs=jobs) + self._log_after_defer_jobs(jobs=jobs) + + job_ids: list[int] = [] + for job in jobs: + assert job.id + job_ids.append(job.id) + return job_ids diff --git a/procrastinate/manager.py b/procrastinate/manager.py index 100c48218..ac11752be 100644 --- a/procrastinate/manager.py +++ b/procrastinate/manager.py @@ -1,30 +1,42 @@ from __future__ import annotations -import asyncio import datetime +import json import logging -from collections.abc import Iterable -from typing import Any, NoReturn +import warnings +from collections.abc import Awaitable, Iterable +from typing import Any, NoReturn, Protocol -from procrastinate import connector, exceptions, jobs, sql, utils +from procrastinate import connector, exceptions, sql, types, utils +from procrastinate import jobs as jobs_module logger = logging.getLogger(__name__) -QUEUEING_LOCK_CONSTRAINT = "procrastinate_jobs_queueing_lock_idx" +QUEUEING_LOCK_CONSTRAINT = "procrastinate_jobs_queueing_lock_idx_v1" + +# TODO: Only necessary to make it work with the pre-migration of v3.0.0. +# We can remove this in the next minor version. +QUEUEING_LOCK_CONSTRAINT_LEGACY = "procrastinate_jobs_queueing_lock_idx" + + +class NotificationCallback(Protocol): + def __call__( + self, *, channel: str, notification: jobs_module.Notification + ) -> Awaitable[None]: ... def get_channel_for_queues(queues: Iterable[str] | None = None) -> Iterable[str]: if queues is None: - return ["procrastinate_any_queue"] + return ["procrastinate_any_queue_v1"] else: - return ["procrastinate_queue#" + queue for queue in queues] + return ["procrastinate_queue_v1#" + queue for queue in queues] class JobManager: def __init__(self, connector: connector.BaseConnector): self.connector = connector - async def defer_job_async(self, job: jobs.Job) -> jobs.Job: + async def defer_job_async(self, job: jobs_module.Job) -> jobs_module.Job: """ Add a job in its queue for later processing by a worker. @@ -38,45 +50,83 @@ async def defer_job_async(self, job: jobs.Job) -> jobs.Job: : A copy of the job instance with the id set. """ - # Make sure this code stays synchronized with .defer_job() + return (await self.batch_defer_jobs_async(jobs=[job]))[0] + + async def batch_defer_jobs_async( + self, jobs: list[jobs_module.Job] + ) -> list[jobs_module.Job]: + """ + Add multiple jobs in their queue for later processing by a worker. + + Parameters + ---------- + jobs: + The jobs to defer + + Returns + ------- + : + A list of jobs with their id set. + """ + # Make sure this code stays synchronized with .batch_defer_jobs() try: - result = await self.connector.execute_query_one_async( - **self._defer_job_query_kwargs(job=job) + results = await self.connector.execute_query_all_async( + **self._defer_jobs_query_kwargs(jobs=jobs) ) except exceptions.UniqueViolation as exc: - self._raise_already_enqueued(exc=exc, queueing_lock=job.queueing_lock) + self._raise_already_enqueued(exc=exc, queueing_lock=exc.queueing_lock) - return job.evolve(id=result["id"], status=jobs.Status.TODO.value) + return [ + job.evolve(id=results[index]["id"], status=jobs_module.Status.TODO.value) + for index, job in enumerate(jobs) + ] - def defer_job(self, job: jobs.Job) -> jobs.Job: + def defer_job(self, job: jobs_module.Job) -> jobs_module.Job: """ Sync version of `defer_job_async`. """ + return self.batch_defer_jobs(jobs=[job])[0] + + def batch_defer_jobs(self, jobs: list[jobs_module.Job]) -> list[jobs_module.Job]: + """ + Sync version of `batch_defer_jobs_async`. + """ try: - result = self.connector.get_sync_connector().execute_query_one( - **self._defer_job_query_kwargs(job=job) + results = self.connector.get_sync_connector().execute_query_all( + **self._defer_jobs_query_kwargs(jobs=jobs) ) except exceptions.UniqueViolation as exc: - self._raise_already_enqueued(exc=exc, queueing_lock=job.queueing_lock) + self._raise_already_enqueued(exc=exc, queueing_lock=exc.queueing_lock) - return job.evolve(id=result["id"], status=jobs.Status.TODO.value) + return [ + job.evolve(id=results[index]["id"], status=jobs_module.Status.TODO.value) + for index, job in enumerate(jobs) + ] - def _defer_job_query_kwargs(self, job: jobs.Job) -> dict[str, Any]: + def _defer_jobs_query_kwargs(self, jobs: list[jobs_module.Job]) -> dict[str, Any]: return { - "query": sql.queries["defer_job"], - "task_name": job.task_name, - "queue": job.queue, - "priority": job.priority, - "lock": job.lock, - "queueing_lock": job.queueing_lock, - "args": job.task_kwargs, - "scheduled_at": job.scheduled_at, + "query": sql.queries["defer_jobs"], + "jobs": [ + types.JobToDefer( + queue_name=job.queue, + task_name=job.task_name, + priority=job.priority, + lock=job.lock, + queueing_lock=job.queueing_lock, + args=job.task_kwargs, + scheduled_at=job.scheduled_at, + ) + for job in jobs + ], } def _raise_already_enqueued( self, exc: exceptions.UniqueViolation, queueing_lock: str | None ) -> NoReturn: - if exc.constraint_name == QUEUEING_LOCK_CONSTRAINT: + if exc.constraint_name in [ + QUEUEING_LOCK_CONSTRAINT, + QUEUEING_LOCK_CONSTRAINT_LEGACY, + ]: raise exceptions.AlreadyEnqueued( "Job cannot be enqueued: there is already a job in the queue " f"with the queueing lock {queueing_lock}" @@ -85,7 +135,7 @@ def _raise_already_enqueued( async def defer_periodic_job( self, - job: jobs.Job, + job: jobs_module.Job, periodic_id: str, defer_timestamp: int, ) -> int | None: @@ -119,7 +169,9 @@ async def defer_periodic_job( return result["id"] - async def fetch_job(self, queues: Iterable[str] | None) -> jobs.Job | None: + async def fetch_job( + self, queues: Iterable[str] | None, worker_id: int + ) -> jobs_module.Job | None: """ Select a job in the queue, and mark it as doing. The worker selecting a job is then responsible for running it, and then @@ -137,7 +189,7 @@ async def fetch_job(self, queues: Iterable[str] | None) -> jobs.Job | None: """ row = await self.connector.execute_query_one_async( - query=sql.queries["fetch_job"], queues=queues + query=sql.queries["fetch_job"], queues=queues, worker_id=worker_id ) # fetch_tasks will always return a row, but is there's no relevant @@ -145,40 +197,65 @@ async def fetch_job(self, queues: Iterable[str] | None) -> jobs.Job | None: if row["id"] is None: return None - return jobs.Job.from_row(row) + return jobs_module.Job.from_row(row) async def get_stalled_jobs( self, - nb_seconds: int, + nb_seconds: int | None = None, queue: str | None = None, task_name: str | None = None, - ) -> Iterable[jobs.Job]: + seconds_since_heartbeat: float = 30, + ) -> Iterable[jobs_module.Job]: """ Return all jobs that have been in ``doing`` state for more than a given time. Parameters ---------- nb_seconds: - Only jobs that have been in ``doing`` state for longer than this will be - returned + If set then jobs that have been in ``doing`` state for longer than that time + in seconds will be returned without considering stalled workers and heartbeats. + This parameter is DEPRECATED and will be removed in a next major version. + Use this method without this parameter instead to get stalled jobs based on + stalled workers and heartbeats. queue: Filter by job queue name task_name: Filter by job task name - """ - rows = await self.connector.execute_query_all_async( - query=sql.queries["select_stalled_jobs"], - nb_seconds=nb_seconds, - queue=queue, - task_name=task_name, - ) - return [jobs.Job.from_row(row) for row in rows] + seconds_since_heartbeat: + Get stalled jobs based on workers that have not sent a heartbeat for longer + than this time in seconds. Only used if ``nb_seconds`` is not set. Defaults + to 30 seconds. When changing it then check also the ``update_heartbeat_interval`` + and ``stalled_worker_timeout`` parameters of the worker. + """ + if nb_seconds is not None: + warnings.warn( + "The `nb_seconds` parameter is deprecated and will be removed in a next " + "major version. Use the method without this parameter instead to get " + "stalled jobs based on stalled workers and heartbeats.", + DeprecationWarning, + stacklevel=2, + ) + rows = await self.connector.execute_query_all_async( + query=sql.queries["select_stalled_jobs_by_started"], + nb_seconds=nb_seconds, + queue=queue, + task_name=task_name, + ) + else: + rows = await self.connector.execute_query_all_async( + query=sql.queries["select_stalled_jobs_by_heartbeat"], + queue=queue, + task_name=task_name, + seconds_since_heartbeat=seconds_since_heartbeat, + ) + + return [jobs_module.Job.from_row(row) for row in rows] async def delete_old_jobs( self, nb_hours: int, queue: str | None = None, - include_error: bool | None = False, + include_failed: bool | None = False, include_cancelled: bool | None = False, include_aborted: bool | None = False, ) -> None: @@ -193,7 +270,7 @@ async def delete_old_jobs( Consider jobs that been in a final state for more than ``nb_hours`` queue: Filter by job queue name - include_error: + include_failed: If ``True``, also consider errored jobs. ``False`` by default include_cancelled: If ``True``, also consider cancelled jobs. ``False`` by default. @@ -201,13 +278,13 @@ async def delete_old_jobs( If ``True``, also consider aborted jobs. ``False`` by default. """ # We only consider finished jobs by default - statuses = [jobs.Status.SUCCEEDED.value] - if include_error: - statuses.append(jobs.Status.FAILED.value) + statuses = [jobs_module.Status.SUCCEEDED.value] + if include_failed: + statuses.append(jobs_module.Status.FAILED.value) if include_cancelled: - statuses.append(jobs.Status.CANCELLED.value) + statuses.append(jobs_module.Status.CANCELLED.value) if include_aborted: - statuses.append(jobs.Status.ABORTED.value) + statuses.append(jobs_module.Status.ABORTED.value) await self.connector.execute_query_async( query=sql.queries["delete_old_jobs"], @@ -218,8 +295,8 @@ async def delete_old_jobs( async def finish_job( self, - job: jobs.Job, - status: jobs.Status, + job: jobs_module.Job, + status: jobs_module.Status, delete_job: bool, ) -> None: """ @@ -239,7 +316,7 @@ async def finish_job( async def finish_job_by_id_async( self, job_id: int, - status: jobs.Status, + status: jobs_module.Status, delete_job: bool, ) -> None: await self.connector.execute_query_async( @@ -260,9 +337,9 @@ def cancel_job_by_id( job_id: The id of the job to cancel abort: - If True, a job in ``doing`` state will be marked as ``aborting``, but the task - itself has to respect the abortion request. If False, only jobs in ``todo`` - state will be set to ``cancelled`` and won't be processed by a worker anymore. + If True, a job will be marked for abortion, but the task itself has to + respect the abortion request. If False, only jobs in ``todo`` state will + be set to ``cancelled`` and won't be processed by a worker anymore. delete_job: If True, the job will be deleted from the database after being cancelled. Does not affect the jobs that should be aborted. @@ -298,9 +375,9 @@ async def cancel_job_by_id_async( job_id: The id of the job to cancel abort: - If True, a job in ``doing`` state will be marked as ``aborting``, but the task - itself has to respect the abortion request. If False, only jobs in ``todo`` - state will be set to ``cancelled`` and won't be processed by a worker anymore. + If True, a job will be marked for abortion, but the task itself has to + respect the abortion request. If False, only jobs in ``todo`` state will + be set to ``cancelled`` and won't be processed by a worker anymore. delete_job: If True, the job will be deleted from the database after being cancelled. Does not affect the jobs that should be aborted. @@ -325,7 +402,7 @@ async def cancel_job_by_id_async( assert result["id"] == job_id return True - def get_job_status(self, job_id: int) -> jobs.Status: + def get_job_status(self, job_id: int) -> jobs_module.Status: """ Get the status of a job by id. @@ -341,9 +418,9 @@ def get_job_status(self, job_id: int) -> jobs.Status: result = self.connector.get_sync_connector().execute_query_one( query=sql.queries["get_job_status"], job_id=job_id ) - return jobs.Status(result["status"]) + return jobs_module.Status(result["status"]) - async def get_job_status_async(self, job_id: int) -> jobs.Status: + async def get_job_status_async(self, job_id: int) -> jobs_module.Status: """ Get the status of a job by id. @@ -359,11 +436,11 @@ async def get_job_status_async(self, job_id: int) -> jobs.Status: result = await self.connector.execute_query_one_async( query=sql.queries["get_job_status"], job_id=job_id ) - return jobs.Status(result["status"]) + return jobs_module.Status(result["status"]) async def retry_job( self, - job: jobs.Job, + job: jobs_module.Job, retry_at: datetime.datetime | None = None, priority: int | None = None, queue: str | None = None, @@ -456,26 +533,39 @@ def retry_job_by_id( ) async def listen_for_jobs( - self, *, event: asyncio.Event, queues: Iterable[str] | None = None + self, + *, + on_notification: NotificationCallback, + queues: Iterable[str] | None = None, ) -> None: """ - Listens to defer operation in the database, and raises the event each time an - defer operation is seen. + Listens to job notifications from the database, and invokes the callback each time an + notification is received. This coroutine either returns ``None`` upon calling if it cannot start listening or does not return and needs to be cancelled to end. Parameters ---------- - event: - This event will be set each time a defer operation occurs - queues: - If ``None``, all defer operations will be considered. If an iterable of + on_notification : ``connector.Notify`` + A coroutine that will be called and awaited every time a notification is received + queues : ``Optional[Iterable[str]]`` + If ``None``, all notification will be considered. If an iterable of queue names is passed, only defer operations on those queues will be considered. Defaults to ``None`` """ + + async def handle_notification(channel: str, payload: str): + notification: jobs_module.Notification = json.loads(payload) + logger.debug( + f"Received {notification['type']} notification from channel", + extra={channel: channel, payload: payload}, + ) + await on_notification(channel=channel, notification=notification) + await self.connector.listen_notify( - event=event, channels=get_channel_for_queues(queues=queues) + on_notification=handle_notification, + channels=get_channel_for_queues(queues=queues), ) async def check_connection_async(self) -> bool: @@ -510,7 +600,8 @@ async def list_jobs_async( status: str | None = None, lock: str | None = None, queueing_lock: str | None = None, - ) -> Iterable[jobs.Job]: + worker_id: int | None = None, + ) -> Iterable[jobs_module.Job]: """ List all procrastinate jobs given query filters. @@ -528,6 +619,8 @@ async def list_jobs_async( Filter by job lock queueing_lock: Filter by job queueing_lock + worker_id: + Filter by worker ID Returns ------- @@ -541,8 +634,9 @@ async def list_jobs_async( status=status, lock=lock, queueing_lock=queueing_lock, + worker_id=worker_id, ) - return [jobs.Job.from_row(row) for row in rows] + return [jobs_module.Job.from_row(row) for row in rows] def list_jobs( self, @@ -552,7 +646,8 @@ def list_jobs( status: str | None = None, lock: str | None = None, queueing_lock: str | None = None, - ) -> Iterable[jobs.Job]: + worker_id: int | None = None, + ) -> list[jobs_module.Job]: """ Sync version of `list_jobs_async` """ @@ -564,8 +659,9 @@ def list_jobs( status=status, lock=lock, queueing_lock=queueing_lock, + worker_id=worker_id, ) - return [jobs.Job.from_row(row) for row in rows] + return [jobs_module.Job.from_row(row) for row in rows] async def list_queues_async( self, @@ -592,8 +688,7 @@ async def list_queues_async( ------- : A list of dictionaries representing queues stats (``name``, ``jobs_count``, - ``todo``, ``doing``, ``succeeded``, ``failed``, ``cancelled``, ``aborting``, - ``aborted``). + ``todo``, ``doing``, ``succeeded``, ``failed``, ``cancelled``, ``aborted``). """ return [ { @@ -604,7 +699,6 @@ async def list_queues_async( "succeeded": row["stats"].get("succeeded", 0), "failed": row["stats"].get("failed", 0), "cancelled": row["stats"].get("cancelled", 0), - "aborting": row["stats"].get("aborting", 0), "aborted": row["stats"].get("aborted", 0), } for row in await self.connector.execute_query_all_async( @@ -635,7 +729,6 @@ def list_queues( "succeeded": row["stats"].get("succeeded", 0), "failed": row["stats"].get("failed", 0), "cancelled": row["stats"].get("cancelled", 0), - "aborting": row["stats"].get("aborting", 0), "aborted": row["stats"].get("aborted", 0), } for row in self.connector.get_sync_connector().execute_query_all( @@ -672,8 +765,7 @@ async def list_tasks_async( ------- : A list of dictionaries representing tasks stats (``name``, ``jobs_count``, - ``todo``, ``doing``, ``succeeded``, ``failed``, ``cancelled``, ``aborting``, - ``aborted``). + ``todo``, ``doing``, ``succeeded``, ``failed``, ``cancelled``, ``aborted``). """ return [ { @@ -684,7 +776,6 @@ async def list_tasks_async( "succeeded": row["stats"].get("succeeded", 0), "failed": row["stats"].get("failed", 0), "cancelled": row["stats"].get("cancelled", 0), - "aborting": row["stats"].get("aborting", 0), "aborted": row["stats"].get("aborted", 0), } for row in await self.connector.execute_query_all_async( @@ -715,7 +806,6 @@ def list_tasks( "succeeded": row["stats"].get("succeeded", 0), "failed": row["stats"].get("failed", 0), "cancelled": row["stats"].get("cancelled", 0), - "aborting": row["stats"].get("aborting", 0), "aborted": row["stats"].get("aborted", 0), } for row in self.connector.get_sync_connector().execute_query_all( @@ -752,8 +842,7 @@ async def list_locks_async( ------- : A list of dictionaries representing locks stats (``name``, ``jobs_count``, - ``todo``, ``doing``, ``succeeded``, ``failed``, ``cancelled``, ``aborting``, - ``aborted``). + ``todo``, ``doing``, ``succeeded``, ``failed``, ``cancelled``, ``aborted``). """ result = [] for row in await self.connector.execute_query_all_async( @@ -772,7 +861,6 @@ async def list_locks_async( "succeeded": row["stats"].get("succeeded", 0), "failed": row["stats"].get("failed", 0), "cancelled": row["stats"].get("cancelled", 0), - "aborting": row["stats"].get("aborting", 0), "aborted": row["stats"].get("aborted", 0), } ) @@ -805,8 +893,79 @@ def list_locks( "succeeded": row["stats"].get("succeeded", 0), "failed": row["stats"].get("failed", 0), "cancelled": row["stats"].get("cancelled", 0), - "aborting": row["stats"].get("aborting", 0), "aborted": row["stats"].get("aborted", 0), } ) return result + + async def list_jobs_to_abort_async(self, queue: str | None = None) -> Iterable[int]: + """ + List ids of running jobs to abort + """ + rows = await self.connector.execute_query_all_async( + query=sql.queries["list_jobs_to_abort"], queue_name=queue + ) + return [row["id"] for row in rows] + + async def register_worker(self) -> int: + """ + Register a newly started worker (with a initial heartbeat) in the database. + + Returns + ------- + : + The ID of the registered worker + """ + result = await self.connector.execute_query_one_async( + query=sql.queries["register_worker"], + ) + return result["worker_id"] + + async def unregister_worker(self, worker_id: int) -> None: + """ + Unregister a shut down worker and also delete its heartbeat from the database. + + Parameters + ---------- + worker_id: + The ID of the worker to delete + """ + await self.connector.execute_query_async( + query=sql.queries["unregister_worker"], + worker_id=worker_id, + ) + + async def update_heartbeat(self, worker_id: int) -> None: + """ + Update the heartbeat of a worker. + + Parameters + ---------- + worker_id: + The ID of the worker to update the heartbeat + """ + await self.connector.execute_query_async( + query=sql.queries["update_heartbeat"], + worker_id=worker_id, + ) + + async def prune_stalled_workers(self, seconds_since_heartbeat: float) -> list[int]: + """ + Delete the workers that have not sent a heartbeat for more than a given time. + + Parameters + ---------- + seconds_since_heartbeat: + Only workers that have not sent a heartbeat for longer than this will be + deleted + + Returns + ------- + : + A list of worker IDs that have been deleted + """ + rows = await self.connector.execute_query_all_async( + query=sql.queries["prune_stalled_workers"], + seconds_since_heartbeat=seconds_since_heartbeat, + ) + return [row["worker_id"] for row in rows] diff --git a/procrastinate/psycopg_connector.py b/procrastinate/psycopg_connector.py index b990849c1..9f4dbdff1 100644 --- a/procrastinate/psycopg_connector.py +++ b/procrastinate/psycopg_connector.py @@ -1,6 +1,5 @@ from __future__ import annotations -import asyncio import contextlib import logging from collections.abc import AsyncGenerator, AsyncIterator, Iterable @@ -172,11 +171,18 @@ async def close_async(self) -> None: await self._async_pool.close() self._async_pool = None + def _wrap_value(self, value: Any) -> Any: + if isinstance(value, dict): + return psycopg.types.json.Jsonb(value) + elif isinstance(value, list): + return [self._wrap_value(item) for item in value] + elif isinstance(value, tuple): + return tuple([self._wrap_value(item) for item in value]) + else: + return value + def _wrap_json(self, arguments: dict[str, Any]): - return { - key: psycopg.types.json.Jsonb(value) if isinstance(value, dict) else value - for key, value in arguments.items() - } + return {key: self._wrap_value(value) for key, value in arguments.items()} @contextlib.asynccontextmanager async def _get_cursor( @@ -247,7 +253,7 @@ async def _get_standalone_connection( @wrap_exceptions() async def listen_notify( - self, event: asyncio.Event, channels: Iterable[str] + self, on_notification: connector.Notify, channels: Iterable[str] ) -> None: while True: async with self._get_standalone_connection() as connection: @@ -258,14 +264,14 @@ async def listen_notify( channel_name=channel_name, ), ) - # Initial set() lets caller know that we're ready to listen - event.set() - await self._loop_notify(event=event, connection=connection) + await self._loop_notify( + on_notification=on_notification, connection=connection + ) @wrap_exceptions() async def _loop_notify( self, - event: asyncio.Event, + on_notification: connector.Notify, connection: psycopg.AsyncConnection, timeout: float = connector.LISTEN_TIMEOUT, ) -> None: @@ -273,12 +279,14 @@ async def _loop_notify( while True: try: - async for _ in utils.gen_with_timeout( + async for notification in utils.gen_with_timeout( aiterable=connection.notifies(), timeout=timeout, raise_timeout=False, ): - event.set() + await on_notification( + channel=notification.channel, payload=notification.payload + ) await connection.execute("SELECT 1") except psycopg.OperationalError: diff --git a/procrastinate/schema.py b/procrastinate/schema.py index 17d075079..f393fae63 100644 --- a/procrastinate/schema.py +++ b/procrastinate/schema.py @@ -1,7 +1,7 @@ from __future__ import annotations -import importlib.resources as importlib_resources import pathlib +from importlib import resources from typing import cast from typing_extensions import LiteralString @@ -20,9 +20,9 @@ def get_schema() -> LiteralString: # procrastinate takes full responsibility for the queries, we # can safely vouch for them being as safe as if they were # defined in the code itself. - schema_sql = ( - importlib_resources.files("procrastinate.sql") / "schema.sql" - ).read_text(encoding="utf-8") + schema_sql = (resources.files("procrastinate.sql") / "schema.sql").read_text( + encoding="utf-8" + ) return cast(LiteralString, schema_sql) @staticmethod diff --git a/procrastinate/shell.py b/procrastinate/shell.py index c5940b7ed..206c78026 100644 --- a/procrastinate/shell.py +++ b/procrastinate/shell.py @@ -89,7 +89,6 @@ def do_list_queues(self, arg: str) -> None: f"succeeded: {queue['succeeded']}, " f"failed: {queue['failed']}, " f"cancelled: {queue['cancelled']}, " - f"aborting: {queue['aborting']}, " f"aborted: {queue['aborted']})" ) @@ -112,7 +111,6 @@ def do_list_tasks(self, arg: str) -> None: f"succeeded: {task['succeeded']}, " f"failed: {task['failed']}, " f"cancelled: {task['cancelled']}, " - f"aborting: {task['aborting']}, " f"aborted: {task['aborted']})" ) @@ -135,7 +133,6 @@ def do_list_locks(self, arg: str) -> None: f"succeeded: {lock['succeeded']}, " f"failed: {lock['failed']}, " f"cancelled: {lock['cancelled']}, " - f"aborting: {lock['aborting']}, " f"aborted: {lock['aborted']})" ) diff --git a/procrastinate/sql/__init__.py b/procrastinate/sql/__init__.py index 385fa011f..e777650f0 100644 --- a/procrastinate/sql/__init__.py +++ b/procrastinate/sql/__init__.py @@ -1,7 +1,7 @@ from __future__ import annotations -import importlib.resources as importlib_resources import re +from importlib import resources from typing import cast from typing_extensions import LiteralString @@ -28,7 +28,7 @@ def parse_query_file(query_file: str) -> dict[str, LiteralString]: def get_queries() -> dict[str, LiteralString]: return parse_query_file( - (importlib_resources.files("procrastinate.sql") / "queries.sql").read_text( + (resources.files("procrastinate.sql") / "queries.sql").read_text( encoding="utf-8" ) ) diff --git a/procrastinate/sql/migrations/03.00.00_01_pre_cancel_notification.sql b/procrastinate/sql/migrations/03.00.00_01_pre_cancel_notification.sql new file mode 100644 index 000000000..a930d5f09 --- /dev/null +++ b/procrastinate/sql/migrations/03.00.00_01_pre_cancel_notification.sql @@ -0,0 +1,295 @@ +-- Note: starting with v3, there are 2 changes in the migration system: +-- - We now have pre- and post-migration scripts. pre-migrations are safe to +-- apply before upgrading the code. post-migrations are safe to apply after +-- upgrading he code. +-- This is a pre-migration script. +-- - Whenever we recreate an immutable object (function, trigger, indexes), we +-- will suffix its name with a version number. + +-- Add an 'abort_requested' column to the procrastinate_jobs table +ALTER TABLE procrastinate_jobs ADD COLUMN abort_requested boolean DEFAULT false NOT NULL; + +-- Set abort requested flag on all jobs with 'aborting' status +UPDATE procrastinate_jobs SET abort_requested = true WHERE status = 'aborting'; + +-- Add temporary triggers to sync the abort_requested flag with the status +-- so that blue-green deployments can work +CREATE OR REPLACE FUNCTION procrastinate_sync_abort_requested_with_status_temp() + RETURNS trigger + LANGUAGE plpgsql +AS $$ +BEGIN + IF NEW.status = 'aborting' THEN + NEW.abort_requested = true; + END IF; + RETURN NEW; +END; +$$; + +CREATE TRIGGER procrastinate_trigger_sync_abort_requested_with_status_temp + BEFORE UPDATE OF status ON procrastinate_jobs + FOR EACH ROW + EXECUTE FUNCTION procrastinate_sync_abort_requested_with_status_temp(); + +-- Create the new versioned functions + +CREATE FUNCTION procrastinate_defer_job_v1( + queue_name character varying, + task_name character varying, + priority integer, + lock text, + queueing_lock text, + args jsonb, + scheduled_at timestamp with time zone +) + RETURNS bigint + LANGUAGE plpgsql +AS $$ +DECLARE + job_id bigint; +BEGIN + INSERT INTO procrastinate_jobs (queue_name, task_name, priority, lock, queueing_lock, args, scheduled_at) + VALUES (queue_name, task_name, priority, lock, queueing_lock, args, scheduled_at) + RETURNING id INTO job_id; + + RETURN job_id; +END; +$$; + +CREATE FUNCTION procrastinate_defer_periodic_job_v1( + _queue_name character varying, + _lock character varying, + _queueing_lock character varying, + _task_name character varying, + _priority integer, + _periodic_id character varying, + _defer_timestamp bigint, + _args jsonb +) + RETURNS bigint + LANGUAGE plpgsql +AS $$ +DECLARE + _job_id bigint; + _defer_id bigint; +BEGIN + + INSERT + INTO procrastinate_periodic_defers (task_name, periodic_id, defer_timestamp) + VALUES (_task_name, _periodic_id, _defer_timestamp) + ON CONFLICT DO NOTHING + RETURNING id into _defer_id; + + IF _defer_id IS NULL THEN + RETURN NULL; + END IF; + + UPDATE procrastinate_periodic_defers + SET job_id = procrastinate_defer_job_v1( + _queue_name, + _task_name, + _priority, + _lock, + _queueing_lock, + _args, + NULL + ) + WHERE id = _defer_id + RETURNING job_id INTO _job_id; + + DELETE + FROM procrastinate_periodic_defers + USING ( + SELECT id + FROM procrastinate_periodic_defers + WHERE procrastinate_periodic_defers.task_name = _task_name + AND procrastinate_periodic_defers.periodic_id = _periodic_id + AND procrastinate_periodic_defers.defer_timestamp < _defer_timestamp + ORDER BY id + FOR UPDATE + ) to_delete + WHERE procrastinate_periodic_defers.id = to_delete.id; + + RETURN _job_id; +END; +$$; + +CREATE FUNCTION procrastinate_fetch_job_v1( + target_queue_names character varying[] +) + RETURNS procrastinate_jobs + LANGUAGE plpgsql +AS $$ +DECLARE + found_jobs procrastinate_jobs; +BEGIN + WITH candidate AS ( + SELECT jobs.* + FROM procrastinate_jobs AS jobs + WHERE + -- reject the job if its lock has earlier jobs + NOT EXISTS ( + SELECT 1 + FROM procrastinate_jobs AS earlier_jobs + WHERE + jobs.lock IS NOT NULL + AND earlier_jobs.lock = jobs.lock + AND earlier_jobs.status IN ('todo', 'doing') + AND earlier_jobs.id < jobs.id) + AND jobs.status = 'todo' + AND (target_queue_names IS NULL OR jobs.queue_name = ANY( target_queue_names )) + AND (jobs.scheduled_at IS NULL OR jobs.scheduled_at <= now()) + ORDER BY jobs.priority DESC, jobs.id ASC LIMIT 1 + FOR UPDATE OF jobs SKIP LOCKED + ) + UPDATE procrastinate_jobs + SET status = 'doing' + FROM candidate + WHERE procrastinate_jobs.id = candidate.id + RETURNING procrastinate_jobs.* INTO found_jobs; + + RETURN found_jobs; +END; +$$; + +CREATE FUNCTION procrastinate_finish_job_v1(job_id bigint, end_status procrastinate_job_status, delete_job boolean) + RETURNS void + LANGUAGE plpgsql +AS $$ +DECLARE + _job_id bigint; +BEGIN + IF end_status NOT IN ('succeeded', 'failed', 'aborted') THEN + RAISE 'End status should be either "succeeded", "failed" or "aborted" (job id: %)', job_id; + END IF; + IF delete_job THEN + DELETE FROM procrastinate_jobs + WHERE id = job_id AND status IN ('todo', 'doing') + RETURNING id INTO _job_id; + ELSE + UPDATE procrastinate_jobs + SET status = end_status, + abort_requested = false, + attempts = CASE status + WHEN 'doing' THEN attempts + 1 ELSE attempts + END + WHERE id = job_id AND status IN ('todo', 'doing') + RETURNING id INTO _job_id; + END IF; + IF _job_id IS NULL THEN + RAISE 'Job was not found or not in "doing" or "todo" status (job id: %)', job_id; + END IF; +END; +$$; + +CREATE FUNCTION procrastinate_cancel_job_v1(job_id bigint, abort boolean, delete_job boolean) + RETURNS bigint + LANGUAGE plpgsql +AS $$ +DECLARE + _job_id bigint; +BEGIN + IF delete_job THEN + DELETE FROM procrastinate_jobs + WHERE id = job_id AND status = 'todo' + RETURNING id INTO _job_id; + END IF; + IF _job_id IS NULL THEN + IF abort THEN + UPDATE procrastinate_jobs + SET abort_requested = true, + status = CASE status + WHEN 'todo' THEN 'cancelled'::procrastinate_job_status ELSE status + END + WHERE id = job_id AND status IN ('todo', 'doing') + RETURNING id INTO _job_id; + ELSE + UPDATE procrastinate_jobs + SET status = 'cancelled'::procrastinate_job_status + WHERE id = job_id AND status = 'todo' + RETURNING id INTO _job_id; + END IF; + END IF; + RETURN _job_id; +END; +$$; + +-- The retry_job function now has specific behaviour when a job is set to be +-- retried while it's aborting: in that case it's marked as failed. +CREATE FUNCTION procrastinate_retry_job_v1( + job_id bigint, + retry_at timestamp with time zone, + new_priority integer, + new_queue_name character varying, + new_lock character varying +) RETURNS void LANGUAGE plpgsql AS $$ +DECLARE + _job_id bigint; + _abort_requested boolean; +BEGIN + SELECT abort_requested FROM procrastinate_jobs + WHERE id = job_id AND status = 'doing' + FOR UPDATE + INTO _abort_requested; + IF _abort_requested THEN + UPDATE procrastinate_jobs + SET status = 'failed'::procrastinate_job_status + WHERE id = job_id AND status = 'doing' + RETURNING id INTO _job_id; + ELSE + UPDATE procrastinate_jobs + SET status = 'todo'::procrastinate_job_status, + attempts = attempts + 1, + scheduled_at = retry_at, + priority = COALESCE(new_priority, priority), + queue_name = COALESCE(new_queue_name, queue_name), + lock = COALESCE(new_lock, lock) + WHERE id = job_id AND status = 'doing' + RETURNING id INTO _job_id; + END IF; + + IF _job_id IS NULL THEN + RAISE 'Job was not found or not in "doing" status (job id: %)', job_id; + END IF; +END; +$$; + +-- Create new versioned trigger functions and triggers + +CREATE FUNCTION procrastinate_notify_queue_job_inserted_v1() + RETURNS trigger + LANGUAGE plpgsql +AS $$ +DECLARE + payload TEXT; +BEGIN + SELECT json_build_object('type', 'job_inserted', 'job_id', NEW.id)::text INTO payload; + PERFORM pg_notify('procrastinate_queue_v1#' || NEW.queue_name, payload); + PERFORM pg_notify('procrastinate_any_queue_v1', payload); + RETURN NEW; +END; +$$; + +CREATE TRIGGER procrastinate_jobs_notify_queue_job_inserted_temp + AFTER INSERT ON procrastinate_jobs + FOR EACH ROW WHEN ((new.status = 'todo'::procrastinate_job_status)) + EXECUTE PROCEDURE procrastinate_notify_queue_job_inserted_v1(); + +CREATE FUNCTION procrastinate_notify_queue_abort_job_v1() +RETURNS trigger + LANGUAGE plpgsql +AS $$ +DECLARE + payload TEXT; +BEGIN + SELECT json_build_object('type', 'abort_job_requested', 'job_id', NEW.id)::text INTO payload; + PERFORM pg_notify('procrastinate_queue_v1#' || NEW.queue_name, payload); + PERFORM pg_notify('procrastinate_any_queue_v1', payload); + RETURN NEW; +END; +$$; + +CREATE TRIGGER procrastinate_jobs_notify_queue_job_aborted_temp + AFTER UPDATE OF abort_requested ON procrastinate_jobs + FOR EACH ROW WHEN ((old.abort_requested = false AND new.abort_requested = true AND new.status = 'doing'::procrastinate_job_status)) + EXECUTE PROCEDURE procrastinate_notify_queue_abort_job_v1(); diff --git a/procrastinate/sql/migrations/03.00.00_50_post_cancel_notification.sql b/procrastinate/sql/migrations/03.00.00_50_post_cancel_notification.sql new file mode 100644 index 000000000..5ff3a8c8b --- /dev/null +++ b/procrastinate/sql/migrations/03.00.00_50_post_cancel_notification.sql @@ -0,0 +1,206 @@ +-- These are old versions of functions, that we needed to keep around for +-- backwards compatibility. We can now safely drop them. +DROP FUNCTION IF EXISTS procrastinate_finish_job( + integer, + procrastinate_job_status, + timestamp with time zone, + boolean +); +DROP FUNCTION IF EXISTS procrastinate_defer_job( + character varying, + character varying, + text, + text, + jsonb, + timestamp with time zone +); +DROP FUNCTION IF EXISTS procrastinate_defer_periodic_job( + character varying, + character varying, + character varying, + character varying, + character varying, + bigint, + jsonb +); +DROP FUNCTION IF EXISTS procrastinate_retry_job( + bigint, + timestamp with time zone +); +DROP FUNCTION IF EXISTS procrastinate_retry_job( + bigint, + timestamp with time zone, + integer, + character varying, + character varying +); + +-- Remove all traces of the "aborting" status +-- Last sanity update in case the trigger didn't work 100% of the time +UPDATE procrastinate_jobs SET abort_requested = true WHERE status = 'aborting'; + +-- Delete the indexes that depend on the old status and enum type +DROP INDEX IF EXISTS procrastinate_jobs_queueing_lock_idx; +DROP INDEX IF EXISTS procrastinate_jobs_lock_idx; +DROP INDEX IF EXISTS procrastinate_jobs_id_lock_idx; + +-- Delete the unversioned triggers +DROP TRIGGER IF EXISTS procrastinate_trigger_status_events_update ON procrastinate_jobs; +DROP TRIGGER IF EXISTS procrastinate_trigger_status_events_insert ON procrastinate_jobs; +DROP TRIGGER IF EXISTS procrastinate_trigger_scheduled_events ON procrastinate_jobs; +DROP TRIGGER IF EXISTS procrastinate_trigger_status_events_update ON procrastinate_jobs; +DROP TRIGGER IF EXISTS procrastinate_jobs_notify_queue ON procrastinate_jobs; + +-- Delete the unversioned functions +DROP FUNCTION IF EXISTS procrastinate_defer_job; +DROP FUNCTION IF EXISTS procrastinate_defer_periodic_job; +DROP FUNCTION IF EXISTS procrastinate_fetch_job; +DROP FUNCTION IF EXISTS procrastinate_finish_job(bigint, procrastinate_job_status, boolean); +DROP FUNCTION IF EXISTS procrastinate_cancel_job; +DROP FUNCTION IF EXISTS procrastinate_trigger_status_events_procedure_update; +DROP FUNCTION IF EXISTS procrastinate_finish_job(integer, procrastinate_job_status, timestamp with time zone, boolean); +DROP FUNCTION IF EXISTS procrastinate_notify_queue; + +-- Delete the functions that depend on the old event type +DROP FUNCTION IF EXISTS procrastinate_trigger_status_events_procedure_insert; +DROP FUNCTION IF EXISTS procrastinate_trigger_scheduled_events_procedure; + +-- Delete temporary triggers and functions +DROP TRIGGER IF EXISTS procrastinate_jobs_notify_queue_job_inserted_temp ON procrastinate_jobs; +DROP TRIGGER IF EXISTS procrastinate_jobs_notify_queue_job_aborted_temp ON procrastinate_jobs; +DROP TRIGGER IF EXISTS procrastinate_trigger_sync_abort_requested_with_status_temp ON procrastinate_jobs; +DROP FUNCTION IF EXISTS procrastinate_sync_abort_requested_with_status_temp; + +-- Alter the table to not use the 'aborting' status anymore +ALTER TABLE procrastinate_jobs +ALTER COLUMN status TYPE procrastinate_job_status +USING ( + CASE status::text + WHEN 'aborting' THEN 'doing'::procrastinate_job_status + ELSE status::procrastinate_job_status + END +); + +-- Recreate the dropped temporary triggers +CREATE TRIGGER procrastinate_jobs_notify_queue_job_inserted_v1 + AFTER INSERT ON procrastinate_jobs + FOR EACH ROW WHEN ((new.status = 'todo'::procrastinate_job_status)) + EXECUTE PROCEDURE procrastinate_notify_queue_job_inserted_v1(); +CREATE TRIGGER procrastinate_jobs_notify_queue_job_aborted_v1 + AFTER UPDATE OF abort_requested ON procrastinate_jobs + FOR EACH ROW WHEN ((old.abort_requested = false AND new.abort_requested = true AND new.status = 'doing'::procrastinate_job_status)) + EXECUTE PROCEDURE procrastinate_notify_queue_abort_job_v1(); + +-- Recreate the dropped indexes (with version suffix) +CREATE UNIQUE INDEX procrastinate_jobs_queueing_lock_idx_v1 ON procrastinate_jobs (queueing_lock) WHERE status = 'todo'; +CREATE UNIQUE INDEX procrastinate_jobs_lock_idx_v1 ON procrastinate_jobs (lock) WHERE status = 'doing'; +CREATE INDEX procrastinate_jobs_id_lock_idx_v1 ON procrastinate_jobs (id, lock) WHERE status = ANY (ARRAY['todo'::procrastinate_job_status, 'doing'::procrastinate_job_status]); + +-- Rename existing indexes +ALTER INDEX procrastinate_jobs_queue_name_idx RENAME TO procrastinate_jobs_queue_name_idx_v1; +ALTER INDEX procrastinate_events_job_id_fkey RENAME TO procrastinate_events_job_id_fkey_v1; +ALTER INDEX procrastinate_periodic_defers_job_id_fkey RENAME TO procrastinate_periodic_defers_job_id_fkey_v1; +ALTER INDEX procrastinate_jobs_priority_idx RENAME TO procrastinate_jobs_priority_idx_v1; + +-- Recreate or rename the other triggers & their associated functions + +CREATE FUNCTION procrastinate_trigger_function_status_events_insert_v1() + RETURNS trigger + LANGUAGE plpgsql +AS $$ +BEGIN + INSERT INTO procrastinate_events(job_id, type) + VALUES (NEW.id, 'deferred'::procrastinate_job_event_type); + RETURN NEW; +END; +$$; + +CREATE TRIGGER procrastinate_trigger_status_events_insert_v1 + AFTER INSERT ON procrastinate_jobs + FOR EACH ROW WHEN ((new.status = 'todo'::procrastinate_job_status)) + EXECUTE PROCEDURE procrastinate_trigger_function_status_events_insert_v1(); + +CREATE FUNCTION procrastinate_trigger_function_status_events_update_v1() + RETURNS trigger + LANGUAGE plpgsql +AS $$ +BEGIN + WITH t AS ( + SELECT CASE + WHEN OLD.status = 'todo'::procrastinate_job_status + AND NEW.status = 'doing'::procrastinate_job_status + THEN 'started'::procrastinate_job_event_type + WHEN OLD.status = 'doing'::procrastinate_job_status + AND NEW.status = 'todo'::procrastinate_job_status + THEN 'deferred_for_retry'::procrastinate_job_event_type + WHEN OLD.status = 'doing'::procrastinate_job_status + AND NEW.status = 'failed'::procrastinate_job_status + THEN 'failed'::procrastinate_job_event_type + WHEN OLD.status = 'doing'::procrastinate_job_status + AND NEW.status = 'succeeded'::procrastinate_job_status + THEN 'succeeded'::procrastinate_job_event_type + WHEN OLD.status = 'todo'::procrastinate_job_status + AND ( + NEW.status = 'cancelled'::procrastinate_job_status + OR NEW.status = 'failed'::procrastinate_job_status + OR NEW.status = 'succeeded'::procrastinate_job_status + ) + THEN 'cancelled'::procrastinate_job_event_type + WHEN OLD.status = 'doing'::procrastinate_job_status + AND NEW.status = 'aborted'::procrastinate_job_status + THEN 'aborted'::procrastinate_job_event_type + ELSE NULL + END as event_type + ) + INSERT INTO procrastinate_events(job_id, type) + SELECT NEW.id, t.event_type + FROM t + WHERE t.event_type IS NOT NULL; + RETURN NEW; +END; +$$; + +CREATE TRIGGER procrastinate_trigger_status_events_update_v1 + AFTER UPDATE OF status ON procrastinate_jobs + FOR EACH ROW + EXECUTE PROCEDURE procrastinate_trigger_function_status_events_update_v1(); + +CREATE FUNCTION procrastinate_trigger_function_scheduled_events_v1() + RETURNS trigger + LANGUAGE plpgsql +AS $$ +BEGIN + INSERT INTO procrastinate_events(job_id, type, at) + VALUES (NEW.id, 'scheduled'::procrastinate_job_event_type, NEW.scheduled_at); + + RETURN NEW; +END; +$$; + +CREATE TRIGGER procrastinate_trigger_scheduled_events_v1 + AFTER UPDATE OR INSERT ON procrastinate_jobs + FOR EACH ROW WHEN ((new.scheduled_at IS NOT NULL AND new.status = 'todo'::procrastinate_job_status)) + EXECUTE PROCEDURE procrastinate_trigger_function_scheduled_events_v1(); + +CREATE FUNCTION procrastinate_trigger_abort_requested_events_procedure_v1() + RETURNS trigger + LANGUAGE plpgsql +AS $$ +BEGIN + INSERT INTO procrastinate_events(job_id, type) + VALUES (NEW.id, 'abort_requested'::procrastinate_job_event_type); + RETURN NEW; +END; +$$; + +CREATE TRIGGER procrastinate_trigger_abort_requested_events_v1 + AFTER UPDATE OF abort_requested ON procrastinate_jobs + FOR EACH ROW WHEN ((new.abort_requested = true)) + EXECUTE PROCEDURE procrastinate_trigger_abort_requested_events_procedure_v1(); + +-- Rename remaining functions to use version suffix +ALTER FUNCTION procrastinate_unlink_periodic_defers RENAME TO procrastinate_unlink_periodic_defers_v1; +ALTER TRIGGER procrastinate_trigger_delete_jobs ON procrastinate_jobs RENAME TO procrastinate_trigger_delete_jobs_v1; + +-- New constraints +ALTER TABLE procrastinate_jobs ADD CONSTRAINT check_not_todo_abort_requested CHECK (NOT (status = 'todo' AND abort_requested = true)); diff --git a/procrastinate/sql/migrations/03.01.00_01_pre_add_heartbeat.sql b/procrastinate/sql/migrations/03.01.00_01_pre_add_heartbeat.sql new file mode 100644 index 000000000..f786f3e0e --- /dev/null +++ b/procrastinate/sql/migrations/03.01.00_01_pre_add_heartbeat.sql @@ -0,0 +1,93 @@ +CREATE TABLE procrastinate_workers( + id bigint PRIMARY KEY GENERATED ALWAYS AS IDENTITY, + last_heartbeat timestamp with time zone NOT NULL DEFAULT NOW() +); + +ALTER TABLE procrastinate_jobs ADD COLUMN worker_id bigint REFERENCES procrastinate_workers(id) ON DELETE SET NULL; + +CREATE INDEX idx_procrastinate_jobs_worker_not_null ON procrastinate_jobs(worker_id) WHERE worker_id IS NOT NULL AND status = 'doing'::procrastinate_job_status; + +CREATE INDEX idx_procrastinate_workers_last_heartbeat ON procrastinate_workers(last_heartbeat); + +CREATE FUNCTION procrastinate_fetch_job_v2( + target_queue_names character varying[], + p_worker_id bigint +) + RETURNS procrastinate_jobs + LANGUAGE plpgsql +AS $$ +DECLARE + found_jobs procrastinate_jobs; +BEGIN + WITH candidate AS ( + SELECT jobs.* + FROM procrastinate_jobs AS jobs + WHERE + -- reject the job if its lock has earlier jobs + NOT EXISTS ( + SELECT 1 + FROM procrastinate_jobs AS earlier_jobs + WHERE + jobs.lock IS NOT NULL + AND earlier_jobs.lock = jobs.lock + AND earlier_jobs.status IN ('todo', 'doing') + AND earlier_jobs.id < jobs.id) + AND jobs.status = 'todo' + AND (target_queue_names IS NULL OR jobs.queue_name = ANY( target_queue_names )) + AND (jobs.scheduled_at IS NULL OR jobs.scheduled_at <= now()) + ORDER BY jobs.priority DESC, jobs.id ASC LIMIT 1 + FOR UPDATE OF jobs SKIP LOCKED + ) + UPDATE procrastinate_jobs + SET status = 'doing', worker_id = p_worker_id + FROM candidate + WHERE procrastinate_jobs.id = candidate.id + RETURNING procrastinate_jobs.* INTO found_jobs; + + RETURN found_jobs; +END; +$$; + +CREATE FUNCTION procrastinate_register_worker_v1() + RETURNS TABLE(worker_id bigint) + LANGUAGE plpgsql +AS $$ +BEGIN + RETURN QUERY + INSERT INTO procrastinate_workers DEFAULT VALUES + RETURNING procrastinate_workers.id; +END; +$$; + +CREATE FUNCTION procrastinate_unregister_worker_v1(worker_id bigint) + RETURNS void + LANGUAGE plpgsql +AS $$ +BEGIN + DELETE FROM procrastinate_workers + WHERE id = worker_id; +END; +$$; + +CREATE FUNCTION procrastinate_update_heartbeat_v1(worker_id bigint) + RETURNS void + LANGUAGE plpgsql +AS $$ +BEGIN + UPDATE procrastinate_workers + SET last_heartbeat = NOW() + WHERE id = worker_id; +END; +$$; + +CREATE FUNCTION procrastinate_prune_stalled_workers_v1(seconds_since_heartbeat float) + RETURNS TABLE(worker_id bigint) + LANGUAGE plpgsql +AS $$ +BEGIN + RETURN QUERY + DELETE FROM procrastinate_workers + WHERE last_heartbeat < NOW() - (seconds_since_heartbeat || 'SECOND')::INTERVAL + RETURNING procrastinate_workers.id; +END; +$$; diff --git a/procrastinate/sql/migrations/03.01.00_50_post_add_heartbeat.sql b/procrastinate/sql/migrations/03.01.00_50_post_add_heartbeat.sql new file mode 100644 index 000000000..6d0f70311 --- /dev/null +++ b/procrastinate/sql/migrations/03.01.00_50_post_add_heartbeat.sql @@ -0,0 +1 @@ +DROP FUNCTION IF EXISTS procrastinate_fetch_job_v1(character varying[]); diff --git a/procrastinate/sql/migrations/03.02.00_01_pre_batch_defer_jobs.sql b/procrastinate/sql/migrations/03.02.00_01_pre_batch_defer_jobs.sql new file mode 100644 index 000000000..84daf05ef --- /dev/null +++ b/procrastinate/sql/migrations/03.02.00_01_pre_batch_defer_jobs.sql @@ -0,0 +1,101 @@ +CREATE TYPE procrastinate_job_to_defer_v1 AS ( + queue_name character varying, + task_name character varying, + priority integer, + lock text, + queueing_lock text, + args jsonb, + scheduled_at timestamp with time zone +); + +CREATE FUNCTION procrastinate_defer_jobs_v1( + jobs procrastinate_job_to_defer_v1[] +) + RETURNS bigint[] + LANGUAGE plpgsql +AS $$ +DECLARE + job_ids bigint[]; +BEGIN + WITH inserted_jobs AS ( + INSERT INTO procrastinate_jobs (queue_name, task_name, priority, lock, queueing_lock, args, scheduled_at) + SELECT (job).queue_name, + (job).task_name, + (job).priority, + (job).lock, + (job).queueing_lock, + (job).args, + (job).scheduled_at + FROM unnest(jobs) AS job + RETURNING id + ) + SELECT array_agg(id) FROM inserted_jobs INTO job_ids; + + RETURN job_ids; +END; +$$; + +CREATE FUNCTION procrastinate_defer_periodic_job_v2( + _queue_name character varying, + _lock character varying, + _queueing_lock character varying, + _task_name character varying, + _priority integer, + _periodic_id character varying, + _defer_timestamp bigint, + _args jsonb +) + RETURNS bigint + LANGUAGE plpgsql +AS $$ +DECLARE + _job_id bigint; + _defer_id bigint; +BEGIN + INSERT + INTO procrastinate_periodic_defers (task_name, periodic_id, defer_timestamp) + VALUES (_task_name, _periodic_id, _defer_timestamp) + ON CONFLICT DO NOTHING + RETURNING id into _defer_id; + + IF _defer_id IS NULL THEN + RETURN NULL; + END IF; + + UPDATE procrastinate_periodic_defers + SET job_id = ( + SELECT COALESCE(( + SELECT unnest(procrastinate_defer_jobs_v1( + ARRAY[ + ROW( + _queue_name, + _task_name, + _priority, + _lock, + _queueing_lock, + _args, + NULL::timestamptz + ) + ]::procrastinate_job_to_defer_v1[] + )) + ), NULL) + ) + WHERE id = _defer_id + RETURNING job_id INTO _job_id; + + DELETE + FROM procrastinate_periodic_defers + USING ( + SELECT id + FROM procrastinate_periodic_defers + WHERE procrastinate_periodic_defers.task_name = _task_name + AND procrastinate_periodic_defers.periodic_id = _periodic_id + AND procrastinate_periodic_defers.defer_timestamp < _defer_timestamp + ORDER BY id + FOR UPDATE + ) to_delete + WHERE procrastinate_periodic_defers.id = to_delete.id; + + RETURN _job_id; +END; +$$; diff --git a/procrastinate/sql/migrations/03.02.00_50_post_batch_defer_jobs.sql b/procrastinate/sql/migrations/03.02.00_50_post_batch_defer_jobs.sql new file mode 100644 index 000000000..0d6f4adb7 --- /dev/null +++ b/procrastinate/sql/migrations/03.02.00_50_post_batch_defer_jobs.sql @@ -0,0 +1,2 @@ +DROP FUNCTION IF EXISTS procrastinate_defer_job_v1; +DROP FUNCTION IF EXISTS procrastinate_defer_periodic_job_v1; diff --git a/procrastinate/sql/migrations/03.03.00_01_pre_priority_lock_fetch_job.sql b/procrastinate/sql/migrations/03.03.00_01_pre_priority_lock_fetch_job.sql new file mode 100644 index 000000000..8e3bbe7d9 --- /dev/null +++ b/procrastinate/sql/migrations/03.03.00_01_pre_priority_lock_fetch_job.sql @@ -0,0 +1,56 @@ +-- Migration: Update procrastinate_fetch_job_v2 to consider priority before job id for locked jobs +DROP FUNCTION IF EXISTS procrastinate_fetch_job_v2(target_queue_names character varying[], p_worker_id bigint); + +CREATE FUNCTION procrastinate_fetch_job_v2( + target_queue_names character varying[], + p_worker_id bigint +) + RETURNS procrastinate_jobs + LANGUAGE plpgsql +AS $$ +DECLARE + found_jobs procrastinate_jobs; +BEGIN + WITH candidate AS ( + SELECT jobs.* + FROM procrastinate_jobs AS jobs + WHERE + -- reject the job if its lock has earlier or higher priority jobs + NOT EXISTS ( + SELECT 1 + FROM procrastinate_jobs AS other_jobs + WHERE + jobs.lock IS NOT NULL + AND other_jobs.lock = jobs.lock + AND ( + -- job with same lock is already running + other_jobs.status = 'doing' + OR + -- job with same lock is waiting and has higher priority (or same priority but was queued first) + ( + other_jobs.status = 'todo' + AND ( + other_jobs.priority > jobs.priority + OR ( + other_jobs.priority = jobs.priority + AND other_jobs.id < jobs.id + ) + ) + ) + ) + ) + AND jobs.status = 'todo' + AND (target_queue_names IS NULL OR jobs.queue_name = ANY( target_queue_names )) + AND (jobs.scheduled_at IS NULL OR jobs.scheduled_at <= now()) + ORDER BY jobs.priority DESC, jobs.id ASC LIMIT 1 + FOR UPDATE OF jobs SKIP LOCKED + ) + UPDATE procrastinate_jobs + SET status = 'doing', worker_id = p_worker_id + FROM candidate + WHERE procrastinate_jobs.id = candidate.id + RETURNING procrastinate_jobs.* INTO found_jobs; + + RETURN found_jobs; +END; +$$; diff --git a/procrastinate/sql/migrations/03.04.00_01_pre_add_retry_failed_job_procedure.sql b/procrastinate/sql/migrations/03.04.00_01_pre_add_retry_failed_job_procedure.sql new file mode 100644 index 000000000..cab8b8e1b --- /dev/null +++ b/procrastinate/sql/migrations/03.04.00_01_pre_add_retry_failed_job_procedure.sql @@ -0,0 +1,43 @@ +-- Append new event type to reflect transition from failed -> todo +ALTER TYPE procrastinate_job_event_type ADD VALUE 'retried' AFTER 'scheduled'; + +-- Procedure to retry failed jobs +CREATE FUNCTION procrastinate_retry_job_v2( + job_id bigint, + retry_at timestamp with time zone, + new_priority integer, + new_queue_name character varying, + new_lock character varying +) RETURNS void LANGUAGE plpgsql AS $$ +DECLARE + _job_id bigint; + _abort_requested boolean; + _current_status procrastinate_job_status; +BEGIN + SELECT status, abort_requested FROM procrastinate_jobs + WHERE id = job_id AND status IN ('doing', 'failed') + FOR UPDATE + INTO _current_status, _abort_requested; + IF _current_status = 'doing' AND _abort_requested THEN + UPDATE procrastinate_jobs + SET status = 'failed'::procrastinate_job_status + WHERE id = job_id AND status = 'doing' + RETURNING id INTO _job_id; + ELSE + UPDATE procrastinate_jobs + SET status = 'todo'::procrastinate_job_status, + attempts = attempts + 1, + scheduled_at = retry_at, + priority = COALESCE(new_priority, priority), + queue_name = COALESCE(new_queue_name, queue_name), + lock = COALESCE(new_lock, lock) + WHERE id = job_id AND status IN ('doing', 'failed') + RETURNING id INTO _job_id; + END IF; + + IF _job_id IS NULL THEN + RAISE 'Job was not found or has an invalid status to retry (job id: %)', job_id; + END IF; + +END; +$$; diff --git a/procrastinate/sql/migrations/03.04.00_50_post_add_retry_failed_job_procedure.sql b/procrastinate/sql/migrations/03.04.00_50_post_add_retry_failed_job_procedure.sql new file mode 100644 index 000000000..b93bb9950 --- /dev/null +++ b/procrastinate/sql/migrations/03.04.00_50_post_add_retry_failed_job_procedure.sql @@ -0,0 +1,50 @@ +DROP TRIGGER IF EXISTS procrastinate_trigger_status_events_update_v1 ON procrastinate_jobs; +DROP FUNCTION IF EXISTS procrastinate_trigger_function_status_events_update_v1; + +CREATE FUNCTION procrastinate_trigger_function_status_events_update_v1() + RETURNS trigger + LANGUAGE plpgsql +AS $function$ +BEGIN + WITH t AS ( + SELECT CASE + WHEN OLD.status = 'todo'::procrastinate_job_status + AND NEW.status = 'doing'::procrastinate_job_status + THEN 'started'::procrastinate_job_event_type + WHEN OLD.status = 'doing'::procrastinate_job_status + AND NEW.status = 'todo'::procrastinate_job_status + THEN 'deferred_for_retry'::procrastinate_job_event_type + WHEN OLD.status = 'doing'::procrastinate_job_status + AND NEW.status = 'failed'::procrastinate_job_status + THEN 'failed'::procrastinate_job_event_type + WHEN OLD.status = 'doing'::procrastinate_job_status + AND NEW.status = 'succeeded'::procrastinate_job_status + THEN 'succeeded'::procrastinate_job_event_type + WHEN OLD.status = 'todo'::procrastinate_job_status + AND ( + NEW.status = 'cancelled'::procrastinate_job_status + OR NEW.status = 'failed'::procrastinate_job_status + OR NEW.status = 'succeeded'::procrastinate_job_status + ) + THEN 'cancelled'::procrastinate_job_event_type + WHEN OLD.status = 'doing'::procrastinate_job_status + AND NEW.status = 'aborted'::procrastinate_job_status + THEN 'aborted'::procrastinate_job_event_type + WHEN OLD.status = 'failed'::procrastinate_job_status + AND NEW.status = 'todo'::procrastinate_job_status + THEN 'retried'::procrastinate_job_event_type + ELSE NULL + END as event_type + ) + INSERT INTO procrastinate_events(job_id, type) + SELECT NEW.id, t.event_type + FROM t + WHERE t.event_type IS NOT NULL; + RETURN NEW; +END; +$function$; + +CREATE TRIGGER procrastinate_trigger_status_events_update_v1 + AFTER UPDATE OF status ON procrastinate_jobs + FOR EACH ROW + EXECUTE PROCEDURE procrastinate_trigger_function_status_events_update_v1(); diff --git a/procrastinate/sql/queries.sql b/procrastinate/sql/queries.sql index ad91db9d9..f54a315fe 100644 --- a/procrastinate/sql/queries.sql +++ b/procrastinate/sql/queries.sql @@ -3,32 +3,54 @@ -- description -- %s-templated QUERY --- defer_job -- --- Create and enqueue a job -SELECT procrastinate_defer_job(%(queue)s, %(task_name)s, %(priority)s, %(lock)s, %(queueing_lock)s, %(args)s, %(scheduled_at)s) AS id; +-- defer_jobs -- +-- Create and enqueue one or more jobs +SELECT unnest( + procrastinate_defer_jobs_v1( + %(jobs)s::procrastinate_job_to_defer_v1[] + ) +) AS id; -- defer_periodic_job -- -- Create a periodic job if it doesn't already exist, and delete periodic metadata -- for previous jobs in the same task. -SELECT procrastinate_defer_periodic_job(%(queue)s, %(lock)s, %(queueing_lock)s, %(task_name)s, %(priority)s, %(periodic_id)s, %(defer_timestamp)s, %(args)s) AS id; +SELECT procrastinate_defer_periodic_job_v2(%(queue)s, %(lock)s, %(queueing_lock)s, %(task_name)s, %(priority)s, %(periodic_id)s, %(defer_timestamp)s, %(args)s) AS id; -- fetch_job -- -- Get the first awaiting job -SELECT id, status, task_name, priority, lock, queueing_lock, args, scheduled_at, queue_name, attempts - FROM procrastinate_fetch_job(%(queues)s); +SELECT id, status, task_name, priority, lock, queueing_lock, args, scheduled_at, queue_name, attempts, worker_id + FROM procrastinate_fetch_job_v2(%(queues)s::varchar[], %(worker_id)s); --- select_stalled_jobs -- +-- select_stalled_jobs_by_started -- -- Get running jobs that started more than a given time ago -SELECT job.id, status, task_name, priority, lock, queueing_lock, args, scheduled_at, queue_name, attempts, max(event.at) started_at +SELECT job.id, status, task_name, priority, lock, queueing_lock, + args, scheduled_at, queue_name, attempts, worker_id, + MAX(event.at) AS started_at FROM procrastinate_jobs job JOIN procrastinate_events event ON event.job_id = job.id WHERE event.type = 'started' AND job.status = 'doing' - AND event.at < NOW() - (%(nb_seconds)s || 'SECOND')::INTERVAL AND (%(queue)s::varchar IS NULL OR job.queue_name = %(queue)s) AND (%(task_name)s::varchar IS NULL OR job.task_name = %(task_name)s) GROUP BY job.id + HAVING MAX(event.at) < NOW() - (%(nb_seconds)s || 'SECOND')::INTERVAL + +-- select_stalled_jobs_by_heartbeat -- +-- Get running jobs of stalled workers (with absent or outdated heartbeat) +WITH stalled_workers AS ( + SELECT id + FROM procrastinate_workers + WHERE last_heartbeat < NOW() - (%(seconds_since_heartbeat)s || ' SECOND')::INTERVAL +) +SELECT job.id, status, task_name, priority, lock, queueing_lock, + args, scheduled_at, queue_name, attempts, job.worker_id + FROM procrastinate_jobs job + LEFT JOIN stalled_workers sw ON sw.id = job.worker_id + WHERE job.status = 'doing' + AND (%(queue)s::varchar IS NULL OR job.queue_name = %(queue)s) + AND (%(task_name)s::varchar IS NULL OR job.task_name = %(task_name)s) + AND (job.worker_id IS NULL OR sw.id IS NOT NULL) -- delete_old_jobs -- -- Delete jobs that have been in a final state for longer than nb_hours @@ -48,19 +70,19 @@ WHERE id IN ( -- finish_job -- -- Finish a job, changing it from "doing" to "succeeded" or "failed" -SELECT procrastinate_finish_job(%(job_id)s, %(status)s, %(delete_job)s); +SELECT procrastinate_finish_job_v1(%(job_id)s, %(status)s, %(delete_job)s); -- cancel_job -- --- Cancel a job, changing it from "todo" to "cancelled" or from "doing" to "aborting" -SELECT procrastinate_cancel_job(%(job_id)s, %(abort)s, %(delete_job)s) AS id; +-- Cancel a job, changing it from "todo" to "cancelled" or mark for abortion +SELECT procrastinate_cancel_job_v1(%(job_id)s, %(abort)s, %(delete_job)s) AS id; -- get_job_status -- -- Get the status of a job SELECT status FROM procrastinate_jobs WHERE id = %(job_id)s; -- retry_job -- --- Retry a job, changing it from "doing" to "todo" -SELECT procrastinate_retry_job(%(job_id)s, %(retry_at)s, %(new_priority)s, %(new_queue_name)s, %(new_lock)s); +-- Retry a job, changing it from "doing" to "todo" or from "failed" to "todo" +SELECT procrastinate_retry_job_v2(%(job_id)s, %(retry_at)s, %(new_priority)s, %(new_queue_name)s, %(new_lock)s); -- listen_queue -- -- In this one, the argument is an identifier, shoud not be escaped the same way @@ -85,7 +107,9 @@ SELECT id, args, status, scheduled_at, - attempts + attempts, + abort_requested, + worker_id FROM procrastinate_jobs WHERE (%(id)s::bigint IS NULL OR id = %(id)s) AND (%(queue_name)s::varchar IS NULL OR queue_name = %(queue_name)s) @@ -93,6 +117,7 @@ SELECT id, AND (%(status)s::procrastinate_job_status IS NULL OR status = %(status)s) AND (%(lock)s::varchar IS NULL OR lock = %(lock)s) AND (%(queueing_lock)s::varchar IS NULL OR queueing_lock = %(queueing_lock)s) + AND (%(worker_id)s::bigint IS NULL OR worker_id = %(worker_id)s) ORDER BY id ASC; -- list_queues -- @@ -187,3 +212,26 @@ SELECT FROM locks GROUP BY name ORDER BY name; + +-- list_jobs_to_abort -- +-- Get list of running jobs that are requested to be aborted +SELECT id from procrastinate_jobs +WHERE status = 'doing' +AND abort_requested = true +AND (%(queue_name)s::varchar IS NULL OR queue_name = %(queue_name)s) + +-- register_worker -- +-- Register a newly started worker +SELECT * FROM procrastinate_register_worker_v1() + +-- unregister_worker -- +-- Unregister a finished worker +SELECT procrastinate_unregister_worker_v1(%(worker_id)s) + +-- update_heartbeat -- +-- Update the heartbeat of a worker +SELECT procrastinate_update_heartbeat_v1(%(worker_id)s) + +-- prune_stalled_workers -- +-- Delete stalled workers that haven't sent a heartbeat in a while +SELECT * FROM procrastinate_prune_stalled_workers_v1(%(seconds_since_heartbeat)s) diff --git a/procrastinate/sql/schema.sql b/procrastinate/sql/schema.sql index 4dd7bd59a..00633d79c 100644 --- a/procrastinate/sql/schema.sql +++ b/procrastinate/sql/schema.sql @@ -10,7 +10,7 @@ CREATE TYPE procrastinate_job_status AS ENUM ( 'succeeded', -- The job ended successfully 'failed', -- The job ended with an error 'cancelled', -- The job was cancelled - 'aborting', -- The job was requested to abort + 'aborting', -- legacy, not used anymore since v3.0.0 'aborted' -- The job was aborted ); @@ -18,16 +18,34 @@ CREATE TYPE procrastinate_job_event_type AS ENUM ( 'deferred', -- Job created, in todo 'started', -- todo -> doing 'deferred_for_retry', -- doing -> todo - 'failed', -- doing or aborting -> failed - 'succeeded', -- doing or aborting -> succeeded + 'failed', -- doing -> failed + 'succeeded', -- doing -> succeeded 'cancelled', -- todo -> cancelled - 'abort_requested', -- doing -> aborting - 'aborted', -- doing or aborting -> aborted - 'scheduled' -- not an event transition, but recording when a task is scheduled for + 'abort_requested', -- not a state transition, but set in a separate field + 'aborted', -- doing -> aborted (only allowed when abort_requested field is set) + 'scheduled', -- not a state transition, but recording when a task is scheduled for + 'retried' -- Manually retried failed job +); + +-- Composite Types + +CREATE TYPE procrastinate_job_to_defer_v1 AS ( + queue_name character varying, + task_name character varying, + priority integer, + lock text, + queueing_lock text, + args jsonb, + scheduled_at timestamp with time zone ); -- Tables +CREATE TABLE procrastinate_workers( + id bigint PRIMARY KEY GENERATED ALWAYS AS IDENTITY, + last_heartbeat timestamp with time zone NOT NULL DEFAULT NOW() +); + CREATE TABLE procrastinate_jobs ( id bigserial PRIMARY KEY, queue_name character varying(128) NOT NULL, @@ -38,7 +56,10 @@ CREATE TABLE procrastinate_jobs ( args jsonb DEFAULT '{}' NOT NULL, status procrastinate_job_status DEFAULT 'todo'::procrastinate_job_status NOT NULL, scheduled_at timestamp with time zone NULL, - attempts integer DEFAULT 0 NOT NULL + attempts integer DEFAULT 0 NOT NULL, + abort_requested boolean DEFAULT false NOT NULL, + worker_id bigint REFERENCES procrastinate_workers(id) ON DELETE SET NULL, + CONSTRAINT check_not_todo_abort_requested CHECK (NOT (status = 'todo' AND abort_requested = true)) ); CREATE TABLE procrastinate_periodic_defers ( @@ -60,46 +81,52 @@ CREATE TABLE procrastinate_events ( -- Constraints & Indices -- this prevents from having several jobs with the same queueing lock in the "todo" state -CREATE UNIQUE INDEX procrastinate_jobs_queueing_lock_idx ON procrastinate_jobs (queueing_lock) WHERE status = 'todo'; +CREATE UNIQUE INDEX procrastinate_jobs_queueing_lock_idx_v1 ON procrastinate_jobs (queueing_lock) WHERE status = 'todo'; -- this prevents from having several jobs with the same lock in the "doing" state -CREATE UNIQUE INDEX procrastinate_jobs_lock_idx ON procrastinate_jobs (lock) WHERE status = 'doing'; +CREATE UNIQUE INDEX procrastinate_jobs_lock_idx_v1 ON procrastinate_jobs (lock) WHERE status = 'doing'; -CREATE INDEX procrastinate_jobs_queue_name_idx ON procrastinate_jobs(queue_name); -CREATE INDEX procrastinate_jobs_id_lock_idx ON procrastinate_jobs (id, lock) WHERE status = ANY (ARRAY['todo'::procrastinate_job_status, 'doing'::procrastinate_job_status, 'aborting'::procrastinate_job_status]); -CREATE INDEX procrastinate_jobs_priority_idx ON procrastinate_jobs(priority desc, id asc) WHERE (status = 'todo'::procrastinate_job_status); +-- Index for select_stalled_jobs_by_heartbeat query +CREATE INDEX idx_procrastinate_jobs_worker_not_null ON procrastinate_jobs(worker_id) WHERE worker_id IS NOT NULL AND status = 'doing'::procrastinate_job_status; +CREATE INDEX procrastinate_jobs_queue_name_idx_v1 ON procrastinate_jobs(queue_name); +CREATE INDEX procrastinate_jobs_id_lock_idx_v1 ON procrastinate_jobs (id, lock) WHERE status = ANY (ARRAY['todo'::procrastinate_job_status, 'doing'::procrastinate_job_status]); +CREATE INDEX procrastinate_jobs_priority_idx_v1 ON procrastinate_jobs(priority desc, id asc) WHERE (status = 'todo'::procrastinate_job_status); -CREATE INDEX procrastinate_events_job_id_fkey ON procrastinate_events(job_id); +CREATE INDEX procrastinate_events_job_id_fkey_v1 ON procrastinate_events(job_id); -CREATE INDEX procrastinate_periodic_defers_job_id_fkey ON procrastinate_periodic_defers(job_id); +CREATE INDEX procrastinate_periodic_defers_job_id_fkey_v1 ON procrastinate_periodic_defers(job_id); +CREATE INDEX idx_procrastinate_workers_last_heartbeat ON procrastinate_workers(last_heartbeat); -- Functions - -CREATE FUNCTION procrastinate_defer_job( - queue_name character varying, - task_name character varying, - priority integer, - lock text, - queueing_lock text, - args jsonb, - scheduled_at timestamp with time zone +CREATE FUNCTION procrastinate_defer_jobs_v1( + jobs procrastinate_job_to_defer_v1[] ) - RETURNS bigint + RETURNS bigint[] LANGUAGE plpgsql AS $$ DECLARE - job_id bigint; + job_ids bigint[]; BEGIN - INSERT INTO procrastinate_jobs (queue_name, task_name, priority, lock, queueing_lock, args, scheduled_at) - VALUES (queue_name, task_name, priority, lock, queueing_lock, args, scheduled_at) - RETURNING id INTO job_id; + WITH inserted_jobs AS ( + INSERT INTO procrastinate_jobs (queue_name, task_name, priority, lock, queueing_lock, args, scheduled_at) + SELECT (job).queue_name, + (job).task_name, + (job).priority, + (job).lock, + (job).queueing_lock, + (job).args, + (job).scheduled_at + FROM unnest(jobs) AS job + RETURNING id + ) + SELECT array_agg(id) FROM inserted_jobs INTO job_ids; - RETURN job_id; + RETURN job_ids; END; $$; -CREATE FUNCTION procrastinate_defer_periodic_job( +CREATE FUNCTION procrastinate_defer_periodic_job_v2( _queue_name character varying, _lock character varying, _queueing_lock character varying, @@ -116,7 +143,6 @@ DECLARE _job_id bigint; _defer_id bigint; BEGIN - INSERT INTO procrastinate_periodic_defers (task_name, periodic_id, defer_timestamp) VALUES (_task_name, _periodic_id, _defer_timestamp) @@ -128,15 +154,23 @@ BEGIN END IF; UPDATE procrastinate_periodic_defers - SET job_id = procrastinate_defer_job( - _queue_name, - _task_name, - _priority, - _lock, - _queueing_lock, - _args, - NULL - ) + SET job_id = ( + SELECT COALESCE(( + SELECT unnest(procrastinate_defer_jobs_v1( + ARRAY[ + ROW( + _queue_name, + _task_name, + _priority, + _lock, + _queueing_lock, + _args, + NULL::timestamptz + ) + ]::procrastinate_job_to_defer_v1[] + )) + ), NULL) + ) WHERE id = _defer_id RETURNING job_id INTO _job_id; @@ -157,8 +191,9 @@ BEGIN END; $$; -CREATE FUNCTION procrastinate_fetch_job( - target_queue_names character varying[] +CREATE FUNCTION procrastinate_fetch_job_v2( + target_queue_names character varying[], + p_worker_id bigint ) RETURNS procrastinate_jobs LANGUAGE plpgsql @@ -170,15 +205,30 @@ BEGIN SELECT jobs.* FROM procrastinate_jobs AS jobs WHERE - -- reject the job if its lock has earlier jobs + -- reject the job if its lock has earlier or higher priority jobs NOT EXISTS ( SELECT 1 - FROM procrastinate_jobs AS earlier_jobs + FROM procrastinate_jobs AS other_jobs WHERE jobs.lock IS NOT NULL - AND earlier_jobs.lock = jobs.lock - AND earlier_jobs.status IN ('todo', 'doing', 'aborting') - AND earlier_jobs.id < jobs.id) + AND other_jobs.lock = jobs.lock + AND ( + -- job with same lock is already running + other_jobs.status = 'doing' + OR + -- job with same lock is waiting and has higher priority (or same priority but was queued first) + ( + other_jobs.status = 'todo' + AND ( + other_jobs.priority > jobs.priority + OR ( + other_jobs.priority = jobs.priority + AND other_jobs.id < jobs.id + ) + ) + ) + ) + ) AND jobs.status = 'todo' AND (target_queue_names IS NULL OR jobs.queue_name = ANY( target_queue_names )) AND (jobs.scheduled_at IS NULL OR jobs.scheduled_at <= now()) @@ -186,19 +236,16 @@ BEGIN FOR UPDATE OF jobs SKIP LOCKED ) UPDATE procrastinate_jobs - SET status = 'doing' + SET status = 'doing', worker_id = p_worker_id FROM candidate WHERE procrastinate_jobs.id = candidate.id RETURNING procrastinate_jobs.* INTO found_jobs; - RETURN found_jobs; + RETURN found_jobs; END; $$; --- procrastinate_finish_job --- the next_scheduled_at argument is kept for compatibility reasons, it is to be --- removed after 1.0.0 is released -CREATE FUNCTION procrastinate_finish_job(job_id bigint, end_status procrastinate_job_status, delete_job boolean) +CREATE FUNCTION procrastinate_finish_job_v1(job_id bigint, end_status procrastinate_job_status, delete_job boolean) RETURNS void LANGUAGE plpgsql AS $$ @@ -210,26 +257,25 @@ BEGIN END IF; IF delete_job THEN DELETE FROM procrastinate_jobs - WHERE id = job_id AND status IN ('todo', 'doing', 'aborting') + WHERE id = job_id AND status IN ('todo', 'doing') RETURNING id INTO _job_id; ELSE UPDATE procrastinate_jobs SET status = end_status, - attempts = - CASE - WHEN status = 'doing' THEN attempts + 1 - ELSE attempts - END - WHERE id = job_id AND status IN ('todo', 'doing', 'aborting') + abort_requested = false, + attempts = CASE status + WHEN 'doing' THEN attempts + 1 ELSE attempts + END + WHERE id = job_id AND status IN ('todo', 'doing') RETURNING id INTO _job_id; END IF; IF _job_id IS NULL THEN - RAISE 'Job was not found or not in "doing", "todo" or "aborting" status (job id: %)', job_id; + RAISE 'Job was not found or not in "doing" or "todo" status (job id: %)', job_id; END IF; END; $$; -CREATE FUNCTION procrastinate_cancel_job(job_id bigint, abort boolean, delete_job boolean) +CREATE FUNCTION procrastinate_cancel_job_v1(job_id bigint, abort boolean, delete_job boolean) RETURNS bigint LANGUAGE plpgsql AS $$ @@ -244,10 +290,10 @@ BEGIN IF _job_id IS NULL THEN IF abort THEN UPDATE procrastinate_jobs - SET status = CASE status - WHEN 'todo' THEN 'cancelled'::procrastinate_job_status - WHEN 'doing' THEN 'aborting'::procrastinate_job_status - END + SET abort_requested = true, + status = CASE status + WHEN 'todo' THEN 'cancelled'::procrastinate_job_status ELSE status + END WHERE id = job_id AND status IN ('todo', 'doing') RETURNING id INTO _job_id; ELSE @@ -261,46 +307,113 @@ BEGIN END; $$; -CREATE FUNCTION procrastinate_retry_job( +CREATE FUNCTION procrastinate_retry_job_v1( job_id bigint, retry_at timestamp with time zone, new_priority integer, new_queue_name character varying, new_lock character varying -) - RETURNS void - LANGUAGE plpgsql -AS $$ +) RETURNS void LANGUAGE plpgsql AS $$ DECLARE _job_id bigint; + _abort_requested boolean; BEGIN - UPDATE procrastinate_jobs - SET status = 'todo', - attempts = attempts + 1, - scheduled_at = retry_at, - priority = COALESCE(new_priority, priority), - queue_name = COALESCE(new_queue_name, queue_name), - lock = COALESCE(new_lock, lock) + SELECT abort_requested FROM procrastinate_jobs WHERE id = job_id AND status = 'doing' - RETURNING id INTO _job_id; + FOR UPDATE + INTO _abort_requested; + IF _abort_requested THEN + UPDATE procrastinate_jobs + SET status = 'failed'::procrastinate_job_status + WHERE id = job_id AND status = 'doing' + RETURNING id INTO _job_id; + ELSE + UPDATE procrastinate_jobs + SET status = 'todo'::procrastinate_job_status, + attempts = attempts + 1, + scheduled_at = retry_at, + priority = COALESCE(new_priority, priority), + queue_name = COALESCE(new_queue_name, queue_name), + lock = COALESCE(new_lock, lock) + WHERE id = job_id AND status = 'doing' + RETURNING id INTO _job_id; + END IF; + IF _job_id IS NULL THEN RAISE 'Job was not found or not in "doing" status (job id: %)', job_id; END IF; END; $$; -CREATE FUNCTION procrastinate_notify_queue() +CREATE FUNCTION procrastinate_retry_job_v2( + job_id bigint, + retry_at timestamp with time zone, + new_priority integer, + new_queue_name character varying, + new_lock character varying +) RETURNS void LANGUAGE plpgsql AS $$ +DECLARE + _job_id bigint; + _abort_requested boolean; + _current_status procrastinate_job_status; +BEGIN + SELECT status, abort_requested FROM procrastinate_jobs + WHERE id = job_id AND status IN ('doing', 'failed') + FOR UPDATE + INTO _current_status, _abort_requested; + IF _current_status = 'doing' AND _abort_requested THEN + UPDATE procrastinate_jobs + SET status = 'failed'::procrastinate_job_status + WHERE id = job_id AND status = 'doing' + RETURNING id INTO _job_id; + ELSE + UPDATE procrastinate_jobs + SET status = 'todo'::procrastinate_job_status, + attempts = attempts + 1, + scheduled_at = retry_at, + priority = COALESCE(new_priority, priority), + queue_name = COALESCE(new_queue_name, queue_name), + lock = COALESCE(new_lock, lock) + WHERE id = job_id AND status IN ('doing', 'failed') + RETURNING id INTO _job_id; + END IF; + + IF _job_id IS NULL THEN + RAISE 'Job was not found or has an invalid status to retry (job id: %)', job_id; + END IF; + +END; +$$; + +CREATE FUNCTION procrastinate_notify_queue_job_inserted_v1() RETURNS trigger LANGUAGE plpgsql AS $$ +DECLARE + payload TEXT; +BEGIN + SELECT json_build_object('type', 'job_inserted', 'job_id', NEW.id)::text INTO payload; + PERFORM pg_notify('procrastinate_queue_v1#' || NEW.queue_name, payload); + PERFORM pg_notify('procrastinate_any_queue_v1', payload); + RETURN NEW; +END; +$$; + +CREATE FUNCTION procrastinate_notify_queue_abort_job_v1() +RETURNS trigger + LANGUAGE plpgsql +AS $$ +DECLARE + payload TEXT; BEGIN - PERFORM pg_notify('procrastinate_queue#' || NEW.queue_name, NEW.task_name); - PERFORM pg_notify('procrastinate_any_queue', NEW.task_name); + SELECT json_build_object('type', 'abort_job_requested', 'job_id', NEW.id)::text INTO payload; + PERFORM pg_notify('procrastinate_queue_v1#' || NEW.queue_name, payload); + PERFORM pg_notify('procrastinate_any_queue_v1', payload); RETURN NEW; END; $$; -CREATE FUNCTION procrastinate_trigger_status_events_procedure_insert() +CREATE FUNCTION procrastinate_trigger_function_status_events_insert_v1() RETURNS trigger LANGUAGE plpgsql AS $$ @@ -311,7 +424,7 @@ BEGIN END; $$; -CREATE FUNCTION procrastinate_trigger_status_events_procedure_update() +CREATE FUNCTION procrastinate_trigger_function_status_events_update_v1() RETURNS trigger LANGUAGE plpgsql AS $$ @@ -338,14 +451,11 @@ BEGIN ) THEN 'cancelled'::procrastinate_job_event_type WHEN OLD.status = 'doing'::procrastinate_job_status - AND NEW.status = 'aborting'::procrastinate_job_status - THEN 'abort_requested'::procrastinate_job_event_type - WHEN ( - OLD.status = 'doing'::procrastinate_job_status - OR OLD.status = 'aborting'::procrastinate_job_status - ) AND NEW.status = 'aborted'::procrastinate_job_status THEN 'aborted'::procrastinate_job_event_type + WHEN OLD.status = 'failed'::procrastinate_job_status + AND NEW.status = 'todo'::procrastinate_job_status + THEN 'retried'::procrastinate_job_event_type ELSE NULL END as event_type ) @@ -357,7 +467,7 @@ BEGIN END; $$; -CREATE FUNCTION procrastinate_trigger_scheduled_events_procedure() +CREATE FUNCTION procrastinate_trigger_function_scheduled_events_v1() RETURNS trigger LANGUAGE plpgsql AS $$ @@ -369,7 +479,18 @@ BEGIN END; $$; -CREATE FUNCTION procrastinate_unlink_periodic_defers() +CREATE FUNCTION procrastinate_trigger_abort_requested_events_procedure_v1() + RETURNS trigger + LANGUAGE plpgsql +AS $$ +BEGIN + INSERT INTO procrastinate_events(job_id, type) + VALUES (NEW.id, 'abort_requested'::procrastinate_job_event_type); + RETURN NEW; +END; +$$; + +CREATE FUNCTION procrastinate_unlink_periodic_defers_v1() RETURNS trigger LANGUAGE plpgsql AS $$ @@ -381,168 +502,82 @@ BEGIN END; $$; --- Triggers - -CREATE TRIGGER procrastinate_jobs_notify_queue - AFTER INSERT ON procrastinate_jobs - FOR EACH ROW WHEN ((new.status = 'todo'::procrastinate_job_status)) - EXECUTE PROCEDURE procrastinate_notify_queue(); - -CREATE TRIGGER procrastinate_trigger_status_events_update - AFTER UPDATE OF status ON procrastinate_jobs - FOR EACH ROW - EXECUTE PROCEDURE procrastinate_trigger_status_events_procedure_update(); - -CREATE TRIGGER procrastinate_trigger_status_events_insert - AFTER INSERT ON procrastinate_jobs - FOR EACH ROW WHEN ((new.status = 'todo'::procrastinate_job_status)) - EXECUTE PROCEDURE procrastinate_trigger_status_events_procedure_insert(); - -CREATE TRIGGER procrastinate_trigger_scheduled_events - AFTER UPDATE OR INSERT ON procrastinate_jobs - FOR EACH ROW WHEN ((new.scheduled_at IS NOT NULL AND new.status = 'todo'::procrastinate_job_status)) - EXECUTE PROCEDURE procrastinate_trigger_scheduled_events_procedure(); - -CREATE TRIGGER procrastinate_trigger_delete_jobs - BEFORE DELETE ON procrastinate_jobs - FOR EACH ROW EXECUTE PROCEDURE procrastinate_unlink_periodic_defers(); - - --- Old versions of functions, for backwards compatibility (to be removed --- after 2.0.0) - --- procrastinate_defer_job --- the function without the priority argument is kept for compatibility reasons -CREATE FUNCTION procrastinate_defer_job( - queue_name character varying, - task_name character varying, - lock text, - queueing_lock text, - args jsonb, - scheduled_at timestamp with time zone -) - RETURNS bigint +CREATE FUNCTION procrastinate_register_worker_v1() + RETURNS TABLE(worker_id bigint) LANGUAGE plpgsql AS $$ -DECLARE - job_id bigint; BEGIN - INSERT INTO procrastinate_jobs (queue_name, task_name, lock, queueing_lock, args, scheduled_at) - VALUES (queue_name, task_name, lock, queueing_lock, args, scheduled_at) - RETURNING id INTO job_id; - - RETURN job_id; + RETURN QUERY + INSERT INTO procrastinate_workers DEFAULT VALUES + RETURNING procrastinate_workers.id; END; $$; --- procrastinate_finish_job -CREATE OR REPLACE FUNCTION procrastinate_finish_job(job_id integer, end_status procrastinate_job_status, next_scheduled_at timestamp with time zone, delete_job boolean) +CREATE FUNCTION procrastinate_unregister_worker_v1(worker_id bigint) RETURNS void LANGUAGE plpgsql AS $$ -DECLARE - _job_id bigint; BEGIN - IF end_status NOT IN ('succeeded', 'failed') THEN - RAISE 'End status should be either "succeeded" or "failed" (job id: %)', job_id; - END IF; - IF delete_job THEN - DELETE FROM procrastinate_jobs - WHERE id = job_id AND status IN ('todo', 'doing') - RETURNING id INTO _job_id; - ELSE - UPDATE procrastinate_jobs - SET status = end_status, - attempts = - CASE - WHEN status = 'doing' THEN attempts + 1 - ELSE attempts - END - WHERE id = job_id AND status IN ('todo', 'doing') - RETURNING id INTO _job_id; - END IF; - IF _job_id IS NULL THEN - RAISE 'Job was not found or not in "doing" or "todo" status (job id: %)', job_id; - END IF; + DELETE FROM procrastinate_workers + WHERE id = worker_id; END; $$; --- procrastinate_defer_periodic_job --- the function without the priority argument is kept for compatibility reasons -CREATE FUNCTION procrastinate_defer_periodic_job( - _queue_name character varying, - _lock character varying, - _queueing_lock character varying, - _task_name character varying, - _periodic_id character varying, - _defer_timestamp bigint, - _args jsonb -) - RETURNS bigint +CREATE FUNCTION procrastinate_update_heartbeat_v1(worker_id bigint) + RETURNS void LANGUAGE plpgsql AS $$ -DECLARE - _job_id bigint; - _defer_id bigint; BEGIN - - INSERT - INTO procrastinate_periodic_defers (task_name, periodic_id, defer_timestamp) - VALUES (_task_name, _periodic_id, _defer_timestamp) - ON CONFLICT DO NOTHING - RETURNING id into _defer_id; - - IF _defer_id IS NULL THEN - RETURN NULL; - END IF; - - UPDATE procrastinate_periodic_defers - SET job_id = procrastinate_defer_job( - _queue_name, - _task_name, - 0, - _lock, - _queueing_lock, - _args, - NULL - ) - WHERE id = _defer_id - RETURNING job_id INTO _job_id; - - DELETE - FROM procrastinate_periodic_defers - USING ( - SELECT id - FROM procrastinate_periodic_defers - WHERE procrastinate_periodic_defers.task_name = _task_name - AND procrastinate_periodic_defers.periodic_id = _periodic_id - AND procrastinate_periodic_defers.defer_timestamp < _defer_timestamp - ORDER BY id - FOR UPDATE - ) to_delete - WHERE procrastinate_periodic_defers.id = to_delete.id; - - RETURN _job_id; + UPDATE procrastinate_workers + SET last_heartbeat = NOW() + WHERE id = worker_id; END; $$; --- procrastinate_retry_job --- the function without the new_* arguments is kept for compatibility reasons -CREATE FUNCTION procrastinate_retry_job(job_id bigint, retry_at timestamp with time zone) - RETURNS void +CREATE FUNCTION procrastinate_prune_stalled_workers_v1(seconds_since_heartbeat float) + RETURNS TABLE(worker_id bigint) LANGUAGE plpgsql AS $$ -DECLARE - _job_id bigint; BEGIN - UPDATE procrastinate_jobs - SET status = 'todo', - attempts = attempts + 1, - scheduled_at = retry_at - WHERE id = job_id AND status = 'doing' - RETURNING id INTO _job_id; - IF _job_id IS NULL THEN - RAISE 'Job was not found or not in "doing" status (job id: %)', job_id; - END IF; + RETURN QUERY + DELETE FROM procrastinate_workers + WHERE last_heartbeat < NOW() - (seconds_since_heartbeat || 'SECOND')::INTERVAL + RETURNING procrastinate_workers.id; END; $$; + +-- Triggers + +CREATE TRIGGER procrastinate_jobs_notify_queue_job_inserted_v1 + AFTER INSERT ON procrastinate_jobs + FOR EACH ROW WHEN ((new.status = 'todo'::procrastinate_job_status)) + EXECUTE PROCEDURE procrastinate_notify_queue_job_inserted_v1(); + +CREATE TRIGGER procrastinate_jobs_notify_queue_job_aborted_v1 + AFTER UPDATE OF abort_requested ON procrastinate_jobs + FOR EACH ROW WHEN ((old.abort_requested = false AND new.abort_requested = true AND new.status = 'doing'::procrastinate_job_status)) + EXECUTE PROCEDURE procrastinate_notify_queue_abort_job_v1(); + +CREATE TRIGGER procrastinate_trigger_status_events_update_v1 + AFTER UPDATE OF status ON procrastinate_jobs + FOR EACH ROW + EXECUTE PROCEDURE procrastinate_trigger_function_status_events_update_v1(); + +CREATE TRIGGER procrastinate_trigger_status_events_insert_v1 + AFTER INSERT ON procrastinate_jobs + FOR EACH ROW WHEN ((new.status = 'todo'::procrastinate_job_status)) + EXECUTE PROCEDURE procrastinate_trigger_function_status_events_insert_v1(); + +CREATE TRIGGER procrastinate_trigger_scheduled_events_v1 + AFTER UPDATE OR INSERT ON procrastinate_jobs + FOR EACH ROW WHEN ((new.scheduled_at IS NOT NULL AND new.status = 'todo'::procrastinate_job_status)) + EXECUTE PROCEDURE procrastinate_trigger_function_scheduled_events_v1(); + +CREATE TRIGGER procrastinate_trigger_abort_requested_events_v1 + AFTER UPDATE OF abort_requested ON procrastinate_jobs + FOR EACH ROW WHEN ((new.abort_requested = true)) + EXECUTE PROCEDURE procrastinate_trigger_abort_requested_events_procedure_v1(); + +CREATE TRIGGER procrastinate_trigger_delete_jobs_v1 + BEFORE DELETE ON procrastinate_jobs + FOR EACH ROW EXECUTE PROCEDURE procrastinate_unlink_periodic_defers_v1(); diff --git a/procrastinate/sync_psycopg_connector.py b/procrastinate/sync_psycopg_connector.py index e8905647b..08a0994de 100644 --- a/procrastinate/sync_psycopg_connector.py +++ b/procrastinate/sync_psycopg_connector.py @@ -2,17 +2,17 @@ import contextlib import logging +import re from collections.abc import Generator, Iterator from typing import Any, Callable import psycopg import psycopg.rows -import psycopg.sql import psycopg.types.json import psycopg_pool from typing_extensions import LiteralString -from procrastinate import connector, exceptions +from procrastinate import connector, exceptions, manager logger = logging.getLogger(__name__) @@ -28,7 +28,18 @@ def wrap_exceptions() -> Generator[None, None, None]: try: yield except psycopg.errors.UniqueViolation as exc: - raise exceptions.UniqueViolation(constraint_name=exc.diag.constraint_name) + constraint_name = exc.diag.constraint_name + queueing_lock = None + if constraint_name == manager.QUEUEING_LOCK_CONSTRAINT: + assert exc.diag.message_detail + match = re.search(r"Key \((.*?)\)=\((.*?)\)", exc.diag.message_detail) + assert match + column, queueing_lock = match.groups() + assert column == "queueing_lock" + + raise exceptions.UniqueViolation( + constraint_name=constraint_name, queueing_lock=queueing_lock + ) except psycopg.Error as exc: raise exceptions.ConnectorException from exc @@ -130,11 +141,18 @@ def close(self) -> None: self._pool.close() self._pool = None + def _wrap_value(self, value: Any) -> Any: + if isinstance(value, dict): + return psycopg.types.json.Jsonb(value) + elif isinstance(value, list): + return [self._wrap_value(item) for item in value] + elif isinstance(value, tuple): + return tuple([self._wrap_value(item) for item in value]) + else: + return value + def _wrap_json(self, arguments: dict[str, Any]): - return { - key: psycopg.types.json.Jsonb(value) if isinstance(value, dict) else value - for key, value in arguments.items() - } + return {key: self._wrap_value(value) for key, value in arguments.items()} @contextlib.contextmanager def _get_cursor(self) -> Iterator[psycopg.Cursor[psycopg.rows.DictRow]]: diff --git a/procrastinate/tasks.py b/procrastinate/tasks.py index 876da1c69..d0381a743 100644 --- a/procrastinate/tasks.py +++ b/procrastinate/tasks.py @@ -139,6 +139,15 @@ async def defer_async(self, *_: Args.args, **task_kwargs: Args.kwargs) -> int: """ return await self.configure().defer_async(**task_kwargs) + async def batch_defer_async(self, *task_kwargs: types.JSONDict) -> list[int]: + """ + Create multiple jobs from this task and the given arguments. This is more + efficient than deferring them one by one. + The jobs will be created with default parameters, if you want to better + specify when and how to launch this job, see `Task.configure`. + """ + return await self.configure().batch_defer_async(*task_kwargs) + def defer(self, *_: Args.args, **task_kwargs: Args.kwargs) -> int: """ Create a job from this task and the given arguments. @@ -147,6 +156,15 @@ def defer(self, *_: Args.args, **task_kwargs: Args.kwargs) -> int: """ return self.configure().defer(**task_kwargs) + def batch_defer(self, *task_kwargs: types.JSONDict) -> list[int]: + """ + Create multiple jobs from this task and the given arguments. This is more + efficient than deferring them one by one. + The jobs will be created with default parameters, if you want to better + specify when and how to launch this job, see `Task.configure`. + """ + return self.configure().batch_defer(*task_kwargs) + def configure(self, **options: Unpack[ConfigureTaskOptions]) -> jobs.JobDeferrer: """ Configure the job with all the specific settings, defining how the job diff --git a/procrastinate/testing.py b/procrastinate/testing.py index 30e898031..a21ede1ee 100644 --- a/procrastinate/testing.py +++ b/procrastinate/testing.py @@ -2,12 +2,24 @@ import asyncio import datetime +import json +import threading +import typing from collections import Counter from collections.abc import Iterable from itertools import count from typing import Any -from procrastinate import connector, exceptions, schema, sql, types, utils +from procrastinate import ( + connector, + exceptions, + jobs, + manager, + schema, + sql, + types, + utils, +) JobRow = dict[str, Any] EventRow = dict[str, Any] @@ -36,9 +48,10 @@ def reset(self) -> None: """ self.jobs: dict[int, JobRow] = {} self.events: dict[int, list[EventRow]] = {} + self.workers: dict[int, datetime.datetime] = {} self.job_counter = count(1) self.queries: list[tuple[str, dict[str, Any]]] = [] - self.notify_event: asyncio.Event | None = None + self.on_notification: connector.Notify | None = None self.notify_channels: list[str] = [] self.periodic_defers: dict[tuple[str, str], int] = {} self.table_exists = True @@ -47,7 +60,7 @@ def reset(self) -> None: def get_sync_connector(self) -> connector.BaseConnector: return self - def generic_execute(self, query, suffix, **arguments) -> Any: + async def generic_execute(self, query, suffix, **arguments) -> Any: """ Calling a query will call the _ method on this class. Suffix is "run" if no result is expected, @@ -55,43 +68,40 @@ def generic_execute(self, query, suffix, **arguments) -> Any: """ query_name = self.reverse_queries[query] self.queries.append((query_name, arguments)) - return getattr(self, f"{query_name}_{suffix}")(**arguments) + return await getattr(self, f"{query_name}_{suffix}")(**arguments) def make_dynamic_query(self, query, **identifiers: str) -> str: return query.format(**identifiers) - def execute_query(self, query: str, **arguments: Any) -> None: - self.generic_execute(query, "run", **arguments) - - def execute_query_one(self, query: str, **arguments: Any) -> dict[str, Any]: - return self.generic_execute(query, "one", **arguments) - - def execute_query_all(self, query: str, **arguments: Any) -> list[dict[str, Any]]: - return self.generic_execute(query, "all", **arguments) - async def execute_query_async(self, query: str, **arguments: Any) -> None: - self.generic_execute(query, "run", **arguments) + await self.generic_execute(query, "run", **arguments) async def execute_query_one_async( self, query: str, **arguments: Any ) -> dict[str, Any]: - return self.generic_execute(query, "one", **arguments) + return await self.generic_execute(query, "one", **arguments) async def execute_query_all_async( self, query: str, **arguments: Any ) -> list[dict[str, Any]]: - return self.generic_execute(query, "all", **arguments) + return await self.generic_execute(query, "all", **arguments) async def listen_notify( - self, event: asyncio.Event, channels: Iterable[str] + self, on_notification: connector.Notify, channels: Iterable[str] ) -> None: - self.notify_event = event + self.on_notification = on_notification self.notify_channels = list(channels) def open(self, pool: connector.Pool | None = None) -> None: self.states.append("open") async def open_async(self, pool: connector.Pool | None = None) -> None: + """ + Save the current event loop and its thread id so that later notifications + can be scheduled on this loop. + """ + self._loop = asyncio.get_running_loop() + self._loop_thread_id = threading.get_ident() self.states.append("open_async") def close(self) -> None: @@ -102,52 +112,72 @@ async def close_async(self) -> None: # End of BaseConnector methods - def defer_job_one( - self, - task_name: str, - priority: int, - lock: str | None, - queueing_lock: str | None, - args: types.JSONDict, - scheduled_at: datetime.datetime | None, - queue: str, - ) -> JobRow: - if queueing_lock is not None and any( - job["queueing_lock"] == queueing_lock and job["status"] == "todo" - for job in self.jobs.values() - ): - from . import manager + async def defer_jobs_all(self, jobs: list[types.JobToDefer]) -> list[JobRow]: + # We check the queueing locks upfront so that no job is inserted into + # the queue if the constraint is violated (simulating a database + # rollback). + new_queueing_locks = [ + job.queueing_lock for job in jobs if job.queueing_lock is not None + ] + + counts = Counter(new_queueing_locks) + duplicate = next( + (lock for lock in new_queueing_locks if counts[lock] > 1), None + ) + + if duplicate is None: + current_queueing_locks = { + job["queueing_lock"] + for job in self.jobs.values() + if job["status"] == "todo" + } - {None} + + duplicate = next( + (lock for lock in new_queueing_locks if lock in current_queueing_locks), + None, + ) + if duplicate is not None: raise exceptions.UniqueViolation( - constraint_name=manager.QUEUEING_LOCK_CONSTRAINT + constraint_name=manager.QUEUEING_LOCK_CONSTRAINT, + queueing_lock=duplicate, ) - id = next(self.job_counter) - - self.jobs[id] = job_row = { - "id": id, - "queue_name": queue, - "task_name": task_name, - "priority": priority, - "lock": lock, - "queueing_lock": queueing_lock, - "args": args, - "status": "todo", - "scheduled_at": scheduled_at, - "attempts": 0, - } - self.events[id] = [] - if scheduled_at: - self.events[id].append({"type": "scheduled", "at": scheduled_at}) - self.events[id].append({"type": "deferred", "at": utils.utcnow()}) - if self.notify_event: - if "procrastinate_any_queue" in self.notify_channels or ( - f"procrastinate_queue#{queue}" in self.notify_channels - ): - self.notify_event.set() - return job_row + job_rows = [] + for job in jobs: + id = next(self.job_counter) + + self.jobs[id] = job_row = { + "id": id, + "queue_name": job.queue_name, + "task_name": job.task_name, + "priority": job.priority, + "lock": job.lock, + "queueing_lock": job.queueing_lock, + "args": job.args, + "status": "todo", + "scheduled_at": job.scheduled_at, + "attempts": 0, + "abort_requested": False, + "worker_id": None, + } + self.events[id] = [] + if job.scheduled_at: + self.events[id].append({"type": "scheduled", "at": job.scheduled_at}) + self.events[id].append({"type": "deferred", "at": utils.utcnow()}) + + await self._notify( + job.queue_name, + { + "type": "job_inserted", + "job_id": id, + }, + ) + job_rows.append(job_row) + + return job_rows - def defer_periodic_job_one( + async def defer_periodic_job_one( self, queue: str, task_name: str, @@ -157,21 +187,26 @@ def defer_periodic_job_one( lock: str | None, queueing_lock: str | None, periodic_id: str, - ): + ) -> JobRow: # If the periodic task has already been deferred for this timestamp if self.periodic_defers.get((task_name, periodic_id)) == defer_timestamp: return {"id": None} self.periodic_defers[(task_name, periodic_id)] = defer_timestamp - return self.defer_job_one( - task_name=task_name, - queue=queue, - priority=priority, - lock=lock, - queueing_lock=queueing_lock, - args=args, - scheduled_at=None, + job_rows = await self.defer_jobs_all( + [ + types.JobToDefer( + queue_name=queue, + task_name=task_name, + priority=priority, + lock=lock, + queueing_lock=queueing_lock, + args=args, + scheduled_at=None, + ) + ] ) + return job_rows[0] @property def current_locks(self) -> Iterable[str]: @@ -187,8 +222,45 @@ def finished_jobs(self) -> list[JobRow]: if job["status"] in {"failed", "succeeded"} ] - def fetch_job_one(self, queues: Iterable[str] | None) -> dict: - # Creating a copy of the iterable so that we can modify it while we iterate + async def _notify(self, queue_name: str, notification: jobs.Notification) -> None: + """ + Instead of directly awaiting on_notification, we check the current thread. + If we are not on the same thread as the one where the loop was saved, + we schedule the notification on the correct loop. + """ + if not self.on_notification: + return + + destination_channels = { + "procrastinate_any_queue_v1", + f"procrastinate_queue_v1#{queue_name}", + } + for channel in set(self.notify_channels).intersection(destination_channels): + coro = self.on_notification( + channel=channel, payload=json.dumps(notification) + ) + if threading.get_ident() == self._loop_thread_id: + # Already on the right thread: just await. + await coro + else: + # Not on the correct thread: schedule the coroutine on the saved loop. + + # run_coroutine_threadsafe needs a coroutine, but coro may be any + # awaitable (coroutines are awaitable but the opposite is not true). + # Consequently, we ensure that coro is a coroutine. + if not isinstance(coro, typing.Coroutine): + original_coro = coro + + async def _coro() -> None: + return await original_coro + + coro = _coro() + future = asyncio.run_coroutine_threadsafe(coro, self._loop) + # Wrap the concurrent.futures.Future so we can await it. + await asyncio.wrap_future(future) + + async def fetch_job_one(self, queues: Iterable[str] | None, worker_id: int) -> dict: + assert worker_id in self.workers, f"Worker {worker_id} not found" filtered_jobs = [ job @@ -208,10 +280,11 @@ def fetch_job_one(self, queues: Iterable[str] | None) -> dict: job = filtered_jobs[0] job["status"] = "doing" + job["worker_id"] = worker_id self.events[job["id"]].append({"type": "started", "at": utils.utcnow()}) return job - def finish_job_run(self, job_id: int, status: str, delete_job: bool) -> None: + async def finish_job_run(self, job_id: int, status: str, delete_job: bool) -> None: if delete_job: self.jobs.pop(job_id) return @@ -219,9 +292,10 @@ def finish_job_run(self, job_id: int, status: str, delete_job: bool) -> None: job_row = self.jobs[job_id] job_row["status"] = status job_row["attempts"] += 1 + job_row["abort_requested"] = False self.events[job_id].append({"type": status, "at": utils.utcnow()}) - def cancel_job_one(self, job_id: int, abort: bool, delete_job: bool) -> dict: + async def cancel_job_one(self, job_id: int, abort: bool, delete_job: bool) -> dict: job_row = self.jobs[job_id] if job_row["status"] == "todo": @@ -232,16 +306,24 @@ def cancel_job_one(self, job_id: int, abort: bool, delete_job: bool) -> dict: job_row["status"] = "cancelled" return {"id": job_id} - if abort and job_row["status"] == "doing": - job_row["status"] = "aborting" + if abort: + job_row["abort_requested"] = True + await self._notify( + job_row["queue_name"], + { + "type": "abort_job_requested", + "job_id": job_id, + }, + ) + return {"id": job_id} return {"id": None} - def get_job_status_one(self, job_id: int) -> dict: + async def get_job_status_one(self, job_id: int) -> dict: return {"status": self.jobs[job_id]["status"]} - def retry_job_run( + async def retry_job_run( self, job_id: int, retry_at: datetime.datetime, @@ -262,7 +344,7 @@ def retry_job_run( self.events[job_id].append({"type": "scheduled", "at": retry_at}) self.events[job_id].append({"type": "deferred_for_retry", "at": utils.utcnow()}) - def select_stalled_jobs_all(self, nb_seconds, queue, task_name): + async def select_stalled_jobs_by_started_all(self, nb_seconds, queue, task_name): return ( job for job in self.jobs.values() @@ -273,7 +355,25 @@ def select_stalled_jobs_all(self, nb_seconds, queue, task_name): and task_name in (job["task_name"], None) ) - def delete_old_jobs_run(self, nb_hours, queue, statuses): + async def select_stalled_jobs_by_heartbeat_all( + self, queue, task_name, seconds_since_heartbeat + ): + return ( + job + for job in self.jobs.values() + if job["status"] == "doing" + and queue in (job["queue_name"], None) + and task_name in (job["task_name"], None) + and ( + self.workers.get( + job["worker_id"], + datetime.datetime.min.replace(tzinfo=datetime.timezone.utc), + ) + < utils.utcnow() - datetime.timedelta(seconds=seconds_since_heartbeat) + ) + ) + + async def delete_old_jobs_run(self, nb_hours, queue, statuses): for id, job in list(self.jobs.items()): if ( job["status"] in statuses @@ -285,47 +385,93 @@ def delete_old_jobs_run(self, nb_hours, queue, statuses): ): self.jobs.pop(id) - def listen_for_jobs_run(self) -> None: + async def listen_for_jobs_run(self) -> None: pass - def apply_schema_run(self) -> None: + async def apply_schema_run(self) -> None: pass - def list_jobs_all(self, **kwargs): + async def list_jobs_all(self, **kwargs): + jobs: list[JobRow] = [] for job in self.jobs.values(): if all( expected is None or str(job[key]) == str(expected) for key, expected in kwargs.items() ): - yield job + jobs.append(job) + return iter(jobs) - def list_queues_all(self, **kwargs): - jobs = list(self.list_jobs_all(**kwargs)) + async def list_queues_all(self, **kwargs): + result: list[dict] = [] + jobs = list(await self.list_jobs_all(**kwargs)) queues = sorted({job["queue_name"] for job in jobs}) for queue in queues: queue_jobs = [job for job in jobs if job["queue_name"] == queue] stats = Counter(job["status"] for job in queue_jobs) - yield {"name": queue, "jobs_count": len(queue_jobs), "stats": stats} + result.append( + {"name": queue, "jobs_count": len(queue_jobs), "stats": stats} + ) + return iter(result) - def list_tasks_all(self, **kwargs): - jobs = list(self.list_jobs_all(**kwargs)) + async def list_tasks_all(self, **kwargs): + result: list[dict] = [] + jobs = list(await self.list_jobs_all(**kwargs)) tasks = sorted({job["task_name"] for job in jobs}) for task in tasks: task_jobs = [job for job in jobs if job["task_name"] == task] stats = Counter(job["status"] for job in task_jobs) - yield {"name": task, "jobs_count": len(task_jobs), "stats": stats} + result.append({"name": task, "jobs_count": len(task_jobs), "stats": stats}) + return result - def list_locks_all(self, **kwargs): - jobs = list(self.list_jobs_all(**kwargs)) + async def list_locks_all(self, **kwargs): + result: list[dict] = [] + jobs = list(await self.list_jobs_all(**kwargs)) locks = sorted({job["lock"] for job in jobs}) for lock in locks: lock_jobs = [job for job in jobs if job["lock"] == lock] stats = Counter(job["status"] for job in lock_jobs) - yield {"name": lock, "jobs_count": len(lock_jobs), "stats": stats} + result.append({"name": lock, "jobs_count": len(lock_jobs), "stats": stats}) + return result + + async def list_jobs_to_abort_all(self, queue_name: str | None): + return list( + await self.list_jobs_all( + status="doing", abort_requested=True, queue_name=queue_name + ) + ) - def set_job_status_run(self, id, status): + async def set_job_status_run(self, id, status): id = int(id) self.jobs[id]["status"] = status - def check_connection_one(self): + async def check_connection_one(self): return {"check": self.table_exists or None} + + async def register_worker_one(self): + worker_id = max(self.workers, default=0) + 1 + self.workers[worker_id] = utils.utcnow() + return {"worker_id": worker_id} + + async def unregister_worker_run(self, worker_id): + self.workers.pop(worker_id) + for job in self.jobs.values(): + if job["worker_id"] == worker_id: + job["worker_id"] = None + + async def update_heartbeat_run(self, worker_id): + self.workers[worker_id] = utils.utcnow() + + async def prune_stalled_workers_all(self, seconds_since_heartbeat): + pruned_workers = [] + for worker_id, heartbeat in list(self.workers.items()): + if heartbeat < utils.utcnow() - datetime.timedelta( + seconds=seconds_since_heartbeat + ): + self.workers.pop(worker_id) + pruned_workers.append({"worker_id": worker_id}) + + for job in self.jobs.values(): + if job["worker_id"] not in self.workers: + job["worker_id"] = None + + return pruned_workers diff --git a/procrastinate/types.py b/procrastinate/types.py index 9c373aff9..15e229951 100644 --- a/procrastinate/types.py +++ b/procrastinate/types.py @@ -1,14 +1,17 @@ from __future__ import annotations -import typing as t +import datetime +from typing import NamedTuple, TypedDict -from typing_extensions import NotRequired +from typing_extensions import NotRequired, TypeAlias -JSONValue = t.Union[str, int, float, bool, None, dict[str, t.Any], list[t.Any]] +JSONValue: TypeAlias = ( + 'dict[str, "JSONValue"] | list["JSONValue"] | str | int | float | bool | None' +) JSONDict = dict[str, JSONValue] -class TimeDeltaParams(t.TypedDict): +class TimeDeltaParams(TypedDict): weeks: NotRequired[int] days: NotRequired[int] hours: NotRequired[int] @@ -16,3 +19,13 @@ class TimeDeltaParams(t.TypedDict): seconds: NotRequired[int] milliseconds: NotRequired[int] microseconds: NotRequired[int] + + +class JobToDefer(NamedTuple): + queue_name: str + task_name: str + priority: int + lock: str | None + queueing_lock: str | None + args: JSONDict + scheduled_at: datetime.datetime | None diff --git a/procrastinate/utils.py b/procrastinate/utils.py index d438fd359..49daa7050 100644 --- a/procrastinate/utils.py +++ b/procrastinate/utils.py @@ -23,7 +23,6 @@ TypeVar, ) -import attr import dateutil.parser from asgiref import sync @@ -158,6 +157,10 @@ def utcnow() -> datetime.datetime: return datetime.datetime.now(tz=datetime.timezone.utc) +def utcmax() -> datetime.datetime: + return datetime.datetime.max.replace(tzinfo=datetime.timezone.utc) + + def parse_datetime(raw: str) -> datetime.datetime: try: # this parser is the stricter one, so we try it first @@ -208,162 +211,46 @@ async def _inner_coro() -> U: return _inner_coro().__await__() -class EndMain(Exception): - pass - - -@attr.dataclass() -class ExceptionRecord: - task: asyncio.Task - exc: Exception - - -async def run_tasks( - main_coros: Iterable[Coroutine], - side_coros: Iterable[Coroutine] | None = None, - graceful_stop_callback: Callable[[], Any] | None = None, -): +async def cancel_and_capture_errors(tasks: list[asyncio.Task]): """ - Run multiple coroutines in parallel: the main coroutines and the side - coroutines. Side coroutines are expected to run until they get cancelled. - Main corountines are expected to return at some point. By default, this - function will return None, but on certain circumstances, (see below) it can - raise a `RunTaskError`. A callback `graceful_stop_callback` will be called - if provided to ask the main coroutines to gracefully stop in case either - one of them or one of the side coroutines raise. - - - If all coroutines from main_coros return and there is no exception in the - coroutines from either `main_coros` or `side_coros`: - - coroutines from `side_coros` are cancelled and awaited - - the function return None - - - If any corountine from `main_coros` or `side_coros` raises an exception: - - `graceful_stop_callback` is called (the idea is that it should ask - coroutines from `main_coros` to exit gracefully) - - the function then wait for main_coros to finish, registering any - additional exception - - coroutines from `side_coros` are cancelled and awaited, registering any - additional exception - - all exceptions from coroutines in both `main_coros` and `side_coros` - are logged - - the function raises `RunTaskError` - - It's not expected that coroutines from `side_coros` return. If this - happens, the function will not react in a specific way. - - When a `RunTaskError` is raised because of one or more underlying - exceptions, one exception is the `__cause__` (the first main or side - coroutine that fails in the input iterables order, which will probably not - the chronologically the first one to be raised). All exceptions are logged. + Cancel all tasks and capture any error returned by any of those tasks (except the CancellationError itself) """ - # Ensure all passed coros are futures (in our case, Tasks). This means that - # all the coroutines start executing now. - # `name` argument to create_task only exist on python 3.8+ - main_tasks = [asyncio.create_task(coro, name=coro.__name__) for coro in main_coros] - side_tasks = [ - asyncio.create_task(coro, name=coro.__name__) for coro in side_coros or [] - ] - for task in main_tasks + side_tasks: - name = task.get_name() - logger.debug( - f"Started {name}", - extra={ - "action": f"{name}_start", - }, - ) - # Note that asyncio.gather() has 2 modes of execution: - # - asyncio.gather(*aws) - # Interrupts the gather at the first exception, and raises this - # exception. Otherwise, return a list containing return values for all - # coroutines - # - asyncio.gather(*aws, return_exceptions=True) - # Run every corouting until the end, return a list of either return - # values or raised exceptions (mixed). - - # The _main function will always raise: either an exception if one happens - # in the main tasks, or EndMain if every coroutine returned - async def _main(): - await asyncio.gather(*main_tasks) - raise EndMain - - exception_records: list[ExceptionRecord] = [] - try: - # side_tasks supposedly never finish, and _main always raises. - # Consequently, it's theoretically impossible to leave this try block - # without going through one of the except branches. - await asyncio.gather(_main(), *side_tasks) - except EndMain: - pass - except Exception as exc: - logger.error( - "Main coroutine error, initiating remaining coroutines stop. " - f"Cause: {exc!r}", - extra={ - "action": "run_tasks_stop_requested", - }, - ) - if graceful_stop_callback: - graceful_stop_callback() - - # Even if we asked the main tasks to stop, we still need to wait for - # them to actually stop. This may take some time. At this point, any - # additional exception will be registered but will not impact execution - # flow. - results = await asyncio.gather(*main_tasks, return_exceptions=True) - for task, result in zip(main_tasks, results): - if isinstance(result, Exception): - exception_records.append( - ExceptionRecord( - task=task, - exc=result, - ) - ) - else: - name = task.get_name() - logger.debug( - f"{name} finished execution", - extra={ - "action": f"{name}_stop", - }, - ) - - for task in side_tasks: - task.cancel() - try: - # task.cancel() says that the next time a task is executed, it will - # raise, but we need to give control back to the task for it to - # actually recieve the exception. - await task - except asyncio.CancelledError: - name = task.get_name() - logger.debug( - f"Stopped {name}", - extra={ - "action": f"{name}_stop", - }, - ) - except Exception as exc: - exception_records.append( - ExceptionRecord( - task=task, - exc=exc, - ) - ) - - for exception_record in exception_records: - name = exception_record.task.get_name() - message = f"{name} error: {exception_record.exc!r}" - action = f"{name}_error" + def log_task_exception(task: asyncio.Task, error: BaseException): logger.exception( - message, + f"{task.get_name()} error: {error!r}", + exc_info=error, extra={ - "action": action, + "action": f"{task.get_name()}_error", }, ) - if exception_records: - raise exceptions.RunTaskError from exception_records[0].exc + for task in tasks: + task.cancel() + + await asyncio.gather(*tasks, return_exceptions=True) + + for task in (task for task in tasks if task.done() and not task.cancelled()): + error = task.exception() + if error: + log_task_exception(task, error=error) + else: + logger.debug(f"Cancelled task {task.get_name()}") + + +async def wait_any(*coros_or_futures: Coroutine | asyncio.Future): + """Starts and wait on the first coroutine to complete and return it + Other pending coroutines are either cancelled or left running""" + futures = set(asyncio.ensure_future(fut) for fut in coros_or_futures) + + _, pending = await asyncio.wait( + futures, + return_when=asyncio.FIRST_COMPLETED, + ) + + for task in pending: + task.cancel() + await asyncio.gather(*pending, return_exceptions=True) def add_namespace(name: str, namespace: str) -> str: @@ -438,4 +325,14 @@ def async_context_decorator(func: Callable) -> Callable: def datetime_from_timedelta_params(params: TimeDeltaParams) -> datetime.datetime: - return utcnow() + datetime.timedelta(**params) + try: + return utcnow() + datetime.timedelta(**params) + except OverflowError: + return utcmax() + + +def queues_display(queues: Iterable[str] | None) -> str: + if queues: + return f"queues {', '.join(queues)}" + else: + return "all queues" diff --git a/procrastinate/worker.py b/procrastinate/worker.py index 4a73fdffd..01351ae86 100644 --- a/procrastinate/worker.py +++ b/procrastinate/worker.py @@ -7,7 +7,6 @@ import logging import time from collections.abc import Awaitable, Iterable -from enum import Enum from typing import Any, Callable from procrastinate import ( @@ -16,27 +15,19 @@ job_context, jobs, periodic, + retry, signals, tasks, + types, utils, ) logger = logging.getLogger(__name__) - WORKER_NAME = "worker" -WORKER_TIMEOUT = 5.0 # seconds -WORKER_CONCURRENCY = 1 # parallel task(s) - - -class DeleteJobCondition(Enum): - """ - An enumeration with all the possible conditions to delete a job - """ - - NEVER = "never" #: Keep jobs in database after completion - SUCCESSFUL = "successful" #: Delete only successful jobs - ALWAYS = "always" #: Always delete jobs at completion +WORKER_CONCURRENCY = 1 # maximum number of parallel jobs +FETCH_JOB_POLLING_INTERVAL = 5.0 # seconds +ABORT_JOB_POLLING_INTERVAL = 5.0 # seconds class Worker: @@ -44,327 +35,560 @@ def __init__( self, app: app.App, queues: Iterable[str] | None = None, - name: str | None = None, + name: str | None = WORKER_NAME, concurrency: int = WORKER_CONCURRENCY, wait: bool = True, - timeout: float = WORKER_TIMEOUT, + fetch_job_polling_interval: float = FETCH_JOB_POLLING_INTERVAL, + abort_job_polling_interval: float = ABORT_JOB_POLLING_INTERVAL, + shutdown_graceful_timeout: float | None = None, listen_notify: bool = True, - delete_jobs: str | DeleteJobCondition = DeleteJobCondition.NEVER.value, + delete_jobs: str | jobs.DeleteJobCondition | None = None, additional_context: dict[str, Any] | None = None, install_signal_handlers: bool = True, + update_heartbeat_interval: float = 10.0, + stalled_worker_timeout: float = 30.0, ): self.app = app self.queues = queues - self.worker_name: str = name or WORKER_NAME + self.worker_name = name self.concurrency = concurrency - - self.timeout = timeout self.wait = wait + self.fetch_job_polling_interval = fetch_job_polling_interval + self.abort_job_polling_interval = abort_job_polling_interval self.listen_notify = listen_notify self.delete_jobs = ( - DeleteJobCondition(delete_jobs) + jobs.DeleteJobCondition(delete_jobs) if isinstance(delete_jobs, str) else delete_jobs - ) - - self.job_manager = self.app.job_manager + ) or jobs.DeleteJobCondition.NEVER + self.additional_context = additional_context self.install_signal_handlers = install_signal_handlers + self.update_heartbeat_interval = update_heartbeat_interval + self.stalled_worker_timeout = stalled_worker_timeout - if name: - self.logger = logger.getChild(name) + if self.worker_name: + self.logger = logger.getChild(self.worker_name) else: self.logger = logger - # Handling the info about the currently running task. - self.base_context: job_context.JobContext = job_context.JobContext( - app=app, - worker_name=self.worker_name, - worker_queues=self.queues, - additional_context=additional_context.copy() if additional_context else {}, - ) - self.current_contexts: dict[int, job_context.JobContext] = {} - self.stop_requested = False - self.notify_event: asyncio.Event | None = None + self.worker_id: int | None = None - def context_for_worker( - self, worker_id: int, reset=False, **kwargs - ) -> job_context.JobContext: - """ - Retrieves the context for sub-sworker ``worker_id``. If not found, or ``reset`` - is True, context is recreated from ``self.base_context``. Additionnal parameters - are used to update the context. The resulting context is kept and will be - returned for later calls. - """ - if reset or worker_id not in self.current_contexts: - context = self.base_context - kwargs["worker_id"] = worker_id - kwargs["additional_context"] = self.base_context.additional_context.copy() - else: - context = self.current_contexts[worker_id] - - if kwargs: - context = context.evolve(**kwargs) - self.current_contexts[worker_id] = context - - return context + self._loop_task: asyncio.Future | None = None + self._new_job_event = asyncio.Event() + self._running_jobs: dict[asyncio.Task, job_context.JobContext] = {} + self._job_semaphore = asyncio.Semaphore(self.concurrency) + self._stop_event = asyncio.Event() + self.shutdown_graceful_timeout = shutdown_graceful_timeout + self._job_ids_to_abort: dict[int, job_context.AbortReason] = dict() - async def listener(self): - assert self.notify_event - return await self.job_manager.listen_for_jobs( - event=self.notify_event, - queues=self.queues, + def stop(self): + if self._stop_event.is_set(): + return + self.logger.info( + "Stop requested", + extra=self._log_extra( + context=None, action="stopping_worker", job_result=None + ), ) - async def periodic_deferrer(self): + self._stop_event.set() + + async def _periodic_deferrer(self): deferrer = periodic.PeriodicDeferrer( registry=self.app.periodic_registry, **self.app.periodic_defaults, ) return await deferrer.worker() - async def run(self) -> None: - self.notify_event = asyncio.Event() - self.stop_requested = False + def find_task(self, task_name: str) -> tasks.Task: + try: + return self.app.tasks[task_name] + except KeyError as exc: + raise exceptions.TaskNotFound from exc - self.logger.info( - f"Starting worker on {self.base_context.queues_display}", - extra=self.base_context.log_extra( - action="start_worker", queues=self.queues - ), - ) - context = contextlib.nullcontext() - if self.install_signal_handlers: - context = signals.on_stop(self.stop) - - with context: - side_coros = [self.periodic_deferrer()] - if self.wait and self.listen_notify: - side_coros.append(self.listener()) - - await utils.run_tasks( - main_coros=( - self.single_worker(worker_id=worker_id) - for worker_id in range(self.concurrency) - ), - side_coros=side_coros, - graceful_stop_callback=self.stop, + def _log_extra( + self, + action: str, + context: job_context.JobContext | None, + job_result: job_context.JobResult | None, + **kwargs: Any, + ) -> types.JSONDict: + worker: types.JSONDict = { + "name": self.worker_name, + "worker_id": self.worker_id, + "job_id": context.job.id if context else None, + "queues": list(self.queues or []), + } + extra: types.JSONDict = { + "action": action, + "worker": worker, + } + if context: + extra["job"] = context.job.log_context() + + return { + **extra, + **(job_result.as_dict() if job_result else {}), + **kwargs, + } + + async def _persist_job_status( + self, + job: jobs.Job, + status: jobs.Status, + retry_decision: retry.RetryDecision | None, + context: job_context.JobContext, + job_result: job_context.JobResult | None, + ): + if retry_decision: + await self.app.job_manager.retry_job( + job=job, + retry_at=retry_decision.retry_at, + lock=retry_decision.lock, + priority=retry_decision.priority, + queue=retry_decision.queue, + ) + else: + delete_job = { + jobs.DeleteJobCondition.ALWAYS: True, + jobs.DeleteJobCondition.NEVER: False, + jobs.DeleteJobCondition.SUCCESSFUL: status == jobs.Status.SUCCEEDED, + }[self.delete_jobs] + await self.app.job_manager.finish_job( + job=job, status=status, delete_job=delete_job ) - self.logger.info( - f"Stopped worker on {self.base_context.queues_display}", - extra=self.base_context.log_extra(action="stop_worker", queues=self.queues), - ) - self.notify_event = None - - async def single_worker(self, worker_id: int): - current_timeout = self.timeout * (worker_id + 1) - while not self.stop_requested: - job = await self.job_manager.fetch_job(self.queues) - if job: - await self.process_job(job=job, worker_id=worker_id) - else: - if not self.wait or self.stop_requested: - break - await self.wait_for_job(timeout=current_timeout) - current_timeout = self.timeout * self.concurrency + assert job.id + self._job_ids_to_abort.pop(job.id, None) - async def wait_for_job(self, timeout: float): - assert self.notify_event self.logger.debug( - f"Waiting for new jobs on {self.base_context.queues_display}", - extra=self.base_context.log_extra( - action="waiting_for_jobs", queues=self.queues + f"Acknowledged job completion {job.call_string}", + extra=self._log_extra( + action="finish_task", + context=context, + status=status, + job_result=job_result, ), ) - self.notify_event.clear() - try: - await asyncio.wait_for(self.notify_event.wait(), timeout=timeout) - except asyncio.TimeoutError: - pass + + def _log_job_outcome( + self, + status: jobs.Status, + context: job_context.JobContext, + job_result: job_context.JobResult | None, + job_retry: exceptions.JobRetry | None, + exc_info: bool | BaseException = False, + ): + if status == jobs.Status.SUCCEEDED: + log_action, log_title = "job_success", "Success" + elif status == jobs.Status.ABORTED and not job_retry: + log_action, log_title = "job_aborted", "Aborted" + elif status == jobs.Status.ABORTED and job_retry: + log_action, log_title = "job_aborted_retry", "Aborted, to retry" + elif job_retry: + log_action, log_title = "job_error_retry", "Error, to retry" else: - self.notify_event.clear() + log_action, log_title = "job_error", "Error" + + text = f"Job {context.job.call_string} ended with status: {log_title}, " + # in practice we should always have a start and end timestamp here + # but in theory the JobResult class allows it to be None + if job_result and job_result.start_timestamp and job_result.end_timestamp: + duration = job_result.end_timestamp - job_result.start_timestamp + text += f"lasted {duration:.3f} s" + if job_result and job_result.result: + text += f" - Result: {job_result.result}"[:250] + + extra = self._log_extra( + context=context, action=log_action, job_result=job_result + ) + log_level = ( + logging.ERROR + if status == jobs.Status.FAILED and not job_retry + else logging.INFO + ) + logger.log(log_level, text, extra=extra, exc_info=exc_info) - async def process_job(self, job: jobs.Job, worker_id: int = 0) -> None: - context = self.context_for_worker(worker_id=worker_id, job=job) + async def _process_job(self, context: job_context.JobContext): + """ + Processes a given job and persists its status + """ + task = self.app.tasks.get(context.job.task_name) + job_retry = None + exc_info = False + retry_decision = None + job = context.job - self.logger.debug( - f"Loaded job info, about to start job {job.call_string}", - extra=context.log_extra(action="loaded_job_info"), - ) + job_result = job_context.JobResult(start_timestamp=context.start_timestamp) - status, retry_decision = None, None try: - await self.run_job(job=job, worker_id=worker_id) - status = jobs.Status.SUCCEEDED - except exceptions.JobAborted: - status = jobs.Status.ABORTED - - except exceptions.JobError as e: - status = jobs.Status.FAILED - if e.retry_exception: - retry_decision = e.retry_exception.retry_decision - if e.critical and e.__cause__: - raise e.__cause__ - - except exceptions.TaskNotFound as exc: - status = jobs.Status.FAILED - self.logger.exception( - f"Task was not found: {exc}", - extra=context.log_extra(action="task_not_found", exception=str(exc)), + if not task: + raise exceptions.TaskNotFound + + self.logger.debug( + f"Loaded job info, about to start job {job.call_string}", + extra=self._log_extra( + context=context, action="loaded_job_info", job_result=job_result + ), + ) + + self.logger.info( + f"Starting job {job.call_string}", + extra=self._log_extra( + context=context, action="start_job", job_result=job_result + ), ) - finally: - if retry_decision: - await self.job_manager.retry_job( - job=job, - retry_at=retry_decision.retry_at, - priority=retry_decision.priority, - queue=retry_decision.queue, - lock=retry_decision.lock, - ) - else: - assert status is not None - delete_job = { - DeleteJobCondition.ALWAYS: True, - DeleteJobCondition.NEVER: False, - DeleteJobCondition.SUCCESSFUL: status == jobs.Status.SUCCEEDED, - }[self.delete_jobs] + exc_info: bool | BaseException = False - await self.job_manager.finish_job( - job=job, status=status, delete_job=delete_job + async def ensure_async() -> Callable[..., Awaitable]: + await_func: Callable[..., Awaitable] + if inspect.iscoroutinefunction(task.func): + await_func = task + else: + await_func = functools.partial(utils.sync_to_async, task) + + job_args = [context] if task.pass_context else [] + task_result = await await_func(*job_args, **job.task_kwargs) + # In some cases, the task function might be a synchronous function + # that returns an awaitable without actually being a + # coroutinefunction. In that case, in the await above, we haven't + # actually called the task, but merely generated the awaitable that + # implements the task. In that case, we want to wait this awaitable. + # It's easy enough to be in that situation that the best course of + # action is probably to await the awaitable. + # It's not even sure it's worth emitting a warning + if inspect.isawaitable(task_result): + task_result = await task_result + + return task_result + + job_result.result = await ensure_async() + + except BaseException as e: + exc_info = e + + # aborted job can be retried if it is caused by a shutdown. + if not (isinstance(e, exceptions.JobAborted)) or ( + context.abort_reason() == job_context.AbortReason.SHUTDOWN + ): + job_retry = ( + task.get_retry_exception(exception=e, job=job) if task else None ) + retry_decision = job_retry.retry_decision if job_retry else None + if isinstance(e, exceptions.TaskNotFound): + self.logger.exception( + f"Task was not found: {e}", + extra=self._log_extra( + context=context, + action="task_not_found", + exception=str(e), + job_result=job_result, + ), + ) + finally: + job_result.end_timestamp = time.time() + + if isinstance(exc_info, exceptions.JobAborted) or isinstance( + exc_info, asyncio.CancelledError + ): + status = jobs.Status.ABORTED + elif exc_info: + status = jobs.Status.FAILED + else: + status = jobs.Status.SUCCEEDED + + self._log_job_outcome( + status=status, + context=context, + job_result=job_result, + job_retry=job_retry, + exc_info=exc_info, + ) - self.logger.debug( - f"Acknowledged job completion {job.call_string}", - extra=context.log_extra(action="finish_task", status=status), + persist_job_status_task = asyncio.create_task( + self._persist_job_status( + job=job, + status=status, + retry_decision=retry_decision, + context=context, + job_result=job_result, + ) ) - # Remove job information from the current context - self.context_for_worker(worker_id=worker_id, reset=True) + try: + await asyncio.shield(persist_job_status_task) + except asyncio.CancelledError: + await persist_job_status_task + raise + + async def _fetch_and_process_jobs(self): + """Fetch and process jobs until there is no job left or asked to stop""" + while not self._stop_event.is_set(): + acquire_sem_task = asyncio.create_task(self._job_semaphore.acquire()) + job = None + try: + await utils.wait_any(acquire_sem_task, self._stop_event.wait()) + if self._stop_event.is_set(): + break - def find_task(self, task_name: str) -> tasks.Task: - try: - return self.app.tasks[task_name] - except KeyError as exc: - raise exceptions.TaskNotFound from exc + assert self.worker_id is not None + job = await self.app.job_manager.fetch_job( + queues=self.queues, worker_id=self.worker_id + ) + finally: + if (not job or self._stop_event.is_set()) and acquire_sem_task.done(): + self._job_semaphore.release() + self._new_job_event.clear() + + if not job: + break + + job_id = job.id + + context = job_context.JobContext( + app=self.app, + worker_name=self.worker_name, + worker_queues=self.queues, + additional_context=self.additional_context.copy() + if self.additional_context + else {}, + job=job, + abort_reason=lambda: self._job_ids_to_abort.get(job_id) + if job_id + else None, + start_timestamp=time.time(), + ) + job_task = asyncio.create_task( + self._process_job(context), + name=f"process job {job.task_name}[{job.id}]", + ) + self._running_jobs[job_task] = context - async def run_job(self, job: jobs.Job, worker_id: int) -> None: - task_name = job.task_name + def on_job_complete(task: asyncio.Task): + del self._running_jobs[task] + self._job_semaphore.release() - task = self.find_task(task_name=task_name) + job_task.add_done_callback(on_job_complete) - context = self.context_for_worker(worker_id=worker_id, task=task) + async def run(self): + """ + Run the worker + This will run forever until asked to stop/cancelled, or until no more job is available is configured not to wait + """ + logger.debug("Pruning stalled workers with old heartbeats") + pruned_workers = await self.app.job_manager.prune_stalled_workers( + self.stalled_worker_timeout + ) + if pruned_workers: + logger.debug(f"Pruned stalled workers: {', '.join(str(pruned_workers))}") - start_time = time.time() - context.job_result.start_timestamp = start_time + self.worker_id = await self.app.job_manager.register_worker() + logger.debug(f"Registered worker {self.worker_id} in the database") - self.logger.info( - f"Starting job {job.call_string}", - extra=context.log_extra(action="start_job"), - ) - job_args = [] - if task.pass_context: - job_args.append(context) - - # Initialise logging variables - task_result = None - log_title = "Error" - log_action = "job_error" - log_level = logging.ERROR - exc_info: bool | BaseException = False - - await_func: Callable[..., Awaitable] - if inspect.iscoroutinefunction(task.func): - await_func = task - else: - await_func = functools.partial(utils.sync_to_async, task) + self.run_task = asyncio.current_task() + loop_task = asyncio.create_task(self._run_loop(), name="worker loop") try: - task_result = await await_func(*job_args, **job.task_kwargs) - # In some cases, the task function might be a synchronous function - # that returns an awaitable without actually being a - # coroutinefunction. In that case, in the await above, we haven't - # actually called the task, but merely generated the awaitable that - # implements the task. In that case, we want to wait this awaitable. - # It's easy enough to be in that situation that the best course of - # action is probably to await the awaitable. - # It's not even sure it's worth emitting a warning - if inspect.isawaitable(task_result): - task_result = await task_result - - except exceptions.JobAborted as e: - task_result = None - log_title = "Aborted" - log_action = "job_aborted" - log_level = logging.INFO - exc_info = e + # shield the loop task from cancellation + # instead, a stop event is set to enable graceful shutdown + await asyncio.shield(loop_task) + except asyncio.CancelledError: + # worker.run is cancelled, usually by cancelling app.run_worker_async + self.stop() + await loop_task raise - except BaseException as e: - task_result = None - log_title = "Error" - log_action = "job_error" - log_level = logging.ERROR - exc_info = e - critical = not isinstance(e, Exception) + async def _handle_notification( + self, *, channel: str, notification: jobs.Notification + ): + if notification["type"] == "job_inserted": + self._new_job_event.set() + elif notification["type"] == "abort_job_requested": + self._handle_abort_jobs_requested([notification["job_id"]]) + + async def _update_heartbeat(self): + while True: + logger.debug( + f"Waiting for {self.update_heartbeat_interval}s before updating worker heartbeat" + ) + await asyncio.sleep(self.update_heartbeat_interval) - assert job.id - status = await self.job_manager.get_job_status_async(job_id=job.id) + logger.debug(f"Updating heartbeat of worker {self.worker_id}") + assert self.worker_id is not None + await self.app.job_manager.update_heartbeat(self.worker_id) - if status == jobs.Status.ABORTING: - retry_exception = None - else: - retry_exception = task.get_retry_exception(exception=e, job=job) - if retry_exception: - log_title = "Error, to retry" - log_action = "job_error_retry" - log_level = logging.INFO + async def _poll_jobs_to_abort(self): + while True: + logger.debug( + f"waiting for {self.abort_job_polling_interval}s before querying jobs to abort" + ) + await asyncio.sleep(self.abort_job_polling_interval) + if not self._running_jobs: + logger.debug("Not querying jobs to abort because no job is running") + continue + try: + job_ids = await self.app.job_manager.list_jobs_to_abort_async() + self._handle_abort_jobs_requested(job_ids) + except Exception as error: + logger.exception( + f"poll_jobs_to_abort error: {error!r}", + exc_info=error, + extra={ + "action": "poll_jobs_to_abort_error", + }, + ) + # recover from errors and continue polling - raise exceptions.JobError( - retry_exception=retry_exception, critical=critical - ) from e + def _handle_abort_jobs_requested(self, job_ids: Iterable[int]): + running_job_ids = {c.job.id for c in self._running_jobs.values() if c.job.id} + new_job_ids_to_abort = (running_job_ids & set(job_ids)) - set( + self._job_ids_to_abort + ) + + for process_job_task, context in self._running_jobs.items(): + if context.job.id in new_job_ids_to_abort: + self._abort_job( + process_job_task, context, job_context.AbortReason.USER_REQUEST + ) + def _abort_job( + self, + process_job_task: asyncio.Task, + context: job_context.JobContext, + reason: job_context.AbortReason, + ): + assert context.job.id + self._job_ids_to_abort[context.job.id] = reason + + log_message: str + task = self.app.tasks.get(context.job.task_name) + if not task: + log_message = "Received a request to abort a job but the job has no associated task. No action to perform" + elif not asyncio.iscoroutinefunction(task.func): + log_message = "Received a request to abort a synchronous job. Job is responsible for aborting by checking context.should_abort" else: - log_title = "Success" - log_action = "job_success" - log_level = logging.INFO - exc_info = False - finally: - end_time = time.time() - duration = end_time - start_time - context.job_result.end_timestamp = end_time - context.job_result.result = task_result + log_message = "Received a request to abort an asynchronous job. Cancelling asyncio task" + process_job_task.cancel() + + self.logger.debug( + log_message, + extra=self._log_extra(action="abort_job", context=context, job_result=None), + ) + + async def _shutdown(self, side_tasks: list[asyncio.Task]): + """ + Gracefully shutdown the worker by cancelling side tasks + and waiting for all pending jobs. + """ + await utils.cancel_and_capture_errors(side_tasks) - extra = context.log_extra(action=log_action) + now = time.time() + for context in self._running_jobs.values(): + duration = now - context.start_timestamp + self.logger.info( + f"Waiting for job to finish: worker: {context.job.call_string} (started {duration:.3f} s ago)", + extra=self._log_extra( + context=None, action="ending_job", job_result=None + ), + ) - text = ( - f"Job {job.call_string} ended with status: {log_title}, " - f"lasted {duration:.3f} s" + if self._running_jobs: + await asyncio.wait( + self._running_jobs, timeout=self.shutdown_graceful_timeout ) - if task_result: - text += f" - Result: {task_result}"[:250] - self.logger.log(log_level, text, extra=extra, exc_info=exc_info) - def stop(self): - # Ensure worker will stop after finishing their task - self.stop_requested = True - # Ensure workers currently waiting are awakened - if self.notify_event: - self.notify_event.set() + # As a reminder, tasks have a done callback that + # removes them from the self._running_jobs dict, + # so as the tasks stop, this dict will shrink. + if self._running_jobs: + self.logger.info( + f"{len(self._running_jobs)} jobs still running after graceful timeout. Aborting them", + extra=self._log_extra( + action="stop_worker", + queues=self.queues, + context=None, + job_result=None, + ), + ) + await self._abort_running_jobs() - # Logging + assert self.worker_id is not None + await self.app.job_manager.unregister_worker(self.worker_id) + logger.debug(f"Unregistered finished worker {self.worker_id} from the database") + self.worker_id = None self.logger.info( - "Stop requested", - extra=self.base_context.log_extra(action="stopping_worker"), + f"Stopped worker on {utils.queues_display(self.queues)}", + extra=self._log_extra( + action="stop_worker", queues=self.queues, context=None, job_result=None + ), ) - contexts = [ - context for context in self.current_contexts.values() if context.job + async def _abort_running_jobs(self): + for task, context in self._running_jobs.items(): + self._abort_job(task, context, job_context.AbortReason.SHUTDOWN) + + await asyncio.gather(*self._running_jobs, return_exceptions=True) + + def _start_side_tasks(self) -> list[asyncio.Task]: + """Start side tasks such as periodic deferrer and notification listener""" + side_tasks = [ + asyncio.create_task(self._update_heartbeat(), name="update_heartbeats"), + asyncio.create_task(self._periodic_deferrer(), name="deferrer"), + asyncio.create_task(self._poll_jobs_to_abort(), name="poll_jobs_to_abort"), ] - now = time.time() - for context in contexts: - self.logger.info( - "Waiting for job to finish: " - + context.job_description(current_timestamp=now), - extra=context.log_extra(action="ending_job"), + if self.listen_notify: + listener_coro = self.app.job_manager.listen_for_jobs( + on_notification=self._handle_notification, + queues=self.queues, ) + side_tasks.append(asyncio.create_task(listener_coro, name="listener")) + return side_tasks + + async def _run_loop(self): + """ + Run all side coroutines, then start fetching/processing jobs in a loop + """ + self.logger.info( + f"Starting worker on {utils.queues_display(self.queues)}", + extra=self._log_extra( + action="start_worker", context=None, queues=self.queues, job_result=None + ), + ) + self._new_job_event.clear() + self._stop_event.clear() + self._running_jobs = {} + self._job_semaphore = asyncio.Semaphore(self.concurrency) + side_tasks = self._start_side_tasks() + + context = ( + signals.on_stop(self.stop) + if self.install_signal_handlers + else contextlib.nullcontext() + ) + + try: + with context: + await self._fetch_and_process_jobs() + if not self.wait: + self.logger.info( + "No job found. Stopping worker because wait=False", + extra=self._log_extra( + context=None, + action="stop_worker", + queues=self.queues, + job_result=None, + ), + ) + self._stop_event.set() + + while not self._stop_event.is_set(): + # wait for a new job notification, a stop even or the next polling interval + await utils.wait_any( + self._new_job_event.wait(), + asyncio.sleep(self.fetch_job_polling_interval), + self._stop_event.wait(), + ) + await self._fetch_and_process_jobs() + finally: + await self._shutdown(side_tasks=side_tasks) diff --git a/pyproject.toml b/pyproject.toml index 2b27b1c9c..2cf38e5cf 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,133 +1,127 @@ [build-system] -requires = ["poetry-core", "poetry-dynamic-versioning"] -build-backend = "poetry_dynamic_versioning.backend" +requires = ["hatchling", "uv-dynamic-versioning"] +build-backend = "hatchling.build" -[tool.poetry] +[tool.hatch.version] +source = "uv-dynamic-versioning" + +[tool.uv-dynamic-versioning] +pattern = "default-unprefixed" + +[tool.hatch.build.targets.sdist] +include = ["procrastinate"] +exclude = ["procrastinate/demos"] + +[tool.hatch.build.targets.wheel] +exclude = ["procrastinate/demos"] + +[project] name = "procrastinate" -version = "0.0.0" +dynamic = ["version"] description = "Postgres-based distributed task processing library" -authors = ["Joachim Jablon", "Eric Lemoine", "Kai Schlamp"] -license = "MIT License" +license-files = ["LICENSE.md"] classifiers = [ "Development Status :: 4 - Beta", "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", ] +authors = [ + { name = "Joachim Jablon", email = "ewjoachim@gmail.com" }, + { name = "Eric Lemoine" }, + { name = "Kai Schlamp" }, +] readme = "README.md" keywords = ["postgres", "task-queue"] -homepage = "https://procrastinate.readthedocs.io/" -repository = "https://github.com/procrastinate-org/procrastinate/" -documentation = "https://procrastinate.readthedocs.io/" - -[tool.poetry.scripts] -procrastinate = 'procrastinate.cli:main' +requires-python = ">=3.9" +dependencies = [ + "psycopg[pool]", + "asgiref", + "attrs", + "contextlib2; python_version < '3.10'", + "croniter", + "python-dateutil", + "typing-extensions", +] -[tool.poetry.dependencies] -python = "^3.9" -aiopg = { version = "*", optional = true } -anyio = "*" -asgiref = "*" -attrs = "*" -contextlib2 = { version = "*", python = "<3.10" } -croniter = "*" -django = { version = ">=2.2", optional = true } -psycopg = { extras = ["pool"], version = "*" } -psycopg2-binary = { version = "*", optional = true } -python-dateutil = "*" -sqlalchemy = { version = "^2.0", optional = true } -sphinx = { version = "*", optional = true } - -[tool.poetry.extras] -django = ["django"] -sqlalchemy = ["sqlalchemy"] +[project.optional-dependencies] +django = ["django>=2.2"] +sqlalchemy = ["sqlalchemy~=2.0"] aiopg = ["aiopg", "psycopg2-binary"] psycopg2 = ["psycopg2-binary"] sphinx = ["sphinx"] -[tool.poetry.group.types] -optional = true - -[tool.poetry.group.types.dependencies] -django-stubs = "*" - -[tool.poetry.group.release.dependencies] -dunamai = "*" - -[tool.poetry.group.lint_format.dependencies] -ruff = "*" - -[tool.poetry.group.pg_implem.dependencies] -aiopg = "*" -sqlalchemy = { extras = ["mypy"], version = "*" } -psycopg2-binary = "*" -psycopg = [ - { version = "*", extras = [ - "binary", - "pool", - ], markers = "sys_platform != 'darwin' or platform_machine != 'arm64'" }, - { version = "*", extras = [ - "binary", - "pool", - ], markers = "sys_platform == 'darwin' and platform_machine == 'arm64'", python = ">=3.10" }, - { version = "*", extras = [ - "pool", - ], markers = "sys_platform == 'darwin' and platform_machine == 'arm64'", python = "<3.10" }, -] +[project.urls] +homepage = "https://procrastinate.readthedocs.io/" +source = "https://github.com/procrastinate-org/procrastinate/" +documentation = "https://procrastinate.readthedocs.io/" +issues = "https://github.com/procrastinate-org/procrastinate/issues" +changelog = "https://github.com/procrastinate-org/procrastinate/releases" -[tool.poetry.group.django.dependencies] -django = [ - { version = "4.2.*", python = "<3.10" }, - { version = "*", python = ">=3.10" }, -] +[project.scripts] +procrastinate = 'procrastinate.cli:main' -[tool.poetry.group.test.dependencies] -pytest-asyncio = "*" -pytest-cov = "*" -pytest-django = "*" -pytest-mock = "*" -migra = "*" -# migra depends on schemainspect, which has an implicit dependency on setuptools -# (pkg_resources). -setuptools = { version = "*" } - -[tool.poetry.group.docs.dependencies] -django = ">=2.2" -furo = "*" -Sphinx = "*" -sphinx-copybutton = "*" -sphinx-github-changelog = "*" -sphinxcontrib-programoutput = "*" -myst-parser = "*" - -[tool.poetry-dynamic-versioning] -enable = true -pattern = '(?P\d+(\.\d+)*)([-._]?((?P[a-zA-Z]+)[-._]?(?P\d+)?))?$' +[tool.uv] +cache-keys = [{ git = { commit = true, tags = true } }] +required-version = ">=0.5.21" +default-groups = ["release", "lint_format", "pg_implem", "test", "docs"] + +[dependency-groups] +types = ["django-stubs"] +release = ["dunamai"] +lint_format = ["ruff", "django-upgrade"] +pg_implem = [ + "aiopg", + "sqlalchemy", + "psycopg2-binary", + "psycopg[binary,pool]; sys_platform != 'darwin' or platform_machine != 'arm64'", + "psycopg[binary,pool]; sys_platform == 'darwin' and platform_machine == 'arm64' and python_version >= '3.10'", + "psycopg[pool]; sys_platform == 'darwin' and platform_machine == 'arm64' and python_version < '3.10'", +] +test = [ + "pytest-asyncio", + "pytest-benchmark", + "pytest-cov", + "pytest-django", + "pytest-mock", + "migra", + # migra depends on schemainspect, which has an implicit dependency on setuptools + # (pkg_resources). + "setuptools", +] +docs = [ + "django>=2.2", + "furo", + "Sphinx", + "sphinx-copybutton", + "sphinx-github-changelog", + "sphinxcontrib-programoutput", + "sphinxcontrib-mermaid", + "myst-parser", +] +dev = ["ruff", "pyright", "doc8"] [tool.pytest.ini_options] -addopts = [ - "--cov-report=term-missing", - "--cov-report=html", - "--cov-branch", - "--cov=procrastinate", - "-vv", - "--strict-markers", - "-rfE", - "--reuse-db", -] +addopts = ["-vv", "--strict-markers", "-rfE", "--reuse-db", "-m not benchmark"] testpaths = [ "tests/unit", "tests/integration", "tests/acceptance", "tests/migration", + "tests/benchmarks", ] +# https://adamj.eu/tech/2025/01/08/django-silence-exception-ignored-outputwrapper/ +# https://code.djangoproject.com/ticket/36056 filterwarnings = """ error + ignore:.+django.core.management.base.OutputWrapper:pytest.PytestUnraisableExceptionWarning ignore:unclosed.+:ResourceWarning + ignore:pkg_resources is deprecated as an API:UserWarning """ asyncio_mode = "auto" asyncio_default_fixture_loop_scope = "function" +django_find_project = false DJANGO_SETTINGS_MODULE = "tests.acceptance.django_settings" - +pythonpath = ["."] [tool.coverage.run] relative_files = true @@ -146,9 +140,8 @@ exclude_lines = [ "[ ]+\\.\\.\\.$", ] - [tool.pyright] -exclude = ["tests", ".venv"] +exclude = ["tests", ".venv", "scripts"] [tool.ruff] target-version = "py39" @@ -176,3 +169,15 @@ required-imports = ["from __future__ import annotations"] [tool.doc8] max-line-length = 88 ignore-path = "docs/_build,.venv" + +[tool.sync-pre-commit-with-uv.pyright-python] +pypi_package_name = "pyright" +additional_dependencies_uv_params = [ + "--group=types", + "--no-group=release", + "--no-group=lint_format", + "--no-group=pg_implem", + "--no-group=test", + "--no-group=docs", + "--all-extras", +] diff --git a/scripts/README.md b/scripts/README.md index da9c7d83d..51f88fbcd 100644 --- a/scripts/README.md +++ b/scripts/README.md @@ -3,6 +3,6 @@ Each individual file in this folder is a script, aimed to capture a command for the project. -These scripts are expected to be run as-is, not in poetry or a virtualenv. +These scripts are expected to be run as-is, not in uv or a virtualenv. See [CONTRIBUTING.md](../CONTRIBUTING.md) for more details. diff --git a/scripts/bootstrap b/scripts/bootstrap index 3aadf7063..cc0789964 100755 --- a/scripts/bootstrap +++ b/scripts/bootstrap @@ -1,25 +1,25 @@ #!/usr/bin/env bash set -eu -# This script will take care of installing pipx for you (mainly on debian-based -# installations). It should not be run in a virtual environment. pipx, poetry -# and pre-commit are all tools that manage their own virtual environements, and -# are useful as tools to have around, and unlikely to cause version clashes -# between projects. +# This script will take care of installing uv (though if you're in a real +# computer and not a container or anything, you may rather want to install it +# differently, e.g. via brew). +# pre-commit and nox are all tools that manage their own virtual environements, +# and are useful as tools to have around, and unlikely to cause version clashes +# between projects, so we're installing them too, via uv, if they're not around. -if ! which pre-commit || ! which poetry; then - if ! which pipx; then - python3 -m pip install --user pipx - python3 -m pipx ensurepath +if ! which uv || ! which pre-commit || ! which nox; then + if ! which uv; then + python3 -m pip install --user uv fi if ! which pre-commit; then - pipx install pre-commit + uv tool install pre-commit fi - if ! which poetry; then - pipx install poetry + if ! which nox; then + uv tool install nox fi fi pre-commit install -poetry install --extras "django sqlalchemy" +uv sync --all-extras --all-groups diff --git a/scripts/docs b/scripts/docs index 803a29c41..0b0d48f73 100755 --- a/scripts/docs +++ b/scripts/docs @@ -1,4 +1,4 @@ #!/usr/bin/env bash set -eux -poetry run sphinx-build -EW docs docs/_build/html "$@" +uv run sphinx-build -EW docs docs/_build/html "$@" diff --git a/scripts/tests b/scripts/tests index a661bfdd9..7de780d79 100755 --- a/scripts/tests +++ b/scripts/tests @@ -1,4 +1,4 @@ #!/usr/bin/env bash set -eux -poetry run pytest "$@" +uv run pytest "$@" diff --git a/tests/acceptance/app.py b/tests/acceptance/app.py index 898a35acc..06a8e0699 100644 --- a/tests/acceptance/app.py +++ b/tests/acceptance/app.py @@ -74,6 +74,11 @@ def product_task(a, b): print(a * b) +@app.task() +def echo_task(value): + print(value) + + nb_tries = 0 diff --git a/tests/acceptance/django_settings.py b/tests/acceptance/django_settings.py index 97a4d761c..304451421 100644 --- a/tests/acceptance/django_settings.py +++ b/tests/acceptance/django_settings.py @@ -5,7 +5,7 @@ SECRET_KEY = "test" DATABASES = { "default": { - "ENGINE": "django.db.backends.postgresql_psycopg2", + "ENGINE": "django.db.backends.postgresql", "NAME": os.environ.get("PGDATABASE", "procrastinate"), "TEST": {"NAME": "procrastinate_django_test"}, }, diff --git a/tests/acceptance/test_async.py b/tests/acceptance/test_async.py index 953337d43..66bed75e4 100644 --- a/tests/acceptance/test_async.py +++ b/tests/acceptance/test_async.py @@ -8,6 +8,7 @@ from procrastinate import app as app_module from procrastinate.contrib import aiopg from procrastinate.exceptions import JobAborted +from procrastinate.job_context import JobContext from procrastinate.jobs import Status @@ -23,7 +24,7 @@ async def async_app(request, psycopg_connector, connection_params): yield app -async def test_defer(async_app): +async def test_defer(async_app: app_module.App): sum_results = [] product_results = [] @@ -46,7 +47,33 @@ async def product_task(a, b): assert product_results == [12] -async def test_cancel(async_app): +@pytest.mark.skip_before_version("3.2.0") +async def test_batch_defer(async_app: app_module.App): + sum_results = [] + product_results = [] + + @async_app.task(queue="default", name="sum_task") + def sum_task(a, b): + sum_results.append(a + b) + + @async_app.task(queue="default", name="product_task") + async def product_task(a, b): + product_results.append(a * b) + + await sum_task.batch_defer_async({"a": 1, "b": 2}, {"a": 3, "b": 4}) + await sum_task.configure().batch_defer_async({"a": 5, "b": 6}, {"a": 7, "b": 8}) + await async_app.configure_task(name="sum_task").batch_defer_async( + {"a": 9, "b": 10}, {"a": 11, "b": 12} + ) + await product_task.batch_defer_async({"a": 3, "b": 4}, {"a": 5, "b": 6}) + + await async_app.run_worker_async(queues=["default"], wait=False) + + assert sum_results == [3, 7, 11, 15, 19, 23] + assert product_results == [12, 30] + + +async def test_cancel(async_app: app_module.App): sum_results = [] @async_app.task(queue="default", name="sum_task") @@ -62,7 +89,7 @@ async def sum_task(a, b): status = await async_app.job_manager.get_job_status_async(job_id) assert status == Status.CANCELLED - jobs = await async_app.job_manager.list_jobs_async() + jobs = list(await async_app.job_manager.list_jobs_async()) assert len(jobs) == 2 await async_app.run_worker_async(queues=["default"], wait=False) @@ -70,7 +97,7 @@ async def sum_task(a, b): assert sum_results == [7] -async def test_cancel_with_delete(async_app): +async def test_cancel_with_delete(async_app: app_module.App): sum_results = [] @async_app.task(queue="default", name="sum_task") @@ -83,7 +110,7 @@ async def sum_task(a, b): result = await async_app.job_manager.cancel_job_by_id_async(job_id, delete_job=True) assert result is True - jobs = await async_app.job_manager.list_jobs_async() + jobs = list(await async_app.job_manager.list_jobs_async()) assert len(jobs) == 1 await async_app.run_worker_async(queues=["default"], wait=False) @@ -91,7 +118,7 @@ async def sum_task(a, b): assert sum_results == [7] -async def test_no_job_to_cancel_found(async_app): +async def test_no_job_to_cancel_found(async_app: app_module.App): @async_app.task(queue="default", name="example_task") def example_task(): pass @@ -104,50 +131,157 @@ def example_task(): status = await async_app.job_manager.get_job_status_async(job_id) assert status == Status.TODO - jobs = await async_app.job_manager.list_jobs_async() + jobs = list(await async_app.job_manager.list_jobs_async()) assert len(jobs) == 1 -async def test_abort(async_app): - @async_app.task(queue="default", name="task1", pass_context=True) - async def task1(context): - while True: - await asyncio.sleep(0.02) - if await context.should_abort_async(): - raise JobAborted +@pytest.mark.parametrize("mode", ["listen", "poll"]) +async def test_abort_async_task(async_app: app_module.App, mode): + @async_app.task(queue="default", name="task1") + async def task1(): + await asyncio.sleep(0.5) - @async_app.task(queue="default", name="task2", pass_context=True) - def task2(context): + job_id = await task1.defer_async() + + abort_job_polling_interval = 0.1 + + worker_task = asyncio.create_task( + async_app.run_worker_async( + queues=["default"], + wait=False, + abort_job_polling_interval=abort_job_polling_interval, + listen_notify=True if mode == "listen" else False, + ) + ) + + await asyncio.sleep(0.05) + result = await async_app.job_manager.cancel_job_by_id_async(job_id, abort=True) + assert result is True + + # when listening for notifications, job should cancel within ms + # if notifications are disabled, job will only cancel after abort_job_polling_interval + await asyncio.wait_for( + worker_task, timeout=0.1 if mode == "listen" else abort_job_polling_interval * 2 + ) + + status = await async_app.job_manager.get_job_status_async(job_id) + assert status == Status.ABORTED + + +@pytest.mark.parametrize("mode", ["listen", "poll"]) +async def test_abort_sync_task(async_app: app_module.App, mode): + @async_app.task(queue="default", name="task1", pass_context=True) + def task1(context): while True: time.sleep(0.02) if context.should_abort(): raise JobAborted - job1_id = await task1.defer_async() - job2_id = await task2.defer_async() + job_id = await task1.defer_async() + + abort_job_polling_interval = 0.1 worker_task = asyncio.create_task( - async_app.run_worker_async(queues=["default"], wait=False) + async_app.run_worker_async( + queues=["default"], + wait=False, + abort_job_polling_interval=abort_job_polling_interval, + listen_notify=True if mode == "listen" else False, + ) ) await asyncio.sleep(0.05) - result = await async_app.job_manager.cancel_job_by_id_async(job1_id, abort=True) - assert result is True - - await asyncio.sleep(0.05) - result = await async_app.job_manager.cancel_job_by_id_async(job2_id, abort=True) + result = await async_app.job_manager.cancel_job_by_id_async(job_id, abort=True) assert result is True - await worker_task + # when listening for notifications, job should cancel within ms + # if notifications are disabled, job will only cancel after abort_job_polling_interval + await asyncio.wait_for( + worker_task, timeout=0.1 if mode == "listen" else abort_job_polling_interval * 2 + ) - status = await async_app.job_manager.get_job_status_async(job1_id) + status = await async_app.job_manager.get_job_status_async(job_id) assert status == Status.ABORTED - status = await async_app.job_manager.get_job_status_async(job2_id) - assert status == Status.ABORTED +async def test_concurrency(async_app: app_module.App): + results = [] + + @async_app.task(queue="default", name="appender") + async def appender(a: int): + await asyncio.sleep(0.1) + results.append(a) + + deferred_tasks = [appender.defer_async(a=i) for i in range(1, 101)] + for task in deferred_tasks: + await task + + # with 20 concurrent workers, 100 tasks should take about 100/20 x 0.1 = 0.5s + # if there is no concurrency, it will take well over 2 seconds and fail + + start_time = time.time() + try: + await asyncio.wait_for( + async_app.run_worker_async(concurrency=20, wait=False), timeout=2 + ) + except asyncio.TimeoutError: + pytest.fail( + "Failed to process all jobs within 2 seconds. Is the concurrency respected?" + ) + duration = time.time() - start_time + + assert duration >= 0.5, ( + "processing jobs faster than expected. Is the concurrency respected?" + ) + + assert len(results) == 100, "Unexpected number of job executions" + + +async def test_polling(async_app: app_module.App): + @async_app.task(queue="default", name="sum") + async def sum(a: int, b: int): + return a + b + + # rely on polling to fetch new jobs + worker_task = asyncio.create_task( + async_app.run_worker_async( + concurrency=1, + wait=True, + listen_notify=False, + fetch_job_polling_interval=0.3, + ) + ) + + # long enough for worker to wait until next polling + await asyncio.sleep(0.1) + + job_id = await sum.defer_async(a=5, b=4) + + await asyncio.sleep(0.1) + + job_status = await async_app.job_manager.get_job_status_async(job_id=job_id) + + assert job_status == Status.TODO, "Job fetched faster than expected." + + await asyncio.sleep(0.2) + + job_status = await async_app.job_manager.get_job_status_async(job_id=job_id) -async def test_retry_when_aborting(async_app): + assert job_status == Status.SUCCEEDED, "Job should have been fetched and processed." + + try: + worker_task.cancel() + await asyncio.wait_for( + worker_task, + timeout=0.5, + ) + except asyncio.CancelledError: + pass + except asyncio.TimeoutError: + pytest.fail("Failed to stop worker") + + +async def test_retry_when_aborting(async_app: app_module.App): attempts = 0 @async_app.task(queue="default", name="task1", pass_context=True, retry=True) @@ -164,3 +298,167 @@ async def example_task(context): status = await async_app.job_manager.get_job_status_async(job_id) assert status == Status.FAILED assert attempts == 1 + + +async def test_stop_worker(async_app: app_module.App): + results = [] + + @async_app.task(name="appender") + async def appender(a: int): + await asyncio.sleep(0.1) + results.append(a) + + job_ids: list[int] = [] + + job_ids.append(await appender.defer_async(a=1)) + job_ids.append(await appender.defer_async(a=2)) + + run_task = asyncio.create_task(async_app.run_worker_async(concurrency=2, wait=True)) + await asyncio.sleep(0.5) + + with pytest.raises(asyncio.CancelledError): + run_task.cancel() + await asyncio.wait_for(run_task, 1) + + for job_id in job_ids: + status = await async_app.job_manager.get_job_status_async(job_id) + assert status == Status.SUCCEEDED + + +async def test_stop_worker_aborts_async_jobs_past_shutdown_graceful_timeout( + async_app: app_module.App, +): + slow_job_cancelled = False + + @async_app.task(queue="default", name="fast_job") + async def fast_job(): + pass + + @async_app.task(queue="default", name="slow_job") + async def slow_job(): + nonlocal slow_job_cancelled + try: + await asyncio.sleep(2) + except asyncio.CancelledError: + slow_job_cancelled = True + raise + + fast_job_id = await fast_job.defer_async() + slow_job_id = await slow_job.defer_async() + + run_task = asyncio.create_task( + async_app.run_worker_async(wait=False, shutdown_graceful_timeout=0.3) + ) + await asyncio.sleep(0.05) + + with pytest.raises(asyncio.CancelledError): + run_task.cancel() + await run_task + + fast_job_status = await async_app.job_manager.get_job_status_async(fast_job_id) + slow_job_status = await async_app.job_manager.get_job_status_async(slow_job_id) + assert fast_job_status == Status.SUCCEEDED + assert slow_job_status == Status.ABORTED + + assert slow_job_cancelled + + +async def test_stop_worker_retries_async_jobs_past_shutdown_graceful_timeout( + async_app: app_module.App, +): + slow_job_cancelled = False + + @async_app.task(queue="default", name="slow_job", retry=1) + async def slow_job(): + nonlocal slow_job_cancelled + try: + await asyncio.sleep(2) + except asyncio.CancelledError: + slow_job_cancelled = True + raise + + slow_job_id = await slow_job.defer_async() + + run_task = asyncio.create_task( + async_app.run_worker_async(wait=False, shutdown_graceful_timeout=0.3) + ) + await asyncio.sleep(0.05) + + with pytest.raises(asyncio.CancelledError): + run_task.cancel() + await run_task + + slow_job_status = await async_app.job_manager.get_job_status_async(slow_job_id) + assert slow_job_cancelled + assert slow_job_status == Status.TODO + + +async def test_stop_worker_aborts_sync_jobs_past_shutdown_graceful_timeout( + async_app: app_module.App, +): + slow_job_cancelled = False + + @async_app.task(queue="default", name="fast_job") + async def fast_job(): + pass + + @async_app.task(queue="default", name="slow_job", pass_context=True) + def slow_job(context: JobContext): + nonlocal slow_job_cancelled + while True: + time.sleep(0.01) + if context.should_abort(): + slow_job_cancelled = True + raise JobAborted() + + fast_job_id = await fast_job.defer_async() + slow_job_id = await slow_job.defer_async() + + run_task = asyncio.create_task( + async_app.run_worker_async(wait=False, shutdown_graceful_timeout=0.3) + ) + await asyncio.sleep(0.05) + + with pytest.raises(asyncio.CancelledError): + run_task.cancel() + await run_task + + await asyncio.sleep(0.05) + + fast_job_status = await async_app.job_manager.get_job_status_async(fast_job_id) + slow_job_status = await async_app.job_manager.get_job_status_async(slow_job_id) + assert fast_job_status == Status.SUCCEEDED + assert slow_job_status == Status.ABORTED + + assert slow_job_cancelled + + +async def test_stop_worker_retries_sync_jobs_past_shutdown_graceful_timeout( + async_app: app_module.App, +): + slow_job_cancelled = False + + @async_app.task(queue="default", name="slow_job", retry=1, pass_context=True) + def slow_job(context: JobContext): + nonlocal slow_job_cancelled + while True: + time.sleep(0.05) + if context.should_abort(): + slow_job_cancelled = True + raise JobAborted() + + slow_job_id = await slow_job.defer_async() + + run_task = asyncio.create_task( + async_app.run_worker_async(wait=False, shutdown_graceful_timeout=0.3) + ) + await asyncio.sleep(0.05) + + with pytest.raises(asyncio.CancelledError): + run_task.cancel() + await run_task + + slow_job_status = await async_app.job_manager.get_job_status_async(slow_job_id) + assert slow_job_status == Status.TODO + + assert slow_job_cancelled diff --git a/tests/acceptance/test_nominal.py b/tests/acceptance/test_nominal.py index 27f32a29a..17c00fe0e 100644 --- a/tests/acceptance/test_nominal.py +++ b/tests/acceptance/test_nominal.py @@ -3,12 +3,25 @@ import signal import subprocess import time +from typing import Protocol import pytest +class RunningWorker(Protocol): + def __call__( + self, *args: str, name: str = "worker", app: str = "app" + ) -> subprocess.Popen[str]: ... + + +class Worker(Protocol): + def __call__( + self, *args: str, sleep: int = 1, app: str = "app" + ) -> tuple[str, str]: ... + + @pytest.fixture -def worker(running_worker): +def worker(running_worker) -> Worker: def func(*queues, sleep=1, app="app"): process = running_worker(*queues, app=app) time.sleep(sleep) @@ -19,7 +32,7 @@ def func(*queues, sleep=1, app="app"): @pytest.fixture -def running_worker(process_env): +def running_worker(process_env) -> RunningWorker: def func(*queues, name="worker", app="app"): return subprocess.Popen( [ @@ -64,7 +77,7 @@ def test_nominal(defer, worker, app): assert stdout.strip() == "20" defer("two_fails") - stdout, stderr = worker(app=app) + stdout, stderr = worker(app=app, sleep=1.5) print(stdout, stderr) assert "Print something to stdout" in stdout assert stderr.count("Exception: This should fail") == 2 @@ -118,27 +131,31 @@ def test_lock(defer, running_worker): """ In this test, we launch 2 workers in two parallel threads, and ask them both to process tasks with the same lock. We check that the second task is - not started before the first one was finished. + not started before the first one was finished, irrespective of the task priority """ defer( "sleep_and_write", - ["--lock", "a"], + ["--lock", "a", "--priority", "1"], sleep=0.5, write_before="before-1", write_after="after-1", ) + + # Run the 2 workers concurrently + process1 = running_worker(name="worker1") + process2 = running_worker(name="worker2") + time.sleep(0.1) + defer( "sleep_and_write", - ["--lock", "a"], + ["--lock", "a", "--priority", "2"], sleep=0.001, write_before="before-2", write_after="after-2", ) - # Run the 2 workers concurrently - process1 = running_worker(name="worker1") - process2 = running_worker(name="worker2") - time.sleep(2) + + time.sleep(1) # And stop them process1.send_signal(signal.SIGINT) process2.send_signal(signal.SIGINT) @@ -154,10 +171,10 @@ def test_lock(defer, running_worker): lines = dict( line.split()[1:] for line in stdout.splitlines() if line.startswith("->") ) - lines = sorted(lines, key=lines.get) + sorted_lines = sorted(lines, key=lambda x: lines[x]) # Check that it all happened in order - assert lines == ["before-1", "after-1", "before-2", "after-2"] + assert sorted_lines == ["before-1", "after-1", "before-2", "after-2"] # If locks didnt work, we would have # ["before-1", "before-2", "after-2", "after-1"] @@ -185,7 +202,7 @@ def test_queueing_lock(defer, running_worker): ) -def test_periodic_deferrer(worker): +def test_periodic_deferrer(worker: Worker): # We're launching a worker that executes a periodic task every second, and # letting it run for 2.5 s. It should execute the task 3 times, and print to stdout: # 0 @@ -196,7 +213,6 @@ def test_periodic_deferrer(worker): print(stdout) print(stderr) - # We're making a dict from the output results = [ [int(a) for a in e[5:].split()] for e in stdout.splitlines() @@ -205,3 +221,18 @@ def test_periodic_deferrer(worker): assert [row[0] for row in results][:2] == [0, 1] assert [row[1] for row in results][:2] == [7, 7] assert results[1][2] == results[0][2] + 1 + + +@pytest.mark.skip_before_version("3.2.3") +def test_priority_order(defer, worker): + # Defer two jobs for the same task, one with higher priority + defer("echo_task", ["--priority", "5", "--lock", "a"], value="low") + defer("echo_task", ["--priority", "10", "--lock", "a"], value="high") + + stdout, stderr = worker() + print(stdout, stderr) + + # The job with the highest priority ("high") should be processed first + lines = stdout.splitlines() + assert lines[0] == "high" + assert lines[1] == "low" diff --git a/tests/acceptance/test_shell.py b/tests/acceptance/test_shell.py index 1194c3283..1613bc10c 100644 --- a/tests/acceptance/test_shell.py +++ b/tests/acceptance/test_shell.py @@ -96,19 +96,19 @@ async def test_shell(read, write, defer): await write("list_queues") assert await read() == [ - "default: 3 jobs (todo: 1, doing: 0, succeeded: 0, failed: 0, cancelled: 2, aborting: 0, aborted: 0)", - "other: 1 jobs (todo: 0, doing: 0, succeeded: 0, failed: 0, cancelled: 1, aborting: 0, aborted: 0)", + "default: 3 jobs (todo: 1, doing: 0, succeeded: 0, failed: 0, cancelled: 2, aborted: 0)", + "other: 1 jobs (todo: 0, doing: 0, succeeded: 0, failed: 0, cancelled: 1, aborted: 0)", ] await write("list_tasks") assert await read() == [ - "ns:tests.acceptance.app.sum_task: 3 jobs (todo: 1, doing: 0, succeeded: 0, failed: 0, cancelled: 2, aborting: 0, aborted: 0)", - "tests.acceptance.app.increment_task: 1 jobs (todo: 0, doing: 0, succeeded: 0, failed: 0, cancelled: 1, aborting: 0, aborted: 0)", + "ns:tests.acceptance.app.sum_task: 3 jobs (todo: 1, doing: 0, succeeded: 0, failed: 0, cancelled: 2, aborted: 0)", + "tests.acceptance.app.increment_task: 1 jobs (todo: 0, doing: 0, succeeded: 0, failed: 0, cancelled: 1, aborted: 0)", ] await write("list_locks") assert await read() == [ - "a: 1 jobs (todo: 1, doing: 0, succeeded: 0, failed: 0, cancelled: 0, aborting: 0, aborted: 0)", - "b: 1 jobs (todo: 0, doing: 0, succeeded: 0, failed: 0, cancelled: 1, aborting: 0, aborted: 0)", - "lock: 2 jobs (todo: 0, doing: 0, succeeded: 0, failed: 0, cancelled: 2, aborting: 0, aborted: 0)", + "a: 1 jobs (todo: 1, doing: 0, succeeded: 0, failed: 0, cancelled: 0, aborted: 0)", + "b: 1 jobs (todo: 0, doing: 0, succeeded: 0, failed: 0, cancelled: 1, aborted: 0)", + "lock: 2 jobs (todo: 0, doing: 0, succeeded: 0, failed: 0, cancelled: 2, aborted: 0)", ] diff --git a/tests/acceptance/test_sync.py b/tests/acceptance/test_sync.py index 1de3be071..38124e2e7 100644 --- a/tests/acceptance/test_sync.py +++ b/tests/acceptance/test_sync.py @@ -33,7 +33,7 @@ async def async_app(not_opened_psycopg_connector): # Even if we test the purely sync parts, we'll still need an async worker to execute # the tasks -async def test_defer(sync_app, async_app): +async def test_defer(sync_app: procrastinate.App, async_app: procrastinate.App): sum_results = [] product_results = [] @@ -58,7 +58,37 @@ async def product_task(a, b): assert product_results == [12] -async def test_nested_sync_to_async(sync_app, async_app): +@pytest.mark.skip_before_version("3.2.0") +async def test_batch_defer(sync_app: procrastinate.App, async_app: procrastinate.App): + sum_results = [] + product_results = [] + + @sync_app.task(queue="default", name="sum_task") + def sum_task(a, b): + sum_results.append(a + b) + + @sync_app.task(queue="default", name="product_task") + async def product_task(a, b): + product_results.append(a * b) + + sum_task.batch_defer({"a": 1, "b": 2}, {"a": 3, "b": 4}) + sum_task.configure().batch_defer({"a": 5, "b": 6}, {"a": 7, "b": 8}) + sync_app.configure_task(name="sum_task").batch_defer( + {"a": 9, "b": 10}, {"a": 11, "b": 12} + ) + product_task.batch_defer({"a": 3, "b": 4}, {"a": 5, "b": 6}) + + # We need to run the async app to execute the tasks + async_app.tasks = sync_app.tasks + await async_app.run_worker_async(queues=["default"], wait=False) + + assert sum_results == [3, 7, 11, 15, 19, 23] + assert product_results == [12, 30] + + +async def test_nested_sync_to_async( + sync_app: procrastinate.App, async_app: procrastinate.App +): sum_results = [] @sync_app.task(queue="default", name="sum_task") @@ -81,7 +111,9 @@ def _inner_sum_task_sync(a, b): assert sum_results == [3] -async def test_sync_task_runs_in_parallel(sync_app, async_app): +async def test_sync_task_runs_in_parallel( + sync_app: procrastinate.App, async_app: procrastinate.App +): results = [] @sync_app.task(queue="default", name="sync_task_1") @@ -105,7 +137,7 @@ def sync_task_2(): assert results == [0, 0, 1, 1, 2, 2] -async def test_cancel(sync_app, async_app): +async def test_cancel(sync_app: procrastinate.App, async_app: procrastinate.App): sum_results = [] @sync_app.task(queue="default", name="sum_task") @@ -131,7 +163,7 @@ def sum_task(a, b): assert sum_results == [7] -def test_no_job_to_cancel_found(sync_app): +def test_no_job_to_cancel_found(sync_app: procrastinate.App): @sync_app.task(queue="default", name="example_task") def example_task(): pass diff --git a/tests/benchmarks/__init__.py b/tests/benchmarks/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/benchmarks/conftest.py b/tests/benchmarks/conftest.py new file mode 100644 index 000000000..4a6d1a142 --- /dev/null +++ b/tests/benchmarks/conftest.py @@ -0,0 +1,20 @@ +from __future__ import annotations + +import asyncio + +import pytest + + +@pytest.fixture +def aio_benchmark(benchmark): + def _wrapper(func, *args, **kwargs): + if asyncio.iscoroutinefunction(func): + event_loop = asyncio.get_event_loop() + + @benchmark + def _(): + return event_loop.run_until_complete(func(*args, **kwargs)) + else: + benchmark(func, *args, **kwargs) + + return _wrapper diff --git a/tests/benchmarks/test_benchmark_async.py b/tests/benchmarks/test_benchmark_async.py new file mode 100644 index 000000000..5857cb524 --- /dev/null +++ b/tests/benchmarks/test_benchmark_async.py @@ -0,0 +1,47 @@ +from __future__ import annotations + +import pytest + +from procrastinate import app as app_module +from procrastinate.contrib import aiopg + + +@pytest.fixture(params=["psycopg_connector", "aiopg_connector"]) +async def async_app(request, psycopg_connector, connection_params): + app = app_module.App( + connector={ + "psycopg_connector": psycopg_connector, + "aiopg_connector": aiopg.AiopgConnector(**connection_params), + }[request.param] + ) + async with app.open_async(): + yield app + + +@pytest.mark.benchmark +def test_benchmark_1000_async_jobs(aio_benchmark, async_app: app_module.App): + @async_app.task(queue="default", name="simple_task") + async def simple_task(): + pass + + async def defer_and_process_jobs(): + for _ in range(1000): + await simple_task.defer_async() + + await async_app.run_worker_async(queues=["default"], wait=False) + + aio_benchmark(defer_and_process_jobs) + + +@pytest.mark.benchmark +def test_benchmark_1000_async_batch_jobs(aio_benchmark, async_app: app_module.App): + @async_app.task(queue="default", name="simple_task") + async def simple_task(): + pass + + async def defer_and_process_jobs(): + await simple_task.batch_defer_async(*[{} for _ in range(1000)]) + + await async_app.run_worker_async(queues=["default"], wait=False) + + aio_benchmark(defer_and_process_jobs) diff --git a/tests/benchmarks/test_benchmark_sync.py b/tests/benchmarks/test_benchmark_sync.py new file mode 100644 index 000000000..09c5bf467 --- /dev/null +++ b/tests/benchmarks/test_benchmark_sync.py @@ -0,0 +1,59 @@ +from __future__ import annotations + +import pytest + +import procrastinate +from procrastinate.contrib import psycopg2 + + +@pytest.fixture(params=["sync_psycopg_connector", "psycopg2_connector"]) +async def sync_app(request, sync_psycopg_connector, connection_params): + app = procrastinate.App( + connector={ + "sync_psycopg_connector": sync_psycopg_connector, + "psycopg2_connector": psycopg2.Psycopg2Connector(**connection_params), + }[request.param] + ) + + with app.open(): + yield app + + +@pytest.fixture +async def async_app(not_opened_psycopg_connector): + app = procrastinate.App(connector=not_opened_psycopg_connector) + async with app.open_async(): + yield app + + +@pytest.mark.benchmark +def test_benchmark_1000_sync_jobs( + aio_benchmark, sync_app: procrastinate.App, async_app: procrastinate.App +): + @sync_app.task(queue="default", name="sum_task") + def simple_task(): + pass + + async def defer_and_process_jobs(): + for _ in range(1000): + simple_task.defer() + + await async_app.run_worker_async(queues=["default"], wait=False) + + aio_benchmark(defer_and_process_jobs) + + +@pytest.mark.benchmark +def test_benchmark_1000_sync_batch_jobs( + aio_benchmark, sync_app: procrastinate.App, async_app: procrastinate.App +): + @sync_app.task(queue="default", name="sum_task") + def simple_task(): + pass + + async def defer_and_process_jobs(): + simple_task.batch_defer(*[{} for _ in range(1000)]) + + await async_app.run_worker_async(queues=["default"], wait=False) + + aio_benchmark(defer_and_process_jobs) diff --git a/tests/conftest.py b/tests/conftest.py index 836c610a7..2f41d32ea 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -8,12 +8,17 @@ import random import signal as stdlib_signal import string +import sys import uuid +from pathlib import Path +import django +import packaging.version import psycopg import psycopg.conninfo import psycopg.sql import pytest +from django.core.management.base import OutputWrapper from procrastinate import app as app_module from procrastinate import blueprints, builtin_tasks, jobs, schema, testing @@ -29,6 +34,70 @@ # that conflicts with our own "app" fixture pytest_plugins = ["sphinx.testing.fixtures"] +# Silence “Exception ignored in ... OutputWrapper”: +# ValueError: I/O operation on closed file. +# https://adamj.eu/tech/2025/01/08/django-silence-exception-ignored-outputwrapper/ +# https://code.djangoproject.com/ticket/36056 +if django.VERSION < (5, 2): + orig_unraisablehook = sys.unraisablehook + + def unraisablehook(unraisable): + print("A" * 30, unraisable) + if ( + unraisable.exc_type is ValueError + and unraisable.exc_value is not None + and unraisable.exc_value.args == ("I/O operation on closed file.",) + and isinstance(unraisable.object, OutputWrapper) + ): + print("B" * 30, "ignored") + return + orig_unraisablehook(unraisable) + + sys.unraisablehook = unraisablehook + + +def pytest_addoption(parser: pytest.Parser) -> None: + parser.addoption( + "--migrate-until", + action="store", + help="Migrate until a specific migration (including it), " + "otherwise the full schema is applied", + ) + + parser.addoption( + "--latest-version", + action="store", + help="Tells pytest what the latest version is so that " + "@pytest.mark.skip_before_version works", + ) + + +def pytest_configure(config): + # register an additional marker + config.addinivalue_line( + "markers", + "skip_before_version(version: str): mark test to run only on versions " + "strictly higher than param. Useful for acceptance tests running on the " + "stable version", + ) + + +def pytest_runtest_setup(item): + required_version = next( + (mark.args[0] for mark in item.iter_markers(name="skip_before_version")), None + ) + latest_version_str = item.config.getoption("--latest-version") + if required_version is None or latest_version_str is None: + return + + latest_version = packaging.version.Version(latest_version_str) + required_version = packaging.version.Version(required_version) + + if latest_version < required_version: + pytest.skip( + f"Skipping test on version {latest_version}, requires {required_version}" + ) + def cursor_execute(cursor, query, *identifiers): if identifiers: @@ -79,13 +148,27 @@ def _(dbname, template=None): @pytest.fixture(scope="session") -def setup_db(): +def setup_db(request: pytest.FixtureRequest): dbname = "procrastinate_test_template" db_create(dbname=dbname) connector = testing.InMemoryConnector() - with db_executor(dbname) as execute: - execute(schema.SchemaManager(connector=connector).get_schema()) + migrate_until = request.config.getoption("--migrate-until") + if migrate_until is None: + with db_executor(dbname) as execute: + execute(schema.SchemaManager(connector=connector).get_schema()) + else: + assert isinstance(migrate_until, str) + schema_manager = schema.SchemaManager(connector=connector) + migrations_path = Path(schema_manager.get_migrations_path()) + migrations = sorted(migrations_path.glob("*.sql")) + for migration in migrations: + with migration.open() as f: + with db_executor(dbname) as execute: + execute(f.read()) + + if migration.name == migrate_until: + break yield dbname @@ -126,7 +209,9 @@ def not_opened_sync_psycopg_connector(psycopg_connection_params): @pytest.fixture -async def psycopg_connector(not_opened_psycopg_connector): +async def psycopg_connector( + not_opened_psycopg_connector: async_psycopg_connector_module.PsycopgConnector, +): await not_opened_psycopg_connector.open_async() yield not_opened_psycopg_connector await not_opened_psycopg_connector.close_async() @@ -168,13 +253,13 @@ def reset_builtin_task_names(): @pytest.fixture -def not_opened_app(connector, reset_builtin_task_names): +def not_opened_app(connector, reset_builtin_task_names) -> app_module.App: return app_module.App(connector=connector) @pytest.fixture -def app(not_opened_app): - with not_opened_app.open() as app: +async def app(not_opened_app: app_module.App): + async with not_opened_app.open_async() as app: yield app @@ -184,7 +269,7 @@ def blueprint(): @pytest.fixture -def job_manager(app): +def job_manager(app: app_module.App): return app.job_manager diff --git a/tests/integration/contrib/aiopg/conftest.py b/tests/integration/contrib/aiopg/conftest.py index 353b773c5..701b6379f 100644 --- a/tests/integration/contrib/aiopg/conftest.py +++ b/tests/integration/contrib/aiopg/conftest.py @@ -27,5 +27,5 @@ async def _(*, open: bool = True, **kwargs): @pytest.fixture -async def aiopg_connector(aiopg_connector_factory): +async def aiopg_connector(aiopg_connector_factory) -> aiopg.AiopgConnector: return await aiopg_connector_factory() diff --git a/tests/integration/contrib/aiopg/test_aiopg_connector.py b/tests/integration/contrib/aiopg/test_aiopg_connector.py index ad7adf745..72ba6df4e 100644 --- a/tests/integration/contrib/aiopg/test_aiopg_connector.py +++ b/tests/integration/contrib/aiopg/test_aiopg_connector.py @@ -8,6 +8,7 @@ import attr import pytest +from procrastinate import exceptions, manager from procrastinate.contrib.aiopg import aiopg_connector as aiopg from procrastinate.contrib.psycopg2 import psycopg2_connector @@ -78,6 +79,42 @@ def decode(dct): assert result["json"] == {"a": 1, "b": Param(p=2)} +async def test_wrap_exceptions(aiopg_connector): + await aiopg_connector.execute_query_async( + """SELECT procrastinate_defer_jobs_v1( + ARRAY[ + ROW( + 'queue'::character varying, + 'foo'::character varying, + 0::integer, + NULL::text, + 'same_queueing_lock'::text, + '{}'::jsonb, + NULL::timestamptz + ) + ]::procrastinate_job_to_defer_v1[] + ) AS id;""" + ) + with pytest.raises(exceptions.UniqueViolation) as excinfo: + await aiopg_connector.execute_query_async( + """SELECT procrastinate_defer_jobs_v1( + ARRAY[ + ROW( + 'queue'::character varying, + 'foo'::character varying, + 0::integer, + NULL::text, + 'same_queueing_lock'::text, + '{}'::jsonb, + NULL::timestamptz + ) + ]::procrastinate_job_to_defer_v1[] + ) AS id;""" + ) + assert excinfo.value.constraint_name == manager.QUEUEING_LOCK_CONSTRAINT + assert excinfo.value.queueing_lock == "same_queueing_lock" + + async def test_execute_query(aiopg_connector): assert ( await aiopg_connector.execute_query_async( @@ -156,15 +193,26 @@ async def test_get_connection_no_psycopg2_adapter_registration( async def test_listen_notify(aiopg_connector): channel = "somechannel" event = asyncio.Event() + received_args: list[dict] = [] + + async def handle_notification(*, channel: str, payload: str): + event.set() + received_args.append({"channel": channel, "payload": payload}) task = asyncio.ensure_future( - aiopg_connector.listen_notify(channels=[channel], event=event) + aiopg_connector.listen_notify( + channels=[channel], on_notification=handle_notification + ) ) try: - await event.wait() - event.clear() - await aiopg_connector.execute_query_async(f"""NOTIFY "{channel}" """) + await asyncio.sleep(0.1) + await aiopg_connector.execute_query_async( + f"""NOTIFY "{channel}", 'somepayload' """ + ) await asyncio.wait_for(event.wait(), timeout=1) + args = received_args.pop() + assert args["channel"] == "somechannel" + assert args["payload"] == "somepayload" except asyncio.TimeoutError: pytest.fail("Notify not received within 1 sec") finally: @@ -174,9 +222,15 @@ async def test_listen_notify(aiopg_connector): async def test_loop_notify_stop_when_connection_closed_old_aiopg(aiopg_connector): # We want to make sure that the when the connection is closed, the loop end. event = asyncio.Event() + + async def handle_notification(channel: str, payload: str): + event.set() + await aiopg_connector.open_async() async with aiopg_connector._pool.acquire() as connection: - coro = aiopg_connector._loop_notify(event=event, connection=connection) + coro = aiopg_connector._loop_notify( + on_notification=handle_notification, connection=connection + ) await asyncio.sleep(0.1) # Currently, the the connection closes, the notifies queue is not # awaken. This test validates the "normal" stopping condition, there is @@ -192,9 +246,15 @@ async def test_loop_notify_stop_when_connection_closed_old_aiopg(aiopg_connector async def test_loop_notify_stop_when_connection_closed(aiopg_connector): # We want to make sure that the when the connection is closed, the loop end. event = asyncio.Event() + + async def handle_notification(channel: str, payload: str): + event.set() + await aiopg_connector.open_async() async with aiopg_connector._pool.acquire() as connection: - coro = aiopg_connector._loop_notify(event=event, connection=connection) + coro = aiopg_connector._loop_notify( + on_notification=handle_notification, connection=connection + ) await asyncio.sleep(0.1) # Currently, the the connection closes, the notifies queue is not # awaken. This test validates the "normal" stopping condition, there is @@ -211,11 +271,15 @@ async def test_loop_notify_timeout(aiopg_connector): # We want to make sure that when the listen starts, we don't listen forever. If the # connection closes, we eventually finish the coroutine. event = asyncio.Event() + + async def handle_notification(channel: str, payload: str): + event.set() + await aiopg_connector.open_async() async with aiopg_connector._pool.acquire() as connection: task = asyncio.ensure_future( aiopg_connector._loop_notify( - event=event, connection=connection, timeout=0.01 + on_notification=handle_notification, connection=connection, timeout=0.01 ) ) await asyncio.sleep(0.1) @@ -234,6 +298,7 @@ async def test_destructor(connection_params, capsys): await connector.open_async() await connector.execute_query_async("SELECT 1") + assert connector._pool assert len(connector._pool._free) == 1 # "del connector" causes a ResourceWarning from aiopg.Pool if the diff --git a/tests/integration/contrib/django/test_django_connector.py b/tests/integration/contrib/django/test_django_connector.py index f523f6613..850cccf9d 100644 --- a/tests/integration/contrib/django/test_django_connector.py +++ b/tests/integration/contrib/django/test_django_connector.py @@ -24,6 +24,15 @@ def foo(): foo.defer() +def test_wrap_exceptions_batch__integrity_error(db): + @procrastinate.contrib.django.app.task(queueing_lock="bar") + def bar(): + pass + + with pytest.raises(exceptions.AlreadyEnqueued): + bar.batch_defer({}, {}) + + def test_get_sync_connector(django_connector): assert django_connector.get_sync_connector() is django_connector diff --git a/tests/integration/contrib/django/test_management_command.py b/tests/integration/contrib/django/test_management_command.py index 71ac4b7c5..403a2c148 100644 --- a/tests/integration/contrib/django/test_management_command.py +++ b/tests/integration/contrib/django/test_management_command.py @@ -10,7 +10,7 @@ def test_procrastinate_command(capsys): with pytest.raises(SystemExit): call_command("procrastinate", "--help") - out, err = capsys.readouterr() + out, _err = capsys.readouterr() assert "usage: procrastinate" in out assert "{worker,defer,healthchecks,shell}" in out diff --git a/tests/integration/contrib/django/test_models.py b/tests/integration/contrib/django/test_models.py index 49615d51c..d3b302f67 100644 --- a/tests/integration/contrib/django/test_models.py +++ b/tests/integration/contrib/django/test_models.py @@ -28,6 +28,8 @@ def test_procrastinate_job(db): "scheduled_at": None, "attempts": 0, "queueing_lock": None, + "abort_requested": False, + "worker_id": None, } @@ -43,6 +45,8 @@ def test_procrastinate_job__property(db): scheduled_at=datetime.datetime(2021, 1, 1, tzinfo=datetime.timezone.utc), attempts=0, queueing_lock="baz", + abort_requested=False, + worker_id=None, ) assert job.procrastinate_job == jobs_module.Job( id=1, @@ -75,6 +79,7 @@ def test_procrastinate_job__create__with_setting(db, settings): scheduled_at=datetime.datetime.now(datetime.timezone.utc), attempts=0, queueing_lock="baz", + abort_requested=False, ) diff --git a/tests/integration/contrib/psycopg2/test_psycopg2_connector.py b/tests/integration/contrib/psycopg2/test_psycopg2_connector.py index 93295b6ff..b9b0597ba 100644 --- a/tests/integration/contrib/psycopg2/test_psycopg2_connector.py +++ b/tests/integration/contrib/psycopg2/test_psycopg2_connector.py @@ -6,6 +6,7 @@ import psycopg2.errors import pytest +from procrastinate import exceptions, manager from procrastinate.contrib import psycopg2 as proc_psycopg2 @@ -83,6 +84,42 @@ def test_json_loads(psycopg2_connector_factory, mocker): assert connector.json_loads is loads +async def test_wrap_exceptions(psycopg2_connector): + psycopg2_connector.execute_query( + """SELECT procrastinate_defer_jobs_v1( + ARRAY[ + ROW( + 'queue'::character varying, + 'foo'::character varying, + 0::integer, + NULL::text, + 'same_queueing_lock'::text, + '{}'::jsonb, + NULL::timestamptz + ) + ]::procrastinate_job_to_defer_v1[] + ) AS id;""" + ) + with pytest.raises(exceptions.UniqueViolation) as excinfo: + psycopg2_connector.execute_query( + """SELECT procrastinate_defer_jobs_v1( + ARRAY[ + ROW( + 'queue'::character varying, + 'foo'::character varying, + 0::integer, + NULL::text, + 'same_queueing_lock'::text, + '{}'::jsonb, + NULL::timestamptz + ) + ]::procrastinate_job_to_defer_v1[] + ) AS id;""" + ) + assert excinfo.value.constraint_name == manager.QUEUEING_LOCK_CONSTRAINT + assert excinfo.value.queueing_lock == "same_queueing_lock" + + def test_execute_query(psycopg2_connector): psycopg2_connector.execute_query("COMMENT ON TABLE \"procrastinate_jobs\" IS 'foo'") result = psycopg2_connector.execute_query_one( diff --git a/tests/integration/contrib/sphinx/test-root/index.rst b/tests/integration/contrib/sphinx/test-root/index.rst index 0f287a494..db38bb360 100644 --- a/tests/integration/contrib/sphinx/test-root/index.rst +++ b/tests/integration/contrib/sphinx/test-root/index.rst @@ -1,5 +1,5 @@ Tasks ===== -.. automodule:: procrastinate_demos.demo_async.tasks +.. automodule:: procrastinate.demos.demo_async.tasks :members: diff --git a/tests/integration/contrib/sqlalchemy/test_psycopg2_connector.py b/tests/integration/contrib/sqlalchemy/test_psycopg2_connector.py index 23989733c..c9ee9bd28 100644 --- a/tests/integration/contrib/sqlalchemy/test_psycopg2_connector.py +++ b/tests/integration/contrib/sqlalchemy/test_psycopg2_connector.py @@ -6,6 +6,7 @@ import psycopg2.errors import pytest +from procrastinate import exceptions, manager from procrastinate.contrib.sqlalchemy import SQLAlchemyPsycopg2Connector @@ -91,6 +92,42 @@ def test_json_loads(sqlalchemy_psycopg2_connector_factory, mocker): assert connector.json_loads is loads +async def test_wrap_exceptions(sqlalchemy_psycopg2_connector): + sqlalchemy_psycopg2_connector.execute_query( + """SELECT procrastinate_defer_jobs_v1( + ARRAY[ + ROW( + 'queue'::character varying, + 'foo'::character varying, + 0::integer, + NULL::text, + 'same_queueing_lock'::text, + '{}'::jsonb, + NULL::timestamptz + ) + ]::procrastinate_job_to_defer_v1[] + ) AS id;""" + ) + with pytest.raises(exceptions.UniqueViolation) as excinfo: + sqlalchemy_psycopg2_connector.execute_query( + """SELECT procrastinate_defer_jobs_v1( + ARRAY[ + ROW( + 'queue'::character varying, + 'foo'::character varying, + 0::integer, + NULL::text, + 'same_queueing_lock'::text, + '{}'::jsonb, + NULL::timestamptz + ) + ]::procrastinate_job_to_defer_v1[] + ) AS id;""" + ) + assert excinfo.value.constraint_name == manager.QUEUEING_LOCK_CONSTRAINT + assert excinfo.value.queueing_lock == "same_queueing_lock" + + def test_execute_query(sqlalchemy_psycopg2_connector): sqlalchemy_psycopg2_connector.execute_query( "COMMENT ON TABLE \"procrastinate_jobs\" IS 'foo'" diff --git a/tests/integration/test_cli.py b/tests/integration/test_cli.py index b54817382..9b3cdd991 100644 --- a/tests/integration/test_cli.py +++ b/tests/integration/test_cli.py @@ -8,7 +8,7 @@ import pytest -from procrastinate import __version__, cli, exceptions, worker +from procrastinate import __version__, cli, exceptions, jobs @dataclasses.dataclass @@ -69,7 +69,7 @@ async def test_worker(entrypoint, cli_app, mocker): cli_app.run_worker_async = mocker.AsyncMock() result = await entrypoint( "worker " - "--queues a,b --name=w1 --timeout=8.3 " + "--queues a,b --name=w1 --fetch-job-polling-interval=8.3 --abort-job-polling-interval=20 " "--one-shot --concurrency=10 --no-listen-notify --delete-jobs=always" ) @@ -79,10 +79,11 @@ async def test_worker(entrypoint, cli_app, mocker): concurrency=10, name="w1", queues=["a", "b"], - timeout=8.3, + fetch_job_polling_interval=8.3, + abort_job_polling_interval=20, wait=False, listen_notify=False, - delete_jobs=worker.DeleteJobCondition.ALWAYS, + delete_jobs=jobs.DeleteJobCondition.ALWAYS, ) @@ -167,6 +168,8 @@ def mytask(a): "status": "todo", "task_name": "hello", "priority": 0, + "abort_requested": False, + "worker_id": None, } } @@ -192,6 +195,8 @@ def mytask(a): "status": "todo", "task_name": "hello", "priority": 5, + "abort_requested": False, + "worker_id": None, } } @@ -222,6 +227,8 @@ def mytask(a): "status": "todo", "task_name": "hello", "priority": 0, + "abort_requested": False, + "worker_id": None, } } @@ -251,6 +258,8 @@ def mytask(a): "status": "todo", "task_name": "hello", "priority": 0, + "abort_requested": False, + "worker_id": None, } assert ( now + datetime.timedelta(seconds=9) @@ -281,7 +290,7 @@ async def test_defer_queueing_lock_ignore(entrypoint, cli_app, connector): def mytask(a): pass - cli_app.configure_task(name="hello", queueing_lock="houba").defer(a=1) + await cli_app.configure_task(name="hello", queueing_lock="houba").defer_async(a=1) result = await entrypoint( """defer --queueing-lock=houba --ignore-already-enqueued hello {"a":2}""" @@ -315,6 +324,8 @@ async def test_defer_unknown(entrypoint, cli_app, connector): "status": "todo", "task_name": "hello", "priority": 0, + "abort_requested": False, + "worker_id": None, } } diff --git a/tests/integration/test_manager.py b/tests/integration/test_manager.py index 8c9173f53..ed6575dc8 100644 --- a/tests/integration/test_manager.py +++ b/tests/integration/test_manager.py @@ -5,7 +5,7 @@ import pytest -from procrastinate import exceptions, jobs, manager +from procrastinate import exceptions, jobs, manager, utils from .. import conftest @@ -31,10 +31,15 @@ def deferred_job_factory(deferred_job_factory, pg_job_manager): @pytest.fixture -def fetched_job_factory(deferred_job_factory, pg_job_manager): +async def worker_id(pg_job_manager): + return await pg_job_manager.register_worker() + + +@pytest.fixture +def fetched_job_factory(deferred_job_factory, pg_job_manager, worker_id): async def factory(**kwargs): job = await deferred_job_factory(**kwargs) - fetched_job = await pg_job_manager.fetch_job(queues=None) + fetched_job = await pg_job_manager.fetch_job(queues=None, worker_id=worker_id) # to make sure we do fetch the job we just deferred assert fetched_job.id == job.id return fetched_job @@ -51,51 +56,55 @@ async def factory(**kwargs): ], ) async def test_fetch_job( - pg_job_manager, - deferred_job_factory, - job_kwargs, - fetch_queues, + pg_job_manager, deferred_job_factory, job_kwargs, fetch_queues, worker_id ): # Now add the job we're testing job = await deferred_job_factory(**job_kwargs) - expected_job = job.evolve(status="doing") - assert await pg_job_manager.fetch_job(queues=fetch_queues) == expected_job + expected_job = job.evolve(status="doing", worker_id=worker_id) + assert ( + await pg_job_manager.fetch_job(queues=fetch_queues, worker_id=worker_id) + == expected_job + ) -async def test_fetch_job_not_fetching_started_job(pg_job_manager, fetched_job_factory): +async def test_fetch_job_not_fetching_started_job( + pg_job_manager, fetched_job_factory, worker_id +): # Add a first started job await fetched_job_factory() - assert await pg_job_manager.fetch_job(queues=None) is None + assert await pg_job_manager.fetch_job(queues=None, worker_id=worker_id) is None async def test_fetch_job_not_fetching_locked_job( - pg_job_manager, deferred_job_factory, fetched_job_factory + pg_job_manager, deferred_job_factory, fetched_job_factory, worker_id ): await fetched_job_factory(lock="lock_1") await deferred_job_factory(lock="lock_1") - assert await pg_job_manager.fetch_job(queues=None) is None + assert await pg_job_manager.fetch_job(queues=None, worker_id=worker_id) is None async def test_fetch_job_respect_lock_aborting_job( - pg_job_manager, deferred_job_factory, fetched_job_factory + pg_job_manager, deferred_job_factory, fetched_job_factory, worker_id ): job = await fetched_job_factory(lock="lock_1") await deferred_job_factory(lock="lock_1") await pg_job_manager.cancel_job_by_id_async(job.id, abort=True) - assert await pg_job_manager.fetch_job(queues=None) is None + assert await pg_job_manager.fetch_job(queues=None, worker_id=worker_id) is None async def test_fetch_job_spacial_case_none_lock( - pg_job_manager, deferred_job_factory, fetched_job_factory + pg_job_manager, deferred_job_factory, fetched_job_factory, worker_id ): await fetched_job_factory(lock=None) job = await deferred_job_factory(lock=None) - assert (await pg_job_manager.fetch_job(queues=None)).id == job.id + assert ( + await pg_job_manager.fetch_job(queues=None, worker_id=worker_id) + ).id == job.id @pytest.mark.parametrize( @@ -108,11 +117,13 @@ async def test_fetch_job_spacial_case_none_lock( ], ) async def test_fetch_job_no_result( - pg_job_manager, deferred_job_factory, job_kwargs, fetch_queues + pg_job_manager, deferred_job_factory, job_kwargs, fetch_queues, worker_id ): await deferred_job_factory(**job_kwargs) - assert await pg_job_manager.fetch_job(queues=fetch_queues) is None + assert ( + await pg_job_manager.fetch_job(queues=fetch_queues, worker_id=worker_id) is None + ) @pytest.mark.parametrize( @@ -123,7 +134,7 @@ async def test_fetch_job_no_result( {"nb_seconds": 1800, "task_name": "task_1"}, ], ) -async def test_get_stalled_jobs_yes( +async def test_get_stalled_jobs_by_started__yes( pg_job_manager, fetched_job_factory, psycopg_connector, filter_args ): job = await fetched_job_factory(queue="queue_a", task_name="task_1") @@ -134,7 +145,8 @@ async def test_get_stalled_jobs_yes( f"WHERE job_id={job.id}" ) - result = await pg_job_manager.get_stalled_jobs(**filter_args) + with pytest.warns(DeprecationWarning, match=".*nb_seconds.*"): + result = await pg_job_manager.get_stalled_jobs(**filter_args) assert result == [job] @@ -146,7 +158,7 @@ async def test_get_stalled_jobs_yes( {"nb_seconds": 1800, "task_name": "task_2"}, ], ) -async def test_get_stalled_jobs_no( +async def test_get_stalled_jobs_by_started__no( pg_job_manager, fetched_job_factory, psycopg_connector, filter_args ): job = await fetched_job_factory(queue="queue_a", task_name="task_1") @@ -157,10 +169,214 @@ async def test_get_stalled_jobs_no( f"WHERE job_id={job.id}" ) + with pytest.warns(DeprecationWarning, match=".*nb_seconds.*"): + result = await pg_job_manager.get_stalled_jobs(**filter_args) + assert result == [] + + +async def test_get_stalled_jobs_by_started__retries__no( + pg_job_manager, fetched_job_factory, psycopg_connector +): + job = await fetched_job_factory(queue="queue_a", task_name="task_1") + + # We fake previous tries + await psycopg_connector.execute_query_async( + f"UPDATE procrastinate_events SET at=now() - INTERVAL '1 hour'" + f"WHERE job_id={job.id} AND type='deferred'" + ) + await psycopg_connector.execute_query_async( + f"INSERT INTO procrastinate_events (job_id, type, at) VALUES" + f"({job.id}, 'started', now() - INTERVAL '1 hour')" + ) + await psycopg_connector.execute_query_async( + f"INSERT INTO procrastinate_events (job_id, type, at) VALUES" + f"({job.id}, 'deferred_for_retry', now() - INTERVAL '59 minutes')" + ) + events = await psycopg_connector.execute_query_all_async( + f"SELECT at, type FROM procrastinate_events " + f"WHERE job_id={job.id} ORDER BY at ASC" + ) + + # Sanity checks: We're in the situation where 1h ago, the job has been deferred, + # started, it failed so it was retried, and it just started again now. + assert [e["type"] for e in events] == [ + "deferred", + "started", + "deferred_for_retry", + "started", + ] + + # It should not be considered stalled + with pytest.warns(DeprecationWarning, match=".*nb_seconds.*"): + result = await pg_job_manager.get_stalled_jobs(nb_seconds=1800) + assert result == [] + + +async def test_get_stalled_jobs_by_started__retries__yes( + pg_job_manager, fetched_job_factory, psycopg_connector +): + job = await fetched_job_factory(queue="queue_a", task_name="task_1") + + # We fake previous tries + await psycopg_connector.execute_query_async( + f"UPDATE procrastinate_events SET at=now() - INTERVAL '1 hour'" + f"WHERE job_id={job.id} AND type='deferred'" + ) + await psycopg_connector.execute_query_async( + f"UPDATE procrastinate_events SET at=now() - INTERVAL '40 minutes'" + f"WHERE job_id={job.id} AND type='started'" + ) + await psycopg_connector.execute_query_async( + f"INSERT INTO procrastinate_events (job_id, type, at) VALUES" + f"({job.id}, 'started', now() - INTERVAL '1 hour')" + ) + await psycopg_connector.execute_query_async( + f"INSERT INTO procrastinate_events (job_id, type, at) VALUES" + f"({job.id}, 'deferred_for_retry', now() - INTERVAL '59 minutes')" + ) + events = await psycopg_connector.execute_query_all_async( + f"SELECT at, type FROM procrastinate_events " + f"WHERE job_id={job.id} ORDER BY at ASC" + ) + + # Sanity checks: We're in the situation where 1h ago, the job has been deferred, + # started, it failed so it was retried, and it started again 40 minutes ago. + assert [e["type"] for e in events] == [ + "deferred", + "started", + "deferred_for_retry", + "started", + ] + + # It should not be considered stalled + with pytest.warns(DeprecationWarning, match=".*nb_seconds.*"): + result = await pg_job_manager.get_stalled_jobs(nb_seconds=1800) + assert result == [job] + + +@pytest.mark.parametrize( + "filter_args", + [ + {"queue": "queue_a"}, + {"task_name": "task_1"}, + ], +) +async def test_get_stalled_jobs_by_heartbeat__yes( + pg_job_manager, fetched_job_factory, psycopg_connector, filter_args +): + job = await fetched_job_factory(queue="queue_a", task_name="task_1") + + # We fake the worker heartbeat + await psycopg_connector.execute_query_async( + "UPDATE procrastinate_workers SET last_heartbeat=last_heartbeat - INTERVAL '35 minutes' " + f"WHERE id='{job.worker_id}'" + ) + + result = await pg_job_manager.get_stalled_jobs(**filter_args) + assert result == [job] + + +@pytest.mark.parametrize( + "filter_args", + [ + {"queue": "queue_b"}, + {"task_name": "task_2"}, + ], +) +async def test_get_stalled_jobs_by_heartbeat__no( + pg_job_manager, fetched_job_factory, psycopg_connector, filter_args +): + job = await fetched_job_factory(queue="queue_a", task_name="task_1") + + # We fake the worker heartbeat + await psycopg_connector.execute_query_async( + "UPDATE procrastinate_workers SET last_heartbeat=last_heartbeat - INTERVAL '35 minutes' " + f"WHERE id='{job.worker_id}'" + ) + result = await pg_job_manager.get_stalled_jobs(**filter_args) assert result == [] +async def test_get_stalled_jobs_by_heartbeat__pruned_worker( + pg_job_manager, fetched_job_factory, psycopg_connector +): + job = await fetched_job_factory(queue="queue_a", task_name="task_1") + + # We fake a stalled and pruned worker + await psycopg_connector.execute_query_async( + f"DELETE FROM procrastinate_workers WHERE id='{job.worker_id}'" + ) + + result = await pg_job_manager.get_stalled_jobs() + pruned_job = job.evolve(worker_id=None) + assert result == [pruned_job] + + +async def test_register_and_unregister_worker(pg_job_manager, psycopg_connector): + worker_id = await pg_job_manager.register_worker() + assert worker_id is not None + + rows = await psycopg_connector.execute_query_all_async( + f"SELECT * FROM procrastinate_workers WHERE id={worker_id}" + ) + assert len(rows) == 1 + assert rows[0]["id"] == worker_id + assert abs((rows[0]["last_heartbeat"] - utils.utcnow()).total_seconds()) < 0.1 + + await pg_job_manager.unregister_worker(worker_id=worker_id) + + rows = await psycopg_connector.execute_query_all_async( + f"SELECT * FROM procrastinate_workers WHERE id={worker_id}" + ) + assert len(rows) == 0 + + +async def test_update_heartbeat(pg_job_manager, psycopg_connector, worker_id): + rows = await psycopg_connector.execute_query_all_async( + f"SELECT * FROM procrastinate_workers WHERE id={worker_id}" + ) + first_heartbeat = rows[0]["last_heartbeat"] + + await pg_job_manager.update_heartbeat(worker_id=worker_id) + + rows = await psycopg_connector.execute_query_all_async( + f"SELECT * FROM procrastinate_workers WHERE id={worker_id}" + ) + assert len(rows) == 1 + assert rows[0]["id"] == worker_id + assert first_heartbeat < rows[0]["last_heartbeat"] < utils.utcnow() + + +async def test_prune_stalled_workers(pg_job_manager, psycopg_connector, worker_id): + rows = await psycopg_connector.execute_query_all_async( + f"SELECT * FROM procrastinate_workers WHERE id={worker_id}" + ) + assert len(rows) == 1 + + pruned_workers = await pg_job_manager.prune_stalled_workers( + seconds_since_heartbeat=1800 + ) + assert pruned_workers == [] + + # We fake the heartbeat to be 35 minutes old + await psycopg_connector.execute_query_async( + "UPDATE procrastinate_workers " + "SET last_heartbeat=last_heartbeat - INTERVAL '35 minutes' " + f"WHERE id='{worker_id}'" + ) + + pruned_workers = await pg_job_manager.prune_stalled_workers( + seconds_since_heartbeat=1800 + ) + assert pruned_workers == [worker_id] + + rows = await psycopg_connector.execute_query_all_async( + f"SELECT * FROM procrastinate_workers WHERE id={worker_id}" + ) + assert len(rows) == 0 + + async def test_delete_old_jobs_job_todo( get_all, pg_job_manager, @@ -198,7 +414,7 @@ async def test_delete_old_jobs_job_doing( @pytest.mark.parametrize( - "status, nb_hours, queue, include_error, expected_job_count", + "status, nb_hours, queue, include_failed, expected_job_count", [ # nb_hours (jobs.Status.SUCCEEDED, 1, None, False, 0), @@ -208,7 +424,7 @@ async def test_delete_old_jobs_job_doing( (jobs.Status.SUCCEEDED, 3, "queue_a", False, 1), (jobs.Status.SUCCEEDED, 1, "queue_b", False, 1), (jobs.Status.SUCCEEDED, 1, "queue_b", False, 1), - # include_error + # include_failed (jobs.Status.FAILED, 1, None, False, 1), (jobs.Status.FAILED, 1, None, True, 0), ], @@ -220,7 +436,7 @@ async def test_delete_old_jobs_parameters( status, nb_hours, queue, - include_error, + include_failed, expected_job_count, fetched_job_factory, ): @@ -236,7 +452,7 @@ async def test_delete_old_jobs_parameters( ) await pg_job_manager.delete_old_jobs( - nb_hours=nb_hours, queue=queue, include_error=include_error + nb_hours=nb_hours, queue=queue, include_failed=include_failed ) jobs_count = len(await get_all("procrastinate_jobs", "id")) assert jobs_count == expected_job_count @@ -271,7 +487,7 @@ async def test_finish_job_wrong_initial_status( await pg_job_manager.finish_job( job=job, status=jobs.Status.FAILED, delete_job=delete_job ) - assert 'Job was not found or not in "doing", "todo" or "aborting" status' in str( + assert 'Job was not found or not in "doing" or "todo" status' in str( excinfo.value.__cause__ ) @@ -291,21 +507,23 @@ async def test_finish_job_wrong_end_status( ) -async def test_retry_job(pg_job_manager, fetched_job_factory): +async def test_retry_job(pg_job_manager, fetched_job_factory, worker_id): job1 = await fetched_job_factory(queue="queue_a") await pg_job_manager.retry_job( job=job1, retry_at=datetime.datetime.now(datetime.timezone.utc) ) - job2 = await pg_job_manager.fetch_job(queues=None) + job2 = await pg_job_manager.fetch_job(queues=None, worker_id=worker_id) assert job2.id == job1.id assert job2.attempts == job1.attempts + 1 assert job2.priority == job1.priority == 0 -async def test_retry_job_with_additional_params(pg_job_manager, fetched_job_factory): +async def test_retry_job_with_additional_params( + pg_job_manager, fetched_job_factory, worker_id +): job1 = await fetched_job_factory(queue="queue_a") await pg_job_manager.retry_job( @@ -316,7 +534,7 @@ async def test_retry_job_with_additional_params(pg_job_manager, fetched_job_fact lock="some_lock", ) - job2 = await pg_job_manager.fetch_job(queues=None) + job2 = await pg_job_manager.fetch_job(queues=None, worker_id=worker_id) assert job2.id == job1.id assert job2.attempts == 1 @@ -372,6 +590,58 @@ async def test_defer_job(pg_job_manager, get_all, job_factory): ] +async def test_batch_defer_jobs(pg_job_manager, get_all, job_factory): + queue = "marsupilami" + new_jobs = await pg_job_manager.batch_defer_jobs_async( + jobs=[ + job_factory( + id=0, + queue=queue, + task_name="bob", + lock="sher", + queueing_lock="houba1", + task_kwargs={"a": 1, "b": 2}, + ), + job_factory( + id=0, + queue=queue, + task_name="bob", + lock="sher", + queueing_lock="houba2", + task_kwargs={"a": 3, "b": 4}, + ), + ] + ) + + result = await get_all( + "procrastinate_jobs", + "id", + "args", + "status", + "lock", + "queueing_lock", + "task_name", + ) + assert result == [ + { + "id": new_jobs[0].id, + "args": {"a": 1, "b": 2}, + "status": "todo", + "lock": "sher", + "queueing_lock": "houba1", + "task_name": "bob", + }, + { + "id": new_jobs[1].id, + "args": {"a": 3, "b": 4}, + "status": "todo", + "lock": "sher", + "queueing_lock": "houba2", + "task_name": "bob", + }, + ] + + async def test_defer_job_violate_queueing_lock(pg_job_manager, job_factory): await pg_job_manager.defer_job_async( job_factory( @@ -379,7 +649,7 @@ async def test_defer_job_violate_queueing_lock(pg_job_manager, job_factory): queue="queue_a", task_name="task_1", lock="lock_1", - queueing_lock="queueing_lock", + queueing_lock="same_queueing_lock", task_kwargs={"a": "b"}, ) ) @@ -390,13 +660,58 @@ async def test_defer_job_violate_queueing_lock(pg_job_manager, job_factory): queue="queue_a", task_name="task_2", lock="lock_2", - queueing_lock="queueing_lock", + queueing_lock="same_queueing_lock", task_kwargs={"c": "d"}, ) ) cause = excinfo.value.__cause__ assert isinstance(cause, exceptions.UniqueViolation) - assert cause.constraint_name == "procrastinate_jobs_queueing_lock_idx" + + # TODO: When QUEUEING_LOCK_CONSTRAINT_LEGACY in manager.py is removed, we can + # also remove the check for the old constraint name "procrastinate_jobs_queueing_lock_idx" + assert cause.constraint_name in [ + "procrastinate_jobs_queueing_lock_idx", + "procrastinate_jobs_queueing_lock_idx_v1", + ] + assert cause.queueing_lock == "same_queueing_lock" + + +async def test_batch_defer_jobs_violate_queueing_lock( + pg_job_manager, get_all, job_factory +): + with pytest.raises(exceptions.AlreadyEnqueued) as excinfo: + await pg_job_manager.batch_defer_jobs_async( + [ + job_factory( + id=1, + queue="queue_a", + task_name="task_1", + lock="lock_1", + queueing_lock="same_queueing_lock", + task_kwargs={"a": "b"}, + ), + job_factory( + id=2, + queue="queue_a", + task_name="task_2", + lock="lock_2", + queueing_lock="same_queueing_lock", + task_kwargs={"c": "d"}, + ), + ] + ) + cause = excinfo.value.__cause__ + assert isinstance(cause, exceptions.UniqueViolation) + assert cause.queueing_lock == "same_queueing_lock" + + # TODO: When QUEUEING_LOCK_CONSTRAINT_LEGACY in manager.py is removed, we can + # also remove the check for the old constraint name "procrastinate_jobs_queueing_lock_idx" + assert cause.constraint_name in [ + "procrastinate_jobs_queueing_lock_idx", + "procrastinate_jobs_queueing_lock_idx_v1", + ] + + assert await get_all("procrastinate_jobs", "id") == [] async def test_check_connection(pg_job_manager): @@ -408,7 +723,7 @@ def test_check_connection_sync(pg_job_manager): @pytest.fixture -async def fixture_jobs(pg_job_manager, job_factory): +async def fixture_jobs(pg_job_manager, job_factory, worker_id): j1 = job_factory( queue="q1", lock="lock1", @@ -448,7 +763,7 @@ async def fixture_jobs(pg_job_manager, job_factory): task_kwargs={"key": "d"}, ) j4 = await pg_job_manager.defer_job_async(job=j4) - await pg_job_manager.fetch_job(queues=["q3"]) + await pg_job_manager.fetch_job(queues=["q3"], worker_id=worker_id) return [j1, j2, j3, j4] @@ -484,7 +799,6 @@ async def test_list_queues_dict(fixture_jobs, pg_job_manager): "succeeded": 0, "failed": 1, "cancelled": 0, - "aborting": 0, "aborted": 0, } @@ -514,7 +828,6 @@ async def test_list_tasks_dict(fixture_jobs, pg_job_manager): "succeeded": 0, "failed": 1, "cancelled": 0, - "aborting": 0, "aborted": 0, } diff --git a/tests/integration/test_psycopg_connector.py b/tests/integration/test_psycopg_connector.py index b36cbe8a2..09c3ff540 100644 --- a/tests/integration/test_psycopg_connector.py +++ b/tests/integration/test_psycopg_connector.py @@ -8,7 +8,7 @@ import attr import pytest -from procrastinate import exceptions, psycopg_connector, sync_psycopg_connector +from procrastinate import exceptions, manager, psycopg_connector, sync_psycopg_connector @pytest.fixture @@ -79,6 +79,42 @@ def decode(dct): assert result["json"] == {"a": 1, "b": Param(p=2)} +async def test_wrap_exceptions(psycopg_connector): + await psycopg_connector.execute_query_async( + """SELECT procrastinate_defer_jobs_v1( + ARRAY[ + ROW( + 'queue'::character varying, + 'foo'::character varying, + 0::integer, + NULL::text, + 'same_queueing_lock'::text, + '{}'::jsonb, + NULL::timestamptz + ) + ]::procrastinate_job_to_defer_v1[] + ) AS id;""" + ) + with pytest.raises(exceptions.UniqueViolation) as excinfo: + await psycopg_connector.execute_query_async( + """SELECT procrastinate_defer_jobs_v1( + ARRAY[ + ROW( + 'queue'::character varying, + 'foo'::character varying, + 0::integer, + NULL::text, + 'same_queueing_lock'::text, + '{}'::jsonb, + NULL::timestamptz + ) + ]::procrastinate_job_to_defer_v1[] + ) AS id;""" + ) + assert excinfo.value.constraint_name == manager.QUEUEING_LOCK_CONSTRAINT + assert excinfo.value.queueing_lock == "same_queueing_lock" + + async def test_execute_query(psycopg_connector): assert ( await psycopg_connector.execute_query_async( @@ -97,20 +133,6 @@ async def test_execute_query(psycopg_connector): assert result == [{"obj_description": "foo"}] -async def test_wrap_exceptions(psycopg_connector): - await psycopg_connector.execute_query_async( - """SELECT procrastinate_defer_job( - 'queue', 'foo', 0, NULL, 'lock', '{}', NULL - ) AS id;""" - ) - with pytest.raises(exceptions.UniqueViolation): - await psycopg_connector.execute_query_async( - """SELECT procrastinate_defer_job( - 'queue', 'foo', 0, NULL, 'lock', '{}', NULL - ) AS id;""" - ) - - async def test_execute_query_sync(psycopg_connector): @asgiref.sync.sync_to_async() def sync(): @@ -166,15 +188,26 @@ async def test_close_async(psycopg_connector): async def test_listen_notify(psycopg_connector): channel = "somechannel" event = asyncio.Event() + received_args: list[dict] = [] + + async def handle_notification(*, channel: str, payload: str): + event.set() + received_args.append({"channel": channel, "payload": payload}) task = asyncio.ensure_future( - psycopg_connector.listen_notify(channels=[channel], event=event) + psycopg_connector.listen_notify( + channels=[channel], on_notification=handle_notification + ) ) try: - await asyncio.wait_for(event.wait(), timeout=0.2) - event.clear() - await psycopg_connector.execute_query_async(f"""NOTIFY "{channel}" """) + await asyncio.sleep(0.1) + await psycopg_connector.execute_query_async( + f"""NOTIFY "{channel}", 'somepayload' """ + ) await asyncio.wait_for(event.wait(), timeout=1) + args = received_args.pop() + assert args["channel"] == "somechannel" + assert args["payload"] == "somepayload" except asyncio.TimeoutError: pytest.fail("Notify not received within 1 sec") finally: @@ -193,10 +226,14 @@ async def configure(connection): async def test_loop_notify_stop_when_connection_closed(psycopg_connector): # We want to make sure that the when the connection is closed, the loop end. - event = asyncio.Event() + async def handle_notification(channel: str, payload: str): + pass + await psycopg_connector.open_async() async with psycopg_connector._async_pool.connection() as connection: - coro = psycopg_connector._loop_notify(event=event, connection=connection) + coro = psycopg_connector._loop_notify( + on_notification=handle_notification, connection=connection + ) await psycopg_connector._async_pool.close() assert connection.closed @@ -210,11 +247,18 @@ async def test_loop_notify_stop_when_connection_closed(psycopg_connector): async def test_loop_notify_timeout(psycopg_connector): # We want to make sure that when the listen starts, we don't listen forever. If the # connection closes, we eventually finish the coroutine. + event = asyncio.Event() + + async def handle_notification(channel: str, payload: str): + event.set() + await psycopg_connector.open_async() async with psycopg_connector._async_pool.connection() as connection: task = asyncio.ensure_future( - psycopg_connector._loop_notify(event=event, connection=connection) + psycopg_connector._loop_notify( + on_notification=handle_notification, connection=connection + ) ) assert not task.done() diff --git a/tests/integration/test_wait_stop.py b/tests/integration/test_wait_stop.py index d1041a700..adf26d79f 100644 --- a/tests/integration/test_wait_stop.py +++ b/tests/integration/test_wait_stop.py @@ -8,21 +8,20 @@ from procrastinate import worker as worker_module -async def test_wait_for_activity(psycopg_connector): +async def test_wait_for_activity_cancelled(psycopg_connector): """ - Testing that a new event interrupts the wait + Testing that the work can be cancelled """ pg_app = app.App(connector=psycopg_connector) - worker = worker_module.Worker(app=pg_app, timeout=2) - worker.notify_event = asyncio.Event() - task = asyncio.ensure_future(worker.single_worker(worker_id=0)) + worker = worker_module.Worker(app=pg_app, fetch_job_polling_interval=2) + task = asyncio.ensure_future(worker.run()) await asyncio.sleep(0.2) # should be enough so that we're waiting - worker.stop_requested = True - worker.notify_event.set() + task.cancel() try: - await asyncio.wait_for(task, timeout=0.2) + with pytest.raises(asyncio.CancelledError): + await asyncio.wait_for(task, timeout=0.2) except asyncio.TimeoutError: pytest.fail("Failed to stop worker within .2s") @@ -32,18 +31,11 @@ async def test_wait_for_activity_timeout(psycopg_connector): Testing that we timeout if nothing happens """ pg_app = app.App(connector=psycopg_connector) - worker = worker_module.Worker(app=pg_app, timeout=2) - worker.notify_event = asyncio.Event() - task = asyncio.ensure_future(worker.single_worker(worker_id=0)) - try: - await asyncio.sleep(0.2) # should be enough so that we're waiting - - worker.stop_requested = True - - with pytest.raises(asyncio.TimeoutError): - await asyncio.wait_for(task, timeout=0.2) - finally: - worker.notify_event.set() + worker = worker_module.Worker(app=pg_app, fetch_job_polling_interval=2) + task = asyncio.ensure_future(worker.run()) + await asyncio.sleep(0.2) # should be enough so that we're waiting + with pytest.raises(asyncio.TimeoutError): + await asyncio.wait_for(task, timeout=0.2) async def test_wait_for_activity_stop_from_signal(psycopg_connector, kill_own_pid): @@ -51,7 +43,7 @@ async def test_wait_for_activity_stop_from_signal(psycopg_connector, kill_own_pi Testing than ctrl+c interrupts the wait """ pg_app = app.App(connector=psycopg_connector) - worker = worker_module.Worker(app=pg_app, timeout=2) + worker = worker_module.Worker(app=pg_app, fetch_job_polling_interval=2) task = asyncio.ensure_future(worker.run()) await asyncio.sleep(0.2) # should be enough so that we're waiting @@ -65,10 +57,10 @@ async def test_wait_for_activity_stop_from_signal(psycopg_connector, kill_own_pi async def test_wait_for_activity_stop(psycopg_connector): """ - Testing than calling job_manager.stop() interrupts the wait + Testing than calling worker.stop() interrupts the wait """ pg_app = app.App(connector=psycopg_connector) - worker = worker_module.Worker(app=pg_app, timeout=2) + worker = worker_module.Worker(app=pg_app, fetch_job_polling_interval=2) task = asyncio.ensure_future(worker.run()) await asyncio.sleep(0.2) # should be enough so that we're waiting diff --git a/tests/integration/test_worker.py b/tests/integration/test_worker.py deleted file mode 100644 index a55e23f33..000000000 --- a/tests/integration/test_worker.py +++ /dev/null @@ -1,106 +0,0 @@ -from __future__ import annotations - -import asyncio -import contextlib -import signal - -import pytest - -from procrastinate import worker - - -@contextlib.asynccontextmanager -async def running_worker(app): - running_worker = worker.Worker(app=app, queues=["some_queue"]) - task = asyncio.ensure_future(running_worker.run()) - running_worker.task = task - yield running_worker - running_worker.stop() - await asyncio.wait_for(task, timeout=0.5) - - -async def test_run(app, caplog): - caplog.set_level("DEBUG") - - done = asyncio.Event() - - @app.task(queue="some_queue") - def t(): - done.set() - - async with running_worker(app): - await t.defer_async() - - try: - await asyncio.wait_for(done.wait(), timeout=0.5) - except asyncio.TimeoutError: - pytest.fail("Failed to launch task withing .5s") - - assert [q[0] for q in app.connector.queries] == [ - "defer_job", - "fetch_job", - "finish_job", - ] - - logs = {(r.action, r.levelname) for r in caplog.records} - # remove the periodic_deferrer_no_task log record because that makes the test flaky - assert { - ("about_to_defer_job", "DEBUG"), - ("job_defer", "INFO"), - ("loaded_job_info", "DEBUG"), - ("start_job", "INFO"), - ("job_success", "INFO"), - ("finish_task", "DEBUG"), - } <= logs - - -async def test_run_log_current_job_when_stopping(app, caplog): - caplog.set_level("DEBUG") - - async with running_worker(app) as worker: - - @app.task(queue="some_queue") - async def t(): - worker.stop() - - await t.defer_async() - - try: - await asyncio.wait_for(worker.task, timeout=0.5) - except asyncio.TimeoutError: - pytest.fail("Failed to launch task within .5s") - - # We want to make sure that the log that names the current running task fired. - logs = " ".join(r.message for r in caplog.records) - assert "Stop requested" in logs - assert ( - "Waiting for job to finish: worker 0: tests.integration.test_worker.t[1]()" - in logs - ) - - -async def test_run_no_listen_notify(app): - running_worker = worker.Worker(app=app, queues=["some_queue"], listen_notify=False) - task = asyncio.ensure_future(running_worker.run()) - try: - await asyncio.sleep(0.01) - assert app.connector.notify_event is None - finally: - running_worker.stop() - await asyncio.wait_for(task, timeout=0.5) - - -async def test_run_no_signal_handlers(app, kill_own_pid): - running_worker = worker.Worker( - app=app, queues=["some_queue"], install_signal_handlers=False - ) - - task = asyncio.ensure_future(running_worker.run()) - try: - with pytest.raises(KeyboardInterrupt): - await asyncio.sleep(0.01) - # Test that handlers are NOT installed - kill_own_pid(signal=signal.SIGINT) - finally: - running_worker.stop() - await asyncio.wait_for(task, timeout=0.5) diff --git a/tests/migration/test_migration.py b/tests/migration/test_migration.py index 379bad076..ecb50c6d6 100644 --- a/tests/migration/test_migration.py +++ b/tests/migration/test_migration.py @@ -2,8 +2,10 @@ import contextlib import pathlib +import subprocess import warnings +import packaging.version import pytest from django.core import management from django.db import connection @@ -98,3 +100,105 @@ def test_django_migrations_run_properly(django_db): def test_no_missing_django_migration(django_db): management.call_command("makemigrations", "procrastinate", dry_run=True, check=True) + + +@pytest.fixture(scope="module") +def latest_version() -> packaging.version.Version: + try: + subprocess.check_call(["git", "fetch", "--tags"]) + out = subprocess.check_output(["git", "tag", "--list"], text=True) + except subprocess.CalledProcessError as exc: + raise ValueError("Cannot fetch latest tag") from exc + + return max(packaging.version.Version(tag) for tag in out.splitlines()) + + +migration_files = sorted( + (pathlib.Path(__file__).parents[2] / "procrastinate" / "sql" / "migrations").glob( + "*.sql" + ) +) + + +@pytest.fixture(scope="module") +def new_migrations(latest_version) -> set[pathlib.Path]: + # git diff latest_version..HEAD --name-only --diff-filter=A --no-renames -- procrastinate/sql/migrations + + try: + out = subprocess.check_output( + [ + "git", + "diff", + f"{latest_version}..HEAD", + "--name-only", + "--diff-filter=A", + "--no-renames", + "--", + "procrastinate/sql/migrations", + ], + text=True, + stderr=subprocess.PIPE, + ) + except subprocess.CalledProcessError as exc: + raise ValueError(f"Cannot fetch new migrations: {exc.stderr}") from exc + + return {pathlib.Path(path) for path in out.splitlines()} + + +@pytest.mark.parametrize( + "migration", [pytest.param(m, id=m.name) for m in migration_files] +) +def test_migration_properly_named( + migration: pathlib.Path, + latest_version: packaging.version.Version, + new_migrations: set[pathlib.Path], +): + # migration is: + # pathlib.Path("..." / "03.00.00_01_pre_cancel_notification.sql") + + migration_name_parts = migration.stem.split("_", 3) + version_str, index_str, *pre_post_name = migration_name_parts + + mig_version = packaging.version.Version(version_str) + + next_minor = packaging.version.Version( + f"{latest_version.major}.{latest_version.minor + 1}.0" + ) + + if migration.name in {m.name for m in new_migrations}: + assert mig_version == next_minor, ( + f"New migration {migration.name} should be named with {next_minor} but is {mig_version}" + ) + else: + assert mig_version <= latest_version, ( + f"Migration {migration.name} should be named with at most {latest_version} but is {mig_version}" + ) + + # All migrations before 3.0.0 are pre migrations + if mig_version < packaging.version.Version("3.0.0"): + pre_post = "pre" + name = pre_post_name[0] + else: + pre_post, name = pre_post_name + + index = int(index_str) + if pre_post == "pre": + assert 1 <= index < 50, ( + f"Pre migration {migration.name} should have an index between 1 and 49, but is {index}" + ) + elif pre_post == "post": + assert 50 <= index < 100, ( + f"Post migration {migration.name} should have an index of at least 50, but is {index}" + ) + else: + assert False, f"Invalid migration name: expecting 'pre' or 'post': {pre_post}" + + assert name == name.lower(), ( + f"Migration {migration.name} should be lower case, but is {name}" + ) + assert "-" not in name, ( + f"Migration {migration.name} should not contain dashes, but is {name}" + ) + assert " " not in name, ( + f"Migration {migration.name} should not contain spaces, but is {name}" + ) diff --git a/tests/unit/contrib/django/test_cli.py b/tests/unit/contrib/django/test_cli.py index 269d07969..4b366a235 100644 --- a/tests/unit/contrib/django/test_cli.py +++ b/tests/unit/contrib/django/test_cli.py @@ -26,9 +26,9 @@ def test_procrastinate_command(): def assert_no_action_named_args(parser): for action in parser._actions: - assert getattr(action, "dest", "") != ( - "args" - ), f"'args' found in {parser.prog}\n{error}" + assert getattr(action, "dest", "") != ("args"), ( + f"'args' found in {parser.prog}\n{error}" + ) if isinstance(action, argparse._SubParsersAction): for subparser in action.choices.values(): assert_no_action_named_args(subparser) diff --git a/tests/unit/test_app.py b/tests/unit/test_app.py index 8118abd02..1f3aeb1bb 100644 --- a/tests/unit/test_app.py +++ b/tests/unit/test_app.py @@ -2,6 +2,7 @@ import asyncio import collections +from typing import cast import pytest @@ -28,7 +29,7 @@ def test_app_task_dont_read_function_attributes(app: app_module.App): def wrapped(): return "foo" - wrapped.pass_context = True + wrapped.pass_context = True # type: ignore task = app.task(wrapped) assert task.pass_context is False @@ -47,14 +48,14 @@ def test_app_register(app: app_module.App): assert app.tasks["bla"] == task -def test_app_worker(app, mocker): +def test_app_worker(app: app_module.App, mocker): Worker = mocker.patch("procrastinate.worker.Worker") - app.worker_defaults["timeout"] = 12 + app.worker_defaults["fetch_job_polling_interval"] = 12 app._worker(queues=["yay"], name="w1", wait=False) Worker.assert_called_once_with( - queues=["yay"], app=app, name="w1", timeout=12, wait=False + queues=["yay"], app=app, name="w1", fetch_job_polling_interval=12, wait=False ) @@ -95,6 +96,7 @@ async def my_task(a): result.append(a) task = asyncio.create_task(app.run_worker_async()) + await asyncio.sleep(0.01) await my_task.defer_async(a=1) await asyncio.sleep(0.01) task.cancel() @@ -104,6 +106,26 @@ async def my_task(a): assert result == [1] +async def test_app_run_worker_async_abort(app: app_module.App): + result = [] + + @app.task + async def my_task(a): + await asyncio.sleep(3) + result.append(a) + + task = asyncio.create_task(app.run_worker_async(shutdown_graceful_timeout=0.1)) + await my_task.defer_async(a=1) + await asyncio.sleep(0.01) + task.cancel() + with pytest.raises(asyncio.CancelledError): + # this wait_for is just here to fail the test faster + await asyncio.wait_for(task, timeout=1) + pytest.fail("Expected the worker to be force stopped") + + assert result == [] + + def test_from_path(mocker): load = mocker.patch("procrastinate.utils.load_from_path") assert app_module.App.from_path("dotted.path") is load.return_value @@ -253,7 +275,7 @@ def test_check_stack_is_called(mocker, connector): called = [] class MyApp(app_module.App): - def _check_stack(self): + def _check_stack(self): # pyright: ignore reportIncompatibleMethodOverride called.append(True) return "foo" @@ -286,16 +308,16 @@ def bar(): assert app.periodic_registry is new_app.periodic_registry -def test_replace_connector(app): +def test_replace_connector(app: app_module.App): @app.task(name="foo") def foo(): pass foo.defer() - assert len(app.connector.jobs) == 1 + assert len(cast(testing.InMemoryConnector, app.connector).jobs) == 1 new_connector = testing.InMemoryConnector() with app.replace_connector(new_connector): - assert len(app.connector.jobs) == 0 + assert len(cast(testing.InMemoryConnector, app.connector).jobs) == 0 - assert len(app.connector.jobs) == 1 + assert len(cast(testing.InMemoryConnector, app.connector).jobs) == 1 diff --git a/tests/unit/test_blueprints.py b/tests/unit/test_blueprints.py index 188cb6e4f..c677dbe25 100644 --- a/tests/unit/test_blueprints.py +++ b/tests/unit/test_blueprints.py @@ -219,7 +219,6 @@ def test_blueprint_task_explicit(blueprint: blueprints.Blueprint, mocker): def my_task(context: JobContext): return "foo" - assert my_task(JobContext()) == "foo" assert blueprint.tasks["foobar"].name == "foobar" assert blueprint.tasks["foobar"].queue == "bar" assert blueprint.tasks["foobar"].lock == "sher" diff --git a/tests/unit/test_builtin_tasks.py b/tests/unit/test_builtin_tasks.py index 8849f9878..716ffdf99 100644 --- a/tests/unit/test_builtin_tasks.py +++ b/tests/unit/test_builtin_tasks.py @@ -1,18 +1,28 @@ from __future__ import annotations +import time +from typing import cast + from procrastinate import builtin_tasks, job_context +from procrastinate.app import App +from procrastinate.testing import InMemoryConnector -async def test_remove_old_jobs(app): +async def test_remove_old_jobs(app: App, job_factory): + job = job_factory() await builtin_tasks.remove_old_jobs( - job_context.JobContext(app=app), + job_context.JobContext( + app=app, job=job, abort_reason=lambda: None, start_timestamp=time.time() + ), max_hours=2, queue="queue_a", - remove_error=True, + remove_failed=True, remove_cancelled=True, remove_aborted=True, ) - assert app.connector.queries == [ + + connector = cast(InMemoryConnector, app.connector) + assert connector.queries == [ ( "delete_old_jobs", { diff --git a/tests/unit/test_cli.py b/tests/unit/test_cli.py index 4e90a90b0..38a03a314 100644 --- a/tests/unit/test_cli.py +++ b/tests/unit/test_cli.py @@ -8,7 +8,7 @@ import pytest -from procrastinate import app, cli, connector, exceptions, worker +from procrastinate import app, cli, connector, exceptions, jobs from procrastinate.connector import BaseConnector @@ -53,7 +53,7 @@ def test_main(mocker): ), ( ["worker", "--delete-jobs", "never"], - {"command": "worker", "delete_jobs": worker.DeleteJobCondition.NEVER}, + {"command": "worker", "delete_jobs": jobs.DeleteJobCondition.NEVER}, ), (["defer", "x"], {"command": "defer", "task": "x"}), (["defer", "x", "{}"], {"command": "defer", "task": "x", "json_args": "{}"}), @@ -281,7 +281,7 @@ def get_sync_connector(self) -> BaseConnector: cli.load_app("foobar") -async def test_shell_single_command(app, capsys): +async def test_shell_single_command(app: app.App, capsys): @app.task(name="foobar") def mytask(a): pass @@ -295,7 +295,7 @@ def mytask(a): assert out == "#1 foobar on default - [todo]\n" -async def test_shell_interactive_command(app, capsys, mocker): +async def test_shell_interactive_command(app: app.App, capsys, mocker): @app.task(name="foobar") def mytask(a): pass diff --git a/tests/unit/test_connector.py b/tests/unit/test_connector.py index e561b6787..a63db018f 100644 --- a/tests/unit/test_connector.py +++ b/tests/unit/test_connector.py @@ -30,7 +30,7 @@ async def test_close_async(connector): ["execute_query_async", {"query": ""}], ["execute_query_one_async", {"query": ""}], ["execute_query_all_async", {"query": ""}], - ["listen_notify", {"event": None, "channels": []}], + ["listen_notify", {"on_notification": None, "channels": []}], ], ) async def test_missing_app_async(method_name, kwargs): diff --git a/tests/unit/test_job_context.py b/tests/unit/test_job_context.py index 0d3a5fe0d..72ed06f33 100644 --- a/tests/unit/test_job_context.py +++ b/tests/unit/test_job_context.py @@ -1,14 +1,16 @@ from __future__ import annotations +import time + import pytest from procrastinate import job_context +from procrastinate.app import App @pytest.mark.parametrize( "job_result, expected", [ - (job_context.JobResult(), None), (job_context.JobResult(start_timestamp=10), 20), (job_context.JobResult(start_timestamp=10, end_timestamp=15), 5), ], @@ -20,7 +22,6 @@ def test_job_result_duration(job_result, expected): @pytest.mark.parametrize( "job_result, expected", [ - (job_context.JobResult(), {}), ( job_context.JobResult(start_timestamp=10), { @@ -44,81 +45,29 @@ def test_job_result_as_dict(job_result, expected, mocker): assert job_result.as_dict() == expected -@pytest.mark.parametrize( - "queues, result", [(None, "all queues"), (["foo", "bar"], "queues foo, bar")] -) -def test_queues_display(queues, result): - context = job_context.JobContext(worker_queues=queues) - assert context.queues_display == result - - -def test_evolve(): - context = job_context.JobContext(worker_name="a") - assert context.evolve(worker_name="b").worker_name == "b" - - -def test_log_extra(): - context = job_context.JobContext( - worker_name="a", worker_id=2, additional_context={"ha": "ho"} - ) - - assert context.log_extra(action="foo", bar="baz") == { - "action": "foo", - "bar": "baz", - "worker": {"name": "a", "id": 2, "queues": None}, - } - - -def test_log_extra_job(job_factory): +def test_evolve(app: App, job_factory): job = job_factory() - context = job_context.JobContext(worker_name="a", worker_id=2, job=job) - - assert context.log_extra(action="foo") == { - "action": "foo", - "job": job.log_context(), - "worker": {"name": "a", "id": 2, "queues": None}, - } - - -def test_job_description_no_job(job_factory): - descr = job_context.JobContext(worker_name="a", worker_id=2).job_description( - current_timestamp=0 + context = job_context.JobContext( + start_timestamp=time.time(), + app=app, + job=job, + worker_name="a", + abort_reason=lambda: None, ) - assert descr == "worker 2: no current job" - + assert context.evolve(worker_name="b").worker_name == "b" -def test_job_description_job_no_time(job_factory): - job = job_factory(task_name="some_task", id=12, task_kwargs={"a": "b"}) - descr = job_context.JobContext( - worker_name="a", worker_id=2, job=job - ).job_description(current_timestamp=0) - assert descr == "worker 2: some_task[12](a='b')" +def test_task(app: App, job_factory): + @app.task(name="my_task") + def my_task(a, b): + return a + b -def test_job_description_job_time(job_factory): - job = job_factory(task_name="some_task", id=12, task_kwargs={"a": "b"}) - descr = job_context.JobContext( - worker_name="a", - worker_id=2, + job = job_factory(task_name="my_task") + context = job_context.JobContext( + start_timestamp=time.time(), + app=app, job=job, - job_result=job_context.JobResult(start_timestamp=20.0), - ).job_description(current_timestamp=30.0) - assert descr == "worker 2: some_task[12](a='b') (started 10.000 s ago)" - - -async def test_should_abort(app, job_factory): - await app.job_manager.defer_job_async(job=job_factory()) - job = await app.job_manager.fetch_job(queues=None) - await app.job_manager.cancel_job_by_id_async(job.id, abort=True) - context = job_context.JobContext(app=app, job=job) - assert context.should_abort() is True - assert await context.should_abort_async() is True - - -async def test_should_not_abort(app, job_factory): - await app.job_manager.defer_job_async(job=job_factory()) - job = await app.job_manager.fetch_job(queues=None) - await app.job_manager.cancel_job_by_id_async(job.id) - context = job_context.JobContext(app=app, job=job) - assert context.should_abort() is False - assert await context.should_abort_async() is False + worker_name="a", + abort_reason=lambda: None, + ) + assert context.task == my_task diff --git a/tests/unit/test_jobs.py b/tests/unit/test_jobs.py index 6c776772b..3c6d5de8c 100644 --- a/tests/unit/test_jobs.py +++ b/tests/unit/test_jobs.py @@ -42,6 +42,8 @@ def test_job_get_context(job_factory, scheduled_at, context_scheduled_at): "scheduled_at": context_scheduled_at, "attempts": 42, "call_string": "mytask[12](a='b')", + "abort_requested": False, + "worker_id": None, } @@ -79,6 +81,8 @@ async def test_job_deferrer_defer_async(job_factory, job_manager, connector): "scheduled_at": None, "status": "todo", "task_name": "mytask", + "abort_requested": False, + "worker_id": None, } } diff --git a/tests/unit/test_manager.py b/tests/unit/test_manager.py index 3d14f2073..634bc8ac8 100644 --- a/tests/unit/test_manager.py +++ b/tests/unit/test_manager.py @@ -5,11 +5,16 @@ import pytest -from procrastinate import exceptions, jobs, manager +from procrastinate import exceptions, jobs, manager, utils from .. import conftest +@pytest.fixture +async def worker_id(job_manager): + return await job_manager.register_worker() + + async def test_manager_defer_job(job_manager, job_factory, connector): job = await job_manager.defer_job_async( job=job_factory( @@ -35,10 +40,68 @@ async def test_manager_defer_job(job_manager, job_factory, connector): "scheduled_at": None, "status": "todo", "task_name": "bla", + "abort_requested": False, + "worker_id": None, } } +async def test_manager_batch_defer_jobs(job_manager, job_factory, connector): + jobs = await job_manager.batch_defer_jobs_async( + jobs=[ + job_factory( + task_kwargs={"a": "b"}, + queue="marsupilami", + task_name="bla", + priority=5, + lock="sher", + ), + job_factory( + task_kwargs={"a": "c"}, + queue="marsupilami", + task_name="bla", + priority=7, + lock="sher", + ), + ] + ) + + assert len(jobs) == 2 + assert jobs[0].id == 1 + assert jobs[1].id == 2 + + assert connector.jobs == { + 1: { + "args": {"a": "b"}, + "attempts": 0, + "id": 1, + "lock": "sher", + "queueing_lock": None, + "queue_name": "marsupilami", + "priority": 5, + "scheduled_at": None, + "status": "todo", + "task_name": "bla", + "abort_requested": False, + "worker_id": None, + }, + 2: { + "args": {"a": "c"}, + "attempts": 0, + "id": 2, + "lock": "sher", + "queueing_lock": None, + "queue_name": "marsupilami", + "priority": 7, + "scheduled_at": None, + "status": "todo", + "task_name": "bla", + "abort_requested": False, + "worker_id": None, + }, + } + + async def test_manager_defer_job_no_lock(job_manager, job_factory, connector): await job_manager.defer_job_async(job=job_factory()) @@ -48,7 +111,7 @@ async def test_manager_defer_job_no_lock(job_manager, job_factory, connector): async def test_manager_defer_job_connector_exception( mocker, job_manager, job_factory, connector ): - connector.execute_query_one_async = mocker.Mock( + connector.execute_query_all_async = mocker.Mock( side_effect=exceptions.ConnectorException ) @@ -59,9 +122,10 @@ async def test_manager_defer_job_connector_exception( async def test_manager_defer_job_unique_violation_exception( mocker, job_manager, job_factory, connector ): - connector.execute_query_one_async = mocker.Mock( + connector.execute_query_all_async = mocker.Mock( side_effect=exceptions.UniqueViolation( - constraint_name="procrastinate_jobs_queueing_lock_idx" + constraint_name="procrastinate_jobs_queueing_lock_idx_v1", + queueing_lock="some_queueing_lock", ) ) @@ -72,8 +136,10 @@ async def test_manager_defer_job_unique_violation_exception( async def test_manager_defer_job_unique_violation_exception_other_constraint( mocker, job_manager, job_factory, connector ): - connector.execute_query_one_async = mocker.Mock( - side_effect=exceptions.UniqueViolation(constraint_name="some_other_constraint") + connector.execute_query_all_async = mocker.Mock( + side_effect=exceptions.UniqueViolation( + constraint_name="some_other_constraint", queueing_lock=None + ) ) with pytest.raises(exceptions.ConnectorException): @@ -83,9 +149,10 @@ async def test_manager_defer_job_unique_violation_exception_other_constraint( async def test_manager_defer_job_unique_violation_exception_sync( mocker, job_manager, job_factory, connector ): - connector.execute_query_one = mocker.Mock( + connector.execute_query_all = mocker.Mock( side_effect=exceptions.UniqueViolation( - constraint_name="procrastinate_jobs_queueing_lock_idx" + constraint_name="procrastinate_jobs_queueing_lock_idx_v1", + queueing_lock="some_queueing_lock", ) ) @@ -96,49 +163,116 @@ async def test_manager_defer_job_unique_violation_exception_sync( async def test_manager_defer_job_unique_violation_exception_other_constraint_sync( mocker, job_manager, job_factory, connector ): - connector.execute_query_one = mocker.Mock( - side_effect=exceptions.UniqueViolation(constraint_name="some_other_constraint") + connector.execute_query_all = mocker.Mock( + side_effect=exceptions.UniqueViolation( + constraint_name="some_other_constraint", queueing_lock=None + ) ) with pytest.raises(exceptions.ConnectorException): job_manager.defer_job(job=job_factory(task_kwargs={"a": "b"})) -async def test_fetch_job_no_suitable_job(job_manager): - assert await job_manager.fetch_job(queues=None) is None +async def test_fetch_job_no_suitable_job(job_manager, worker_id): + assert await job_manager.fetch_job(queues=None, worker_id=worker_id) is None -async def test_fetch_job(job_manager, job_factory): +async def test_fetch_job(job_manager, job_factory, worker_id): job = job_factory(id=None) await job_manager.defer_job_async(job=job) - expected_job = job.evolve(id=1, status="doing") - assert await job_manager.fetch_job(queues=None) == expected_job + expected_job = job.evolve(id=1, status="doing", worker_id=worker_id) + assert await job_manager.fetch_job(queues=None, worker_id=worker_id) == expected_job -async def test_get_stalled_jobs_not_stalled(job_manager, job_factory): +async def test_get_stalled_jobs_by_started_not_stalled(job_manager, job_factory): job = job_factory(id=1) await job_manager.defer_job_async(job=job) - assert await job_manager.get_stalled_jobs(nb_seconds=1000) == [] + with pytest.warns(DeprecationWarning, match=".*nb_seconds.*"): + assert await job_manager.get_stalled_jobs(nb_seconds=1000) == [] -async def test_get_stalled_jobs_stalled(job_manager, job_factory, connector): +async def test_get_stalled_jobs_by_started_stalled( + job_manager, job_factory, connector, worker_id +): job = job_factory() await job_manager.defer_job_async(job=job) - await job_manager.fetch_job(queues=None) + await job_manager.fetch_job(queues=None, worker_id=worker_id) connector.events[1][-1]["at"] = conftest.aware_datetime(2000, 1, 1) - expected_job = job.evolve(id=1, status="doing") - assert await job_manager.get_stalled_jobs(nb_seconds=1000) == [expected_job] + expected_job = job.evolve(id=1, status="doing", worker_id=worker_id) + with pytest.warns(DeprecationWarning, match=".*nb_seconds.*"): + assert await job_manager.get_stalled_jobs(nb_seconds=1000) == [expected_job] + + +async def test_get_stalled_jobs_by_heartbeat_not_stalled(job_manager, job_factory): + job = job_factory(id=1) + await job_manager.defer_job_async(job=job) + assert await job_manager.get_stalled_jobs() == [] + + +async def test_get_stalled_jobs_by_heartbeat_stalled( + job_manager, job_factory, connector, worker_id +): + job = job_factory() + await job_manager.defer_job_async(job=job) + await job_manager.fetch_job(queues=None, worker_id=worker_id) + connector.workers = {1: conftest.aware_datetime(2000, 1, 1)} + expected_job = job.evolve(id=1, status="doing", worker_id=worker_id) + assert await job_manager.get_stalled_jobs() == [expected_job] + + +async def test_register_and_unregister_worker(job_manager, connector): + then = utils.utcnow() + assert connector.workers == {} + worker_id = await job_manager.register_worker() + assert worker_id is not None + + assert len(connector.workers) == 1 + assert worker_id in connector.workers + assert then < connector.workers[worker_id] < utils.utcnow() + + await job_manager.unregister_worker(worker_id=1) + + assert connector.workers == {} + + +async def test_update_heartbeat(job_manager, connector, worker_id): + first_heartbeat = connector.workers[worker_id] + + await job_manager.update_heartbeat(worker_id=worker_id) + + assert len(connector.workers) == 1 + assert worker_id in connector.workers + assert first_heartbeat < connector.workers[worker_id] < utils.utcnow() + + +async def test_prune_stalled_workers(job_manager, connector, worker_id): + assert len(connector.workers) == 1 + + pruned_workers = await job_manager.prune_stalled_workers( + seconds_since_heartbeat=1800 + ) + assert pruned_workers == [] + + # We fake the heartbeat to be 35 minutes old + heartbeat = connector.workers[worker_id] + connector.workers[worker_id] = heartbeat - datetime.timedelta(minutes=35) + + pruned_workers = await job_manager.prune_stalled_workers( + seconds_since_heartbeat=1800 + ) + assert pruned_workers == [worker_id] + assert connector.workers == {} @pytest.mark.parametrize( - "include_error, statuses", + "include_failed, statuses", [(False, ["succeeded"]), (True, ["succeeded", "failed"])], ) async def test_delete_old_jobs( - job_manager, job_factory, connector, include_error, statuses, mocker + job_manager, job_factory, connector, include_failed, statuses, mocker ): await job_manager.delete_old_jobs( - nb_hours=5, queue="marsupilami", include_error=include_error + nb_hours=5, queue="marsupilami", include_failed=include_failed ) assert connector.queries == [ ( @@ -225,12 +359,12 @@ async def test_delete_cancelled_todo_job_async(job_manager, job_factory, connect assert len(connector.jobs) == 0 -async def test_cancel_doing_job(job_manager, job_factory, connector): +async def test_cancel_doing_job(job_manager, job_factory, connector, worker_id): job = job_factory(id=1) await job_manager.defer_job_async(job=job) - await job_manager.fetch_job(queues=None) + await job_manager.fetch_job(queues=None, worker_id=worker_id) - cancelled = job_manager.cancel_job_by_id(job_id=1) + cancelled = await job_manager.cancel_job_by_id_async(job_id=1) assert not cancelled assert connector.queries[-1] == ( "cancel_job", @@ -239,18 +373,19 @@ async def test_cancel_doing_job(job_manager, job_factory, connector): assert connector.jobs[1]["status"] == "doing" -async def test_abort_doing_job(job_manager, job_factory, connector): +async def test_abort_doing_job(job_manager, job_factory, connector, worker_id): job = job_factory(id=1) await job_manager.defer_job_async(job=job) - await job_manager.fetch_job(queues=None) + await job_manager.fetch_job(queues=None, worker_id=worker_id) - cancelled = job_manager.cancel_job_by_id(job_id=1, abort=True) + cancelled = await job_manager.cancel_job_by_id_async(job_id=1, abort=True) assert cancelled assert connector.queries[-1] == ( "cancel_job", {"job_id": 1, "abort": True, "delete_job": False}, ) - assert connector.jobs[1]["status"] == "aborting" + assert connector.jobs[1]["status"] == "doing" + assert connector.jobs[1]["abort_requested"] is True def test_get_job_status(job_manager, job_factory, connector): @@ -260,13 +395,13 @@ def test_get_job_status(job_manager, job_factory, connector): assert job_manager.get_job_status(job_id=1) == jobs.Status.TODO -async def test_get_job_status_async(job_manager, job_factory, connector): +async def test_get_job_status_async(job_manager, job_factory, connector, worker_id): job = job_factory(id=1) await job_manager.defer_job_async(job=job) assert await job_manager.get_job_status_async(job_id=1) == jobs.Status.TODO - await job_manager.fetch_job(queues=None) + await job_manager.fetch_job(queues=None, worker_id=worker_id) assert await job_manager.get_job_status_async(job_id=1) == jobs.Status.DOING @@ -293,22 +428,22 @@ async def test_retry_job(job_manager, job_factory, connector): @pytest.mark.parametrize( "queues, channels", [ - (None, ["procrastinate_any_queue"]), - (["a", "b"], ["procrastinate_queue#a", "procrastinate_queue#b"]), + (None, ["procrastinate_any_queue_v1"]), + (["a", "b"], ["procrastinate_queue_v1#a", "procrastinate_queue_v1#b"]), ], ) async def test_listen_for_jobs(job_manager, connector, mocker, queues, channels): - event = mocker.Mock() + on_notification = mocker.Mock() - await job_manager.listen_for_jobs(queues=queues, event=event) - assert connector.notify_event is event + await job_manager.listen_for_jobs(queues=queues, on_notification=on_notification) + assert connector.on_notification assert connector.notify_channels == channels @pytest.fixture def configure(app): @app.task - def foo(timestamp): + def foo(timestamp: int): pass return foo.configure @@ -443,7 +578,7 @@ def test_retry_job_by_id(job_manager, connector, job_factory, dt): async def test_list_jobs_async(job_manager, job_factory): - job = job_manager.defer_job(job=job_factory()) + job = await job_manager.defer_job_async(job=job_factory()) assert await job_manager.list_jobs_async() == [job] @@ -455,7 +590,7 @@ def test_list_jobs(job_manager, job_factory): async def test_list_queues_async(job_manager, job_factory): - job_manager.defer_job(job=job_factory(queue="foo")) + await job_manager.defer_job_async(job=job_factory(queue="foo")) assert await job_manager.list_queues_async() == [ { @@ -466,7 +601,6 @@ async def test_list_queues_async(job_manager, job_factory): "succeeded": 0, "failed": 0, "cancelled": 0, - "aborting": 0, "aborted": 0, } ] @@ -484,14 +618,13 @@ def test_list_queues_(job_manager, job_factory): "succeeded": 0, "failed": 0, "cancelled": 0, - "aborting": 0, "aborted": 0, } ] async def test_list_tasks_async(job_manager, job_factory): - job_manager.defer_job(job=job_factory(task_name="foo")) + await job_manager.defer_job_async(job=job_factory(task_name="foo")) assert await job_manager.list_tasks_async() == [ { @@ -502,7 +635,6 @@ async def test_list_tasks_async(job_manager, job_factory): "succeeded": 0, "failed": 0, "cancelled": 0, - "aborting": 0, "aborted": 0, } ] @@ -520,14 +652,13 @@ def test_list_tasks(job_manager, job_factory): "succeeded": 0, "failed": 0, "cancelled": 0, - "aborting": 0, "aborted": 0, } ] async def test_list_locks_async(job_manager, job_factory): - job_manager.defer_job(job=job_factory(lock="foo")) + await job_manager.defer_job_async(job=job_factory(lock="foo")) assert await job_manager.list_locks_async() == [ { @@ -538,7 +669,6 @@ async def test_list_locks_async(job_manager, job_factory): "succeeded": 0, "failed": 0, "cancelled": 0, - "aborting": 0, "aborted": 0, } ] @@ -556,7 +686,6 @@ def test_list_locks(job_manager, job_factory): "succeeded": 0, "failed": 0, "cancelled": 0, - "aborting": 0, "aborted": 0, } ] diff --git a/tests/unit/test_retry.py b/tests/unit/test_retry.py index b01b7a2c7..d462be4f2 100644 --- a/tests/unit/test_retry.py +++ b/tests/unit/test_retry.py @@ -94,6 +94,17 @@ def test_get_none_retry_decision(attempts, wait, linear_wait, exponential_wait, assert strategy.get_retry_decision(exception=Exception(), job=job_mock) is None +def test_get_retry_decision_does_not_overflow(mocker): + # 5 ** 20s exceeds year 9999, the maximum representable by datetime. + # `retry_at` should be clamped to the maximum instead of crashing. + strategy = retry_module.RetryStrategy(exponential_wait=5) + job_mock = mocker.Mock(attempts=20) + retry_decision = strategy.get_retry_decision(exception=Exception(), job=job_mock) + assert isinstance(retry_decision, RetryDecision) + assert retry_decision.retry_at + assert retry_decision.retry_at.year == 9999 + + def test_retry_exception(mocker): strategy = retry_module.RetryStrategy(retry_exceptions=[ValueError]) job_mock = mocker.Mock(attempts=0) diff --git a/tests/unit/test_shell.py b/tests/unit/test_shell.py index 3e2db2cdb..8019c56db 100644 --- a/tests/unit/test_shell.py +++ b/tests/unit/test_shell.py @@ -2,46 +2,55 @@ import pytest -from procrastinate import manager +from procrastinate import manager, testing, utils from procrastinate import shell as shell_module +from procrastinate import types as t from .. import conftest @pytest.fixture -def shell(connector): +def shell(connector: testing.InMemoryConnector): return shell_module.ProcrastinateShell(manager.JobManager(connector=connector)) -def test_exit(shell): +def test_exit(shell: shell_module.ProcrastinateShell): assert shell.do_exit("") is True -def test_EOF(shell): +def test_EOF(shell: shell_module.ProcrastinateShell): assert shell.do_EOF("") is True -def test_list_jobs(shell, connector, capsys): - connector.defer_job_one( - "task1", - 0, - "lock1", - "queueing_lock1", - {}, - conftest.aware_datetime(2000, 1, 1), - "queue1", - ) - connector.defer_job_one( - "task2", - 0, - "lock2", - "queueing_lock2", - {}, - conftest.aware_datetime(2000, 1, 1), - "queue2", +async def test_list_jobs( + shell: shell_module.ProcrastinateShell, + connector: testing.InMemoryConnector, + capsys: pytest.CaptureFixture, +): + await connector.defer_jobs_all( + [ + t.JobToDefer( + queue_name="queue1", + task_name="task1", + priority=0, + lock="lock1", + queueing_lock="queueing_lock1", + args={}, + scheduled_at=conftest.aware_datetime(2000, 1, 1), + ), + t.JobToDefer( + queue_name="queue2", + task_name="task2", + priority=0, + lock="lock2", + queueing_lock="queueing_lock2", + args={}, + scheduled_at=conftest.aware_datetime(2000, 1, 1), + ), + ] ) - shell.do_list_jobs("") + await utils.sync_to_async(shell.do_list_jobs, "") captured = capsys.readouterr() assert captured.out.splitlines() == [ "#1 task1 on queue1 - [todo]", @@ -57,32 +66,43 @@ def test_list_jobs(shell, connector, capsys): "queueing_lock": None, "lock": None, "status": None, + "worker_id": None, }, ) ] -def test_list_jobs_filters(shell, connector, capsys): - connector.defer_job_one( - "task1", - 0, - "lock1", - "queueing_lock1", - {}, - conftest.aware_datetime(2000, 1, 1), - "queue1", - ) - connector.defer_job_one( - "task2", - 0, - "lock2", - "queueing_lock2", - {}, - conftest.aware_datetime(2000, 1, 1), - "queue2", +async def test_list_jobs_filters( + shell: shell_module.ProcrastinateShell, + connector: testing.InMemoryConnector, + capsys: pytest.CaptureFixture, +): + await connector.defer_jobs_all( + [ + t.JobToDefer( + queue_name="queue1", + task_name="task1", + priority=0, + lock="lock1", + queueing_lock="queueing_lock1", + args={}, + scheduled_at=conftest.aware_datetime(2000, 1, 1), + ), + t.JobToDefer( + queue_name="queue2", + task_name="task2", + priority=0, + lock="lock2", + queueing_lock="queueing_lock2", + args={}, + scheduled_at=conftest.aware_datetime(2000, 1, 1), + ), + ] ) - shell.do_list_jobs("id=2 queue=queue2 task=task2 lock=lock2 status=todo") + await utils.sync_to_async( + shell.do_list_jobs, "id=2 queue=queue2 task=task2 lock=lock2 status=todo" + ) captured = capsys.readouterr() assert captured.out.splitlines() == [ "#2 task2 on queue2 - [todo]", @@ -97,32 +117,41 @@ def test_list_jobs_filters(shell, connector, capsys): "queueing_lock": None, "lock": "lock2", "status": "todo", + "worker_id": None, }, ) ] -def test_list_jobs_details(shell, connector, capsys): - connector.defer_job_one( - "task1", - 5, - "lock1", - "queueing_lock1", - {"x": 11}, - conftest.aware_datetime(1000, 1, 1), - "queue1", - ) - connector.defer_job_one( - "task2", - 7, - "lock2", - "queueing_lock2", - {"y": 22}, - conftest.aware_datetime(2000, 1, 1), - "queue2", +async def test_list_jobs_details( + shell: shell_module.ProcrastinateShell, + connector: testing.InMemoryConnector, + capsys: pytest.CaptureFixture, +): + await connector.defer_jobs_all( + [ + t.JobToDefer( + queue_name="queue1", + task_name="task1", + priority=5, + lock="lock1", + queueing_lock="queueing_lock1", + args={"x": 11}, + scheduled_at=conftest.aware_datetime(1000, 1, 1), + ), + t.JobToDefer( + queue_name="queue2", + task_name="task2", + priority=7, + lock="lock2", + queueing_lock="queueing_lock2", + args={"y": 22}, + scheduled_at=conftest.aware_datetime(2000, 1, 1), + ), + ] ) - shell.do_list_jobs("details") + await utils.sync_to_async(shell.do_list_jobs, "details") captured = capsys.readouterr() assert captured.out.splitlines() == [ "#1 task1 on queue1 - [todo] (attempts=0, priority=5, scheduled_at=1000-01-01 " @@ -132,21 +161,49 @@ def test_list_jobs_details(shell, connector, capsys): ] -def test_list_jobs_empty(shell, connector, capsys): - shell.do_list_jobs("") +async def test_list_jobs_empty( + shell: shell_module.ProcrastinateShell, + connector: testing.InMemoryConnector, + capsys: pytest.CaptureFixture, +): + await utils.sync_to_async(shell.do_list_jobs, "") captured = capsys.readouterr() assert captured.out == "" -def test_list_queues(shell, connector, capsys): - connector.defer_job_one("task1", 0, "lock1", "queueing_lock1", {}, 0, "queue1") - connector.defer_job_one("task2", 0, "lock2", "queueing_lock2", {}, 0, "queue2") +async def test_list_queues( + shell: shell_module.ProcrastinateShell, + connector: testing.InMemoryConnector, + capsys: pytest.CaptureFixture, +): + await connector.defer_jobs_all( + [ + t.JobToDefer( + queue_name="queue1", + task_name="task1", + priority=0, + lock="lock1", + queueing_lock="queueing_lock1", + args={}, + scheduled_at=conftest.aware_datetime(2000, 1, 1), + ), + t.JobToDefer( + queue_name="queue2", + task_name="task2", + priority=0, + lock="lock2", + queueing_lock="queueing_lock2", + args={}, + scheduled_at=conftest.aware_datetime(2000, 1, 1), + ), + ] + ) - shell.do_list_queues("") + await utils.sync_to_async(shell.do_list_queues, "") captured = capsys.readouterr() assert captured.out.splitlines() == [ - "queue1: 1 jobs (todo: 1, doing: 0, succeeded: 0, failed: 0, cancelled: 0, aborting: 0, aborted: 0)", - "queue2: 1 jobs (todo: 1, doing: 0, succeeded: 0, failed: 0, cancelled: 0, aborting: 0, aborted: 0)", + "queue1: 1 jobs (todo: 1, doing: 0, succeeded: 0, failed: 0, cancelled: 0, aborted: 0)", + "queue2: 1 jobs (todo: 1, doing: 0, succeeded: 0, failed: 0, cancelled: 0, aborted: 0)", ] assert connector.queries == [ ( @@ -156,14 +213,40 @@ def test_list_queues(shell, connector, capsys): ] -def test_list_queues_filters(shell, connector, capsys): - connector.defer_job_one("task1", 0, "lock1", "queueing_lock1", {}, 0, "queue1") - connector.defer_job_one("task2", 0, "lock2", "queueing_lock2", {}, 0, "queue2") +async def test_list_queues_filters( + shell: shell_module.ProcrastinateShell, + connector: testing.InMemoryConnector, + capsys: pytest.CaptureFixture, +): + await connector.defer_jobs_all( + [ + t.JobToDefer( + queue_name="queue1", + task_name="task1", + priority=0, + lock="lock1", + queueing_lock="queueing_lock1", + args={}, + scheduled_at=conftest.aware_datetime(2000, 1, 1), + ), + t.JobToDefer( + queue_name="queue2", + task_name="task2", + priority=0, + lock="lock2", + queueing_lock="queueing_lock2", + args={}, + scheduled_at=conftest.aware_datetime(2000, 1, 1), + ), + ] + ) - shell.do_list_queues("queue=queue2 task=task2 lock=lock2 status=todo") + await utils.sync_to_async( + shell.do_list_queues, "queue=queue2 task=task2 lock=lock2 status=todo" + ) captured = capsys.readouterr() assert captured.out.splitlines() == [ - "queue2: 1 jobs (todo: 1, doing: 0, succeeded: 0, failed: 0, cancelled: 0, aborting: 0, aborted: 0)", + "queue2: 1 jobs (todo: 1, doing: 0, succeeded: 0, failed: 0, cancelled: 0, aborted: 0)", ] assert connector.queries == [ ( @@ -178,21 +261,49 @@ def test_list_queues_filters(shell, connector, capsys): ] -def test_list_queues_empty(shell, connector, capsys): - shell.do_list_queues("") +async def test_list_queues_empty( + shell: shell_module.ProcrastinateShell, + connector: testing.InMemoryConnector, + capsys: pytest.CaptureFixture, +): + await utils.sync_to_async(shell.do_list_queues, "") captured = capsys.readouterr() assert captured.out == "" -def test_list_tasks(shell, connector, capsys): - connector.defer_job_one("task1", 0, "lock1", "queueing_lock1", {}, 0, "queue1") - connector.defer_job_one("task2", 0, "lock2", "queueing_lock2", {}, 0, "queue2") +async def test_list_tasks( + shell: shell_module.ProcrastinateShell, + connector: testing.InMemoryConnector, + capsys: pytest.CaptureFixture, +): + await connector.defer_jobs_all( + [ + t.JobToDefer( + queue_name="queue1", + task_name="task1", + priority=0, + lock="lock1", + queueing_lock="queueing_lock1", + args={}, + scheduled_at=conftest.aware_datetime(2000, 1, 1), + ), + t.JobToDefer( + queue_name="queue2", + task_name="task2", + priority=0, + lock="lock2", + queueing_lock="queueing_lock2", + args={}, + scheduled_at=conftest.aware_datetime(2000, 1, 1), + ), + ] + ) - shell.do_list_tasks("") + await utils.sync_to_async(shell.do_list_tasks, "") captured = capsys.readouterr() assert captured.out.splitlines() == [ - "task1: 1 jobs (todo: 1, doing: 0, succeeded: 0, failed: 0, cancelled: 0, aborting: 0, aborted: 0)", - "task2: 1 jobs (todo: 1, doing: 0, succeeded: 0, failed: 0, cancelled: 0, aborting: 0, aborted: 0)", + "task1: 1 jobs (todo: 1, doing: 0, succeeded: 0, failed: 0, cancelled: 0, aborted: 0)", + "task2: 1 jobs (todo: 1, doing: 0, succeeded: 0, failed: 0, cancelled: 0, aborted: 0)", ] assert connector.queries == [ ( @@ -202,14 +313,40 @@ def test_list_tasks(shell, connector, capsys): ] -def test_list_tasks_filters(shell, connector, capsys): - connector.defer_job_one("task1", 0, "lock1", "queueing_lock1", {}, 0, "queue1") - connector.defer_job_one("task2", 0, "lock2", "queueing_lock2", {}, 0, "queue2") +async def test_list_tasks_filters( + shell: shell_module.ProcrastinateShell, + connector: testing.InMemoryConnector, + capsys: pytest.CaptureFixture, +): + await connector.defer_jobs_all( + [ + t.JobToDefer( + queue_name="queue1", + task_name="task1", + priority=0, + lock="lock1", + queueing_lock="queueing_lock1", + args={}, + scheduled_at=conftest.aware_datetime(2000, 1, 1), + ), + t.JobToDefer( + queue_name="queue2", + task_name="task2", + priority=0, + lock="lock2", + queueing_lock="queueing_lock2", + args={}, + scheduled_at=conftest.aware_datetime(2000, 1, 1), + ), + ] + ) - shell.do_list_tasks("queue=queue2 task=task2 lock=lock2 status=todo") + await utils.sync_to_async( + shell.do_list_tasks, "queue=queue2 task=task2 lock=lock2 status=todo" + ) captured = capsys.readouterr() assert captured.out.splitlines() == [ - "task2: 1 jobs (todo: 1, doing: 0, succeeded: 0, failed: 0, cancelled: 0, aborting: 0, aborted: 0)", + "task2: 1 jobs (todo: 1, doing: 0, succeeded: 0, failed: 0, cancelled: 0, aborted: 0)", ] assert connector.queries == [ ( @@ -224,21 +361,49 @@ def test_list_tasks_filters(shell, connector, capsys): ] -def test_list_tasks_empty(shell, connector, capsys): - shell.do_list_tasks("") +async def test_list_tasks_empty( + shell: shell_module.ProcrastinateShell, + connector: testing.InMemoryConnector, + capsys: pytest.CaptureFixture, +): + await utils.sync_to_async(shell.do_list_tasks, "") captured = capsys.readouterr() assert captured.out == "" -def test_list_locks(shell, connector, capsys): - connector.defer_job_one("task1", 0, "lock1", "queueing_lock1", {}, 0, "queue1") - connector.defer_job_one("task2", 0, "lock2", "queueing_lock2", {}, 0, "queue2") +async def test_list_locks( + shell: shell_module.ProcrastinateShell, + connector: testing.InMemoryConnector, + capsys: pytest.CaptureFixture, +): + await connector.defer_jobs_all( + [ + t.JobToDefer( + queue_name="queue1", + task_name="task1", + priority=0, + lock="lock1", + queueing_lock="queueing_lock1", + args={}, + scheduled_at=conftest.aware_datetime(2000, 1, 1), + ), + t.JobToDefer( + queue_name="queue2", + task_name="task2", + priority=0, + lock="lock2", + queueing_lock="queueing_lock2", + args={}, + scheduled_at=conftest.aware_datetime(2000, 1, 1), + ), + ] + ) - shell.do_list_locks("") + await utils.sync_to_async(shell.do_list_locks, "") captured = capsys.readouterr() assert captured.out.splitlines() == [ - "lock1: 1 jobs (todo: 1, doing: 0, succeeded: 0, failed: 0, cancelled: 0, aborting: 0, aborted: 0)", - "lock2: 1 jobs (todo: 1, doing: 0, succeeded: 0, failed: 0, cancelled: 0, aborting: 0, aborted: 0)", + "lock1: 1 jobs (todo: 1, doing: 0, succeeded: 0, failed: 0, cancelled: 0, aborted: 0)", + "lock2: 1 jobs (todo: 1, doing: 0, succeeded: 0, failed: 0, cancelled: 0, aborted: 0)", ] assert connector.queries == [ ( @@ -248,14 +413,40 @@ def test_list_locks(shell, connector, capsys): ] -def test_list_locks_filters(shell, connector, capsys): - connector.defer_job_one("task1", 0, "lock1", "queueing_lock1", {}, 0, "queue1") - connector.defer_job_one("task2", 0, "lock2", "queueing_lock2", {}, 0, "queue2") +async def test_list_locks_filters( + shell: shell_module.ProcrastinateShell, + connector: testing.InMemoryConnector, + capsys: pytest.CaptureFixture, +): + await connector.defer_jobs_all( + [ + t.JobToDefer( + queue_name="queue1", + task_name="task1", + priority=0, + lock="lock1", + queueing_lock="queueing_lock1", + args={}, + scheduled_at=conftest.aware_datetime(2000, 1, 1), + ), + t.JobToDefer( + queue_name="queue2", + task_name="task2", + priority=0, + lock="lock2", + queueing_lock="queueing_lock2", + args={}, + scheduled_at=conftest.aware_datetime(2000, 1, 1), + ), + ] + ) - shell.do_list_locks("queue=queue2 task=task2 lock=lock2 status=todo") + await utils.sync_to_async( + shell.do_list_locks, "queue=queue2 task=task2 lock=lock2 status=todo" + ) captured = capsys.readouterr() assert captured.out.splitlines() == [ - "lock2: 1 jobs (todo: 1, doing: 0, succeeded: 0, failed: 0, cancelled: 0, aborting: 0, aborted: 0)", + "lock2: 1 jobs (todo: 1, doing: 0, succeeded: 0, failed: 0, cancelled: 0, aborted: 0)", ] assert connector.queries == [ ( @@ -270,48 +461,68 @@ def test_list_locks_filters(shell, connector, capsys): ] -def test_list_locks_empty(shell, connector, capsys): - shell.do_list_locks("") +async def test_list_locks_empty( + shell: shell_module.ProcrastinateShell, + connector: testing.InMemoryConnector, + capsys: pytest.CaptureFixture, +): + await utils.sync_to_async(shell.do_list_locks, "") captured = capsys.readouterr() assert captured.out == "" -def test_retry(shell, connector, capsys): - connector.defer_job_one( - "task", - 0, - "lock", - "queueing_lock", - {}, - conftest.aware_datetime(2000, 1, 1), - "queue", +async def test_retry( + shell: shell_module.ProcrastinateShell, + connector: testing.InMemoryConnector, + capsys: pytest.CaptureFixture, +): + await connector.defer_jobs_all( + [ + t.JobToDefer( + queue_name="queue", + task_name="task", + priority=0, + lock="lock", + queueing_lock="queueing_lock", + args={}, + scheduled_at=conftest.aware_datetime(2000, 1, 1), + ), + ] ) - connector.set_job_status_run(1, "failed") + await connector.set_job_status_run(1, "failed") - shell.do_list_jobs("id=1") + await utils.sync_to_async(shell.do_list_jobs, "id=1") captured = capsys.readouterr() assert captured.out.strip() == "#1 task on queue - [failed]" - shell.do_retry("1") + await utils.sync_to_async(shell.do_retry, "1") captured = capsys.readouterr() assert captured.out.strip() == "#1 task on queue - [todo]" -def test_cancel(shell, connector, capsys): - connector.defer_job_one( - "task", - 0, - "lock", - "queueing_lock", - {}, - conftest.aware_datetime(2000, 1, 1), - "queue", +async def test_cancel( + shell: shell_module.ProcrastinateShell, + connector: testing.InMemoryConnector, + capsys: pytest.CaptureFixture, +): + await connector.defer_jobs_all( + [ + t.JobToDefer( + queue_name="queue", + task_name="task", + priority=0, + lock="lock", + queueing_lock="queueing_lock", + args={}, + scheduled_at=conftest.aware_datetime(2000, 1, 1), + ), + ] ) - shell.do_list_jobs("id=1") + await utils.sync_to_async(shell.do_list_jobs, "id=1") captured = capsys.readouterr() assert captured.out.strip() == "#1 task on queue - [todo]" - shell.do_cancel("1") + await utils.sync_to_async(shell.do_cancel, "1") captured = capsys.readouterr() assert captured.out.strip() == "#1 task on queue - [cancelled]" diff --git a/tests/unit/test_sql.py b/tests/unit/test_sql.py index 157a9a03c..9396ed191 100644 --- a/tests/unit/test_sql.py +++ b/tests/unit/test_sql.py @@ -30,4 +30,4 @@ def test_parse_query_file(): def test_get_queries(): - assert {"defer_job", "fetch_job", "finish_job"} <= set(sql.get_queries()) + assert {"defer_jobs", "fetch_job", "finish_job"} <= set(sql.get_queries()) diff --git a/tests/unit/test_tasks.py b/tests/unit/test_tasks.py index c6e757651..940bee7f8 100644 --- a/tests/unit/test_tasks.py +++ b/tests/unit/test_tasks.py @@ -38,10 +38,51 @@ async def test_task_defer_async(app: App, connector): "status": "todo", "scheduled_at": None, "attempts": 0, + "abort_requested": False, + "worker_id": None, } } +async def test_task_batch_defer_async(app: App, connector): + task = tasks.Task(task_func, blueprint=app, queue="queue") + + await task.batch_defer_async({"a": 1}, {"b": 2}) + + # The lock is the only thing we can't predict + lock = connector.jobs[1]["lock"] + assert connector.jobs == { + 1: { + "id": 1, + "queue_name": "queue", + "priority": 0, + "task_name": "tests.unit.test_tasks.task_func", + "lock": lock, + "queueing_lock": None, + "args": {"a": 1}, + "status": "todo", + "scheduled_at": None, + "attempts": 0, + "abort_requested": False, + "worker_id": None, + }, + 2: { + "id": 2, + "queue_name": "queue", + "priority": 0, + "task_name": "tests.unit.test_tasks.task_func", + "lock": lock, + "queueing_lock": None, + "args": {"b": 2}, + "status": "todo", + "scheduled_at": None, + "attempts": 0, + "abort_requested": False, + "worker_id": None, + }, + } + + async def test_task_default_priority(app: App, connector): task = tasks.Task(task_func, blueprint=app, queue="queue", priority=7) diff --git a/tests/unit/test_testing.py b/tests/unit/test_testing.py index d50915330..9b6343b4f 100644 --- a/tests/unit/test_testing.py +++ b/tests/unit/test_testing.py @@ -1,42 +1,44 @@ from __future__ import annotations import asyncio +from unittest.mock import AsyncMock import pytest -from procrastinate import exceptions, utils +from procrastinate import exceptions, testing, utils +from procrastinate import types as t from .. import conftest -def test_reset(connector): +def test_reset(connector: testing.InMemoryConnector): connector.jobs = {1: {}} connector.reset() assert connector.jobs == {} -def test_generic_execute(connector): +async def test_generic_execute(connector: testing.InMemoryConnector): result = {} connector.reverse_queries = {"a": "b"} - def b(**kwargs): + async def b(**kwargs): result.update(kwargs) - connector.b_youpi = b + connector.b_youpi = b # type: ignore - connector.generic_execute("a", "youpi", i="j") + await connector.generic_execute("a", "youpi", i="j") assert result == {"i": "j"} -async def test_execute_query(connector, mocker): - connector.generic_execute = mocker.Mock() +async def test_execute_query(connector: testing.InMemoryConnector): + connector.generic_execute = AsyncMock() await connector.execute_query_async("a", b="c") connector.generic_execute.assert_called_with("a", "run", b="c") -async def test_execute_query_one(connector, mocker): - connector.generic_execute = mocker.Mock() +async def test_execute_query_one(connector: testing.InMemoryConnector): + connector.generic_execute = AsyncMock() assert ( await connector.execute_query_one_async("a", b="c") == connector.generic_execute.return_value @@ -44,8 +46,8 @@ async def test_execute_query_one(connector, mocker): connector.generic_execute.assert_called_with("a", "one", b="c") -async def test_execute_query_all_async(connector, mocker): - connector.generic_execute = mocker.Mock() +async def test_execute_query_all_async(connector: testing.InMemoryConnector): + connector.generic_execute = AsyncMock() assert ( await connector.execute_query_all_async("a", b="c") == connector.generic_execute.return_value @@ -53,19 +55,23 @@ async def test_execute_query_all_async(connector, mocker): connector.generic_execute.assert_called_with("a", "all", b="c") -def test_make_dynamic_query(connector): +def test_make_dynamic_query(connector: testing.InMemoryConnector): assert connector.make_dynamic_query("foo {bar}", bar="baz") == "foo baz" -def test_defer_job_one(connector): - job = connector.defer_job_one( - task_name="mytask", - priority=5, - lock="sher", - queueing_lock="houba", - args={"a": "b"}, - scheduled_at=None, - queue="marsupilami", +async def test_defer_one_job(connector: testing.InMemoryConnector): + jobs = await connector.defer_jobs_all( + [ + t.JobToDefer( + queue_name="marsupilami", + task_name="mytask", + priority=5, + lock="sher", + queueing_lock="houba", + args={"a": "b"}, + scheduled_at=None, + ) + ] ) assert connector.jobs == { @@ -80,66 +86,136 @@ def test_defer_job_one(connector): "status": "todo", "scheduled_at": None, "attempts": 0, + "abort_requested": False, + "worker_id": None, } } - assert connector.jobs[1] == job - - -def test_defer_job_one_multiple_times(connector): - connector.defer_job_one( - task_name="mytask", - priority=0, - lock=None, - queueing_lock=None, - args={}, - scheduled_at=None, - queue="default", + assert connector.jobs[1] == jobs[0] + + +async def test_defer_one_job_multiple_times(connector: testing.InMemoryConnector): + await connector.defer_jobs_all( + [ + t.JobToDefer( + queue_name="default", + task_name="mytask", + priority=0, + lock=None, + queueing_lock=None, + args={}, + scheduled_at=None, + ) + ] ) - connector.defer_job_one( - task_name="mytask", - priority=0, - lock=None, - queueing_lock=None, - args={}, - scheduled_at=None, - queue="default", + await connector.defer_jobs_all( + [ + t.JobToDefer( + queue_name="default", + task_name="mytask", + priority=0, + lock=None, + queueing_lock=None, + args={}, + scheduled_at=None, + ) + ] ) assert len(connector.jobs) == 2 -def test_defer_same_job_with_queueing_lock_second_time_after_first_one_succeeded( - connector, +async def test_defer_multiple_jobs_at_once(connector: testing.InMemoryConnector): + await connector.defer_jobs_all( + [ + t.JobToDefer( + queue_name="default", + task_name="mytask", + priority=0, + lock=None, + queueing_lock=None, + args={}, + scheduled_at=None, + ), + t.JobToDefer( + queue_name="default", + task_name="mytask", + priority=0, + lock=None, + queueing_lock=None, + args={}, + scheduled_at=None, + ), + ] + ) + assert len(connector.jobs) == 2 + + +async def test_defer_same_job_with_queueing_lock_second_time_after_first_one_succeeded( + connector: testing.InMemoryConnector, ): - job_data = { - "task_name": "mytask", - "priority": 0, - "lock": None, - "queueing_lock": "some-lock", - "args": {}, - "scheduled_at": None, - "queue": "default", - } + jobs_to_defer: list[t.JobToDefer] = [ + t.JobToDefer( + queue_name="default", + task_name="mytask", + priority=0, + lock=None, + queueing_lock="some-lock", + args={}, + scheduled_at=None, + ) + ] # 1. Defer job with queueing-lock - job_row = connector.defer_job_one(**job_data) + job_rows = await connector.defer_jobs_all(jobs_to_defer) assert len(connector.jobs) == 1 # 2. Defering a second time should fail, as first one # still in state `todo` with pytest.raises(exceptions.UniqueViolation): - connector.defer_job_one(**job_data) + await connector.defer_jobs_all(jobs_to_defer) assert len(connector.jobs) == 1 # 3. Finish first job - connector.finish_job_run(job_id=job_row["id"], status="finished", delete_job=False) + await connector.finish_job_run( + job_id=job_rows[0]["id"], status="finished", delete_job=False + ) # 4. Defering a second time should work now, # as first job in state `finished` - connector.defer_job_one(**job_data) + await connector.defer_jobs_all(jobs_to_defer) assert len(connector.jobs) == 2 -def test_current_locks(connector): +async def test_defer_jobs_all_violates_queueing_lock( + connector: testing.InMemoryConnector, +): + with pytest.raises(exceptions.UniqueViolation): + await connector.defer_jobs_all( + [ + t.JobToDefer( + queue_name="default", + task_name="mytask", + priority=0, + lock=None, + queueing_lock="queueing_lock", + args={}, + scheduled_at=None, + ), + t.JobToDefer( + queue_name="default", + task_name="mytask", + priority=0, + lock=None, + queueing_lock="queueing_lock", + args={}, + scheduled_at=None, + ), + ] + ) + + assert len(connector.jobs) == 0 + + +def test_current_locks(connector: testing.InMemoryConnector): connector.jobs = { 1: {"status": "todo", "lock": "foo"}, 2: {"status": "doing", "lock": "yay"}, @@ -147,7 +223,7 @@ def test_current_locks(connector): assert connector.current_locks == {"yay"} -def test_finished_jobs(connector): +def test_finished_jobs(connector: testing.InMemoryConnector): connector.jobs = { 1: {"status": "todo"}, 2: {"status": "doing"}, @@ -157,7 +233,7 @@ def test_finished_jobs(connector): assert connector.finished_jobs == [{"status": "succeeded"}, {"status": "failed"}] -def test_select_stalled_jobs_all(connector): +async def test_select_stalled_jobs_by_started_all(connector: testing.InMemoryConnector): connector.jobs = { # We're not selecting this job because it's "succeeded" 1: { @@ -211,13 +287,81 @@ def test_select_stalled_jobs_all(connector): 6: [{"at": conftest.aware_datetime(2000, 1, 1)}], } - results = connector.select_stalled_jobs_all( + results = await connector.select_stalled_jobs_by_started_all( queue="marsupilami", task_name="mytask", nb_seconds=0 ) assert [job["id"] for job in results] == [5, 6] -def test_delete_old_jobs_run(connector): +async def test_select_stalled_jobs_by_heartbeat_all( + connector: testing.InMemoryConnector, +): + worker1_id = 1 + worker2_id = 2 + worker3_id = 3 + + connector.jobs = { + # We're not selecting this job because it's "succeeded" + 1: { + "id": 1, + "status": "succeeded", + "queue_name": "marsupilami", + "task_name": "mytask", + "worker_id": worker1_id, + }, + # This one because it's the wrong queue + 2: { + "id": 2, + "status": "doing", + "queue_name": "other_queue", + "task_name": "mytask", + "worker_id": worker1_id, + }, + # This one because of the task + 3: { + "id": 3, + "status": "doing", + "queue_name": "marsupilami", + "task_name": "my_other_task", + "worker_id": worker1_id, + }, + # This one because it's not stalled + 4: { + "id": 4, + "status": "doing", + "queue_name": "marsupilami", + "task_name": "mytask", + "worker_id": worker3_id, + }, + # We're taking this one. + 5: { + "id": 5, + "status": "doing", + "queue_name": "marsupilami", + "task_name": "mytask", + "worker_id": worker1_id, + }, + # And this one + 6: { + "id": 6, + "status": "doing", + "queue_name": "marsupilami", + "task_name": "mytask", + "worker_id": worker2_id, + }, + } + connector.workers = { + worker2_id: conftest.aware_datetime(2000, 1, 1), + worker3_id: conftest.aware_datetime(2100, 1, 1), + } + + results = await connector.select_stalled_jobs_by_heartbeat_all( + queue="marsupilami", task_name="mytask", seconds_since_heartbeat=0 + ) + assert [job["id"] for job in results] == [5, 6] + + +async def test_delete_old_jobs_run(connector: testing.InMemoryConnector): connector.jobs = { # We're not deleting this job because it's "doing" 1: {"id": 1, "status": "doing", "queue_name": "marsupilami"}, @@ -235,155 +379,209 @@ def test_delete_old_jobs_run(connector): 4: [{"type": "succeeded", "at": conftest.aware_datetime(2000, 1, 1)}], } - connector.delete_old_jobs_run( + await connector.delete_old_jobs_run( queue="marsupilami", statuses=("succeeded"), nb_hours=0 ) assert 4 not in connector.jobs -def test_fetch_job_one(connector): +async def test_fetch_job_one(connector: testing.InMemoryConnector): # This one will be selected, then skipped the second time because it's processing - connector.defer_job_one( - task_name="mytask", - priority=0, - args={}, - queue="marsupilami", - scheduled_at=None, - lock="a", - queueing_lock="a", + await connector.defer_jobs_all( + [ + t.JobToDefer( + queue_name="marsupilami", + task_name="mytask", + priority=0, + lock="a", + queueing_lock="a", + args={}, + scheduled_at=None, + ) + ] ) - # This one because it's the wrong queue - connector.defer_job_one( - task_name="mytask", - priority=0, - args={}, - queue="other_queue", - scheduled_at=None, - lock="b", - queueing_lock="b", + await connector.defer_jobs_all( + [ + t.JobToDefer( + queue_name="other_queue", + task_name="mytask", + priority=0, + lock="b", + queueing_lock="b", + args={}, + scheduled_at=None, + ) + ] ) # This one because of the scheduled_at - connector.defer_job_one( - task_name="mytask", - priority=0, - args={}, - queue="marsupilami", - scheduled_at=conftest.aware_datetime(2100, 1, 1), - lock="c", - queueing_lock="c", + await connector.defer_jobs_all( + [ + t.JobToDefer( + queue_name="marsupilami", + task_name="mytask", + priority=0, + lock="c", + queueing_lock="c", + args={}, + scheduled_at=conftest.aware_datetime(2100, 1, 1), + ) + ] ) # This one because of the lock - connector.defer_job_one( - task_name="mytask", - priority=0, - args={}, - queue="marsupilami", - scheduled_at=None, - lock="a", - queueing_lock="d", + await connector.defer_jobs_all( + [ + t.JobToDefer( + queue_name="marsupilami", + task_name="mytask", + priority=0, + lock="a", + queueing_lock="d", + args={}, + scheduled_at=None, + ) + ] ) # We're taking this one. - connector.defer_job_one( - task_name="mytask", - priority=0, - args={}, - queue="marsupilami", - scheduled_at=None, - lock="e", - queueing_lock="e", + await connector.defer_jobs_all( + [ + t.JobToDefer( + queue_name="marsupilami", + task_name="mytask", + priority=0, + lock="e", + queueing_lock="e", + args={}, + scheduled_at=None, + ) + ] ) - assert connector.fetch_job_one(queues=["marsupilami"])["id"] == 1 - assert connector.fetch_job_one(queues=["marsupilami"])["id"] == 5 + connector.workers = {1: utils.utcnow()} + assert (await connector.fetch_job_one(queues=["marsupilami"], worker_id=1))[ + "id" + ] == 1 + assert (await connector.fetch_job_one(queues=["marsupilami"], worker_id=1))[ + "id" + ] == 5 -def test_fetch_job_one_prioritized(connector): + +async def test_fetch_job_one_prioritized(connector: testing.InMemoryConnector): # This one will be selected second as it has a lower priority - connector.defer_job_one( - task_name="mytask", - priority=5, - args={}, - queue="marsupilami", - scheduled_at=None, - lock=None, - queueing_lock=None, + await connector.defer_jobs_all( + [ + t.JobToDefer( + queue_name="marsupilami", + task_name="mytask", + priority=5, + lock=None, + queueing_lock=None, + args={}, + scheduled_at=None, + ) + ] ) - # This one will be selected first as it has a higher priority - connector.defer_job_one( - task_name="mytask", - priority=7, - args={}, - queue="marsupilami", - scheduled_at=None, - lock=None, - queueing_lock=None, + await connector.defer_jobs_all( + [ + t.JobToDefer( + queue_name="marsupilami", + task_name="mytask", + priority=7, + lock=None, + queueing_lock=None, + args={}, + scheduled_at=None, + ) + ] ) - assert connector.fetch_job_one(queues=None)["id"] == 2 - assert connector.fetch_job_one(queues=None)["id"] == 1 + connector.workers = {1: utils.utcnow()} + assert (await connector.fetch_job_one(queues=None, worker_id=1))["id"] == 2 + assert (await connector.fetch_job_one(queues=None, worker_id=1))["id"] == 1 -def test_fetch_job_one_none_lock(connector): + +async def test_fetch_job_one_none_lock(connector: testing.InMemoryConnector): """Testing that 2 jobs with locks "None" don't block one another""" - connector.defer_job_one( - task_name="mytask", - priority=0, - args={}, - queue="default", - scheduled_at=None, - lock=None, - queueing_lock=None, - ) - connector.defer_job_one( - task_name="mytask", - priority=0, - args={}, - queue="default", - scheduled_at=None, - lock=None, - queueing_lock=None, + await connector.defer_jobs_all( + [ + t.JobToDefer( + queue_name="default", + task_name="mytask", + priority=0, + lock=None, + queueing_lock=None, + args={}, + scheduled_at=None, + ), + t.JobToDefer( + queue_name="default", + task_name="mytask", + priority=0, + lock=None, + queueing_lock=None, + args={}, + scheduled_at=None, + ), + ] ) - assert connector.fetch_job_one(queues=None)["id"] == 1 - assert connector.fetch_job_one(queues=None)["id"] == 2 + connector.workers = {1: utils.utcnow()} + + assert (await connector.fetch_job_one(queues=None, worker_id=1))["id"] == 1 + assert (await connector.fetch_job_one(queues=None, worker_id=1))["id"] == 2 + + +async def test_finish_job_run(connector: testing.InMemoryConnector): + await connector.defer_jobs_all( + [ + t.JobToDefer( + queue_name="marsupilami", + task_name="mytask", + priority=0, + lock="sher", + queueing_lock="houba", + args={}, + scheduled_at=None, + ) + ] + ) + connector.workers = {1: utils.utcnow()} -def test_finish_job_run(connector): - connector.defer_job_one( - task_name="mytask", - priority=0, - args={}, - queue="marsupilami", - scheduled_at=None, - lock="sher", - queueing_lock="houba", - ) - job_row = connector.fetch_job_one(queues=None) + job_row = await connector.fetch_job_one(queues=None, worker_id=1) id = job_row["id"] - connector.finish_job_run(job_id=id, status="finished", delete_job=False) + await connector.finish_job_run(job_id=id, status="finished", delete_job=False) assert connector.jobs[id]["attempts"] == 1 assert connector.jobs[id]["status"] == "finished" -def test_retry_job_run(connector): - connector.defer_job_one( - task_name="mytask", - priority=0, - args={}, - queue="marsupilami", - scheduled_at=None, - lock="sher", - queueing_lock="houba", +async def test_retry_job_run(connector: testing.InMemoryConnector): + await connector.defer_jobs_all( + [ + t.JobToDefer( + queue_name="marsupilami", + task_name="mytask", + priority=0, + lock="sher", + queueing_lock="houba", + args={}, + scheduled_at=None, + ) + ] ) - job_row = connector.fetch_job_one(queues=None) + + connector.workers = {1: utils.utcnow()} + + job_row = await connector.fetch_job_one(queues=None, worker_id=1) id = job_row["id"] retry_at = conftest.aware_datetime(2000, 1, 1) - connector.retry_job_run( + await connector.retry_job_run( job_id=id, retry_at=retry_at, new_priority=3, @@ -400,29 +598,76 @@ def test_retry_job_run(connector): assert len(connector.events[id]) == 4 -def test_apply_schema_run(connector): +async def test_apply_schema_run(connector: testing.InMemoryConnector): # If we don't crash, it's enough - connector.apply_schema_run() + await connector.apply_schema_run() -def test_listen_for_jobs_run(connector): +async def test_listen_for_jobs_run(connector: testing.InMemoryConnector): # If we don't crash, it's enough - connector.listen_for_jobs_run() + await connector.listen_for_jobs_run() -async def test_defer_no_notify(connector): +async def test_defer_no_notify(connector: testing.InMemoryConnector): # This test is there to check that if the deferred queue doesn't match the # listened queue, the testing connector doesn't notify. + event = asyncio.Event() - await connector.listen_notify(event=event, channels="some_other_channel") - connector.defer_job_one( - task_name="foo", - priority=0, - lock="bar", - args={}, - scheduled_at=None, - queue="baz", - queueing_lock="houba", + + async def on_notification(*, channel: str, payload: str): + event.set() + + await connector.listen_notify( + on_notification=on_notification, channels="some_other_channel" + ) + await connector.defer_jobs_all( + [ + t.JobToDefer( + queue_name="baz", + task_name="foo", + priority=0, + lock="bar", + queueing_lock="houba", + args={}, + scheduled_at=None, + ) + ] ) assert not event.is_set() + + +async def test_register_worker(connector: testing.InMemoryConnector): + then = utils.utcnow() + assert connector.workers == {} + row = await connector.register_worker_one() + assert then <= connector.workers[row["worker_id"]] <= utils.utcnow() + + +async def test_unregister_worker(connector: testing.InMemoryConnector): + connector.workers = {1: utils.utcnow()} + await connector.unregister_worker_run(worker_id=1) + assert connector.workers == {} + + +async def test_update_heartbeat_run(connector: testing.InMemoryConnector): + dt = conftest.aware_datetime(2000, 1, 1) + connector.workers = {1: dt} + await connector.update_heartbeat_run(worker_id=1) + assert dt < connector.workers[1] <= utils.utcnow() + + +async def test_prune_stalled_workers_all(connector: testing.InMemoryConnector): + connector.workers = { # type: ignore + "worker1": conftest.aware_datetime(2000, 1, 1), + "worker2": conftest.aware_datetime(2000, 1, 1), + "worker3": conftest.aware_datetime(2100, 1, 1), + } + pruned_workers = await connector.prune_stalled_workers_all( + seconds_since_heartbeat=0 + ) + assert pruned_workers == [ + {"worker_id": "worker1"}, + {"worker_id": "worker2"}, + ] + assert connector.workers == {"worker3": conftest.aware_datetime(2100, 1, 1)} diff --git a/tests/unit/test_utils.py b/tests/unit/test_utils.py index 8fe7702c3..c28b044f5 100644 --- a/tests/unit/test_utils.py +++ b/tests/unit/test_utils.py @@ -3,6 +3,7 @@ import asyncio import datetime import functools +import logging import sys import time import types @@ -164,161 +165,6 @@ def __(): return _ -async def test_run_tasks(finished, coro, short, caplog): - caplog.set_level("ERROR") - # Two functions in main coros, both go through their ends - await utils.run_tasks(main_coros=[coro(1), coro(2, sleep=0.01)]) - assert finished == {1, 2} - - assert caplog.records == [] - - -async def test_run_tasks_graceful_stop_callback_not_called( - launched, coro, callback, short -): - # A graceful_stop_callback is provided but isn't used because the main - # coros return on their own. - await utils.run_tasks(main_coros=[coro(1)], graceful_stop_callback=callback(2)) - assert launched == {1} - - -async def test_run_tasks_graceful_stop_callback_called(launched, coro, callback, short): - # A main function is provided, but it crashes. This time, the graceful callback - # is called. - with pytest.raises(exceptions.RunTaskError): - await utils.run_tasks( - main_coros=[coro(1, exc=ZeroDivisionError)], - graceful_stop_callback=callback(2), - ) - assert launched == {1, 2} - - -async def test_run_tasks_graceful_stop_callback_called_side( - launched, finished, coro, callback, short -): - # Two main coros provided, one crashes and one succeeds. The - # graceful_stop_callback is called and the coro that succeeds is awaited - # until it returns - with pytest.raises(exceptions.RunTaskError): - await utils.run_tasks( - main_coros=[coro(1, sleep=0.01), coro(2, exc=ZeroDivisionError)], - graceful_stop_callback=callback(3), - ) - assert launched == {1, 2, 3} - assert finished == {1, 2} - - -async def test_run_tasks_side_coro(launched, finished, coro, short): - # When all the main coros have returned, the remaining side coros are - # cancelled - await utils.run_tasks(main_coros=[coro(1), coro(2)], side_coros=[coro(3, sleep=1)]) - assert launched == {1, 2, 3} - assert finished == {1, 2} - - -async def test_run_tasks_side_coro_crash(launched, finished, coro, short): - # There's a main and a side. The side crashes. Main is still awaited and - # the unction raises - with pytest.raises(exceptions.RunTaskError) as exc_info: - await utils.run_tasks( - main_coros=[coro(1, sleep=0.01)], - side_coros=[coro(2, exc=ZeroDivisionError)], - ) - assert launched == {1, 2} - assert finished == {1, 2} - assert isinstance(exc_info.value.__cause__, ZeroDivisionError) - - -async def test_run_tasks_main_coro_crash(launched, finished, coro, short): - # There's a main and a side. The main crashes. Side is cancelled, and the - # function raises - with pytest.raises(exceptions.RunTaskError) as exc_info: - await utils.run_tasks( - main_coros=[coro(1, exc=ZeroDivisionError)], - side_coros=[coro(2, sleep=1)], - ) - assert launched == {1, 2} - assert finished == {1} - assert isinstance(exc_info.value.__cause__, ZeroDivisionError) - - -async def test_run_tasks_main_coro_one_crashes(launched, finished, coro, short): - # 2 mains. One main crashes. The other finishes, and then the function fails. - with pytest.raises(exceptions.RunTaskError) as exc_info: - await utils.run_tasks( - main_coros=[coro(1, exc=ZeroDivisionError), coro(2, sleep=0.001)], - ) - assert launched == {1, 2} - assert finished == {1, 2} - assert isinstance(exc_info.value.__cause__, ZeroDivisionError) - - -async def test_run_tasks_main_coro_both_crash(launched, finished, coro, short): - # 2 mains. The 2 crash. The reported error is for the first one. - with pytest.raises(exceptions.RunTaskError) as exc_info: - await utils.run_tasks( - main_coros=[ - coro(1, sleep=0.001, exc=ValueError), - coro(2, exc=ZeroDivisionError), - ], - ) - assert launched == {1, 2} - assert finished == {1, 2} - assert isinstance(exc_info.value.__cause__, ValueError) - - -@pytest.fixture -def count_logs(caplog): - """Count how many logs match all the arguments""" - caplog.set_level("DEBUG") - - def _(**kwargs): - return sum( - all((getattr(record, key, None) == value) for key, value in kwargs.items()) - for record in caplog.records - ) - - return _ - - -async def test_run_tasks_logs(coro, short, count_logs): - # 2 mains. The 2 crash. The reported error is for the first one. - with pytest.raises(exceptions.RunTaskError): - await utils.run_tasks( - main_coros=[ - coro(1, exc=ZeroDivisionError("foo")), - coro(2), - ], - side_coros=[ - coro(3, exc=RuntimeError("bar")), - coro(4), - ], - ) - assert 4 == count_logs( - levelname="DEBUG", - message="Started func", - action="func_start", - ) - - assert 1 == count_logs( - levelname="DEBUG", - message="func finished execution", - action="func_stop", - ) - - assert 1 == count_logs( - levelname="ERROR", - message="func error: ZeroDivisionError('foo')", - action="func_error", - ) - - assert 1 == count_logs( - levelname="ERROR", - message="func error: RuntimeError('bar')", - action="func_error", - ) - - def test_utcnow(mocker): dt = mocker.patch("datetime.datetime") assert utils.utcnow() == dt.now.return_value @@ -380,7 +226,7 @@ async def close(): awaited.append("closed") context = utils.AwaitableContext(open_coro=open, close_coro=close, return_value=1) - context.awaited = awaited + context.awaited = awaited # type: ignore return context @@ -504,3 +350,43 @@ async def func2(): assert await func2() == 4 assert result == [1, 2, 3] + + +@pytest.mark.parametrize( + "task_1_error, task_2_error", + [ + (None, None), + (ValueError("Nope from task_1"), None), + (None, ValueError("Nope from task_2")), + (ValueError("Nope from task_1"), ValueError("Nope from task_2")), + ], +) +async def test_cancel_and_capture_errors(task_1_error, task_2_error, caplog): + caplog.set_level(logging.ERROR) + + async def task_1(): + if task_1_error: + raise task_1_error + else: + await asyncio.sleep(0.5) + + async def task_2(): + if task_2_error: + raise task_2_error + else: + await asyncio.sleep(0.5) + + tasks = [asyncio.create_task(task_1()), asyncio.create_task(task_2())] + await asyncio.sleep(0.01) + await asyncio.wait_for(utils.cancel_and_capture_errors(tasks), timeout=100) + + expected_error_count = sum(1 for error in (task_1_error, task_2_error) if error) + + assert len(caplog.records) == expected_error_count + + +@pytest.mark.parametrize( + "queues, result", [(None, "all queues"), (["foo", "bar"], "queues foo, bar")] +) +def test_queues_display(queues, result): + assert utils.queues_display(queues) == result diff --git a/tests/unit/test_worker.py b/tests/unit/test_worker.py index 790b295bb..49c691937 100644 --- a/tests/unit/test_worker.py +++ b/tests/unit/test_worker.py @@ -1,708 +1,836 @@ from __future__ import annotations import asyncio +import datetime +import signal +from typing import cast import pytest +from pytest_mock import MockerFixture -from procrastinate import exceptions, job_context, jobs, tasks, worker -from procrastinate.retry import RetryDecision +from procrastinate import utils +from procrastinate.app import App +from procrastinate.exceptions import JobAborted +from procrastinate.job_context import JobContext +from procrastinate.jobs import DEFAULT_QUEUE, Job, Status +from procrastinate.testing import InMemoryConnector +from procrastinate.worker import Worker -from .. import conftest + +async def start_worker(worker: Worker): + task = asyncio.create_task(worker.run()) + await asyncio.sleep(0.01) + return task @pytest.fixture -def test_worker(app): - return worker.Worker(app, queues=None) +async def worker(app: App, request: pytest.FixtureRequest): + kwargs = request.param if hasattr(request, "param") else {} + worker = Worker(app, **kwargs) + yield worker + if worker.run_task and not worker.run_task.done(): + worker.stop() + try: + await asyncio.wait_for(worker.run_task, timeout=0.2) + except asyncio.CancelledError: + pass -@pytest.fixture -def context(app): - def _(job): - return job_context.JobContext(app=app, worker_name="worker", job=job) +@pytest.mark.parametrize( + "available_jobs, concurrency", + [ + (0, 1), + (1, 1), + (2, 1), + (1, 2), + (2, 2), + (4, 2), + ], +) +async def test_worker_run_no_wait(app: App, available_jobs, concurrency): + worker = Worker(app, wait=False, concurrency=concurrency) - return _ + @app.task + async def perform_job(): + pass + for i in range(available_jobs): + await perform_job.defer_async() -def test_worker_additional_context(app): - worker_obj = worker.Worker(app=app, additional_context={"foo": "bar"}) - assert worker_obj.base_context.additional_context == {"foo": "bar"} + await asyncio.wait_for(worker.run(), 0.1) -async def test_run(test_worker, mocker, caplog): - caplog.set_level("INFO") +async def test_worker_run_wait_until_cancelled(app: App): + worker = Worker(app, wait=True) + with pytest.raises(asyncio.TimeoutError): + await asyncio.wait_for(worker.run(), 0.05) - single_worker = mocker.Mock() - async def mock(worker_id): - single_worker(worker_id=worker_id) +async def test_worker_run_wait_stop(app: App, caplog): + caplog.set_level("INFO") + worker = Worker(app, wait=True) + run_task = asyncio.create_task(worker.run()) + # wait just enough to make sure the task is running + await asyncio.sleep(0.01) + worker.stop() + await asyncio.wait_for(run_task, 0.1) - test_worker.single_worker = mock + assert set(caplog.messages) == { + "Starting worker on all queues", + "Stop requested", + "Stopped worker on all queues", + "No periodic task found, periodic deferrer will not run.", + } - await test_worker.run() - single_worker.assert_called() +async def test_worker_run_once_log_messages(app: App, caplog): + caplog.set_level("INFO") + worker = Worker(app, wait=False) + await asyncio.wait_for(worker.run(), 0.1) assert set(caplog.messages) == { "Starting worker on all queues", + "No job found. Stopping worker because wait=False", "Stopped worker on all queues", "No periodic task found, periodic deferrer will not run.", } +async def test_worker_run_wait_listen(worker): + await start_worker(worker) + connector = cast(InMemoryConnector, worker.app.connector) + + assert connector.notify_channels == ["procrastinate_any_queue_v1"] + + @pytest.mark.parametrize( - "side_effect, status", + "available_jobs, worker", [ - (None, "succeeded"), - (exceptions.JobAborted(), "aborted"), - (exceptions.JobError(), "failed"), - (exceptions.TaskNotFound(), "failed"), + (2, {"concurrency": 1}), + (3, {"concurrency": 2}), ], + indirect=["worker"], ) -async def test_process_job( - mocker, test_worker, job_factory, connector, side_effect, status +async def test_worker_run_respects_concurrency( + worker: Worker, app: App, available_jobs ): - async def coro(*args, **kwargs): - pass + complete_tasks = asyncio.Event() + + @app.task + async def perform_job(): + await complete_tasks.wait() + + for _ in range(available_jobs): + await perform_job.defer_async() + + await start_worker(worker) + + connector = cast(InMemoryConnector, app.connector) + + doings_jobs = list(await connector.list_jobs_all(status=Status.DOING.value)) + todo_jobs = list(await connector.list_jobs_all(status=Status.TODO.value)) + + assert len(doings_jobs) == worker.concurrency + assert len(todo_jobs) == available_jobs - worker.concurrency + + complete_tasks.set() + + +async def test_worker_run_respects_concurrency_variant(worker: Worker, app: App): + worker.concurrency = 2 + + max_parallelism = 0 + parallel_jobs = 0 + + @app.task + async def perform_job(sleep: float): + nonlocal max_parallelism + nonlocal parallel_jobs + parallel_jobs += 1 + + max_parallelism = max(max_parallelism, parallel_jobs) + await asyncio.sleep(sleep) + parallel_jobs -= 1 + + await perform_job.defer_async(sleep=0.05) + await perform_job.defer_async(sleep=0.1) + + await start_worker(worker) + + # wait enough to run out of job and to have one pending job + await asyncio.sleep(0.05) + + assert max_parallelism == 2 + assert parallel_jobs == 1 + + # defer more jobs than the worker can process in parallel + await perform_job.defer_async(sleep=0.05) + await perform_job.defer_async(sleep=0.05) + await perform_job.defer_async(sleep=0.05) + + await asyncio.sleep(0.2) + assert max_parallelism == 2 + assert parallel_jobs == 0 + + +async def test_worker_run_fetches_job_on_notification(worker, app: App): + complete_tasks = asyncio.Event() + + @app.task + async def perform_job(): + await complete_tasks.wait() + + await start_worker(worker) + + connector = cast(InMemoryConnector, app.connector) + + assert len([query for query in connector.queries if query[0] == "fetch_job"]) == 1 + + await asyncio.sleep(0.01) - test_worker.run_job = mocker.Mock(side_effect=side_effect or coro) - job = job_factory(id=1) - await test_worker.job_manager.defer_job_async(job) + assert len([query for query in connector.queries if query[0] == "fetch_job"]) == 1 - await test_worker.process_job(job=job) + await perform_job.defer_async() + await asyncio.sleep(0.01) - test_worker.run_job.assert_called_with(job=job, worker_id=0) - assert connector.jobs[1]["status"] == status + assert len([query for query in connector.queries if query[0] == "fetch_job"]) == 2 + + complete_tasks.set() + + +@pytest.mark.parametrize( + "worker", + [({"fetch_job_polling_interval": 0.05})], + indirect=["worker"], +) +async def test_worker_run_respects_polling(worker, app): + await start_worker(worker) + + connector = cast(InMemoryConnector, app.connector) + await asyncio.sleep(0.01) + + assert len([query for query in connector.queries if query[0] == "fetch_job"]) == 1 + + await asyncio.sleep(0.07) + + assert len([query for query in connector.queries if query[0] == "fetch_job"]) == 2 @pytest.mark.parametrize( - "side_effect, delete_jobs", + "worker, fail_task", [ - (None, "successful"), - (None, "always"), - (exceptions.JobError(), "always"), + ({"delete_jobs": "never"}, False), + ({"delete_jobs": "never"}, True), + ({"delete_jobs": "successful"}, True), ], + indirect=["worker"], ) -async def test_process_job_with_deletion( - mocker, app, job_factory, connector, side_effect, delete_jobs -): - async def coro(*args, **kwargs): - pass +async def test_process_job_without_deletion(app: App, worker, fail_task): + @app.task() + async def task_func(): + if fail_task: + raise ValueError("Nope") - test_worker = worker.Worker(app, delete_jobs=delete_jobs) - test_worker.run_job = mocker.Mock(side_effect=side_effect or coro) - job = job_factory(id=1) - await test_worker.job_manager.defer_job_async(job) + job_id = await task_func.defer_async() - await test_worker.process_job(job=job) + await start_worker(worker) - assert 1 not in connector.jobs + connector = cast(InMemoryConnector, app.connector) + assert job_id in connector.jobs @pytest.mark.parametrize( - "side_effect, delete_jobs", + "worker, fail_task", [ - (None, "never"), - (exceptions.JobError(), "never"), - (exceptions.JobError(), "successful"), + ({"delete_jobs": "successful"}, False), + ({"delete_jobs": "always"}, False), + ({"delete_jobs": "always"}, True), ], + indirect=["worker"], ) -async def test_process_job_without_deletion( - mocker, app, job_factory, connector, side_effect, delete_jobs -): - async def coro(*args, **kwargs): - pass +async def test_process_job_with_deletion(app: App, worker, fail_task): + @app.task() + async def task_func(): + if fail_task: + raise ValueError("Nope") - test_worker = worker.Worker(app, delete_jobs=delete_jobs) - test_worker.run_job = mocker.Mock(side_effect=side_effect or coro) - job = job_factory(id=1) - await test_worker.job_manager.defer_job_async(job) + job_id = await task_func.defer_async() - await test_worker.process_job(job=job) + await start_worker(worker) - assert 1 in connector.jobs + connector = cast(InMemoryConnector, app.connector) + assert job_id not in connector.jobs -async def test_process_job_retry_failed_job( - mocker, test_worker, job_factory, connector -): - async def coro(*args, **kwargs): - pass +async def test_stopping_worker_waits_for_task(app: App, worker): + complete_task_event = asyncio.Event() - retry_at = conftest.aware_datetime(2000, 1, 1) - test_worker.run_job = mocker.Mock( - side_effect=exceptions.JobError( - retry_exception=exceptions.JobRetry( - retry_decision=RetryDecision(retry_at=retry_at) - ) - ) - ) - job = job_factory(id=1) - await test_worker.job_manager.defer_job_async(job) + @app.task() + async def task_func(): + await complete_task_event.wait() - await test_worker.process_job(job=job, worker_id=0) + run_task = await start_worker(worker) - test_worker.run_job.assert_called_with(job=job, worker_id=0) - assert connector.jobs[1]["status"] == "todo" - assert connector.jobs[1]["scheduled_at"] == retry_at - assert connector.jobs[1]["attempts"] == 1 + job_id = await task_func.defer_async() + await asyncio.sleep(0.05) -async def test_process_job_retry_failed_job_critical( - mocker, test_worker, job_factory, connector -): - class TestException(BaseException): - pass + # this should still be running waiting for the task to complete + assert run_task.done() is False - job_exception = exceptions.JobError(critical=True) - job_exception.__cause__ = TestException() + # tell the task to complete + complete_task_event.set() - test_worker.run_job = mocker.Mock(side_effect=job_exception) - job = job_factory(id=1) - await test_worker.job_manager.defer_job_async(job) + # this should successfully complete the job and re-raise the CancelledError + with pytest.raises(asyncio.CancelledError): + run_task.cancel() + await asyncio.wait_for(run_task, 0.1) - # Exceptions that extend BaseException should be re-raised after the failed job - # is scheduled for retry (if retry is applicable). - with pytest.raises(TestException): - await test_worker.process_job(job=job, worker_id=0) + status = await app.job_manager.get_job_status_async(job_id) + assert status == Status.SUCCEEDED - test_worker.run_job.assert_called_with(job=job, worker_id=0) - assert connector.jobs[1]["status"] == "failed" - assert connector.jobs[1]["scheduled_at"] is None - assert connector.jobs[1]["attempts"] == 1 +@pytest.mark.parametrize("mode", [("stop"), ("cancel")]) +async def test_stopping_worker_aborts_job_after_timeout(app: App, worker, mode): + complete_task_event = asyncio.Event() + worker.shutdown_graceful_timeout = 0.02 -async def test_process_job_retry_failed_job_retry_critical( - mocker, test_worker, job_factory, connector -): - class TestException(BaseException): - pass + task_cancelled = False - retry_at = conftest.aware_datetime(2000, 1, 1) - job_exception = exceptions.JobError( - critical=True, - retry_exception=exceptions.JobRetry( - retry_decision=RetryDecision(retry_at=retry_at) - ), - ) - job_exception.__cause__ = TestException() + @app.task() + async def task_func(): + nonlocal task_cancelled + try: + await complete_task_event.wait() + except asyncio.CancelledError: + task_cancelled = True + raise - test_worker.run_job = mocker.Mock(side_effect=job_exception) - job = job_factory(id=1) - await test_worker.job_manager.defer_job_async(job) + run_task = await start_worker(worker) - # Exceptions that extend BaseException should be re-raised after the failed job - # is scheduled for retry (if retry is applicable). - with pytest.raises(TestException): - await test_worker.process_job(job=job, worker_id=0) + job_id = await task_func.defer_async() - test_worker.run_job.assert_called_with(job=job, worker_id=0) - assert connector.jobs[1]["status"] == "todo" - assert connector.jobs[1]["scheduled_at"] == retry_at - assert connector.jobs[1]["attempts"] == 1 + await asyncio.sleep(0.05) + # this should still be running waiting for the task to complete + assert run_task.done() is False -async def test_run_job(app): - result = [] + # we don't tell task to complete, it will be cancelled after timeout - @app.task(queue="yay", name="task_func") - def task_func(a, b): - result.append(a + b) + if mode == "stop": + worker.stop() - job = jobs.Job( - id=16, - task_kwargs={"a": 9, "b": 3}, - lock="sherlock", - queueing_lock="houba", - task_name="task_func", - queue="yay", - ) - test_worker = worker.Worker(app, queues=["yay"]) - await test_worker.run_job(job=job, worker_id=3) + await asyncio.sleep(0.1) + assert run_task.done() + await run_task + else: + with pytest.raises(asyncio.CancelledError): + run_task.cancel() - assert result == [12] + await asyncio.sleep(0.1) + assert run_task.done() + await run_task + status = await app.job_manager.get_job_status_async(job_id) + assert status == Status.ABORTED + assert task_cancelled -async def test_run_job_async(app): - result = [] - @app.task(queue="yay", name="task_func") - async def task_func(a, b): - result.append(a + b) +async def test_stopping_worker_job_suppresses_cancellation(app: App, worker): + complete_task_event = asyncio.Event() + worker.shutdown_graceful_timeout = 0.02 - job = jobs.Job( - id=16, - task_kwargs={"a": 9, "b": 3}, - lock="sherlock", - queueing_lock="houba", - task_name="task_func", - queue="yay", - ) - test_worker = worker.Worker(app, queues=["yay"]) - await test_worker.run_job(job=job, worker_id=3) + @app.task() + async def task_func(): + try: + await complete_task_event.wait() + except asyncio.CancelledError: + # supress the cancellation + pass - assert result == [12] + run_task = await start_worker(worker) + job_id = await task_func.defer_async() -async def test_run_job_semi_async(app): - result = [] + await asyncio.sleep(0.05) - @app.task(queue="yay", name="task_func") - def task_func(a, b): - async def inner(): - result.append(a + b) + # this should still be running waiting for the task to complete + assert run_task.done() is False - return inner() + worker.stop() - job = jobs.Job( - id=16, - task_kwargs={"a": 9, "b": 3}, - lock="sherlock", - queueing_lock="houba", - task_name="task_func", - queue="yay", - ) - test_worker = worker.Worker(app, queues=["yay"]) - await test_worker.run_job(job=job, worker_id=3) + await asyncio.sleep(0.1) + assert run_task.done() + await run_task - assert result == [12] + status = await app.job_manager.get_job_status_async(job_id) + assert status == Status.SUCCEEDED -async def test_run_job_log_result(caplog, app): - caplog.set_level("INFO") +@pytest.mark.parametrize( + "worker", + [({"additional_context": {"foo": "bar"}})], + indirect=["worker"], +) +async def test_worker_passes_additional_context(app: App, worker): + @app.task(pass_context=True) + async def task_func(jobContext: JobContext): + assert jobContext.additional_context["foo"] == "bar" - result = [] + job_id = await task_func.defer_async() - def task_func(a, b): # pylint: disable=unused-argument - s = a + b - result.append(s) - return s + await start_worker(worker) - task = tasks.Task(task_func, blueprint=app, queue="yay", name="job") + status = await app.job_manager.get_job_status_async(job_id) + assert status == Status.SUCCEEDED - app.tasks = {"task_func": task} - job = jobs.Job( - id=16, - task_kwargs={"a": 9, "b": 3}, - lock="sherlock", - queueing_lock="houba", - task_name="task_func", - queue="yay", - ) - test_worker = worker.Worker(app, queues=["yay"]) - await test_worker.run_job(job=job, worker_id=3) +async def test_run_job_async(app: App, worker): + result = [] - assert result == [12] + @app.task(queue="yay", name="task_func") + async def task_func(a, b): + result.append(a + b) - records = [record for record in caplog.records if record.action == "job_success"] - assert len(records) == 1 - record = records[0] - assert record.result == 12 - assert "Result: 12" in record.message + job_id = await task_func.defer_async(a=9, b=3) + await start_worker(worker) + assert result == [12] -@pytest.mark.parametrize( - "worker_name, logger_name, record_worker_name", - [(None, "worker", "worker"), ("w1", "worker.w1", "w1")], -) -async def test_run_job_log_name( - caplog, app, worker_name, logger_name, record_worker_name -): - caplog.set_level("INFO") + status = await app.job_manager.get_job_status_async(job_id) + assert status == Status.SUCCEEDED - test_worker = worker.Worker(app, name=worker_name, wait=False) - @app.task - def task(): - pass +async def test_run_job_sync(app: App, worker): + result = [] - await task.defer_async() + @app.task(queue="yay", name="task_func") + def task_func(a, b): + result.append(a + b) - await test_worker.run() + job_id = await task_func.defer_async(a=9, b=3) - # We're not interested in defer logs - records = [r for r in caplog.records if "worker" in r.name] + await start_worker(worker) + assert result == [12] - assert len(records) - record_names = [record.name for record in records] - assert all([name.endswith(logger_name) for name in record_names]) + status = await app.job_manager.get_job_status_async(job_id) + assert status == Status.SUCCEEDED - worker_names = [getattr(record, "worker", {}).get("name") for record in records] - assert all([name == record_worker_name for name in worker_names]) +async def test_run_job_semi_async(app: App, worker): + result = [] -async def test_run_job_aborted(app, caplog): - caplog.set_level("INFO") + @app.task(queue="yay", name="task_func") + def task_func(a, b): + async def inner(): + result.append(a + b) - def job_func(a, b): # pylint: disable=unused-argument - raise exceptions.JobAborted() + return inner() - task = tasks.Task(job_func, blueprint=app, queue="yay", name="job") - task.func = job_func + job_id = await task_func.defer_async(a=9, b=3) - app.tasks = {"job": task} + await start_worker(worker) - job = jobs.Job( - id=16, - task_kwargs={"a": 9, "b": 3}, - lock="sherlock", - queueing_lock="houba", - task_name="job", - queue="yay", - ) - test_worker = worker.Worker(app, queues=["yay"]) - with pytest.raises(exceptions.JobAborted): - await test_worker.run_job(job=job, worker_id=3) + assert result == [12] - assert ( - len( - [ - r - for r in caplog.records - if r.levelname == "INFO" and "Aborted" in r.message - ] - ) - == 1 - ) + status = await app.job_manager.get_job_status_async(job_id) + assert status == Status.SUCCEEDED -async def test_run_job_error(app, caplog, mocker): +async def test_run_job_log_result(caplog, app: App, worker): caplog.set_level("INFO") - def job_func(a, b): # pylint: disable=unused-argument - raise ValueError("nope") + @app.task(queue="yay", name="task_func") + async def task_func(a, b): + return a + b - task = tasks.Task(job_func, blueprint=app, queue="yay", name="job") - task.func = job_func + await task_func.defer_async(a=9, b=3) - app.tasks = {"job": task} + await start_worker(worker) - job = jobs.Job( - id=16, - task_kwargs={"a": 9, "b": 3}, - lock="sherlock", - queueing_lock="houba", - task_name="job", - queue="yay", - ) - app.job_manager.get_job_status_async = mocker.AsyncMock(return_value="doing") - test_worker = worker.Worker(app, queues=["yay"]) - with pytest.raises(exceptions.JobError): - await test_worker.run_job(job=job, worker_id=3) + records = [record for record in caplog.records if record.action == "job_success"] + assert len(records) == 1 + record = records[0] + assert record.result == 12 + assert "Result: 12" in record.message - assert ( - len( - [ - r - for r in caplog.records - if r.levelname == "ERROR" and "to retry" not in r.message - ] + +async def test_run_task_not_found_status(app: App, worker, caplog): + job = await app.job_manager.defer_job_async( + Job( + task_name="random_task_name", + queue=DEFAULT_QUEUE, + lock=None, + queueing_lock=None, ) - == 1 ) + assert job.id + await start_worker(worker) + await asyncio.sleep(0.01) + status = await app.job_manager.get_job_status_async(job.id) + assert status == Status.FAILED -async def test_run_job_critical_error(app, caplog, mocker): - caplog.set_level("INFO") - - def job_func(a, b): # pylint: disable=unused-argument - raise BaseException("nope") + records = [record for record in caplog.records if record.action == "task_not_found"] + assert len(records) == 1 + record = records[0] + assert record.levelname == "ERROR" - task = tasks.Task(job_func, blueprint=app, queue="yay", name="job") - task.func = job_func - app.tasks = {"job": task} +class CustomCriticalError(BaseException): + pass - job = jobs.Job( - id=16, - task_kwargs={"a": 9, "b": 3}, - lock="sherlock", - queueing_lock="houba", - task_name="job", - queue="yay", - ) - app.job_manager.get_job_status_async = mocker.AsyncMock(return_value="doing") - test_worker = worker.Worker(app, queues=["yay"]) - with pytest.raises(exceptions.JobError) as exc_info: - await test_worker.run_job(job=job, worker_id=3) - assert exc_info.value.critical is True +@pytest.mark.parametrize( + "critical_error", + [ + (False), + (True), + ], +) +async def test_run_job_error(app: App, worker, critical_error, caplog): + @app.task(queue="yay", name="task_func") + def task_func(a, b): + raise CustomCriticalError("Nope") if critical_error else ValueError("Nope") + job_id = await task_func.defer_async(a=9, b=3) -async def test_run_job_retry(app, caplog, mocker): - caplog.set_level("INFO") + await start_worker(worker) - def job_func(a, b): # pylint: disable=unused-argument - raise ValueError("nope") + await asyncio.sleep(0.05) + status = await app.job_manager.get_job_status_async(job_id) + assert status == Status.FAILED - task = tasks.Task(job_func, blueprint=app, queue="yay", name="job", retry=True) - task.func = job_func + records = [ + record + for record in caplog.records + if hasattr(record, "action") and record.action == "job_error" + ] + assert len(records) == 1 + record = records[0] + assert record.levelname == "ERROR" + assert "to retry" not in record.message - app.tasks = {"job": task} - job = jobs.Job( - id=16, - task_kwargs={"a": 9, "b": 3}, - lock="sherlock", - task_name="job", - queueing_lock="houba", - queue="yay", - ) - app.job_manager.get_job_status_async = mocker.AsyncMock(return_value="doing") - test_worker = worker.Worker(app, queues=["yay"]) - with pytest.raises(exceptions.JobError) as exc_info: - await test_worker.run_job(job=job, worker_id=3) +async def test_run_job_raising_job_aborted(app: App, worker, caplog): + caplog.set_level("INFO") - assert isinstance(exc_info.value.retry_exception, exceptions.JobRetry) + @app.task(queue="yay", name="task_func") + async def task_func(): + raise JobAborted() - assert ( - len( - [ - r - for r in caplog.records - if r.levelname == "INFO" and "to retry" in r.message - ] - ) - == 1 - ) - assert len([r for r in caplog.records if r.levelname == "ERROR"]) == 0 + job_id = await task_func.defer_async() + await start_worker(worker) -async def test_run_job_not_found(app): - job = jobs.Job( - id=16, - task_kwargs={"a": 9, "b": 3}, - lock="sherlock", - queueing_lock="houba", - task_name="job", - queue="yay", - ) - test_worker = worker.Worker(app, queues=["yay"]) - with pytest.raises(exceptions.TaskNotFound): - await test_worker.run_job(job=job, worker_id=3) + status = await app.job_manager.get_job_status_async(job_id) + assert status == Status.ABORTED + records = [record for record in caplog.records if record.action == "job_aborted"] + assert len(records) == 1 + record = records[0] + assert record.levelname == "INFO" + assert "Aborted" in record.message -async def test_run_job_pass_context(app): - result = [] - @app.task(queue="yay", name="job", pass_context=True) - def task_func(test_context, a): - result.extend([test_context, a]) - - job = jobs.Job( - id=16, - task_kwargs={"a": 1}, - lock="sherlock", - queueing_lock="houba", - task_name="job", - queue="yay", - ) - test_worker = worker.Worker( - app, queues=["yay"], name="my_worker", additional_context={"foo": "bar"} - ) - context = test_worker.context_for_worker(worker_id=3) +async def test_abort_async_job(app: App, worker): + @app.task(queue="yay", name="task_func") + async def task_func(): + await asyncio.sleep(0.2) - await test_worker.run_job(job=job, worker_id=3) + job_id = await task_func.defer_async() - context = context.evolve(task=task_func) + await start_worker(worker) + await app.job_manager.cancel_job_by_id_async(job_id, abort=True) + await asyncio.sleep(0.01) + status = await app.job_manager.get_job_status_async(job_id) + assert status == Status.ABORTED - assert result == [ - context, - 1, - ] +async def test_abort_async_job_while_finishing(app: App, worker, mocker: MockerFixture): + """ + Tests that aborting a job after that job completes but before the job status is updated + does not prevent the job status from being updated + """ + connector = cast(InMemoryConnector, app.connector) + original_finish_job_run = connector.finish_job_run -async def test_wait_for_job_with_job(app, mocker): - test_worker = worker.Worker(app) - # notify_event is set to None initially, and we skip run() - test_worker.notify_event = mocker.Mock() + complete_finish_job_event = asyncio.Event() - wait_for = mocker.Mock() + async def delayed_finish_job_run(**arguments): + await complete_finish_job_event.wait() + return await original_finish_job_run(**arguments) - async def mock(coro, timeout): - wait_for(coro, timeout=timeout) + connector.finish_job_run = mocker.AsyncMock(name="finish_job_run") + connector.finish_job_run.side_effect = delayed_finish_job_run - mocker.patch("asyncio.wait_for", mock) + @app.task(queue="yay", name="task_func") + async def task_func(): + pass - await test_worker.wait_for_job(timeout=42) + job_id = await task_func.defer_async() - wait_for.assert_called_with(test_worker.notify_event.wait.return_value, timeout=42) + await start_worker(worker) + await app.job_manager.cancel_job_by_id_async(job_id, abort=True) + await asyncio.sleep(0.01) + complete_finish_job_event.set() + await asyncio.sleep(0.01) + status = await app.job_manager.get_job_status_async(job_id) + assert status == Status.SUCCEEDED - assert test_worker.notify_event.mock_calls == [ - mocker.call.clear(), - mocker.call.wait(), - mocker.call.clear(), - ] +async def test_abort_async_job_preventing_cancellation(app: App, worker): + """ + Tests that an async job can prevent itself from being aborted + """ -async def test_wait_for_job_without_job(app, mocker): - test_worker = worker.Worker(app) - # notify_event is set to None initially, and we skip run() - test_worker.notify_event = mocker.Mock() + @app.task(queue="yay", name="task_func") + async def task_func(): + try: + await asyncio.sleep(0.2) + except asyncio.CancelledError: + pass - wait_for = mocker.Mock(side_effect=asyncio.TimeoutError) + job_id = await task_func.defer_async() - async def mock(coro, timeout): - wait_for(coro, timeout=timeout) + await start_worker(worker) + await app.job_manager.cancel_job_by_id_async(job_id, abort=True) + await asyncio.sleep(0.01) + status = await app.job_manager.get_job_status_async(job_id) + assert status == Status.SUCCEEDED - mocker.patch("asyncio.wait_for", mock) - await test_worker.wait_for_job(timeout=42) +@pytest.mark.parametrize( + "worker", + [ + ({"listen_notify": False, "abort_job_polling_interval": 0.05}), + ({"listen_notify": True, "abort_job_polling_interval": 1}), + ], + indirect=["worker"], +) +async def test_run_job_abort(app: App, worker: Worker): + @app.task(queue="yay", name="task_func", pass_context=True) + async def task_func(job_context: JobContext): + while True: + await asyncio.sleep(0.01) + if job_context.should_abort(): + raise JobAborted() - wait_for.assert_called_with(test_worker.notify_event.wait.return_value, timeout=42) + job_id = await task_func.defer_async() - assert test_worker.notify_event.mock_calls == [ - mocker.call.clear(), - mocker.call.wait(), - ] + await start_worker(worker) + await app.job_manager.cancel_job_by_id_async(job_id, abort=True) -async def test_single_worker_no_wait(app, mocker): - process_job = mocker.Mock() - wait_for_job = mocker.Mock() + await asyncio.sleep(0.01 if worker.listen_notify else 0.05) - class TestWorker(worker.Worker): - async def process_job(self, job): - process_job(job=job) + status = await app.job_manager.get_job_status_async(job_id) + assert status == Status.ABORTED + assert worker._job_ids_to_abort == {}, ( + "Expected cancelled job id to be removed from set" + ) - async def wait_for_job(self, timeout): - wait_for_job(timeout) - await TestWorker(app=app, wait=False).single_worker(worker_id=0) +@pytest.mark.parametrize( + "critical_error, recover_on_attempt_number, expected_status, " + "expected_attempts, expected_info_logs, expected_error_logs", + [ + (False, 2, "succeeded", 2, 1, 0), + (True, 2, "succeeded", 2, 1, 0), + (False, 3, "failed", 2, 1, 1), + (True, 3, "failed", 2, 1, 1), + ], +) +async def test_run_job_retry_failed_job( + app: App, + worker, + critical_error, + recover_on_attempt_number, + expected_status, + expected_attempts, + expected_info_logs, + expected_error_logs, + caplog, +): + caplog.set_level("INFO") - assert process_job.called is False - assert wait_for_job.called is False + worker.wait = False + attempt = 0 -async def test_single_worker_stop_during_execution(app, mocker): - process_job = mocker.Mock() - wait_for_job = mocker.Mock() + @app.task(retry=1) + def task_func(): + nonlocal attempt + attempt += 1 + if attempt < recover_on_attempt_number: + raise CustomCriticalError("Nope") if critical_error else ValueError("Nope") - await app.configure_task("bla").defer_async() + job_id = await task_func.defer_async() - class TestWorker(worker.Worker): - async def process_job(self, job, worker_id): - process_job(job=job, worker_id=worker_id) - self.stop_requested = True + await start_worker(worker) - async def wait_for_job(self, timeout): - wait_for_job(timeout=timeout) + await asyncio.sleep(0.01) - await TestWorker(app=app).single_worker(worker_id=0) + connector = cast(InMemoryConnector, app.connector) + job_row = connector.jobs[job_id] + assert job_row["status"] == expected_status + assert job_row["attempts"] == expected_attempts - assert wait_for_job.called is False - process_job.assert_called_once() + info_records = [ + record + for record in caplog.records + if record.levelname == "INFO" and "to retry" in record.message + ] + error_records = [record for record in caplog.records if record.levelname == "ERROR"] + assert len(info_records) == expected_info_logs + assert len(error_records) == expected_error_logs -async def test_single_worker_stop_during_wait(app, mocker): - process_job = mocker.Mock() - wait_for_job = mocker.Mock() +async def test_run_log_actions(app: App, caplog, worker): + caplog.set_level("DEBUG") - await app.configure_task("bla").defer_async() + done = asyncio.Event() - class TestWorker(worker.Worker): - async def process_job(self, job, worker_id): - process_job(job=job, worker_id=worker_id) + @app.task(queue="some_queue") + def t(): + done.set() - async def wait_for_job(self, timeout): - wait_for_job() - self.stop_requested = True + await t.defer_async() - await TestWorker(app=app).single_worker(worker_id=0) + await start_worker(worker) - process_job.assert_called_once() - wait_for_job.assert_called_once() + await asyncio.wait_for(done.wait(), timeout=0.05) + connector = cast(InMemoryConnector, app.connector) + assert [q[0] for q in connector.queries] == [ + "defer_jobs", + "prune_stalled_workers", + "register_worker", + "fetch_job", + "finish_job", + "fetch_job", + ] -async def test_single_worker_spread_wait(app, mocker): - process_job = mocker.Mock() - wait_for_job = mocker.Mock() + logs = {(r.action, r.levelname) for r in caplog.records if hasattr(r, "action")} + # remove the periodic_deferrer_no_task log record because that makes the test flaky + assert { + ("about_to_defer_jobs", "DEBUG"), + ("jobs_deferred", "INFO"), + ("start_worker", "INFO"), + ("loaded_job_info", "DEBUG"), + ("start_job", "INFO"), + ("job_success", "INFO"), + ("finish_task", "DEBUG"), + } <= logs + + +async def test_run_log_current_job_when_stopping(app: App, worker, caplog): + caplog.set_level("DEBUG") + complete_job_event = asyncio.Event() + + @app.task(queue="some_queue") + async def t(): + await complete_job_event.wait() + + job_id = await t.defer_async() + run_task = await start_worker(worker) + worker.stop() + await asyncio.sleep(0.01) + complete_job_event.set() + + await asyncio.wait_for(run_task, timeout=0.05) + # We want to make sure that the log that names the current running task fired. + logs = " ".join(r.message for r in caplog.records) + assert "Stop requested" in logs + assert ( + f"Waiting for job to finish: worker: tests.unit.test_worker.t[{job_id}]()" + in logs + ) - await app.configure_task("bla").defer_async() - class TestWorker(worker.Worker): - stop = False +async def test_run_no_signal_handlers(worker, kill_own_pid): + worker.install_signal_handlers = False + await start_worker(worker) + with pytest.raises(KeyboardInterrupt): + await asyncio.sleep(0.01) + # Test that handlers are NOT installed + kill_own_pid(signal=signal.SIGINT) - async def process_job(self, job, worker_id): - process_job(job=job, worker_id=worker_id) - async def wait_for_job(self, timeout): - wait_for_job(timeout) - self.stop_requested = self.stop - self.stop = True +async def test_worker_id_and_heartbeat_lifecycle(app: App): + connector = cast(InMemoryConnector, app.connector) - await TestWorker(app=app, timeout=4, concurrency=7).single_worker(worker_id=3) + assert connector.workers == {} - process_job.assert_called_once() - assert wait_for_job.call_args_list == [mocker.call(4 * (3 + 1)), mocker.call(4 * 7)] + worker = Worker(app, update_heartbeat_interval=0.05) + assert worker.worker_id is None + run_task = await start_worker(worker) -def test_context_for_worker(app): - test_worker = worker.Worker(app=app, name="foo") - expected = job_context.JobContext(app=app, worker_id=3, worker_name="foo") + worker_id = worker.worker_id + assert worker_id is not None and worker_id > 0 - context = test_worker.context_for_worker(worker_id=3) + await asyncio.sleep(0.01) - assert context == expected + heartbeat1 = connector.workers[worker_id] + assert heartbeat1 is not None + await asyncio.sleep(0.05) -def test_context_for_worker_kwargs(app): - test_worker = worker.Worker(app=app, name="foo") - expected = job_context.JobContext(app=app, worker_id=3, worker_name="bar") + heartbeat2 = connector.workers[worker_id] + assert heartbeat2 > heartbeat1 - context = test_worker.context_for_worker(worker_id=3, worker_name="bar") + worker.stop() + await run_task - assert context == expected + assert worker.worker_id is None + assert connector.workers == {} -def test_context_for_worker_value_kept(app): - test_worker = worker.Worker(app=app, name="foo") - expected = job_context.JobContext(app=app, worker_id=3, worker_name="bar") +async def test_job_receives_worker_id(app: App): + @app.task(queue="some_queue") + async def t(): + await asyncio.sleep(0.08) - test_worker.context_for_worker(worker_id=3, worker_name="bar") - context = test_worker.context_for_worker(worker_id=3) + job_id = await t.defer_async() - assert context == expected + connector = cast(InMemoryConnector, app.connector) + job_row = connector.jobs[job_id] + assert job_row["worker_id"] is None -def test_context_for_worker_reset(app): - test_worker = worker.Worker(app=app, name="foo") - expected = job_context.JobContext(app=app, worker_id=3, worker_name="foo") + worker = Worker(app, wait=False) + run_task = await start_worker(worker) - test_worker.context_for_worker(worker_id=3, worker_name="bar") - context = test_worker.context_for_worker(worker_id=3, reset=True) + await asyncio.sleep(0.05) - assert context == expected + assert job_row["status"] == "doing" + assert job_row["worker_id"] == worker.worker_id + await asyncio.sleep(0.05) -def test_worker_copy_additional_context(app): - additional_context = {"foo": "bar"} - test_worker = worker.Worker( - app=app, - name="worker", - additional_context=additional_context, - ) + assert job_row["status"] == "succeeded" + assert job_row["worker_id"] is None - # mutate the additional_context object and test that we have the original - # value in the worker - additional_context["foo"] = "baz" - assert test_worker.base_context.additional_context == {"foo": "bar"} + await run_task -def test_context_for_worker_with_additional_context(app): - additional_context = {"foo": "bar"} - test_worker = worker.Worker( - app=app, - name="worker", - additional_context=additional_context, - ) +async def test_worker_prunes_stalled_workers(app: App): + worker = Worker(app, wait=False) - context1 = test_worker.context_for_worker(worker_id=3) + worker1_id = 1 + worker2_id = 2 - # mutate the additional_context object for one worker and test that it - # hasn't changed for other workers - context1.additional_context["foo"] = "baz" + connector = cast(InMemoryConnector, app.connector) + connector.workers = { + worker1_id: utils.utcnow() + - datetime.timedelta(seconds=worker.stalled_worker_timeout - 1), + worker2_id: utils.utcnow() + - datetime.timedelta(seconds=worker.stalled_worker_timeout + 1), + } - context2 = test_worker.context_for_worker(worker_id=4) + run_task = await start_worker(worker) + await run_task - assert context2.additional_context == {"foo": "bar"} + assert worker1_id in connector.workers + assert worker2_id not in connector.workers diff --git a/tests/unit/test_worker_sync.py b/tests/unit/test_worker_sync.py index 0ce9dadb1..48715162f 100644 --- a/tests/unit/test_worker_sync.py +++ b/tests/unit/test_worker_sync.py @@ -1,28 +1,22 @@ from __future__ import annotations -import asyncio - import pytest -from procrastinate import exceptions, job_context, worker +from procrastinate import exceptions, worker +from procrastinate.app import App @pytest.fixture -def test_worker(app): +async def test_worker(app: App) -> worker.Worker: return worker.Worker(app=app, queues=["yay"]) -@pytest.fixture -def context(): - return job_context.JobContext() - - -def test_worker_find_task_missing(test_worker): +async def test_worker_find_task_missing(test_worker): with pytest.raises(exceptions.TaskNotFound): test_worker.find_task("foobarbaz") -def test_worker_find_task(app): +async def test_worker_find_task(app: App): test_worker = worker.Worker(app=app, queues=["yay"]) @app.task(name="foo") @@ -32,29 +26,9 @@ def task_func(): assert test_worker.find_task("foo") == task_func -def test_stop(test_worker, caplog): +async def test_stop(test_worker, caplog): caplog.set_level("INFO") - test_worker.notify_event = asyncio.Event() test_worker.stop() - assert test_worker.stop_requested is True - assert test_worker.notify_event.is_set() assert caplog.messages == ["Stop requested"] - - -def test_stop_log_job(test_worker, caplog, context, job_factory): - caplog.set_level("INFO") - test_worker.notify_event = asyncio.Event() - job = job_factory(id=42, task_name="bla") - ctx = context.evolve(job=job, worker_id=0) - test_worker.current_contexts[0] = ctx - - test_worker.stop() - - assert test_worker.stop_requested is True - assert test_worker.notify_event.is_set() - assert caplog.messages == [ - "Stop requested", - "Waiting for job to finish: worker 0: bla[42]()", - ] diff --git a/uv.lock b/uv.lock new file mode 100644 index 000000000..81343701f --- /dev/null +++ b/uv.lock @@ -0,0 +1,1882 @@ +version = 1 +revision = 3 +requires-python = ">=3.9" +resolution-markers = [ + "python_full_version >= '3.11' and platform_machine == 'arm64' and sys_platform == 'darwin'", + "python_full_version == '3.10.*' and platform_machine == 'arm64' and sys_platform == 'darwin'", + "(python_full_version >= '3.11' and platform_machine != 'arm64') or (python_full_version >= '3.11' and sys_platform != 'darwin')", + "(python_full_version == '3.10.*' and platform_machine != 'arm64') or (python_full_version == '3.10.*' and sys_platform != 'darwin')", + "(python_full_version < '3.10' and platform_machine != 'arm64') or (python_full_version < '3.10' and sys_platform != 'darwin')", + "python_full_version < '3.10' and platform_machine == 'arm64' and sys_platform == 'darwin'", +] + +[[package]] +name = "accessible-pygments" +version = "0.0.5" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pygments" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/bc/c1/bbac6a50d02774f91572938964c582fff4270eee73ab822a4aeea4d8b11b/accessible_pygments-0.0.5.tar.gz", hash = "sha256:40918d3e6a2b619ad424cb91e556bd3bd8865443d9f22f1dcdf79e33c8046872", size = 1377899, upload-time = "2024-05-10T11:23:10.216Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/8d/3f/95338030883d8c8b91223b4e21744b04d11b161a3ef117295d8241f50ab4/accessible_pygments-0.0.5-py3-none-any.whl", hash = "sha256:88ae3211e68a1d0b011504b2ffc1691feafce124b845bd072ab6f9f66f34d4b7", size = 1395903, upload-time = "2024-05-10T11:23:08.421Z" }, +] + +[[package]] +name = "aiopg" +version = "1.4.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "async-timeout" }, + { name = "psycopg2-binary" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/b0/0a/aba75a9ffcb1704b98c39986344230eaa70c40ac28e5ca635df231db912f/aiopg-1.4.0.tar.gz", hash = "sha256:116253bef86b4d954116716d181e9a0294037f266718b2e1c9766af995639d71", size = 35593, upload-time = "2022-10-26T09:31:49.478Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/9b/2f/ab8690bf995171b9a8b60b98a2ca91d4108a42422abf10bf622397437d26/aiopg-1.4.0-py3-none-any.whl", hash = "sha256:aea46e8aff30b039cfa818e6db4752c97656e893fc75e5a5dc57355a9e9dedbd", size = 34770, upload-time = "2022-10-26T09:31:48.019Z" }, +] + +[[package]] +name = "alabaster" +version = "0.7.16" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "(python_full_version < '3.10' and platform_machine != 'arm64') or (python_full_version < '3.10' and sys_platform != 'darwin')", + "python_full_version < '3.10' and platform_machine == 'arm64' and sys_platform == 'darwin'", +] +sdist = { url = "https://files.pythonhosted.org/packages/c9/3e/13dd8e5ed9094e734ac430b5d0eb4f2bb001708a8b7856cbf8e084e001ba/alabaster-0.7.16.tar.gz", hash = "sha256:75a8b99c28a5dad50dd7f8ccdd447a121ddb3892da9e53d1ca5cca3106d58d65", size = 23776, upload-time = "2024-01-10T00:56:10.189Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/32/34/d4e1c02d3bee589efb5dfa17f88ea08bdb3e3eac12bc475462aec52ed223/alabaster-0.7.16-py3-none-any.whl", hash = "sha256:b46733c07dce03ae4e150330b975c75737fa60f0a7c591b6c8bf4928a28e2c92", size = 13511, upload-time = "2024-01-10T00:56:08.388Z" }, +] + +[[package]] +name = "alabaster" +version = "1.0.0" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version >= '3.11' and platform_machine == 'arm64' and sys_platform == 'darwin'", + "python_full_version == '3.10.*' and platform_machine == 'arm64' and sys_platform == 'darwin'", + "(python_full_version >= '3.11' and platform_machine != 'arm64') or (python_full_version >= '3.11' and sys_platform != 'darwin')", + "(python_full_version == '3.10.*' and platform_machine != 'arm64') or (python_full_version == '3.10.*' and sys_platform != 'darwin')", +] +sdist = { url = "https://files.pythonhosted.org/packages/a6/f8/d9c74d0daf3f742840fd818d69cfae176fa332022fd44e3469487d5a9420/alabaster-1.0.0.tar.gz", hash = "sha256:c00dca57bca26fa62a6d7d0a9fcce65f3e026e9bfe33e9c538fd3fbb2144fd9e", size = 24210, upload-time = "2024-07-26T18:15:03.762Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7e/b3/6b4067be973ae96ba0d615946e314c5ae35f9f993eca561b356540bb0c2b/alabaster-1.0.0-py3-none-any.whl", hash = "sha256:fc6786402dc3fcb2de3cabd5fe455a2db534b371124f1f21de8731783dec828b", size = 13929, upload-time = "2024-07-26T18:15:02.05Z" }, +] + +[[package]] +name = "asgiref" +version = "3.9.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions", marker = "python_full_version < '3.11'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/90/61/0aa957eec22ff70b830b22ff91f825e70e1ef732c06666a805730f28b36b/asgiref-3.9.1.tar.gz", hash = "sha256:a5ab6582236218e5ef1648f242fd9f10626cfd4de8dc377db215d5d5098e3142", size = 36870, upload-time = "2025-07-08T09:07:43.344Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7c/3c/0464dcada90d5da0e71018c04a140ad6349558afb30b3051b4264cc5b965/asgiref-3.9.1-py3-none-any.whl", hash = "sha256:f3bba7092a48005b5f5bacd747d36ee4a5a61f4a269a6df590b43144355ebd2c", size = 23790, upload-time = "2025-07-08T09:07:41.548Z" }, +] + +[[package]] +name = "async-timeout" +version = "4.0.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/87/d6/21b30a550dafea84b1b8eee21b5e23fa16d010ae006011221f33dcd8d7f8/async-timeout-4.0.3.tar.gz", hash = "sha256:4640d96be84d82d02ed59ea2b7105a0f7b33abe8703703cd0ab0bf87c427522f", size = 8345, upload-time = "2023-08-10T16:35:56.907Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a7/fa/e01228c2938de91d47b307831c62ab9e4001e747789d0b05baf779a6488c/async_timeout-4.0.3-py3-none-any.whl", hash = "sha256:7405140ff1230c310e51dc27b3145b9092d659ce68ff733fb0cefe3ee42be028", size = 5721, upload-time = "2023-08-10T16:35:55.203Z" }, +] + +[[package]] +name = "attrs" +version = "25.3.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/5a/b0/1367933a8532ee6ff8d63537de4f1177af4bff9f3e829baf7331f595bb24/attrs-25.3.0.tar.gz", hash = "sha256:75d7cefc7fb576747b2c81b4442d4d4a1ce0900973527c011d1030fd3bf4af1b", size = 812032, upload-time = "2025-03-13T11:10:22.779Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/77/06/bb80f5f86020c4551da315d78b3ab75e8228f89f0162f2c3a819e407941a/attrs-25.3.0-py3-none-any.whl", hash = "sha256:427318ce031701fea540783410126f03899a97ffc6f61596ad581ac2e40e3bc3", size = 63815, upload-time = "2025-03-13T11:10:21.14Z" }, +] + +[[package]] +name = "babel" +version = "2.17.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/7d/6b/d52e42361e1aa00709585ecc30b3f9684b3ab62530771402248b1b1d6240/babel-2.17.0.tar.gz", hash = "sha256:0c54cffb19f690cdcc52a3b50bcbf71e07a808d1c80d549f2459b9d2cf0afb9d", size = 9951852, upload-time = "2025-02-01T15:17:41.026Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b7/b8/3fe70c75fe32afc4bb507f75563d39bc5642255d1d94f1f23604725780bf/babel-2.17.0-py3-none-any.whl", hash = "sha256:4d0b53093fdfb4b21c92b5213dba5a1b23885afa8383709427046b21c366e5f2", size = 10182537, upload-time = "2025-02-01T15:17:37.39Z" }, +] + +[[package]] +name = "backports-asyncio-runner" +version = "1.2.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/8e/ff/70dca7d7cb1cbc0edb2c6cc0c38b65cba36cccc491eca64cabd5fe7f8670/backports_asyncio_runner-1.2.0.tar.gz", hash = "sha256:a5aa7b2b7d8f8bfcaa2b57313f70792df84e32a2a746f585213373f900b42162", size = 69893, upload-time = "2025-07-02T02:27:15.685Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a0/59/76ab57e3fe74484f48a53f8e337171b4a2349e506eabe136d7e01d059086/backports_asyncio_runner-1.2.0-py3-none-any.whl", hash = "sha256:0da0a936a8aeb554eccb426dc55af3ba63bcdc69fa1a600b5bb305413a4477b5", size = 12313, upload-time = "2025-07-02T02:27:14.263Z" }, +] + +[[package]] +name = "beautifulsoup4" +version = "4.13.5" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "soupsieve" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/85/2e/3e5079847e653b1f6dc647aa24549d68c6addb4c595cc0d902d1b19308ad/beautifulsoup4-4.13.5.tar.gz", hash = "sha256:5e70131382930e7c3de33450a2f54a63d5e4b19386eab43a5b34d594268f3695", size = 622954, upload-time = "2025-08-24T14:06:13.168Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/04/eb/f4151e0c7377a6e08a38108609ba5cede57986802757848688aeedd1b9e8/beautifulsoup4-4.13.5-py3-none-any.whl", hash = "sha256:642085eaa22233aceadff9c69651bc51e8bf3f874fb6d7104ece2beb24b47c4a", size = 105113, upload-time = "2025-08-24T14:06:14.884Z" }, +] + +[[package]] +name = "certifi" +version = "2025.8.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/dc/67/960ebe6bf230a96cda2e0abcf73af550ec4f090005363542f0765df162e0/certifi-2025.8.3.tar.gz", hash = "sha256:e564105f78ded564e3ae7c923924435e1daa7463faeab5bb932bc53ffae63407", size = 162386, upload-time = "2025-08-03T03:07:47.08Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e5/48/1549795ba7742c948d2ad169c1c8cdbae65bc450d6cd753d124b17c8cd32/certifi-2025.8.3-py3-none-any.whl", hash = "sha256:f6c12493cfb1b06ba2ff328595af9350c65d6644968e5d3a2ffd78699af217a5", size = 161216, upload-time = "2025-08-03T03:07:45.777Z" }, +] + +[[package]] +name = "charset-normalizer" +version = "3.4.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/83/2d/5fd176ceb9b2fc619e63405525573493ca23441330fcdaee6bef9460e924/charset_normalizer-3.4.3.tar.gz", hash = "sha256:6fce4b8500244f6fcb71465d4a4930d132ba9ab8e71a7859e6a5d59851068d14", size = 122371, upload-time = "2025-08-09T07:57:28.46Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d6/98/f3b8013223728a99b908c9344da3aa04ee6e3fa235f19409033eda92fb78/charset_normalizer-3.4.3-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:fb7f67a1bfa6e40b438170ebdc8158b78dc465a5a67b6dde178a46987b244a72", size = 207695, upload-time = "2025-08-09T07:55:36.452Z" }, + { url = "https://files.pythonhosted.org/packages/21/40/5188be1e3118c82dcb7c2a5ba101b783822cfb413a0268ed3be0468532de/charset_normalizer-3.4.3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:cc9370a2da1ac13f0153780040f465839e6cccb4a1e44810124b4e22483c93fe", size = 147153, upload-time = "2025-08-09T07:55:38.467Z" }, + { url = "https://files.pythonhosted.org/packages/37/60/5d0d74bc1e1380f0b72c327948d9c2aca14b46a9efd87604e724260f384c/charset_normalizer-3.4.3-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:07a0eae9e2787b586e129fdcbe1af6997f8d0e5abaa0bc98c0e20e124d67e601", size = 160428, upload-time = "2025-08-09T07:55:40.072Z" }, + { url = "https://files.pythonhosted.org/packages/85/9a/d891f63722d9158688de58d050c59dc3da560ea7f04f4c53e769de5140f5/charset_normalizer-3.4.3-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:74d77e25adda8581ffc1c720f1c81ca082921329452eba58b16233ab1842141c", size = 157627, upload-time = "2025-08-09T07:55:41.706Z" }, + { url = "https://files.pythonhosted.org/packages/65/1a/7425c952944a6521a9cfa7e675343f83fd82085b8af2b1373a2409c683dc/charset_normalizer-3.4.3-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d0e909868420b7049dafd3a31d45125b31143eec59235311fc4c57ea26a4acd2", size = 152388, upload-time = "2025-08-09T07:55:43.262Z" }, + { url = "https://files.pythonhosted.org/packages/f0/c9/a2c9c2a355a8594ce2446085e2ec97fd44d323c684ff32042e2a6b718e1d/charset_normalizer-3.4.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:c6f162aabe9a91a309510d74eeb6507fab5fff92337a15acbe77753d88d9dcf0", size = 150077, upload-time = "2025-08-09T07:55:44.903Z" }, + { url = "https://files.pythonhosted.org/packages/3b/38/20a1f44e4851aa1c9105d6e7110c9d020e093dfa5836d712a5f074a12bf7/charset_normalizer-3.4.3-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:4ca4c094de7771a98d7fbd67d9e5dbf1eb73efa4f744a730437d8a3a5cf994f0", size = 161631, upload-time = "2025-08-09T07:55:46.346Z" }, + { url = "https://files.pythonhosted.org/packages/a4/fa/384d2c0f57edad03d7bec3ebefb462090d8905b4ff5a2d2525f3bb711fac/charset_normalizer-3.4.3-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:02425242e96bcf29a49711b0ca9f37e451da7c70562bc10e8ed992a5a7a25cc0", size = 159210, upload-time = "2025-08-09T07:55:47.539Z" }, + { url = "https://files.pythonhosted.org/packages/33/9e/eca49d35867ca2db336b6ca27617deed4653b97ebf45dfc21311ce473c37/charset_normalizer-3.4.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:78deba4d8f9590fe4dae384aeff04082510a709957e968753ff3c48399f6f92a", size = 153739, upload-time = "2025-08-09T07:55:48.744Z" }, + { url = "https://files.pythonhosted.org/packages/2a/91/26c3036e62dfe8de8061182d33be5025e2424002125c9500faff74a6735e/charset_normalizer-3.4.3-cp310-cp310-win32.whl", hash = "sha256:d79c198e27580c8e958906f803e63cddb77653731be08851c7df0b1a14a8fc0f", size = 99825, upload-time = "2025-08-09T07:55:50.305Z" }, + { url = "https://files.pythonhosted.org/packages/e2/c6/f05db471f81af1fa01839d44ae2a8bfeec8d2a8b4590f16c4e7393afd323/charset_normalizer-3.4.3-cp310-cp310-win_amd64.whl", hash = "sha256:c6e490913a46fa054e03699c70019ab869e990270597018cef1d8562132c2669", size = 107452, upload-time = "2025-08-09T07:55:51.461Z" }, + { url = "https://files.pythonhosted.org/packages/7f/b5/991245018615474a60965a7c9cd2b4efbaabd16d582a5547c47ee1c7730b/charset_normalizer-3.4.3-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:b256ee2e749283ef3ddcff51a675ff43798d92d746d1a6e4631bf8c707d22d0b", size = 204483, upload-time = "2025-08-09T07:55:53.12Z" }, + { url = "https://files.pythonhosted.org/packages/c7/2a/ae245c41c06299ec18262825c1569c5d3298fc920e4ddf56ab011b417efd/charset_normalizer-3.4.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:13faeacfe61784e2559e690fc53fa4c5ae97c6fcedb8eb6fb8d0a15b475d2c64", size = 145520, upload-time = "2025-08-09T07:55:54.712Z" }, + { url = "https://files.pythonhosted.org/packages/3a/a4/b3b6c76e7a635748c4421d2b92c7b8f90a432f98bda5082049af37ffc8e3/charset_normalizer-3.4.3-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:00237675befef519d9af72169d8604a067d92755e84fe76492fef5441db05b91", size = 158876, upload-time = "2025-08-09T07:55:56.024Z" }, + { url = "https://files.pythonhosted.org/packages/e2/e6/63bb0e10f90a8243c5def74b5b105b3bbbfb3e7bb753915fe333fb0c11ea/charset_normalizer-3.4.3-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:585f3b2a80fbd26b048a0be90c5aae8f06605d3c92615911c3a2b03a8a3b796f", size = 156083, upload-time = "2025-08-09T07:55:57.582Z" }, + { url = "https://files.pythonhosted.org/packages/87/df/b7737ff046c974b183ea9aa111b74185ac8c3a326c6262d413bd5a1b8c69/charset_normalizer-3.4.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0e78314bdc32fa80696f72fa16dc61168fda4d6a0c014e0380f9d02f0e5d8a07", size = 150295, upload-time = "2025-08-09T07:55:59.147Z" }, + { url = "https://files.pythonhosted.org/packages/61/f1/190d9977e0084d3f1dc169acd060d479bbbc71b90bf3e7bf7b9927dec3eb/charset_normalizer-3.4.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:96b2b3d1a83ad55310de8c7b4a2d04d9277d5591f40761274856635acc5fcb30", size = 148379, upload-time = "2025-08-09T07:56:00.364Z" }, + { url = "https://files.pythonhosted.org/packages/4c/92/27dbe365d34c68cfe0ca76f1edd70e8705d82b378cb54ebbaeabc2e3029d/charset_normalizer-3.4.3-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:939578d9d8fd4299220161fdd76e86c6a251987476f5243e8864a7844476ba14", size = 160018, upload-time = "2025-08-09T07:56:01.678Z" }, + { url = "https://files.pythonhosted.org/packages/99/04/baae2a1ea1893a01635d475b9261c889a18fd48393634b6270827869fa34/charset_normalizer-3.4.3-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:fd10de089bcdcd1be95a2f73dbe6254798ec1bda9f450d5828c96f93e2536b9c", size = 157430, upload-time = "2025-08-09T07:56:02.87Z" }, + { url = "https://files.pythonhosted.org/packages/2f/36/77da9c6a328c54d17b960c89eccacfab8271fdaaa228305330915b88afa9/charset_normalizer-3.4.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:1e8ac75d72fa3775e0b7cb7e4629cec13b7514d928d15ef8ea06bca03ef01cae", size = 151600, upload-time = "2025-08-09T07:56:04.089Z" }, + { url = "https://files.pythonhosted.org/packages/64/d4/9eb4ff2c167edbbf08cdd28e19078bf195762e9bd63371689cab5ecd3d0d/charset_normalizer-3.4.3-cp311-cp311-win32.whl", hash = "sha256:6cf8fd4c04756b6b60146d98cd8a77d0cdae0e1ca20329da2ac85eed779b6849", size = 99616, upload-time = "2025-08-09T07:56:05.658Z" }, + { url = "https://files.pythonhosted.org/packages/f4/9c/996a4a028222e7761a96634d1820de8a744ff4327a00ada9c8942033089b/charset_normalizer-3.4.3-cp311-cp311-win_amd64.whl", hash = "sha256:31a9a6f775f9bcd865d88ee350f0ffb0e25936a7f930ca98995c05abf1faf21c", size = 107108, upload-time = "2025-08-09T07:56:07.176Z" }, + { url = "https://files.pythonhosted.org/packages/e9/5e/14c94999e418d9b87682734589404a25854d5f5d0408df68bc15b6ff54bb/charset_normalizer-3.4.3-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:e28e334d3ff134e88989d90ba04b47d84382a828c061d0d1027b1b12a62b39b1", size = 205655, upload-time = "2025-08-09T07:56:08.475Z" }, + { url = "https://files.pythonhosted.org/packages/7d/a8/c6ec5d389672521f644505a257f50544c074cf5fc292d5390331cd6fc9c3/charset_normalizer-3.4.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0cacf8f7297b0c4fcb74227692ca46b4a5852f8f4f24b3c766dd94a1075c4884", size = 146223, upload-time = "2025-08-09T07:56:09.708Z" }, + { url = "https://files.pythonhosted.org/packages/fc/eb/a2ffb08547f4e1e5415fb69eb7db25932c52a52bed371429648db4d84fb1/charset_normalizer-3.4.3-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c6fd51128a41297f5409deab284fecbe5305ebd7e5a1f959bee1c054622b7018", size = 159366, upload-time = "2025-08-09T07:56:11.326Z" }, + { url = "https://files.pythonhosted.org/packages/82/10/0fd19f20c624b278dddaf83b8464dcddc2456cb4b02bb902a6da126b87a1/charset_normalizer-3.4.3-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:3cfb2aad70f2c6debfbcb717f23b7eb55febc0bb23dcffc0f076009da10c6392", size = 157104, upload-time = "2025-08-09T07:56:13.014Z" }, + { url = "https://files.pythonhosted.org/packages/16/ab/0233c3231af734f5dfcf0844aa9582d5a1466c985bbed6cedab85af9bfe3/charset_normalizer-3.4.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1606f4a55c0fd363d754049cdf400175ee96c992b1f8018b993941f221221c5f", size = 151830, upload-time = "2025-08-09T07:56:14.428Z" }, + { url = "https://files.pythonhosted.org/packages/ae/02/e29e22b4e02839a0e4a06557b1999d0a47db3567e82989b5bb21f3fbbd9f/charset_normalizer-3.4.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:027b776c26d38b7f15b26a5da1044f376455fb3766df8fc38563b4efbc515154", size = 148854, upload-time = "2025-08-09T07:56:16.051Z" }, + { url = "https://files.pythonhosted.org/packages/05/6b/e2539a0a4be302b481e8cafb5af8792da8093b486885a1ae4d15d452bcec/charset_normalizer-3.4.3-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:42e5088973e56e31e4fa58eb6bd709e42fc03799c11c42929592889a2e54c491", size = 160670, upload-time = "2025-08-09T07:56:17.314Z" }, + { url = "https://files.pythonhosted.org/packages/31/e7/883ee5676a2ef217a40ce0bffcc3d0dfbf9e64cbcfbdf822c52981c3304b/charset_normalizer-3.4.3-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:cc34f233c9e71701040d772aa7490318673aa7164a0efe3172b2981218c26d93", size = 158501, upload-time = "2025-08-09T07:56:18.641Z" }, + { url = "https://files.pythonhosted.org/packages/c1/35/6525b21aa0db614cf8b5792d232021dca3df7f90a1944db934efa5d20bb1/charset_normalizer-3.4.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:320e8e66157cc4e247d9ddca8e21f427efc7a04bbd0ac8a9faf56583fa543f9f", size = 153173, upload-time = "2025-08-09T07:56:20.289Z" }, + { url = "https://files.pythonhosted.org/packages/50/ee/f4704bad8201de513fdc8aac1cabc87e38c5818c93857140e06e772b5892/charset_normalizer-3.4.3-cp312-cp312-win32.whl", hash = "sha256:fb6fecfd65564f208cbf0fba07f107fb661bcd1a7c389edbced3f7a493f70e37", size = 99822, upload-time = "2025-08-09T07:56:21.551Z" }, + { url = "https://files.pythonhosted.org/packages/39/f5/3b3836ca6064d0992c58c7561c6b6eee1b3892e9665d650c803bd5614522/charset_normalizer-3.4.3-cp312-cp312-win_amd64.whl", hash = "sha256:86df271bf921c2ee3818f0522e9a5b8092ca2ad8b065ece5d7d9d0e9f4849bcc", size = 107543, upload-time = "2025-08-09T07:56:23.115Z" }, + { url = "https://files.pythonhosted.org/packages/65/ca/2135ac97709b400c7654b4b764daf5c5567c2da45a30cdd20f9eefe2d658/charset_normalizer-3.4.3-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:14c2a87c65b351109f6abfc424cab3927b3bdece6f706e4d12faaf3d52ee5efe", size = 205326, upload-time = "2025-08-09T07:56:24.721Z" }, + { url = "https://files.pythonhosted.org/packages/71/11/98a04c3c97dd34e49c7d247083af03645ca3730809a5509443f3c37f7c99/charset_normalizer-3.4.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:41d1fc408ff5fdfb910200ec0e74abc40387bccb3252f3f27c0676731df2b2c8", size = 146008, upload-time = "2025-08-09T07:56:26.004Z" }, + { url = "https://files.pythonhosted.org/packages/60/f5/4659a4cb3c4ec146bec80c32d8bb16033752574c20b1252ee842a95d1a1e/charset_normalizer-3.4.3-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:1bb60174149316da1c35fa5233681f7c0f9f514509b8e399ab70fea5f17e45c9", size = 159196, upload-time = "2025-08-09T07:56:27.25Z" }, + { url = "https://files.pythonhosted.org/packages/86/9e/f552f7a00611f168b9a5865a1414179b2c6de8235a4fa40189f6f79a1753/charset_normalizer-3.4.3-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:30d006f98569de3459c2fc1f2acde170b7b2bd265dc1943e87e1a4efe1b67c31", size = 156819, upload-time = "2025-08-09T07:56:28.515Z" }, + { url = "https://files.pythonhosted.org/packages/7e/95/42aa2156235cbc8fa61208aded06ef46111c4d3f0de233107b3f38631803/charset_normalizer-3.4.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:416175faf02e4b0810f1f38bcb54682878a4af94059a1cd63b8747244420801f", size = 151350, upload-time = "2025-08-09T07:56:29.716Z" }, + { url = "https://files.pythonhosted.org/packages/c2/a9/3865b02c56f300a6f94fc631ef54f0a8a29da74fb45a773dfd3dcd380af7/charset_normalizer-3.4.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:6aab0f181c486f973bc7262a97f5aca3ee7e1437011ef0c2ec04b5a11d16c927", size = 148644, upload-time = "2025-08-09T07:56:30.984Z" }, + { url = "https://files.pythonhosted.org/packages/77/d9/cbcf1a2a5c7d7856f11e7ac2d782aec12bdfea60d104e60e0aa1c97849dc/charset_normalizer-3.4.3-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:fdabf8315679312cfa71302f9bd509ded4f2f263fb5b765cf1433b39106c3cc9", size = 160468, upload-time = "2025-08-09T07:56:32.252Z" }, + { url = "https://files.pythonhosted.org/packages/f6/42/6f45efee8697b89fda4d50580f292b8f7f9306cb2971d4b53f8914e4d890/charset_normalizer-3.4.3-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:bd28b817ea8c70215401f657edef3a8aa83c29d447fb0b622c35403780ba11d5", size = 158187, upload-time = "2025-08-09T07:56:33.481Z" }, + { url = "https://files.pythonhosted.org/packages/70/99/f1c3bdcfaa9c45b3ce96f70b14f070411366fa19549c1d4832c935d8e2c3/charset_normalizer-3.4.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:18343b2d246dc6761a249ba1fb13f9ee9a2bcd95decc767319506056ea4ad4dc", size = 152699, upload-time = "2025-08-09T07:56:34.739Z" }, + { url = "https://files.pythonhosted.org/packages/a3/ad/b0081f2f99a4b194bcbb1934ef3b12aa4d9702ced80a37026b7607c72e58/charset_normalizer-3.4.3-cp313-cp313-win32.whl", hash = "sha256:6fb70de56f1859a3f71261cbe41005f56a7842cc348d3aeb26237560bfa5e0ce", size = 99580, upload-time = "2025-08-09T07:56:35.981Z" }, + { url = "https://files.pythonhosted.org/packages/9a/8f/ae790790c7b64f925e5c953b924aaa42a243fb778fed9e41f147b2a5715a/charset_normalizer-3.4.3-cp313-cp313-win_amd64.whl", hash = "sha256:cf1ebb7d78e1ad8ec2a8c4732c7be2e736f6e5123a4146c5b89c9d1f585f8cef", size = 107366, upload-time = "2025-08-09T07:56:37.339Z" }, + { url = "https://files.pythonhosted.org/packages/8e/91/b5a06ad970ddc7a0e513112d40113e834638f4ca1120eb727a249fb2715e/charset_normalizer-3.4.3-cp314-cp314-macosx_10_13_universal2.whl", hash = "sha256:3cd35b7e8aedeb9e34c41385fda4f73ba609e561faedfae0a9e75e44ac558a15", size = 204342, upload-time = "2025-08-09T07:56:38.687Z" }, + { url = "https://files.pythonhosted.org/packages/ce/ec/1edc30a377f0a02689342f214455c3f6c2fbedd896a1d2f856c002fc3062/charset_normalizer-3.4.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b89bc04de1d83006373429975f8ef9e7932534b8cc9ca582e4db7d20d91816db", size = 145995, upload-time = "2025-08-09T07:56:40.048Z" }, + { url = "https://files.pythonhosted.org/packages/17/e5/5e67ab85e6d22b04641acb5399c8684f4d37caf7558a53859f0283a650e9/charset_normalizer-3.4.3-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:2001a39612b241dae17b4687898843f254f8748b796a2e16f1051a17078d991d", size = 158640, upload-time = "2025-08-09T07:56:41.311Z" }, + { url = "https://files.pythonhosted.org/packages/f1/e5/38421987f6c697ee3722981289d554957c4be652f963d71c5e46a262e135/charset_normalizer-3.4.3-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:8dcfc373f888e4fb39a7bc57e93e3b845e7f462dacc008d9749568b1c4ece096", size = 156636, upload-time = "2025-08-09T07:56:43.195Z" }, + { url = "https://files.pythonhosted.org/packages/a0/e4/5a075de8daa3ec0745a9a3b54467e0c2967daaaf2cec04c845f73493e9a1/charset_normalizer-3.4.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:18b97b8404387b96cdbd30ad660f6407799126d26a39ca65729162fd810a99aa", size = 150939, upload-time = "2025-08-09T07:56:44.819Z" }, + { url = "https://files.pythonhosted.org/packages/02/f7/3611b32318b30974131db62b4043f335861d4d9b49adc6d57c1149cc49d4/charset_normalizer-3.4.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:ccf600859c183d70eb47e05a44cd80a4ce77394d1ac0f79dbd2dd90a69a3a049", size = 148580, upload-time = "2025-08-09T07:56:46.684Z" }, + { url = "https://files.pythonhosted.org/packages/7e/61/19b36f4bd67f2793ab6a99b979b4e4f3d8fc754cbdffb805335df4337126/charset_normalizer-3.4.3-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:53cd68b185d98dde4ad8990e56a58dea83a4162161b1ea9272e5c9182ce415e0", size = 159870, upload-time = "2025-08-09T07:56:47.941Z" }, + { url = "https://files.pythonhosted.org/packages/06/57/84722eefdd338c04cf3030ada66889298eaedf3e7a30a624201e0cbe424a/charset_normalizer-3.4.3-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:30a96e1e1f865f78b030d65241c1ee850cdf422d869e9028e2fc1d5e4db73b92", size = 157797, upload-time = "2025-08-09T07:56:49.756Z" }, + { url = "https://files.pythonhosted.org/packages/72/2a/aff5dd112b2f14bcc3462c312dce5445806bfc8ab3a7328555da95330e4b/charset_normalizer-3.4.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:d716a916938e03231e86e43782ca7878fb602a125a91e7acb8b5112e2e96ac16", size = 152224, upload-time = "2025-08-09T07:56:51.369Z" }, + { url = "https://files.pythonhosted.org/packages/b7/8c/9839225320046ed279c6e839d51f028342eb77c91c89b8ef2549f951f3ec/charset_normalizer-3.4.3-cp314-cp314-win32.whl", hash = "sha256:c6dbd0ccdda3a2ba7c2ecd9d77b37f3b5831687d8dc1b6ca5f56a4880cc7b7ce", size = 100086, upload-time = "2025-08-09T07:56:52.722Z" }, + { url = "https://files.pythonhosted.org/packages/ee/7a/36fbcf646e41f710ce0a563c1c9a343c6edf9be80786edeb15b6f62e17db/charset_normalizer-3.4.3-cp314-cp314-win_amd64.whl", hash = "sha256:73dc19b562516fc9bcf6e5d6e596df0b4eb98d87e4f79f3ae71840e6ed21361c", size = 107400, upload-time = "2025-08-09T07:56:55.172Z" }, + { url = "https://files.pythonhosted.org/packages/c2/ca/9a0983dd5c8e9733565cf3db4df2b0a2e9a82659fd8aa2a868ac6e4a991f/charset_normalizer-3.4.3-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:70bfc5f2c318afece2f5838ea5e4c3febada0be750fcf4775641052bbba14d05", size = 207520, upload-time = "2025-08-09T07:57:11.026Z" }, + { url = "https://files.pythonhosted.org/packages/39/c6/99271dc37243a4f925b09090493fb96c9333d7992c6187f5cfe5312008d2/charset_normalizer-3.4.3-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:23b6b24d74478dc833444cbd927c338349d6ae852ba53a0d02a2de1fce45b96e", size = 147307, upload-time = "2025-08-09T07:57:12.4Z" }, + { url = "https://files.pythonhosted.org/packages/e4/69/132eab043356bba06eb333cc2cc60c6340857d0a2e4ca6dc2b51312886b3/charset_normalizer-3.4.3-cp39-cp39-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:34a7f768e3f985abdb42841e20e17b330ad3aaf4bb7e7aeeb73db2e70f077b99", size = 160448, upload-time = "2025-08-09T07:57:13.712Z" }, + { url = "https://files.pythonhosted.org/packages/04/9a/914d294daa4809c57667b77470533e65def9c0be1ef8b4c1183a99170e9d/charset_normalizer-3.4.3-cp39-cp39-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:fb731e5deb0c7ef82d698b0f4c5bb724633ee2a489401594c5c88b02e6cb15f7", size = 157758, upload-time = "2025-08-09T07:57:14.979Z" }, + { url = "https://files.pythonhosted.org/packages/b0/a8/6f5bcf1bcf63cb45625f7c5cadca026121ff8a6c8a3256d8d8cd59302663/charset_normalizer-3.4.3-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:257f26fed7d7ff59921b78244f3cd93ed2af1800ff048c33f624c87475819dd7", size = 152487, upload-time = "2025-08-09T07:57:16.332Z" }, + { url = "https://files.pythonhosted.org/packages/c4/72/d3d0e9592f4e504f9dea08b8db270821c909558c353dc3b457ed2509f2fb/charset_normalizer-3.4.3-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:1ef99f0456d3d46a50945c98de1774da86f8e992ab5c77865ea8b8195341fc19", size = 150054, upload-time = "2025-08-09T07:57:17.576Z" }, + { url = "https://files.pythonhosted.org/packages/20/30/5f64fe3981677fe63fa987b80e6c01042eb5ff653ff7cec1b7bd9268e54e/charset_normalizer-3.4.3-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:2c322db9c8c89009a990ef07c3bcc9f011a3269bc06782f916cd3d9eed7c9312", size = 161703, upload-time = "2025-08-09T07:57:20.012Z" }, + { url = "https://files.pythonhosted.org/packages/e1/ef/dd08b2cac9284fd59e70f7d97382c33a3d0a926e45b15fc21b3308324ffd/charset_normalizer-3.4.3-cp39-cp39-musllinux_1_2_s390x.whl", hash = "sha256:511729f456829ef86ac41ca78c63a5cb55240ed23b4b737faca0eb1abb1c41bc", size = 159096, upload-time = "2025-08-09T07:57:21.329Z" }, + { url = "https://files.pythonhosted.org/packages/45/8c/dcef87cfc2b3f002a6478f38906f9040302c68aebe21468090e39cde1445/charset_normalizer-3.4.3-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:88ab34806dea0671532d3f82d82b85e8fc23d7b2dd12fa837978dad9bb392a34", size = 153852, upload-time = "2025-08-09T07:57:22.608Z" }, + { url = "https://files.pythonhosted.org/packages/63/86/9cbd533bd37883d467fcd1bd491b3547a3532d0fbb46de2b99feeebf185e/charset_normalizer-3.4.3-cp39-cp39-win32.whl", hash = "sha256:16a8770207946ac75703458e2c743631c79c59c5890c80011d536248f8eaa432", size = 99840, upload-time = "2025-08-09T07:57:23.883Z" }, + { url = "https://files.pythonhosted.org/packages/ce/d6/7e805c8e5c46ff9729c49950acc4ee0aeb55efb8b3a56687658ad10c3216/charset_normalizer-3.4.3-cp39-cp39-win_amd64.whl", hash = "sha256:d22dbedd33326a4a5190dd4fe9e9e693ef12160c77382d9e87919bce54f3d4ca", size = 107438, upload-time = "2025-08-09T07:57:25.287Z" }, + { url = "https://files.pythonhosted.org/packages/8a/1f/f041989e93b001bc4e44bb1669ccdcf54d3f00e628229a85b08d330615c5/charset_normalizer-3.4.3-py3-none-any.whl", hash = "sha256:ce571ab16d890d23b5c278547ba694193a45011ff86a9162a71307ed9f86759a", size = 53175, upload-time = "2025-08-09T07:57:26.864Z" }, +] + +[[package]] +name = "colorama" +version = "0.4.6" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d8/53/6f443c9a4a8358a93a6792e2acffb9d9d5cb0a5cfd8802644b7b1c9a02e4/colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", size = 27697, upload-time = "2022-10-25T02:36:22.414Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335, upload-time = "2022-10-25T02:36:20.889Z" }, +] + +[[package]] +name = "contextlib2" +version = "21.6.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/c7/13/37ea7805ae3057992e96ecb1cffa2fa35c2ef4498543b846f90dd2348d8f/contextlib2-21.6.0.tar.gz", hash = "sha256:ab1e2bfe1d01d968e1b7e8d9023bc51ef3509bba217bb730cee3827e1ee82869", size = 43795, upload-time = "2021-06-27T06:54:40.841Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/76/56/6d6872f79d14c0cb02f1646cbb4592eef935857c0951a105874b7b62a0c3/contextlib2-21.6.0-py2.py3-none-any.whl", hash = "sha256:3fbdb64466afd23abaf6c977627b75b6139a5a3e8ce38405c5b413aed7a0471f", size = 13277, upload-time = "2021-06-27T06:54:20.972Z" }, +] + +[[package]] +name = "coverage" +version = "7.10.6" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/14/70/025b179c993f019105b79575ac6edb5e084fb0f0e63f15cdebef4e454fb5/coverage-7.10.6.tar.gz", hash = "sha256:f644a3ae5933a552a29dbb9aa2f90c677a875f80ebea028e5a52a4f429044b90", size = 823736, upload-time = "2025-08-29T15:35:16.668Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a8/1d/2e64b43d978b5bd184e0756a41415597dfef30fcbd90b747474bd749d45f/coverage-7.10.6-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:70e7bfbd57126b5554aa482691145f798d7df77489a177a6bef80de78860a356", size = 217025, upload-time = "2025-08-29T15:32:57.169Z" }, + { url = "https://files.pythonhosted.org/packages/23/62/b1e0f513417c02cc10ef735c3ee5186df55f190f70498b3702d516aad06f/coverage-7.10.6-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e41be6f0f19da64af13403e52f2dec38bbc2937af54df8ecef10850ff8d35301", size = 217419, upload-time = "2025-08-29T15:32:59.908Z" }, + { url = "https://files.pythonhosted.org/packages/e7/16/b800640b7a43e7c538429e4d7223e0a94fd72453a1a048f70bf766f12e96/coverage-7.10.6-cp310-cp310-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:c61fc91ab80b23f5fddbee342d19662f3d3328173229caded831aa0bd7595460", size = 244180, upload-time = "2025-08-29T15:33:01.608Z" }, + { url = "https://files.pythonhosted.org/packages/fb/6f/5e03631c3305cad187eaf76af0b559fff88af9a0b0c180d006fb02413d7a/coverage-7.10.6-cp310-cp310-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:10356fdd33a7cc06e8051413140bbdc6f972137508a3572e3f59f805cd2832fd", size = 245992, upload-time = "2025-08-29T15:33:03.239Z" }, + { url = "https://files.pythonhosted.org/packages/eb/a1/f30ea0fb400b080730125b490771ec62b3375789f90af0bb68bfb8a921d7/coverage-7.10.6-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:80b1695cf7c5ebe7b44bf2521221b9bb8cdf69b1f24231149a7e3eb1ae5fa2fb", size = 247851, upload-time = "2025-08-29T15:33:04.603Z" }, + { url = "https://files.pythonhosted.org/packages/02/8e/cfa8fee8e8ef9a6bb76c7bef039f3302f44e615d2194161a21d3d83ac2e9/coverage-7.10.6-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:2e4c33e6378b9d52d3454bd08847a8651f4ed23ddbb4a0520227bd346382bbc6", size = 245891, upload-time = "2025-08-29T15:33:06.176Z" }, + { url = "https://files.pythonhosted.org/packages/93/a9/51be09b75c55c4f6c16d8d73a6a1d46ad764acca0eab48fa2ffaef5958fe/coverage-7.10.6-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:c8a3ec16e34ef980a46f60dc6ad86ec60f763c3f2fa0db6d261e6e754f72e945", size = 243909, upload-time = "2025-08-29T15:33:07.74Z" }, + { url = "https://files.pythonhosted.org/packages/e9/a6/ba188b376529ce36483b2d585ca7bdac64aacbe5aa10da5978029a9c94db/coverage-7.10.6-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:7d79dabc0a56f5af990cc6da9ad1e40766e82773c075f09cc571e2076fef882e", size = 244786, upload-time = "2025-08-29T15:33:08.965Z" }, + { url = "https://files.pythonhosted.org/packages/d0/4c/37ed872374a21813e0d3215256180c9a382c3f5ced6f2e5da0102fc2fd3e/coverage-7.10.6-cp310-cp310-win32.whl", hash = "sha256:86b9b59f2b16e981906e9d6383eb6446d5b46c278460ae2c36487667717eccf1", size = 219521, upload-time = "2025-08-29T15:33:10.599Z" }, + { url = "https://files.pythonhosted.org/packages/8e/36/9311352fdc551dec5b973b61f4e453227ce482985a9368305880af4f85dd/coverage-7.10.6-cp310-cp310-win_amd64.whl", hash = "sha256:e132b9152749bd33534e5bd8565c7576f135f157b4029b975e15ee184325f528", size = 220417, upload-time = "2025-08-29T15:33:11.907Z" }, + { url = "https://files.pythonhosted.org/packages/d4/16/2bea27e212c4980753d6d563a0803c150edeaaddb0771a50d2afc410a261/coverage-7.10.6-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c706db3cabb7ceef779de68270150665e710b46d56372455cd741184f3868d8f", size = 217129, upload-time = "2025-08-29T15:33:13.575Z" }, + { url = "https://files.pythonhosted.org/packages/2a/51/e7159e068831ab37e31aac0969d47b8c5ee25b7d307b51e310ec34869315/coverage-7.10.6-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:8e0c38dc289e0508ef68ec95834cb5d2e96fdbe792eaccaa1bccac3966bbadcc", size = 217532, upload-time = "2025-08-29T15:33:14.872Z" }, + { url = "https://files.pythonhosted.org/packages/e7/c0/246ccbea53d6099325d25cd208df94ea435cd55f0db38099dd721efc7a1f/coverage-7.10.6-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:752a3005a1ded28f2f3a6e8787e24f28d6abe176ca64677bcd8d53d6fe2ec08a", size = 247931, upload-time = "2025-08-29T15:33:16.142Z" }, + { url = "https://files.pythonhosted.org/packages/7d/fb/7435ef8ab9b2594a6e3f58505cc30e98ae8b33265d844007737946c59389/coverage-7.10.6-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:689920ecfd60f992cafca4f5477d55720466ad2c7fa29bb56ac8d44a1ac2b47a", size = 249864, upload-time = "2025-08-29T15:33:17.434Z" }, + { url = "https://files.pythonhosted.org/packages/51/f8/d9d64e8da7bcddb094d511154824038833c81e3a039020a9d6539bf303e9/coverage-7.10.6-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ec98435796d2624d6905820a42f82149ee9fc4f2d45c2c5bc5a44481cc50db62", size = 251969, upload-time = "2025-08-29T15:33:18.822Z" }, + { url = "https://files.pythonhosted.org/packages/43/28/c43ba0ef19f446d6463c751315140d8f2a521e04c3e79e5c5fe211bfa430/coverage-7.10.6-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:b37201ce4a458c7a758ecc4efa92fa8ed783c66e0fa3c42ae19fc454a0792153", size = 249659, upload-time = "2025-08-29T15:33:20.407Z" }, + { url = "https://files.pythonhosted.org/packages/79/3e/53635bd0b72beaacf265784508a0b386defc9ab7fad99ff95f79ce9db555/coverage-7.10.6-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:2904271c80898663c810a6b067920a61dd8d38341244a3605bd31ab55250dad5", size = 247714, upload-time = "2025-08-29T15:33:21.751Z" }, + { url = "https://files.pythonhosted.org/packages/4c/55/0964aa87126624e8c159e32b0bc4e84edef78c89a1a4b924d28dd8265625/coverage-7.10.6-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:5aea98383463d6e1fa4e95416d8de66f2d0cb588774ee20ae1b28df826bcb619", size = 248351, upload-time = "2025-08-29T15:33:23.105Z" }, + { url = "https://files.pythonhosted.org/packages/eb/ab/6cfa9dc518c6c8e14a691c54e53a9433ba67336c760607e299bfcf520cb1/coverage-7.10.6-cp311-cp311-win32.whl", hash = "sha256:e3fb1fa01d3598002777dd259c0c2e6d9d5e10e7222976fc8e03992f972a2cba", size = 219562, upload-time = "2025-08-29T15:33:24.717Z" }, + { url = "https://files.pythonhosted.org/packages/5b/18/99b25346690cbc55922e7cfef06d755d4abee803ef335baff0014268eff4/coverage-7.10.6-cp311-cp311-win_amd64.whl", hash = "sha256:f35ed9d945bece26553d5b4c8630453169672bea0050a564456eb88bdffd927e", size = 220453, upload-time = "2025-08-29T15:33:26.482Z" }, + { url = "https://files.pythonhosted.org/packages/d8/ed/81d86648a07ccb124a5cf1f1a7788712b8d7216b593562683cd5c9b0d2c1/coverage-7.10.6-cp311-cp311-win_arm64.whl", hash = "sha256:99e1a305c7765631d74b98bf7dbf54eeea931f975e80f115437d23848ee8c27c", size = 219127, upload-time = "2025-08-29T15:33:27.777Z" }, + { url = "https://files.pythonhosted.org/packages/26/06/263f3305c97ad78aab066d116b52250dd316e74fcc20c197b61e07eb391a/coverage-7.10.6-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:5b2dd6059938063a2c9fee1af729d4f2af28fd1a545e9b7652861f0d752ebcea", size = 217324, upload-time = "2025-08-29T15:33:29.06Z" }, + { url = "https://files.pythonhosted.org/packages/e9/60/1e1ded9a4fe80d843d7d53b3e395c1db3ff32d6c301e501f393b2e6c1c1f/coverage-7.10.6-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:388d80e56191bf846c485c14ae2bc8898aa3124d9d35903fef7d907780477634", size = 217560, upload-time = "2025-08-29T15:33:30.748Z" }, + { url = "https://files.pythonhosted.org/packages/b8/25/52136173c14e26dfed8b106ed725811bb53c30b896d04d28d74cb64318b3/coverage-7.10.6-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:90cb5b1a4670662719591aa92d0095bb41714970c0b065b02a2610172dbf0af6", size = 249053, upload-time = "2025-08-29T15:33:32.041Z" }, + { url = "https://files.pythonhosted.org/packages/cb/1d/ae25a7dc58fcce8b172d42ffe5313fc267afe61c97fa872b80ee72d9515a/coverage-7.10.6-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:961834e2f2b863a0e14260a9a273aff07ff7818ab6e66d2addf5628590c628f9", size = 251802, upload-time = "2025-08-29T15:33:33.625Z" }, + { url = "https://files.pythonhosted.org/packages/f5/7a/1f561d47743710fe996957ed7c124b421320f150f1d38523d8d9102d3e2a/coverage-7.10.6-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:bf9a19f5012dab774628491659646335b1928cfc931bf8d97b0d5918dd58033c", size = 252935, upload-time = "2025-08-29T15:33:34.909Z" }, + { url = "https://files.pythonhosted.org/packages/6c/ad/8b97cd5d28aecdfde792dcbf646bac141167a5cacae2cd775998b45fabb5/coverage-7.10.6-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:99c4283e2a0e147b9c9cc6bc9c96124de9419d6044837e9799763a0e29a7321a", size = 250855, upload-time = "2025-08-29T15:33:36.922Z" }, + { url = "https://files.pythonhosted.org/packages/33/6a/95c32b558d9a61858ff9d79580d3877df3eb5bc9eed0941b1f187c89e143/coverage-7.10.6-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:282b1b20f45df57cc508c1e033403f02283adfb67d4c9c35a90281d81e5c52c5", size = 248974, upload-time = "2025-08-29T15:33:38.175Z" }, + { url = "https://files.pythonhosted.org/packages/0d/9c/8ce95dee640a38e760d5b747c10913e7a06554704d60b41e73fdea6a1ffd/coverage-7.10.6-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:8cdbe264f11afd69841bd8c0d83ca10b5b32853263ee62e6ac6a0ab63895f972", size = 250409, upload-time = "2025-08-29T15:33:39.447Z" }, + { url = "https://files.pythonhosted.org/packages/04/12/7a55b0bdde78a98e2eb2356771fd2dcddb96579e8342bb52aa5bc52e96f0/coverage-7.10.6-cp312-cp312-win32.whl", hash = "sha256:a517feaf3a0a3eca1ee985d8373135cfdedfbba3882a5eab4362bda7c7cf518d", size = 219724, upload-time = "2025-08-29T15:33:41.172Z" }, + { url = "https://files.pythonhosted.org/packages/36/4a/32b185b8b8e327802c9efce3d3108d2fe2d9d31f153a0f7ecfd59c773705/coverage-7.10.6-cp312-cp312-win_amd64.whl", hash = "sha256:856986eadf41f52b214176d894a7de05331117f6035a28ac0016c0f63d887629", size = 220536, upload-time = "2025-08-29T15:33:42.524Z" }, + { url = "https://files.pythonhosted.org/packages/08/3a/d5d8dc703e4998038c3099eaf77adddb00536a3cec08c8dcd556a36a3eb4/coverage-7.10.6-cp312-cp312-win_arm64.whl", hash = "sha256:acf36b8268785aad739443fa2780c16260ee3fa09d12b3a70f772ef100939d80", size = 219171, upload-time = "2025-08-29T15:33:43.974Z" }, + { url = "https://files.pythonhosted.org/packages/bd/e7/917e5953ea29a28c1057729c1d5af9084ab6d9c66217523fd0e10f14d8f6/coverage-7.10.6-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:ffea0575345e9ee0144dfe5701aa17f3ba546f8c3bb48db62ae101afb740e7d6", size = 217351, upload-time = "2025-08-29T15:33:45.438Z" }, + { url = "https://files.pythonhosted.org/packages/eb/86/2e161b93a4f11d0ea93f9bebb6a53f113d5d6e416d7561ca41bb0a29996b/coverage-7.10.6-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:95d91d7317cde40a1c249d6b7382750b7e6d86fad9d8eaf4fa3f8f44cf171e80", size = 217600, upload-time = "2025-08-29T15:33:47.269Z" }, + { url = "https://files.pythonhosted.org/packages/0e/66/d03348fdd8df262b3a7fb4ee5727e6e4936e39e2f3a842e803196946f200/coverage-7.10.6-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:3e23dd5408fe71a356b41baa82892772a4cefcf758f2ca3383d2aa39e1b7a003", size = 248600, upload-time = "2025-08-29T15:33:48.953Z" }, + { url = "https://files.pythonhosted.org/packages/73/dd/508420fb47d09d904d962f123221bc249f64b5e56aa93d5f5f7603be475f/coverage-7.10.6-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:0f3f56e4cb573755e96a16501a98bf211f100463d70275759e73f3cbc00d4f27", size = 251206, upload-time = "2025-08-29T15:33:50.697Z" }, + { url = "https://files.pythonhosted.org/packages/e9/1f/9020135734184f439da85c70ea78194c2730e56c2d18aee6e8ff1719d50d/coverage-7.10.6-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:db4a1d897bbbe7339946ffa2fe60c10cc81c43fab8b062d3fcb84188688174a4", size = 252478, upload-time = "2025-08-29T15:33:52.303Z" }, + { url = "https://files.pythonhosted.org/packages/a4/a4/3d228f3942bb5a2051fde28c136eea23a761177dc4ff4ef54533164ce255/coverage-7.10.6-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:d8fd7879082953c156d5b13c74aa6cca37f6a6f4747b39538504c3f9c63d043d", size = 250637, upload-time = "2025-08-29T15:33:53.67Z" }, + { url = "https://files.pythonhosted.org/packages/36/e3/293dce8cdb9a83de971637afc59b7190faad60603b40e32635cbd15fbf61/coverage-7.10.6-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:28395ca3f71cd103b8c116333fa9db867f3a3e1ad6a084aa3725ae002b6583bc", size = 248529, upload-time = "2025-08-29T15:33:55.022Z" }, + { url = "https://files.pythonhosted.org/packages/90/26/64eecfa214e80dd1d101e420cab2901827de0e49631d666543d0e53cf597/coverage-7.10.6-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:61c950fc33d29c91b9e18540e1aed7d9f6787cc870a3e4032493bbbe641d12fc", size = 250143, upload-time = "2025-08-29T15:33:56.386Z" }, + { url = "https://files.pythonhosted.org/packages/3e/70/bd80588338f65ea5b0d97e424b820fb4068b9cfb9597fbd91963086e004b/coverage-7.10.6-cp313-cp313-win32.whl", hash = "sha256:160c00a5e6b6bdf4e5984b0ef21fc860bc94416c41b7df4d63f536d17c38902e", size = 219770, upload-time = "2025-08-29T15:33:58.063Z" }, + { url = "https://files.pythonhosted.org/packages/a7/14/0b831122305abcc1060c008f6c97bbdc0a913ab47d65070a01dc50293c2b/coverage-7.10.6-cp313-cp313-win_amd64.whl", hash = "sha256:628055297f3e2aa181464c3808402887643405573eb3d9de060d81531fa79d32", size = 220566, upload-time = "2025-08-29T15:33:59.766Z" }, + { url = "https://files.pythonhosted.org/packages/83/c6/81a83778c1f83f1a4a168ed6673eeedc205afb562d8500175292ca64b94e/coverage-7.10.6-cp313-cp313-win_arm64.whl", hash = "sha256:df4ec1f8540b0bcbe26ca7dd0f541847cc8a108b35596f9f91f59f0c060bfdd2", size = 219195, upload-time = "2025-08-29T15:34:01.191Z" }, + { url = "https://files.pythonhosted.org/packages/d7/1c/ccccf4bf116f9517275fa85047495515add43e41dfe8e0bef6e333c6b344/coverage-7.10.6-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:c9a8b7a34a4de3ed987f636f71881cd3b8339f61118b1aa311fbda12741bff0b", size = 218059, upload-time = "2025-08-29T15:34:02.91Z" }, + { url = "https://files.pythonhosted.org/packages/92/97/8a3ceff833d27c7492af4f39d5da6761e9ff624831db9e9f25b3886ddbca/coverage-7.10.6-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:8dd5af36092430c2b075cee966719898f2ae87b636cefb85a653f1d0ba5d5393", size = 218287, upload-time = "2025-08-29T15:34:05.106Z" }, + { url = "https://files.pythonhosted.org/packages/92/d8/50b4a32580cf41ff0423777a2791aaf3269ab60c840b62009aec12d3970d/coverage-7.10.6-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:b0353b0f0850d49ada66fdd7d0c7cdb0f86b900bb9e367024fd14a60cecc1e27", size = 259625, upload-time = "2025-08-29T15:34:06.575Z" }, + { url = "https://files.pythonhosted.org/packages/7e/7e/6a7df5a6fb440a0179d94a348eb6616ed4745e7df26bf2a02bc4db72c421/coverage-7.10.6-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:d6b9ae13d5d3e8aeca9ca94198aa7b3ebbc5acfada557d724f2a1f03d2c0b0df", size = 261801, upload-time = "2025-08-29T15:34:08.006Z" }, + { url = "https://files.pythonhosted.org/packages/3a/4c/a270a414f4ed5d196b9d3d67922968e768cd971d1b251e1b4f75e9362f75/coverage-7.10.6-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:675824a363cc05781b1527b39dc2587b8984965834a748177ee3c37b64ffeafb", size = 264027, upload-time = "2025-08-29T15:34:09.806Z" }, + { url = "https://files.pythonhosted.org/packages/9c/8b/3210d663d594926c12f373c5370bf1e7c5c3a427519a8afa65b561b9a55c/coverage-7.10.6-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:692d70ea725f471a547c305f0d0fc6a73480c62fb0da726370c088ab21aed282", size = 261576, upload-time = "2025-08-29T15:34:11.585Z" }, + { url = "https://files.pythonhosted.org/packages/72/d0/e1961eff67e9e1dba3fc5eb7a4caf726b35a5b03776892da8d79ec895775/coverage-7.10.6-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:851430a9a361c7a8484a36126d1d0ff8d529d97385eacc8dfdc9bfc8c2d2cbe4", size = 259341, upload-time = "2025-08-29T15:34:13.159Z" }, + { url = "https://files.pythonhosted.org/packages/3a/06/d6478d152cd189b33eac691cba27a40704990ba95de49771285f34a5861e/coverage-7.10.6-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:d9369a23186d189b2fc95cc08b8160ba242057e887d766864f7adf3c46b2df21", size = 260468, upload-time = "2025-08-29T15:34:14.571Z" }, + { url = "https://files.pythonhosted.org/packages/ed/73/737440247c914a332f0b47f7598535b29965bf305e19bbc22d4c39615d2b/coverage-7.10.6-cp313-cp313t-win32.whl", hash = "sha256:92be86fcb125e9bda0da7806afd29a3fd33fdf58fba5d60318399adf40bf37d0", size = 220429, upload-time = "2025-08-29T15:34:16.394Z" }, + { url = "https://files.pythonhosted.org/packages/bd/76/b92d3214740f2357ef4a27c75a526eb6c28f79c402e9f20a922c295c05e2/coverage-7.10.6-cp313-cp313t-win_amd64.whl", hash = "sha256:6b3039e2ca459a70c79523d39347d83b73f2f06af5624905eba7ec34d64d80b5", size = 221493, upload-time = "2025-08-29T15:34:17.835Z" }, + { url = "https://files.pythonhosted.org/packages/fc/8e/6dcb29c599c8a1f654ec6cb68d76644fe635513af16e932d2d4ad1e5ac6e/coverage-7.10.6-cp313-cp313t-win_arm64.whl", hash = "sha256:3fb99d0786fe17b228eab663d16bee2288e8724d26a199c29325aac4b0319b9b", size = 219757, upload-time = "2025-08-29T15:34:19.248Z" }, + { url = "https://files.pythonhosted.org/packages/d3/aa/76cf0b5ec00619ef208da4689281d48b57f2c7fde883d14bf9441b74d59f/coverage-7.10.6-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:6008a021907be8c4c02f37cdc3ffb258493bdebfeaf9a839f9e71dfdc47b018e", size = 217331, upload-time = "2025-08-29T15:34:20.846Z" }, + { url = "https://files.pythonhosted.org/packages/65/91/8e41b8c7c505d398d7730206f3cbb4a875a35ca1041efc518051bfce0f6b/coverage-7.10.6-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:5e75e37f23eb144e78940b40395b42f2321951206a4f50e23cfd6e8a198d3ceb", size = 217607, upload-time = "2025-08-29T15:34:22.433Z" }, + { url = "https://files.pythonhosted.org/packages/87/7f/f718e732a423d442e6616580a951b8d1ec3575ea48bcd0e2228386805e79/coverage-7.10.6-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:0f7cb359a448e043c576f0da00aa8bfd796a01b06aa610ca453d4dde09cc1034", size = 248663, upload-time = "2025-08-29T15:34:24.425Z" }, + { url = "https://files.pythonhosted.org/packages/e6/52/c1106120e6d801ac03e12b5285e971e758e925b6f82ee9b86db3aa10045d/coverage-7.10.6-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:c68018e4fc4e14b5668f1353b41ccf4bc83ba355f0e1b3836861c6f042d89ac1", size = 251197, upload-time = "2025-08-29T15:34:25.906Z" }, + { url = "https://files.pythonhosted.org/packages/3d/ec/3a8645b1bb40e36acde9c0609f08942852a4af91a937fe2c129a38f2d3f5/coverage-7.10.6-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:cd4b2b0707fc55afa160cd5fc33b27ccbf75ca11d81f4ec9863d5793fc6df56a", size = 252551, upload-time = "2025-08-29T15:34:27.337Z" }, + { url = "https://files.pythonhosted.org/packages/a1/70/09ecb68eeb1155b28a1d16525fd3a9b65fbe75337311a99830df935d62b6/coverage-7.10.6-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:4cec13817a651f8804a86e4f79d815b3b28472c910e099e4d5a0e8a3b6a1d4cb", size = 250553, upload-time = "2025-08-29T15:34:29.065Z" }, + { url = "https://files.pythonhosted.org/packages/c6/80/47df374b893fa812e953b5bc93dcb1427a7b3d7a1a7d2db33043d17f74b9/coverage-7.10.6-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:f2a6a8e06bbda06f78739f40bfb56c45d14eb8249d0f0ea6d4b3d48e1f7c695d", size = 248486, upload-time = "2025-08-29T15:34:30.897Z" }, + { url = "https://files.pythonhosted.org/packages/4a/65/9f98640979ecee1b0d1a7164b589de720ddf8100d1747d9bbdb84be0c0fb/coverage-7.10.6-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:081b98395ced0d9bcf60ada7661a0b75f36b78b9d7e39ea0790bb4ed8da14747", size = 249981, upload-time = "2025-08-29T15:34:32.365Z" }, + { url = "https://files.pythonhosted.org/packages/1f/55/eeb6603371e6629037f47bd25bef300387257ed53a3c5fdb159b7ac8c651/coverage-7.10.6-cp314-cp314-win32.whl", hash = "sha256:6937347c5d7d069ee776b2bf4e1212f912a9f1f141a429c475e6089462fcecc5", size = 220054, upload-time = "2025-08-29T15:34:34.124Z" }, + { url = "https://files.pythonhosted.org/packages/15/d1/a0912b7611bc35412e919a2cd59ae98e7ea3b475e562668040a43fb27897/coverage-7.10.6-cp314-cp314-win_amd64.whl", hash = "sha256:adec1d980fa07e60b6ef865f9e5410ba760e4e1d26f60f7e5772c73b9a5b0713", size = 220851, upload-time = "2025-08-29T15:34:35.651Z" }, + { url = "https://files.pythonhosted.org/packages/ef/2d/11880bb8ef80a45338e0b3e0725e4c2d73ffbb4822c29d987078224fd6a5/coverage-7.10.6-cp314-cp314-win_arm64.whl", hash = "sha256:a80f7aef9535442bdcf562e5a0d5a5538ce8abe6bb209cfbf170c462ac2c2a32", size = 219429, upload-time = "2025-08-29T15:34:37.16Z" }, + { url = "https://files.pythonhosted.org/packages/83/c0/1f00caad775c03a700146f55536ecd097a881ff08d310a58b353a1421be0/coverage-7.10.6-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:0de434f4fbbe5af4fa7989521c655c8c779afb61c53ab561b64dcee6149e4c65", size = 218080, upload-time = "2025-08-29T15:34:38.919Z" }, + { url = "https://files.pythonhosted.org/packages/a9/c4/b1c5d2bd7cc412cbeb035e257fd06ed4e3e139ac871d16a07434e145d18d/coverage-7.10.6-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:6e31b8155150c57e5ac43ccd289d079eb3f825187d7c66e755a055d2c85794c6", size = 218293, upload-time = "2025-08-29T15:34:40.425Z" }, + { url = "https://files.pythonhosted.org/packages/3f/07/4468d37c94724bf6ec354e4ec2f205fda194343e3e85fd2e59cec57e6a54/coverage-7.10.6-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:98cede73eb83c31e2118ae8d379c12e3e42736903a8afcca92a7218e1f2903b0", size = 259800, upload-time = "2025-08-29T15:34:41.996Z" }, + { url = "https://files.pythonhosted.org/packages/82/d8/f8fb351be5fee31690cd8da768fd62f1cfab33c31d9f7baba6cd8960f6b8/coverage-7.10.6-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:f863c08f4ff6b64fa8045b1e3da480f5374779ef187f07b82e0538c68cb4ff8e", size = 261965, upload-time = "2025-08-29T15:34:43.61Z" }, + { url = "https://files.pythonhosted.org/packages/e8/70/65d4d7cfc75c5c6eb2fed3ee5cdf420fd8ae09c4808723a89a81d5b1b9c3/coverage-7.10.6-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:2b38261034fda87be356f2c3f42221fdb4171c3ce7658066ae449241485390d5", size = 264220, upload-time = "2025-08-29T15:34:45.387Z" }, + { url = "https://files.pythonhosted.org/packages/98/3c/069df106d19024324cde10e4ec379fe2fb978017d25e97ebee23002fbadf/coverage-7.10.6-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:0e93b1476b79eae849dc3872faeb0bf7948fd9ea34869590bc16a2a00b9c82a7", size = 261660, upload-time = "2025-08-29T15:34:47.288Z" }, + { url = "https://files.pythonhosted.org/packages/fc/8a/2974d53904080c5dc91af798b3a54a4ccb99a45595cc0dcec6eb9616a57d/coverage-7.10.6-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:ff8a991f70f4c0cf53088abf1e3886edcc87d53004c7bb94e78650b4d3dac3b5", size = 259417, upload-time = "2025-08-29T15:34:48.779Z" }, + { url = "https://files.pythonhosted.org/packages/30/38/9616a6b49c686394b318974d7f6e08f38b8af2270ce7488e879888d1e5db/coverage-7.10.6-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:ac765b026c9f33044419cbba1da913cfb82cca1b60598ac1c7a5ed6aac4621a0", size = 260567, upload-time = "2025-08-29T15:34:50.718Z" }, + { url = "https://files.pythonhosted.org/packages/76/16/3ed2d6312b371a8cf804abf4e14895b70e4c3491c6e53536d63fd0958a8d/coverage-7.10.6-cp314-cp314t-win32.whl", hash = "sha256:441c357d55f4936875636ef2cfb3bee36e466dcf50df9afbd398ce79dba1ebb7", size = 220831, upload-time = "2025-08-29T15:34:52.653Z" }, + { url = "https://files.pythonhosted.org/packages/d5/e5/d38d0cb830abede2adb8b147770d2a3d0e7fecc7228245b9b1ae6c24930a/coverage-7.10.6-cp314-cp314t-win_amd64.whl", hash = "sha256:073711de3181b2e204e4870ac83a7c4853115b42e9cd4d145f2231e12d670930", size = 221950, upload-time = "2025-08-29T15:34:54.212Z" }, + { url = "https://files.pythonhosted.org/packages/f4/51/e48e550f6279349895b0ffcd6d2a690e3131ba3a7f4eafccc141966d4dea/coverage-7.10.6-cp314-cp314t-win_arm64.whl", hash = "sha256:137921f2bac5559334ba66122b753db6dc5d1cf01eb7b64eb412bb0d064ef35b", size = 219969, upload-time = "2025-08-29T15:34:55.83Z" }, + { url = "https://files.pythonhosted.org/packages/91/70/f73ad83b1d2fd2d5825ac58c8f551193433a7deaf9b0d00a8b69ef61cd9a/coverage-7.10.6-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:90558c35af64971d65fbd935c32010f9a2f52776103a259f1dee865fe8259352", size = 217009, upload-time = "2025-08-29T15:34:57.381Z" }, + { url = "https://files.pythonhosted.org/packages/01/e8/099b55cd48922abbd4b01ddd9ffa352408614413ebfc965501e981aced6b/coverage-7.10.6-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:8953746d371e5695405806c46d705a3cd170b9cc2b9f93953ad838f6c1e58612", size = 217400, upload-time = "2025-08-29T15:34:58.985Z" }, + { url = "https://files.pythonhosted.org/packages/ee/d1/c6bac7c9e1003110a318636fef3b5c039df57ab44abcc41d43262a163c28/coverage-7.10.6-cp39-cp39-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:c83f6afb480eae0313114297d29d7c295670a41c11b274e6bca0c64540c1ce7b", size = 243835, upload-time = "2025-08-29T15:35:00.541Z" }, + { url = "https://files.pythonhosted.org/packages/01/f9/82c6c061838afbd2172e773156c0aa84a901d59211b4975a4e93accf5c89/coverage-7.10.6-cp39-cp39-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:7eb68d356ba0cc158ca535ce1381dbf2037fa8cb5b1ae5ddfc302e7317d04144", size = 245658, upload-time = "2025-08-29T15:35:02.135Z" }, + { url = "https://files.pythonhosted.org/packages/81/6a/35674445b1d38161148558a3ff51b0aa7f0b54b1def3abe3fbd34efe05bc/coverage-7.10.6-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5b15a87265e96307482746d86995f4bff282f14b027db75469c446da6127433b", size = 247433, upload-time = "2025-08-29T15:35:03.777Z" }, + { url = "https://files.pythonhosted.org/packages/18/27/98c99e7cafb288730a93535092eb433b5503d529869791681c4f2e2012a8/coverage-7.10.6-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:fc53ba868875bfbb66ee447d64d6413c2db91fddcfca57025a0e7ab5b07d5862", size = 245315, upload-time = "2025-08-29T15:35:05.629Z" }, + { url = "https://files.pythonhosted.org/packages/09/05/123e0dba812408c719c319dea05782433246f7aa7b67e60402d90e847545/coverage-7.10.6-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:efeda443000aa23f276f4df973cb82beca682fd800bb119d19e80504ffe53ec2", size = 243385, upload-time = "2025-08-29T15:35:07.494Z" }, + { url = "https://files.pythonhosted.org/packages/67/52/d57a42502aef05c6325f28e2e81216c2d9b489040132c18725b7a04d1448/coverage-7.10.6-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:9702b59d582ff1e184945d8b501ffdd08d2cee38d93a2206aa5f1365ce0b8d78", size = 244343, upload-time = "2025-08-29T15:35:09.55Z" }, + { url = "https://files.pythonhosted.org/packages/6b/22/7f6fad7dbb37cf99b542c5e157d463bd96b797078b1ec506691bc836f476/coverage-7.10.6-cp39-cp39-win32.whl", hash = "sha256:2195f8e16ba1a44651ca684db2ea2b2d4b5345da12f07d9c22a395202a05b23c", size = 219530, upload-time = "2025-08-29T15:35:11.167Z" }, + { url = "https://files.pythonhosted.org/packages/62/30/e2fda29bfe335026027e11e6a5e57a764c9df13127b5cf42af4c3e99b937/coverage-7.10.6-cp39-cp39-win_amd64.whl", hash = "sha256:f32ff80e7ef6a5b5b606ea69a36e97b219cd9dc799bcf2963018a4d8f788cfbf", size = 220432, upload-time = "2025-08-29T15:35:12.902Z" }, + { url = "https://files.pythonhosted.org/packages/44/0c/50db5379b615854b5cf89146f8f5bd1d5a9693d7f3a987e269693521c404/coverage-7.10.6-py3-none-any.whl", hash = "sha256:92c4ecf6bf11b2e85fd4d8204814dc26e6a19f0c9d938c207c5cb0eadfcabbe3", size = 208986, upload-time = "2025-08-29T15:35:14.506Z" }, +] + +[package.optional-dependencies] +toml = [ + { name = "tomli", marker = "python_full_version <= '3.11'" }, +] + +[[package]] +name = "croniter" +version = "6.0.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "python-dateutil" }, + { name = "pytz" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/ad/2f/44d1ae153a0e27be56be43465e5cb39b9650c781e001e7864389deb25090/croniter-6.0.0.tar.gz", hash = "sha256:37c504b313956114a983ece2c2b07790b1f1094fe9d81cc94739214748255577", size = 64481, upload-time = "2024-12-17T17:17:47.32Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/07/4b/290b4c3efd6417a8b0c284896de19b1d5855e6dbdb97d2a35e68fa42de85/croniter-6.0.0-py2.py3-none-any.whl", hash = "sha256:2f878c3856f17896979b2a4379ba1f09c83e374931ea15cc835c5dd2eee9b368", size = 25468, upload-time = "2024-12-17T17:17:45.359Z" }, +] + +[[package]] +name = "django" +version = "4.2.24" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "(python_full_version < '3.10' and platform_machine != 'arm64') or (python_full_version < '3.10' and sys_platform != 'darwin')", + "python_full_version < '3.10' and platform_machine == 'arm64' and sys_platform == 'darwin'", +] +dependencies = [ + { name = "asgiref", marker = "python_full_version < '3.10'" }, + { name = "sqlparse", marker = "python_full_version < '3.10'" }, + { name = "tzdata", marker = "python_full_version < '3.10' and sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/ca/1a/3b7d8722a4c605078808b9839a6cf207f7aa763655bcf165fa219bedb39e/django-4.2.24.tar.gz", hash = "sha256:40cd7d3f53bc6cd1902eadce23c337e97200888df41e4a73b42d682f23e71d80", size = 10452798, upload-time = "2025-09-03T13:14:05.643Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/1a/71/1cfb7cdd51270cf7df26f4ee4df4294b06a110e04bec553301d2b2617f24/django-4.2.24-py3-none-any.whl", hash = "sha256:a6527112c58821a0dfc5ab73013f0bdd906539790a17196658e36e66af43c350", size = 7993901, upload-time = "2025-09-03T13:13:51.365Z" }, +] + +[[package]] +name = "django" +version = "5.2.6" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version >= '3.11' and platform_machine == 'arm64' and sys_platform == 'darwin'", + "python_full_version == '3.10.*' and platform_machine == 'arm64' and sys_platform == 'darwin'", + "(python_full_version >= '3.11' and platform_machine != 'arm64') or (python_full_version >= '3.11' and sys_platform != 'darwin')", + "(python_full_version == '3.10.*' and platform_machine != 'arm64') or (python_full_version == '3.10.*' and sys_platform != 'darwin')", +] +dependencies = [ + { name = "asgiref", marker = "python_full_version >= '3.10'" }, + { name = "sqlparse", marker = "python_full_version >= '3.10'" }, + { name = "tzdata", marker = "python_full_version >= '3.10' and sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/4c/8c/2a21594337250a171d45dda926caa96309d5136becd1f48017247f9cdea0/django-5.2.6.tar.gz", hash = "sha256:da5e00372763193d73cecbf71084a3848458cecf4cee36b9a1e8d318d114a87b", size = 10858861, upload-time = "2025-09-03T13:04:03.23Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f5/af/6593f6d21404e842007b40fdeb81e73c20b6649b82d020bb0801b270174c/django-5.2.6-py3-none-any.whl", hash = "sha256:60549579b1174a304b77e24a93d8d9fafe6b6c03ac16311f3e25918ea5a20058", size = 8303111, upload-time = "2025-09-03T13:03:47.808Z" }, +] + +[[package]] +name = "django-stubs" +version = "5.1.3" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "(python_full_version < '3.10' and platform_machine != 'arm64') or (python_full_version < '3.10' and sys_platform != 'darwin')", + "python_full_version < '3.10' and platform_machine == 'arm64' and sys_platform == 'darwin'", +] +dependencies = [ + { name = "asgiref", marker = "python_full_version < '3.10'" }, + { name = "django", version = "4.2.24", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, + { name = "django-stubs-ext", version = "5.1.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, + { name = "tomli", marker = "python_full_version < '3.10'" }, + { name = "types-pyyaml", marker = "python_full_version < '3.10'" }, + { name = "typing-extensions", marker = "python_full_version < '3.10'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/dd/48/e733ceff94ed3c4ccba4c2f0708739974bbcdbcfb69efefb87b10780937f/django_stubs-5.1.3.tar.gz", hash = "sha256:8c230bc5bebee6da282ba8a27ad1503c84a0c4cd2f46e63d149e76d2a63e639a", size = 267390, upload-time = "2025-02-07T09:56:59.773Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/74/94/3551a181faf44a63a4ef1ab8e0eb7f27f6af168c2f719ea482e54b39d237/django_stubs-5.1.3-py3-none-any.whl", hash = "sha256:716758ced158b439213062e52de6df3cff7c586f9f9ad7ab59210efbea5dfe78", size = 472753, upload-time = "2025-02-07T09:56:57.291Z" }, +] + +[[package]] +name = "django-stubs" +version = "5.2.2" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version >= '3.11' and platform_machine == 'arm64' and sys_platform == 'darwin'", + "python_full_version == '3.10.*' and platform_machine == 'arm64' and sys_platform == 'darwin'", + "(python_full_version >= '3.11' and platform_machine != 'arm64') or (python_full_version >= '3.11' and sys_platform != 'darwin')", + "(python_full_version == '3.10.*' and platform_machine != 'arm64') or (python_full_version == '3.10.*' and sys_platform != 'darwin')", +] +dependencies = [ + { name = "django", version = "5.2.6", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, + { name = "django-stubs-ext", version = "5.2.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, + { name = "tomli", marker = "python_full_version == '3.10.*'" }, + { name = "types-pyyaml", marker = "python_full_version >= '3.10'" }, + { name = "typing-extensions", marker = "python_full_version >= '3.10'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/bc/27/ab9813da817a29ae69ec92af31ad8fc58ce3c904f23ea604bd3bdd9adc37/django_stubs-5.2.2.tar.gz", hash = "sha256:2a04b510c7a812f88223fd7e6d87fb4ea98717f19c8e5c8b59691d83ad40a8a6", size = 243049, upload-time = "2025-07-17T08:35:02.747Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/55/cb/bb387a1d40691ad54fec2be9e5093becebd63cca0ccb9348cbb27602e1d1/django_stubs-5.2.2-py3-none-any.whl", hash = "sha256:79bd0fdbc78958a8f63e0b062bd9d03f1de539664476c0be62ade5f063c9e41e", size = 485188, upload-time = "2025-07-17T08:35:00.356Z" }, +] + +[[package]] +name = "django-stubs-ext" +version = "5.1.3" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "(python_full_version < '3.10' and platform_machine != 'arm64') or (python_full_version < '3.10' and sys_platform != 'darwin')", + "python_full_version < '3.10' and platform_machine == 'arm64' and sys_platform == 'darwin'", +] +dependencies = [ + { name = "django", version = "4.2.24", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, + { name = "typing-extensions", marker = "python_full_version < '3.10'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/9f/06/7b210e0073c6cb8824bde82afc25f268e8c410a99d3621297f44fa3f6a6c/django_stubs_ext-5.1.3.tar.gz", hash = "sha256:3e60f82337f0d40a362f349bf15539144b96e4ceb4dbd0239be1cd71f6a74ad0", size = 9613, upload-time = "2025-02-07T09:56:22.543Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/cc/52/50125afcf29382b7f9d88a992e44835108dd2f1694d6d17d6d3d6fe06c81/django_stubs_ext-5.1.3-py3-none-any.whl", hash = "sha256:64561fbc53e963cc1eed2c8eb27e18b8e48dcb90771205180fe29fc8a59e55fd", size = 9034, upload-time = "2025-02-07T09:56:19.51Z" }, +] + +[[package]] +name = "django-stubs-ext" +version = "5.2.2" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version >= '3.11' and platform_machine == 'arm64' and sys_platform == 'darwin'", + "python_full_version == '3.10.*' and platform_machine == 'arm64' and sys_platform == 'darwin'", + "(python_full_version >= '3.11' and platform_machine != 'arm64') or (python_full_version >= '3.11' and sys_platform != 'darwin')", + "(python_full_version == '3.10.*' and platform_machine != 'arm64') or (python_full_version == '3.10.*' and sys_platform != 'darwin')", +] +dependencies = [ + { name = "django", version = "5.2.6", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, + { name = "typing-extensions", marker = "python_full_version >= '3.10'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/fc/06/5e94715d103e6cc72380cb0d0b6682a7d5ad2c366cee478c94d77aad777d/django_stubs_ext-5.2.2.tar.gz", hash = "sha256:d9d151b919fe2438760f5bd938f03e1cb08c84d0651f9e5917f1313907e42683", size = 6244, upload-time = "2025-07-17T08:34:35.054Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/4e/38/2903676f97f7902ee31984a06756b0e8836e897f4b617e1a03be4a43eb4f/django_stubs_ext-5.2.2-py3-none-any.whl", hash = "sha256:8833bbe32405a2a0ce168d3f75a87168f61bd16939caf0e8bf173bccbd8a44c5", size = 8816, upload-time = "2025-07-17T08:34:33.715Z" }, +] + +[[package]] +name = "django-upgrade" +version = "1.27.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "tokenize-rt" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/ef/a9/7b0bfa99ed86082d90ffe686c845d9cc94571b3f9fabf1f222dd776492ed/django_upgrade-1.27.0.tar.gz", hash = "sha256:1a865f772a35fb909acbeae02ed96d009ceb2f7349ebac0772f148e606cd36c3", size = 37144, upload-time = "2025-08-27T14:54:24.465Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/4e/41/ede4848396b5ec3568482375d229dc05f6cbc1b9c8d61bfafac39f8c4137/django_upgrade-1.27.0-py3-none-any.whl", hash = "sha256:54a9009905b30dc6943109ad42cdadedd9fdff5d98ac9e0488074b08158d758d", size = 63756, upload-time = "2025-08-27T14:54:22.97Z" }, +] + +[[package]] +name = "doc8" +version = "1.1.2" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "(python_full_version < '3.10' and platform_machine != 'arm64') or (python_full_version < '3.10' and sys_platform != 'darwin')", + "python_full_version < '3.10' and platform_machine == 'arm64' and sys_platform == 'darwin'", +] +dependencies = [ + { name = "docutils", marker = "python_full_version < '3.10'" }, + { name = "pygments", marker = "python_full_version < '3.10'" }, + { name = "restructuredtext-lint", marker = "python_full_version < '3.10'" }, + { name = "stevedore", marker = "python_full_version < '3.10'" }, + { name = "tomli", marker = "python_full_version < '3.10'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/11/28/b0a576233730b756ca1ebb422bc6199a761b826b86e93e5196dfa85331ea/doc8-1.1.2.tar.gz", hash = "sha256:1225f30144e1cc97e388dbaf7fe3e996d2897473a53a6dae268ddde21c354b98", size = 27030, upload-time = "2024-09-02T13:11:12.146Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0c/f1/6ffd5d76578e98a8f21ae7216b88a7212c778f665f1a8f4f8ce6f9605da4/doc8-1.1.2-py3-none-any.whl", hash = "sha256:e787b3076b391b8b49400da5d018bacafe592dfc0a04f35a9be22d0122b82b59", size = 25794, upload-time = "2024-09-02T13:11:10.83Z" }, +] + +[[package]] +name = "doc8" +version = "2.0.0" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version >= '3.11' and platform_machine == 'arm64' and sys_platform == 'darwin'", + "python_full_version == '3.10.*' and platform_machine == 'arm64' and sys_platform == 'darwin'", + "(python_full_version >= '3.11' and platform_machine != 'arm64') or (python_full_version >= '3.11' and sys_platform != 'darwin')", + "(python_full_version == '3.10.*' and platform_machine != 'arm64') or (python_full_version == '3.10.*' and sys_platform != 'darwin')", +] +dependencies = [ + { name = "docutils", marker = "python_full_version >= '3.10'" }, + { name = "pygments", marker = "python_full_version >= '3.10'" }, + { name = "restructuredtext-lint", marker = "python_full_version >= '3.10'" }, + { name = "stevedore", marker = "python_full_version >= '3.10'" }, + { name = "tomli", marker = "python_full_version == '3.10.*'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/92/91/88bb55225046a2ee9c2243d47346c78d2ed861c769168f451568625ad670/doc8-2.0.0.tar.gz", hash = "sha256:1267ad32758971fbcf991442417a3935c7bc9e52550e73622e0e56ba55ea1d40", size = 28436, upload-time = "2025-06-13T13:08:53.174Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a2/e9/90b7d243364d3dce38c8c2a1b8c103d7a8d1383c2b24c735fae0eee038dd/doc8-2.0.0-py3-none-any.whl", hash = "sha256:9862710027f793c25f9b1899150660e4bf1d4c9a6738742e71f32011e2e3f590", size = 25861, upload-time = "2025-06-13T13:08:51.839Z" }, +] + +[[package]] +name = "docutils" +version = "0.21.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ae/ed/aefcc8cd0ba62a0560c3c18c33925362d46c6075480bfa4df87b28e169a9/docutils-0.21.2.tar.gz", hash = "sha256:3a6b18732edf182daa3cd12775bbb338cf5691468f91eeeb109deff6ebfa986f", size = 2204444, upload-time = "2024-04-23T18:57:18.24Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/8f/d7/9322c609343d929e75e7e5e6255e614fcc67572cfd083959cdef3b7aad79/docutils-0.21.2-py3-none-any.whl", hash = "sha256:dafca5b9e384f0e419294eb4d2ff9fa826435bf15f15b7bd45723e8ad76811b2", size = 587408, upload-time = "2024-04-23T18:57:14.835Z" }, +] + +[[package]] +name = "dunamai" +version = "1.25.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "packaging" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/f1/2f/194d9a34c4d831c6563d2d990720850f0baef9ab60cb4ad8ae0eff6acd34/dunamai-1.25.0.tar.gz", hash = "sha256:a7f8360ea286d3dbaf0b6a1473f9253280ac93d619836ad4514facb70c0719d1", size = 46155, upload-time = "2025-07-04T19:25:56.082Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/36/41/04e2a649058b0713b00d6c9bd22da35618bb157289e05d068e51fddf8d7e/dunamai-1.25.0-py3-none-any.whl", hash = "sha256:7f9dc687dd3256e613b6cc978d9daabfd2bb5deb8adc541fc135ee423ffa98ab", size = 27022, upload-time = "2025-07-04T19:25:54.863Z" }, +] + +[[package]] +name = "exceptiongroup" +version = "1.3.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions", marker = "python_full_version < '3.11'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/0b/9f/a65090624ecf468cdca03533906e7c69ed7588582240cfe7cc9e770b50eb/exceptiongroup-1.3.0.tar.gz", hash = "sha256:b241f5885f560bc56a59ee63ca4c6a8bfa46ae4ad651af316d4e81817bb9fd88", size = 29749, upload-time = "2025-05-10T17:42:51.123Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/36/f4/c6e662dade71f56cd2f3735141b265c3c79293c109549c1e6933b0651ffc/exceptiongroup-1.3.0-py3-none-any.whl", hash = "sha256:4d111e6e0c13d0644cad6ddaa7ed0261a0b36971f6d23e7ec9b4b9097da78a10", size = 16674, upload-time = "2025-05-10T17:42:49.33Z" }, +] + +[[package]] +name = "furo" +version = "2025.7.19" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "accessible-pygments" }, + { name = "beautifulsoup4" }, + { name = "pygments" }, + { name = "sphinx", version = "7.4.7", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, + { name = "sphinx", version = "8.1.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version == '3.10.*'" }, + { name = "sphinx", version = "8.2.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, + { name = "sphinx-basic-ng" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/d0/69/312cd100fa45ddaea5a588334d2defa331ff427bcb61f5fe2ae61bdc3762/furo-2025.7.19.tar.gz", hash = "sha256:4164b2cafcf4023a59bb3c594e935e2516f6b9d35e9a5ea83d8f6b43808fe91f", size = 1662054, upload-time = "2025-07-19T10:52:09.754Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3a/34/2b07b72bee02a63241d654f5d8af87a2de977c59638eec41ca356ab915cd/furo-2025.7.19-py3-none-any.whl", hash = "sha256:bdea869822dfd2b494ea84c0973937e35d1575af088b6721a29c7f7878adc9e3", size = 342175, upload-time = "2025-07-19T10:52:02.399Z" }, +] + +[[package]] +name = "greenlet" +version = "3.2.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/03/b8/704d753a5a45507a7aab61f18db9509302ed3d0a27ac7e0359ec2905b1a6/greenlet-3.2.4.tar.gz", hash = "sha256:0dca0d95ff849f9a364385f36ab49f50065d76964944638be9691e1832e9f86d", size = 188260, upload-time = "2025-08-07T13:24:33.51Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7d/ed/6bfa4109fcb23a58819600392564fea69cdc6551ffd5e69ccf1d52a40cbc/greenlet-3.2.4-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:8c68325b0d0acf8d91dde4e6f930967dd52a5302cd4062932a6b2e7c2969f47c", size = 271061, upload-time = "2025-08-07T13:17:15.373Z" }, + { url = "https://files.pythonhosted.org/packages/2a/fc/102ec1a2fc015b3a7652abab7acf3541d58c04d3d17a8d3d6a44adae1eb1/greenlet-3.2.4-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:94385f101946790ae13da500603491f04a76b6e4c059dab271b3ce2e283b2590", size = 629475, upload-time = "2025-08-07T13:42:54.009Z" }, + { url = "https://files.pythonhosted.org/packages/c5/26/80383131d55a4ac0fb08d71660fd77e7660b9db6bdb4e8884f46d9f2cc04/greenlet-3.2.4-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:f10fd42b5ee276335863712fa3da6608e93f70629c631bf77145021600abc23c", size = 640802, upload-time = "2025-08-07T13:45:25.52Z" }, + { url = "https://files.pythonhosted.org/packages/9f/7c/e7833dbcd8f376f3326bd728c845d31dcde4c84268d3921afcae77d90d08/greenlet-3.2.4-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:c8c9e331e58180d0d83c5b7999255721b725913ff6bc6cf39fa2a45841a4fd4b", size = 636703, upload-time = "2025-08-07T13:53:12.622Z" }, + { url = "https://files.pythonhosted.org/packages/e9/49/547b93b7c0428ede7b3f309bc965986874759f7d89e4e04aeddbc9699acb/greenlet-3.2.4-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:58b97143c9cc7b86fc458f215bd0932f1757ce649e05b640fea2e79b54cedb31", size = 635417, upload-time = "2025-08-07T13:18:25.189Z" }, + { url = "https://files.pythonhosted.org/packages/7f/91/ae2eb6b7979e2f9b035a9f612cf70f1bf54aad4e1d125129bef1eae96f19/greenlet-3.2.4-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c2ca18a03a8cfb5b25bc1cbe20f3d9a4c80d8c3b13ba3df49ac3961af0b1018d", size = 584358, upload-time = "2025-08-07T13:18:23.708Z" }, + { url = "https://files.pythonhosted.org/packages/f7/85/433de0c9c0252b22b16d413c9407e6cb3b41df7389afc366ca204dbc1393/greenlet-3.2.4-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:9fe0a28a7b952a21e2c062cd5756d34354117796c6d9215a87f55e38d15402c5", size = 1113550, upload-time = "2025-08-07T13:42:37.467Z" }, + { url = "https://files.pythonhosted.org/packages/a1/8d/88f3ebd2bc96bf7747093696f4335a0a8a4c5acfcf1b757717c0d2474ba3/greenlet-3.2.4-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8854167e06950ca75b898b104b63cc646573aa5fef1353d4508ecdd1ee76254f", size = 1137126, upload-time = "2025-08-07T13:18:20.239Z" }, + { url = "https://files.pythonhosted.org/packages/d6/6f/b60b0291d9623c496638c582297ead61f43c4b72eef5e9c926ef4565ec13/greenlet-3.2.4-cp310-cp310-win_amd64.whl", hash = "sha256:73f49b5368b5359d04e18d15828eecc1806033db5233397748f4ca813ff1056c", size = 298654, upload-time = "2025-08-07T13:50:00.469Z" }, + { url = "https://files.pythonhosted.org/packages/a4/de/f28ced0a67749cac23fecb02b694f6473f47686dff6afaa211d186e2ef9c/greenlet-3.2.4-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:96378df1de302bc38e99c3a9aa311967b7dc80ced1dcc6f171e99842987882a2", size = 272305, upload-time = "2025-08-07T13:15:41.288Z" }, + { url = "https://files.pythonhosted.org/packages/09/16/2c3792cba130000bf2a31c5272999113f4764fd9d874fb257ff588ac779a/greenlet-3.2.4-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:1ee8fae0519a337f2329cb78bd7a8e128ec0f881073d43f023c7b8d4831d5246", size = 632472, upload-time = "2025-08-07T13:42:55.044Z" }, + { url = "https://files.pythonhosted.org/packages/ae/8f/95d48d7e3d433e6dae5b1682e4292242a53f22df82e6d3dda81b1701a960/greenlet-3.2.4-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:94abf90142c2a18151632371140b3dba4dee031633fe614cb592dbb6c9e17bc3", size = 644646, upload-time = "2025-08-07T13:45:26.523Z" }, + { url = "https://files.pythonhosted.org/packages/d5/5e/405965351aef8c76b8ef7ad370e5da58d57ef6068df197548b015464001a/greenlet-3.2.4-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:4d1378601b85e2e5171b99be8d2dc85f594c79967599328f95c1dc1a40f1c633", size = 640519, upload-time = "2025-08-07T13:53:13.928Z" }, + { url = "https://files.pythonhosted.org/packages/25/5d/382753b52006ce0218297ec1b628e048c4e64b155379331f25a7316eb749/greenlet-3.2.4-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:0db5594dce18db94f7d1650d7489909b57afde4c580806b8d9203b6e79cdc079", size = 639707, upload-time = "2025-08-07T13:18:27.146Z" }, + { url = "https://files.pythonhosted.org/packages/1f/8e/abdd3f14d735b2929290a018ecf133c901be4874b858dd1c604b9319f064/greenlet-3.2.4-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2523e5246274f54fdadbce8494458a2ebdcdbc7b802318466ac5606d3cded1f8", size = 587684, upload-time = "2025-08-07T13:18:25.164Z" }, + { url = "https://files.pythonhosted.org/packages/5d/65/deb2a69c3e5996439b0176f6651e0052542bb6c8f8ec2e3fba97c9768805/greenlet-3.2.4-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:1987de92fec508535687fb807a5cea1560f6196285a4cde35c100b8cd632cc52", size = 1116647, upload-time = "2025-08-07T13:42:38.655Z" }, + { url = "https://files.pythonhosted.org/packages/3f/cc/b07000438a29ac5cfb2194bfc128151d52f333cee74dd7dfe3fb733fc16c/greenlet-3.2.4-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:55e9c5affaa6775e2c6b67659f3a71684de4c549b3dd9afca3bc773533d284fa", size = 1142073, upload-time = "2025-08-07T13:18:21.737Z" }, + { url = "https://files.pythonhosted.org/packages/d8/0f/30aef242fcab550b0b3520b8e3561156857c94288f0332a79928c31a52cf/greenlet-3.2.4-cp311-cp311-win_amd64.whl", hash = "sha256:9c40adce87eaa9ddb593ccb0fa6a07caf34015a29bf8d344811665b573138db9", size = 299100, upload-time = "2025-08-07T13:44:12.287Z" }, + { url = "https://files.pythonhosted.org/packages/44/69/9b804adb5fd0671f367781560eb5eb586c4d495277c93bde4307b9e28068/greenlet-3.2.4-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:3b67ca49f54cede0186854a008109d6ee71f66bd57bb36abd6d0a0267b540cdd", size = 274079, upload-time = "2025-08-07T13:15:45.033Z" }, + { url = "https://files.pythonhosted.org/packages/46/e9/d2a80c99f19a153eff70bc451ab78615583b8dac0754cfb942223d2c1a0d/greenlet-3.2.4-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:ddf9164e7a5b08e9d22511526865780a576f19ddd00d62f8a665949327fde8bb", size = 640997, upload-time = "2025-08-07T13:42:56.234Z" }, + { url = "https://files.pythonhosted.org/packages/3b/16/035dcfcc48715ccd345f3a93183267167cdd162ad123cd93067d86f27ce4/greenlet-3.2.4-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:f28588772bb5fb869a8eb331374ec06f24a83a9c25bfa1f38b6993afe9c1e968", size = 655185, upload-time = "2025-08-07T13:45:27.624Z" }, + { url = "https://files.pythonhosted.org/packages/31/da/0386695eef69ffae1ad726881571dfe28b41970173947e7c558d9998de0f/greenlet-3.2.4-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:5c9320971821a7cb77cfab8d956fa8e39cd07ca44b6070db358ceb7f8797c8c9", size = 649926, upload-time = "2025-08-07T13:53:15.251Z" }, + { url = "https://files.pythonhosted.org/packages/68/88/69bf19fd4dc19981928ceacbc5fd4bb6bc2215d53199e367832e98d1d8fe/greenlet-3.2.4-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:c60a6d84229b271d44b70fb6e5fa23781abb5d742af7b808ae3f6efd7c9c60f6", size = 651839, upload-time = "2025-08-07T13:18:30.281Z" }, + { url = "https://files.pythonhosted.org/packages/19/0d/6660d55f7373b2ff8152401a83e02084956da23ae58cddbfb0b330978fe9/greenlet-3.2.4-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:3b3812d8d0c9579967815af437d96623f45c0f2ae5f04e366de62a12d83a8fb0", size = 607586, upload-time = "2025-08-07T13:18:28.544Z" }, + { url = "https://files.pythonhosted.org/packages/8e/1a/c953fdedd22d81ee4629afbb38d2f9d71e37d23caace44775a3a969147d4/greenlet-3.2.4-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:abbf57b5a870d30c4675928c37278493044d7c14378350b3aa5d484fa65575f0", size = 1123281, upload-time = "2025-08-07T13:42:39.858Z" }, + { url = "https://files.pythonhosted.org/packages/3f/c7/12381b18e21aef2c6bd3a636da1088b888b97b7a0362fac2e4de92405f97/greenlet-3.2.4-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:20fb936b4652b6e307b8f347665e2c615540d4b42b3b4c8a321d8286da7e520f", size = 1151142, upload-time = "2025-08-07T13:18:22.981Z" }, + { url = "https://files.pythonhosted.org/packages/e9/08/b0814846b79399e585f974bbeebf5580fbe59e258ea7be64d9dfb253c84f/greenlet-3.2.4-cp312-cp312-win_amd64.whl", hash = "sha256:a7d4e128405eea3814a12cc2605e0e6aedb4035bf32697f72deca74de4105e02", size = 299899, upload-time = "2025-08-07T13:38:53.448Z" }, + { url = "https://files.pythonhosted.org/packages/49/e8/58c7f85958bda41dafea50497cbd59738c5c43dbbea5ee83d651234398f4/greenlet-3.2.4-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:1a921e542453fe531144e91e1feedf12e07351b1cf6c9e8a3325ea600a715a31", size = 272814, upload-time = "2025-08-07T13:15:50.011Z" }, + { url = "https://files.pythonhosted.org/packages/62/dd/b9f59862e9e257a16e4e610480cfffd29e3fae018a68c2332090b53aac3d/greenlet-3.2.4-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:cd3c8e693bff0fff6ba55f140bf390fa92c994083f838fece0f63be121334945", size = 641073, upload-time = "2025-08-07T13:42:57.23Z" }, + { url = "https://files.pythonhosted.org/packages/f7/0b/bc13f787394920b23073ca3b6c4a7a21396301ed75a655bcb47196b50e6e/greenlet-3.2.4-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:710638eb93b1fa52823aa91bf75326f9ecdfd5e0466f00789246a5280f4ba0fc", size = 655191, upload-time = "2025-08-07T13:45:29.752Z" }, + { url = "https://files.pythonhosted.org/packages/f2/d6/6adde57d1345a8d0f14d31e4ab9c23cfe8e2cd39c3baf7674b4b0338d266/greenlet-3.2.4-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:c5111ccdc9c88f423426df3fd1811bfc40ed66264d35aa373420a34377efc98a", size = 649516, upload-time = "2025-08-07T13:53:16.314Z" }, + { url = "https://files.pythonhosted.org/packages/7f/3b/3a3328a788d4a473889a2d403199932be55b1b0060f4ddd96ee7cdfcad10/greenlet-3.2.4-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:d76383238584e9711e20ebe14db6c88ddcedc1829a9ad31a584389463b5aa504", size = 652169, upload-time = "2025-08-07T13:18:32.861Z" }, + { url = "https://files.pythonhosted.org/packages/ee/43/3cecdc0349359e1a527cbf2e3e28e5f8f06d3343aaf82ca13437a9aa290f/greenlet-3.2.4-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:23768528f2911bcd7e475210822ffb5254ed10d71f4028387e5a99b4c6699671", size = 610497, upload-time = "2025-08-07T13:18:31.636Z" }, + { url = "https://files.pythonhosted.org/packages/b8/19/06b6cf5d604e2c382a6f31cafafd6f33d5dea706f4db7bdab184bad2b21d/greenlet-3.2.4-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:00fadb3fedccc447f517ee0d3fd8fe49eae949e1cd0f6a611818f4f6fb7dc83b", size = 1121662, upload-time = "2025-08-07T13:42:41.117Z" }, + { url = "https://files.pythonhosted.org/packages/a2/15/0d5e4e1a66fab130d98168fe984c509249c833c1a3c16806b90f253ce7b9/greenlet-3.2.4-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:d25c5091190f2dc0eaa3f950252122edbbadbb682aa7b1ef2f8af0f8c0afefae", size = 1149210, upload-time = "2025-08-07T13:18:24.072Z" }, + { url = "https://files.pythonhosted.org/packages/0b/55/2321e43595e6801e105fcfdee02b34c0f996eb71e6ddffca6b10b7e1d771/greenlet-3.2.4-cp313-cp313-win_amd64.whl", hash = "sha256:554b03b6e73aaabec3745364d6239e9e012d64c68ccd0b8430c64ccc14939a8b", size = 299685, upload-time = "2025-08-07T13:24:38.824Z" }, + { url = "https://files.pythonhosted.org/packages/22/5c/85273fd7cc388285632b0498dbbab97596e04b154933dfe0f3e68156c68c/greenlet-3.2.4-cp314-cp314-macosx_11_0_universal2.whl", hash = "sha256:49a30d5fda2507ae77be16479bdb62a660fa51b1eb4928b524975b3bde77b3c0", size = 273586, upload-time = "2025-08-07T13:16:08.004Z" }, + { url = "https://files.pythonhosted.org/packages/d1/75/10aeeaa3da9332c2e761e4c50d4c3556c21113ee3f0afa2cf5769946f7a3/greenlet-3.2.4-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:299fd615cd8fc86267b47597123e3f43ad79c9d8a22bebdce535e53550763e2f", size = 686346, upload-time = "2025-08-07T13:42:59.944Z" }, + { url = "https://files.pythonhosted.org/packages/c0/aa/687d6b12ffb505a4447567d1f3abea23bd20e73a5bed63871178e0831b7a/greenlet-3.2.4-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:c17b6b34111ea72fc5a4e4beec9711d2226285f0386ea83477cbb97c30a3f3a5", size = 699218, upload-time = "2025-08-07T13:45:30.969Z" }, + { url = "https://files.pythonhosted.org/packages/dc/8b/29aae55436521f1d6f8ff4e12fb676f3400de7fcf27fccd1d4d17fd8fecd/greenlet-3.2.4-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:b4a1870c51720687af7fa3e7cda6d08d801dae660f75a76f3845b642b4da6ee1", size = 694659, upload-time = "2025-08-07T13:53:17.759Z" }, + { url = "https://files.pythonhosted.org/packages/92/2e/ea25914b1ebfde93b6fc4ff46d6864564fba59024e928bdc7de475affc25/greenlet-3.2.4-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:061dc4cf2c34852b052a8620d40f36324554bc192be474b9e9770e8c042fd735", size = 695355, upload-time = "2025-08-07T13:18:34.517Z" }, + { url = "https://files.pythonhosted.org/packages/72/60/fc56c62046ec17f6b0d3060564562c64c862948c9d4bc8aa807cf5bd74f4/greenlet-3.2.4-cp314-cp314-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:44358b9bf66c8576a9f57a590d5f5d6e72fa4228b763d0e43fee6d3b06d3a337", size = 657512, upload-time = "2025-08-07T13:18:33.969Z" }, + { url = "https://files.pythonhosted.org/packages/e3/a5/6ddab2b4c112be95601c13428db1d8b6608a8b6039816f2ba09c346c08fc/greenlet-3.2.4-cp314-cp314-win_amd64.whl", hash = "sha256:e37ab26028f12dbb0ff65f29a8d3d44a765c61e729647bf2ddfbbed621726f01", size = 303425, upload-time = "2025-08-07T13:32:27.59Z" }, + { url = "https://files.pythonhosted.org/packages/f7/c0/93885c4106d2626bf51fdec377d6aef740dfa5c4877461889a7cf8e565cc/greenlet-3.2.4-cp39-cp39-macosx_11_0_universal2.whl", hash = "sha256:b6a7c19cf0d2742d0809a4c05975db036fdff50cd294a93632d6a310bf9ac02c", size = 269859, upload-time = "2025-08-07T13:16:16.003Z" }, + { url = "https://files.pythonhosted.org/packages/4d/f5/33f05dc3ba10a02dedb1485870cf81c109227d3d3aa280f0e48486cac248/greenlet-3.2.4-cp39-cp39-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:27890167f55d2387576d1f41d9487ef171849ea0359ce1510ca6e06c8bece11d", size = 627610, upload-time = "2025-08-07T13:43:01.345Z" }, + { url = "https://files.pythonhosted.org/packages/b2/a7/9476decef51a0844195f99ed5dc611d212e9b3515512ecdf7321543a7225/greenlet-3.2.4-cp39-cp39-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:18d9260df2b5fbf41ae5139e1be4e796d99655f023a636cd0e11e6406cca7d58", size = 639417, upload-time = "2025-08-07T13:45:32.094Z" }, + { url = "https://files.pythonhosted.org/packages/bd/e0/849b9159cbb176f8c0af5caaff1faffdece7a8417fcc6fe1869770e33e21/greenlet-3.2.4-cp39-cp39-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:671df96c1f23c4a0d4077a325483c1503c96a1b7d9db26592ae770daa41233d4", size = 634751, upload-time = "2025-08-07T13:53:18.848Z" }, + { url = "https://files.pythonhosted.org/packages/5f/d3/844e714a9bbd39034144dca8b658dcd01839b72bb0ec7d8014e33e3705f0/greenlet-3.2.4-cp39-cp39-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:16458c245a38991aa19676900d48bd1a6f2ce3e16595051a4db9d012154e8433", size = 634020, upload-time = "2025-08-07T13:18:36.841Z" }, + { url = "https://files.pythonhosted.org/packages/6b/4c/f3de2a8de0e840ecb0253ad0dc7e2bb3747348e798ec7e397d783a3cb380/greenlet-3.2.4-cp39-cp39-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c9913f1a30e4526f432991f89ae263459b1c64d1608c0d22a5c79c287b3c70df", size = 582817, upload-time = "2025-08-07T13:18:35.48Z" }, + { url = "https://files.pythonhosted.org/packages/89/80/7332915adc766035c8980b161c2e5d50b2f941f453af232c164cff5e0aeb/greenlet-3.2.4-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:b90654e092f928f110e0007f572007c9727b5265f7632c2fa7415b4689351594", size = 1111985, upload-time = "2025-08-07T13:42:42.425Z" }, + { url = "https://files.pythonhosted.org/packages/66/71/1928e2c80197353bcb9b50aa19c4d8e26ee6d7a900c564907665cf4b9a41/greenlet-3.2.4-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:81701fd84f26330f0d5f4944d4e92e61afe6319dcd9775e39396e39d7c3e5f98", size = 1136137, upload-time = "2025-08-07T13:18:26.168Z" }, + { url = "https://files.pythonhosted.org/packages/89/48/a5dc74dde38aeb2b15d418cec76ed50e1dd3d620ccda84d8199703248968/greenlet-3.2.4-cp39-cp39-win32.whl", hash = "sha256:65458b409c1ed459ea899e939f0e1cdb14f58dbc803f2f93c5eab5694d32671b", size = 281400, upload-time = "2025-08-07T14:02:20.263Z" }, + { url = "https://files.pythonhosted.org/packages/e5/44/342c4591db50db1076b8bda86ed0ad59240e3e1da17806a4cf10a6d0e447/greenlet-3.2.4-cp39-cp39-win_amd64.whl", hash = "sha256:d2e685ade4dafd447ede19c31277a224a239a0a1a4eca4e6390efedf20260cfb", size = 298533, upload-time = "2025-08-07T13:56:34.168Z" }, +] + +[[package]] +name = "idna" +version = "3.10" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f1/70/7703c29685631f5a7590aa73f1f1d3fa9a380e654b86af429e0934a32f7d/idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9", size = 190490, upload-time = "2024-09-15T18:07:39.745Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/76/c6/c88e154df9c4e1a2a66ccf0005a88dfb2650c1dffb6f5ce603dfbd452ce3/idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3", size = 70442, upload-time = "2024-09-15T18:07:37.964Z" }, +] + +[[package]] +name = "imagesize" +version = "1.4.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a7/84/62473fb57d61e31fef6e36d64a179c8781605429fd927b5dd608c997be31/imagesize-1.4.1.tar.gz", hash = "sha256:69150444affb9cb0d5cc5a92b3676f0b2fb7cd9ae39e947a5e11a36b4497cd4a", size = 1280026, upload-time = "2022-07-01T12:21:05.687Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ff/62/85c4c919272577931d407be5ba5d71c20f0b616d31a0befe0ae45bb79abd/imagesize-1.4.1-py2.py3-none-any.whl", hash = "sha256:0d8d18d08f840c19d0ee7ca1fd82490fdc3729b7ac93f49870406ddde8ef8d8b", size = 8769, upload-time = "2022-07-01T12:21:02.467Z" }, +] + +[[package]] +name = "importlib-metadata" +version = "8.7.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "zipp", marker = "python_full_version < '3.10'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/76/66/650a33bd90f786193e4de4b3ad86ea60b53c89b669a5c7be931fac31cdb0/importlib_metadata-8.7.0.tar.gz", hash = "sha256:d13b81ad223b890aa16c5471f2ac3056cf76c5f10f82d6f9292f0b415f389000", size = 56641, upload-time = "2025-04-27T15:29:01.736Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/20/b0/36bd937216ec521246249be3bf9855081de4c5e06a0c9b4219dbeda50373/importlib_metadata-8.7.0-py3-none-any.whl", hash = "sha256:e5dd1551894c77868a30651cef00984d50e1002d06942a7101d34870c5f02afd", size = 27656, upload-time = "2025-04-27T15:29:00.214Z" }, +] + +[[package]] +name = "iniconfig" +version = "2.1.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f2/97/ebf4da567aa6827c909642694d71c9fcf53e5b504f2d96afea02718862f3/iniconfig-2.1.0.tar.gz", hash = "sha256:3abbd2e30b36733fee78f9c7f7308f2d0050e88f0087fd25c2645f63c773e1c7", size = 4793, upload-time = "2025-03-19T20:09:59.721Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2c/e1/e6716421ea10d38022b952c159d5161ca1193197fb744506875fbb87ea7b/iniconfig-2.1.0-py3-none-any.whl", hash = "sha256:9deba5723312380e77435581c6bf4935c94cbfab9b1ed33ef8d238ea168eb760", size = 6050, upload-time = "2025-03-19T20:10:01.071Z" }, +] + +[[package]] +name = "jinja2" +version = "3.1.6" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "markupsafe" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/df/bf/f7da0350254c0ed7c72f3e33cef02e048281fec7ecec5f032d4aac52226b/jinja2-3.1.6.tar.gz", hash = "sha256:0137fb05990d35f1275a587e9aee6d56da821fc83491a0fb838183be43f66d6d", size = 245115, upload-time = "2025-03-05T20:05:02.478Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/62/a1/3d680cbfd5f4b8f15abc1d571870c5fc3e594bb582bc3b64ea099db13e56/jinja2-3.1.6-py3-none-any.whl", hash = "sha256:85ece4451f492d0c13c5dd7c13a64681a86afae63a5f347908daf103ce6d2f67", size = 134899, upload-time = "2025-03-05T20:05:00.369Z" }, +] + +[[package]] +name = "markdown-it-py" +version = "3.0.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "mdurl" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/38/71/3b932df36c1a044d397a1f92d1cf91ee0a503d91e470cbd670aa66b07ed0/markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb", size = 74596, upload-time = "2023-06-03T06:41:14.443Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/42/d7/1ec15b46af6af88f19b8e5ffea08fa375d433c998b8a7639e76935c14f1f/markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1", size = 87528, upload-time = "2023-06-03T06:41:11.019Z" }, +] + +[[package]] +name = "markupsafe" +version = "3.0.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/b2/97/5d42485e71dfc078108a86d6de8fa46db44a1a9295e89c5d6d4a06e23a62/markupsafe-3.0.2.tar.gz", hash = "sha256:ee55d3edf80167e48ea11a923c7386f4669df67d7994554387f84e7d8b0a2bf0", size = 20537, upload-time = "2024-10-18T15:21:54.129Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/04/90/d08277ce111dd22f77149fd1a5d4653eeb3b3eaacbdfcbae5afb2600eebd/MarkupSafe-3.0.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7e94c425039cde14257288fd61dcfb01963e658efbc0ff54f5306b06054700f8", size = 14357, upload-time = "2024-10-18T15:20:51.44Z" }, + { url = "https://files.pythonhosted.org/packages/04/e1/6e2194baeae0bca1fae6629dc0cbbb968d4d941469cbab11a3872edff374/MarkupSafe-3.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9e2d922824181480953426608b81967de705c3cef4d1af983af849d7bd619158", size = 12393, upload-time = "2024-10-18T15:20:52.426Z" }, + { url = "https://files.pythonhosted.org/packages/1d/69/35fa85a8ece0a437493dc61ce0bb6d459dcba482c34197e3efc829aa357f/MarkupSafe-3.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:38a9ef736c01fccdd6600705b09dc574584b89bea478200c5fbf112a6b0d5579", size = 21732, upload-time = "2024-10-18T15:20:53.578Z" }, + { url = "https://files.pythonhosted.org/packages/22/35/137da042dfb4720b638d2937c38a9c2df83fe32d20e8c8f3185dbfef05f7/MarkupSafe-3.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bbcb445fa71794da8f178f0f6d66789a28d7319071af7a496d4d507ed566270d", size = 20866, upload-time = "2024-10-18T15:20:55.06Z" }, + { url = "https://files.pythonhosted.org/packages/29/28/6d029a903727a1b62edb51863232152fd335d602def598dade38996887f0/MarkupSafe-3.0.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:57cb5a3cf367aeb1d316576250f65edec5bb3be939e9247ae594b4bcbc317dfb", size = 20964, upload-time = "2024-10-18T15:20:55.906Z" }, + { url = "https://files.pythonhosted.org/packages/cc/cd/07438f95f83e8bc028279909d9c9bd39e24149b0d60053a97b2bc4f8aa51/MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:3809ede931876f5b2ec92eef964286840ed3540dadf803dd570c3b7e13141a3b", size = 21977, upload-time = "2024-10-18T15:20:57.189Z" }, + { url = "https://files.pythonhosted.org/packages/29/01/84b57395b4cc062f9c4c55ce0df7d3108ca32397299d9df00fedd9117d3d/MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e07c3764494e3776c602c1e78e298937c3315ccc9043ead7e685b7f2b8d47b3c", size = 21366, upload-time = "2024-10-18T15:20:58.235Z" }, + { url = "https://files.pythonhosted.org/packages/bd/6e/61ebf08d8940553afff20d1fb1ba7294b6f8d279df9fd0c0db911b4bbcfd/MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:b424c77b206d63d500bcb69fa55ed8d0e6a3774056bdc4839fc9298a7edca171", size = 21091, upload-time = "2024-10-18T15:20:59.235Z" }, + { url = "https://files.pythonhosted.org/packages/11/23/ffbf53694e8c94ebd1e7e491de185124277964344733c45481f32ede2499/MarkupSafe-3.0.2-cp310-cp310-win32.whl", hash = "sha256:fcabf5ff6eea076f859677f5f0b6b5c1a51e70a376b0579e0eadef8db48c6b50", size = 15065, upload-time = "2024-10-18T15:21:00.307Z" }, + { url = "https://files.pythonhosted.org/packages/44/06/e7175d06dd6e9172d4a69a72592cb3f7a996a9c396eee29082826449bbc3/MarkupSafe-3.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:6af100e168aa82a50e186c82875a5893c5597a0c1ccdb0d8b40240b1f28b969a", size = 15514, upload-time = "2024-10-18T15:21:01.122Z" }, + { url = "https://files.pythonhosted.org/packages/6b/28/bbf83e3f76936960b850435576dd5e67034e200469571be53f69174a2dfd/MarkupSafe-3.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9025b4018f3a1314059769c7bf15441064b2207cb3f065e6ea1e7359cb46db9d", size = 14353, upload-time = "2024-10-18T15:21:02.187Z" }, + { url = "https://files.pythonhosted.org/packages/6c/30/316d194b093cde57d448a4c3209f22e3046c5bb2fb0820b118292b334be7/MarkupSafe-3.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:93335ca3812df2f366e80509ae119189886b0f3c2b81325d39efdb84a1e2ae93", size = 12392, upload-time = "2024-10-18T15:21:02.941Z" }, + { url = "https://files.pythonhosted.org/packages/f2/96/9cdafba8445d3a53cae530aaf83c38ec64c4d5427d975c974084af5bc5d2/MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2cb8438c3cbb25e220c2ab33bb226559e7afb3baec11c4f218ffa7308603c832", size = 23984, upload-time = "2024-10-18T15:21:03.953Z" }, + { url = "https://files.pythonhosted.org/packages/f1/a4/aefb044a2cd8d7334c8a47d3fb2c9f328ac48cb349468cc31c20b539305f/MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a123e330ef0853c6e822384873bef7507557d8e4a082961e1defa947aa59ba84", size = 23120, upload-time = "2024-10-18T15:21:06.495Z" }, + { url = "https://files.pythonhosted.org/packages/8d/21/5e4851379f88f3fad1de30361db501300d4f07bcad047d3cb0449fc51f8c/MarkupSafe-3.0.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e084f686b92e5b83186b07e8a17fc09e38fff551f3602b249881fec658d3eca", size = 23032, upload-time = "2024-10-18T15:21:07.295Z" }, + { url = "https://files.pythonhosted.org/packages/00/7b/e92c64e079b2d0d7ddf69899c98842f3f9a60a1ae72657c89ce2655c999d/MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d8213e09c917a951de9d09ecee036d5c7d36cb6cb7dbaece4c71a60d79fb9798", size = 24057, upload-time = "2024-10-18T15:21:08.073Z" }, + { url = "https://files.pythonhosted.org/packages/f9/ac/46f960ca323037caa0a10662ef97d0a4728e890334fc156b9f9e52bcc4ca/MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:5b02fb34468b6aaa40dfc198d813a641e3a63b98c2b05a16b9f80b7ec314185e", size = 23359, upload-time = "2024-10-18T15:21:09.318Z" }, + { url = "https://files.pythonhosted.org/packages/69/84/83439e16197337b8b14b6a5b9c2105fff81d42c2a7c5b58ac7b62ee2c3b1/MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:0bff5e0ae4ef2e1ae4fdf2dfd5b76c75e5c2fa4132d05fc1b0dabcd20c7e28c4", size = 23306, upload-time = "2024-10-18T15:21:10.185Z" }, + { url = "https://files.pythonhosted.org/packages/9a/34/a15aa69f01e2181ed8d2b685c0d2f6655d5cca2c4db0ddea775e631918cd/MarkupSafe-3.0.2-cp311-cp311-win32.whl", hash = "sha256:6c89876f41da747c8d3677a2b540fb32ef5715f97b66eeb0c6b66f5e3ef6f59d", size = 15094, upload-time = "2024-10-18T15:21:11.005Z" }, + { url = "https://files.pythonhosted.org/packages/da/b8/3a3bd761922d416f3dc5d00bfbed11f66b1ab89a0c2b6e887240a30b0f6b/MarkupSafe-3.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:70a87b411535ccad5ef2f1df5136506a10775d267e197e4cf531ced10537bd6b", size = 15521, upload-time = "2024-10-18T15:21:12.911Z" }, + { url = "https://files.pythonhosted.org/packages/22/09/d1f21434c97fc42f09d290cbb6350d44eb12f09cc62c9476effdb33a18aa/MarkupSafe-3.0.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:9778bd8ab0a994ebf6f84c2b949e65736d5575320a17ae8984a77fab08db94cf", size = 14274, upload-time = "2024-10-18T15:21:13.777Z" }, + { url = "https://files.pythonhosted.org/packages/6b/b0/18f76bba336fa5aecf79d45dcd6c806c280ec44538b3c13671d49099fdd0/MarkupSafe-3.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:846ade7b71e3536c4e56b386c2a47adf5741d2d8b94ec9dc3e92e5e1ee1e2225", size = 12348, upload-time = "2024-10-18T15:21:14.822Z" }, + { url = "https://files.pythonhosted.org/packages/e0/25/dd5c0f6ac1311e9b40f4af06c78efde0f3b5cbf02502f8ef9501294c425b/MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c99d261bd2d5f6b59325c92c73df481e05e57f19837bdca8413b9eac4bd8028", size = 24149, upload-time = "2024-10-18T15:21:15.642Z" }, + { url = "https://files.pythonhosted.org/packages/f3/f0/89e7aadfb3749d0f52234a0c8c7867877876e0a20b60e2188e9850794c17/MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e17c96c14e19278594aa4841ec148115f9c7615a47382ecb6b82bd8fea3ab0c8", size = 23118, upload-time = "2024-10-18T15:21:17.133Z" }, + { url = "https://files.pythonhosted.org/packages/d5/da/f2eeb64c723f5e3777bc081da884b414671982008c47dcc1873d81f625b6/MarkupSafe-3.0.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:88416bd1e65dcea10bc7569faacb2c20ce071dd1f87539ca2ab364bf6231393c", size = 22993, upload-time = "2024-10-18T15:21:18.064Z" }, + { url = "https://files.pythonhosted.org/packages/da/0e/1f32af846df486dce7c227fe0f2398dc7e2e51d4a370508281f3c1c5cddc/MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2181e67807fc2fa785d0592dc2d6206c019b9502410671cc905d132a92866557", size = 24178, upload-time = "2024-10-18T15:21:18.859Z" }, + { url = "https://files.pythonhosted.org/packages/c4/f6/bb3ca0532de8086cbff5f06d137064c8410d10779c4c127e0e47d17c0b71/MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:52305740fe773d09cffb16f8ed0427942901f00adedac82ec8b67752f58a1b22", size = 23319, upload-time = "2024-10-18T15:21:19.671Z" }, + { url = "https://files.pythonhosted.org/packages/a2/82/8be4c96ffee03c5b4a034e60a31294daf481e12c7c43ab8e34a1453ee48b/MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ad10d3ded218f1039f11a75f8091880239651b52e9bb592ca27de44eed242a48", size = 23352, upload-time = "2024-10-18T15:21:20.971Z" }, + { url = "https://files.pythonhosted.org/packages/51/ae/97827349d3fcffee7e184bdf7f41cd6b88d9919c80f0263ba7acd1bbcb18/MarkupSafe-3.0.2-cp312-cp312-win32.whl", hash = "sha256:0f4ca02bea9a23221c0182836703cbf8930c5e9454bacce27e767509fa286a30", size = 15097, upload-time = "2024-10-18T15:21:22.646Z" }, + { url = "https://files.pythonhosted.org/packages/c1/80/a61f99dc3a936413c3ee4e1eecac96c0da5ed07ad56fd975f1a9da5bc630/MarkupSafe-3.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:8e06879fc22a25ca47312fbe7c8264eb0b662f6db27cb2d3bbbc74b1df4b9b87", size = 15601, upload-time = "2024-10-18T15:21:23.499Z" }, + { url = "https://files.pythonhosted.org/packages/83/0e/67eb10a7ecc77a0c2bbe2b0235765b98d164d81600746914bebada795e97/MarkupSafe-3.0.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ba9527cdd4c926ed0760bc301f6728ef34d841f405abf9d4f959c478421e4efd", size = 14274, upload-time = "2024-10-18T15:21:24.577Z" }, + { url = "https://files.pythonhosted.org/packages/2b/6d/9409f3684d3335375d04e5f05744dfe7e9f120062c9857df4ab490a1031a/MarkupSafe-3.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f8b3d067f2e40fe93e1ccdd6b2e1d16c43140e76f02fb1319a05cf2b79d99430", size = 12352, upload-time = "2024-10-18T15:21:25.382Z" }, + { url = "https://files.pythonhosted.org/packages/d2/f5/6eadfcd3885ea85fe2a7c128315cc1bb7241e1987443d78c8fe712d03091/MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:569511d3b58c8791ab4c2e1285575265991e6d8f8700c7be0e88f86cb0672094", size = 24122, upload-time = "2024-10-18T15:21:26.199Z" }, + { url = "https://files.pythonhosted.org/packages/0c/91/96cf928db8236f1bfab6ce15ad070dfdd02ed88261c2afafd4b43575e9e9/MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15ab75ef81add55874e7ab7055e9c397312385bd9ced94920f2802310c930396", size = 23085, upload-time = "2024-10-18T15:21:27.029Z" }, + { url = "https://files.pythonhosted.org/packages/c2/cf/c9d56af24d56ea04daae7ac0940232d31d5a8354f2b457c6d856b2057d69/MarkupSafe-3.0.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f3818cb119498c0678015754eba762e0d61e5b52d34c8b13d770f0719f7b1d79", size = 22978, upload-time = "2024-10-18T15:21:27.846Z" }, + { url = "https://files.pythonhosted.org/packages/2a/9f/8619835cd6a711d6272d62abb78c033bda638fdc54c4e7f4272cf1c0962b/MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:cdb82a876c47801bb54a690c5ae105a46b392ac6099881cdfb9f6e95e4014c6a", size = 24208, upload-time = "2024-10-18T15:21:28.744Z" }, + { url = "https://files.pythonhosted.org/packages/f9/bf/176950a1792b2cd2102b8ffeb5133e1ed984547b75db47c25a67d3359f77/MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:cabc348d87e913db6ab4aa100f01b08f481097838bdddf7c7a84b7575b7309ca", size = 23357, upload-time = "2024-10-18T15:21:29.545Z" }, + { url = "https://files.pythonhosted.org/packages/ce/4f/9a02c1d335caabe5c4efb90e1b6e8ee944aa245c1aaaab8e8a618987d816/MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:444dcda765c8a838eaae23112db52f1efaf750daddb2d9ca300bcae1039adc5c", size = 23344, upload-time = "2024-10-18T15:21:30.366Z" }, + { url = "https://files.pythonhosted.org/packages/ee/55/c271b57db36f748f0e04a759ace9f8f759ccf22b4960c270c78a394f58be/MarkupSafe-3.0.2-cp313-cp313-win32.whl", hash = "sha256:bcf3e58998965654fdaff38e58584d8937aa3096ab5354d493c77d1fdd66d7a1", size = 15101, upload-time = "2024-10-18T15:21:31.207Z" }, + { url = "https://files.pythonhosted.org/packages/29/88/07df22d2dd4df40aba9f3e402e6dc1b8ee86297dddbad4872bd5e7b0094f/MarkupSafe-3.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:e6a2a455bd412959b57a172ce6328d2dd1f01cb2135efda2e4576e8a23fa3b0f", size = 15603, upload-time = "2024-10-18T15:21:32.032Z" }, + { url = "https://files.pythonhosted.org/packages/62/6a/8b89d24db2d32d433dffcd6a8779159da109842434f1dd2f6e71f32f738c/MarkupSafe-3.0.2-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:b5a6b3ada725cea8a5e634536b1b01c30bcdcd7f9c6fff4151548d5bf6b3a36c", size = 14510, upload-time = "2024-10-18T15:21:33.625Z" }, + { url = "https://files.pythonhosted.org/packages/7a/06/a10f955f70a2e5a9bf78d11a161029d278eeacbd35ef806c3fd17b13060d/MarkupSafe-3.0.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:a904af0a6162c73e3edcb969eeeb53a63ceeb5d8cf642fade7d39e7963a22ddb", size = 12486, upload-time = "2024-10-18T15:21:34.611Z" }, + { url = "https://files.pythonhosted.org/packages/34/cf/65d4a571869a1a9078198ca28f39fba5fbb910f952f9dbc5220afff9f5e6/MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4aa4e5faecf353ed117801a068ebab7b7e09ffb6e1d5e412dc852e0da018126c", size = 25480, upload-time = "2024-10-18T15:21:35.398Z" }, + { url = "https://files.pythonhosted.org/packages/0c/e3/90e9651924c430b885468b56b3d597cabf6d72be4b24a0acd1fa0e12af67/MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0ef13eaeee5b615fb07c9a7dadb38eac06a0608b41570d8ade51c56539e509d", size = 23914, upload-time = "2024-10-18T15:21:36.231Z" }, + { url = "https://files.pythonhosted.org/packages/66/8c/6c7cf61f95d63bb866db39085150df1f2a5bd3335298f14a66b48e92659c/MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d16a81a06776313e817c951135cf7340a3e91e8c1ff2fac444cfd75fffa04afe", size = 23796, upload-time = "2024-10-18T15:21:37.073Z" }, + { url = "https://files.pythonhosted.org/packages/bb/35/cbe9238ec3f47ac9a7c8b3df7a808e7cb50fe149dc7039f5f454b3fba218/MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:6381026f158fdb7c72a168278597a5e3a5222e83ea18f543112b2662a9b699c5", size = 25473, upload-time = "2024-10-18T15:21:37.932Z" }, + { url = "https://files.pythonhosted.org/packages/e6/32/7621a4382488aa283cc05e8984a9c219abad3bca087be9ec77e89939ded9/MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:3d79d162e7be8f996986c064d1c7c817f6df3a77fe3d6859f6f9e7be4b8c213a", size = 24114, upload-time = "2024-10-18T15:21:39.799Z" }, + { url = "https://files.pythonhosted.org/packages/0d/80/0985960e4b89922cb5a0bac0ed39c5b96cbc1a536a99f30e8c220a996ed9/MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:131a3c7689c85f5ad20f9f6fb1b866f402c445b220c19fe4308c0b147ccd2ad9", size = 24098, upload-time = "2024-10-18T15:21:40.813Z" }, + { url = "https://files.pythonhosted.org/packages/82/78/fedb03c7d5380df2427038ec8d973587e90561b2d90cd472ce9254cf348b/MarkupSafe-3.0.2-cp313-cp313t-win32.whl", hash = "sha256:ba8062ed2cf21c07a9e295d5b8a2a5ce678b913b45fdf68c32d95d6c1291e0b6", size = 15208, upload-time = "2024-10-18T15:21:41.814Z" }, + { url = "https://files.pythonhosted.org/packages/4f/65/6079a46068dfceaeabb5dcad6d674f5f5c61a6fa5673746f42a9f4c233b3/MarkupSafe-3.0.2-cp313-cp313t-win_amd64.whl", hash = "sha256:e444a31f8db13eb18ada366ab3cf45fd4b31e4db1236a4448f68778c1d1a5a2f", size = 15739, upload-time = "2024-10-18T15:21:42.784Z" }, + { url = "https://files.pythonhosted.org/packages/a7/ea/9b1530c3fdeeca613faeb0fb5cbcf2389d816072fab72a71b45749ef6062/MarkupSafe-3.0.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:eaa0a10b7f72326f1372a713e73c3f739b524b3af41feb43e4921cb529f5929a", size = 14344, upload-time = "2024-10-18T15:21:43.721Z" }, + { url = "https://files.pythonhosted.org/packages/4b/c2/fbdbfe48848e7112ab05e627e718e854d20192b674952d9042ebd8c9e5de/MarkupSafe-3.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:48032821bbdf20f5799ff537c7ac3d1fba0ba032cfc06194faffa8cda8b560ff", size = 12389, upload-time = "2024-10-18T15:21:44.666Z" }, + { url = "https://files.pythonhosted.org/packages/f0/25/7a7c6e4dbd4f867d95d94ca15449e91e52856f6ed1905d58ef1de5e211d0/MarkupSafe-3.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1a9d3f5f0901fdec14d8d2f66ef7d035f2157240a433441719ac9a3fba440b13", size = 21607, upload-time = "2024-10-18T15:21:45.452Z" }, + { url = "https://files.pythonhosted.org/packages/53/8f/f339c98a178f3c1e545622206b40986a4c3307fe39f70ccd3d9df9a9e425/MarkupSafe-3.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:88b49a3b9ff31e19998750c38e030fc7bb937398b1f78cfa599aaef92d693144", size = 20728, upload-time = "2024-10-18T15:21:46.295Z" }, + { url = "https://files.pythonhosted.org/packages/1a/03/8496a1a78308456dbd50b23a385c69b41f2e9661c67ea1329849a598a8f9/MarkupSafe-3.0.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cfad01eed2c2e0c01fd0ecd2ef42c492f7f93902e39a42fc9ee1692961443a29", size = 20826, upload-time = "2024-10-18T15:21:47.134Z" }, + { url = "https://files.pythonhosted.org/packages/e6/cf/0a490a4bd363048c3022f2f475c8c05582179bb179defcee4766fb3dcc18/MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:1225beacc926f536dc82e45f8a4d68502949dc67eea90eab715dea3a21c1b5f0", size = 21843, upload-time = "2024-10-18T15:21:48.334Z" }, + { url = "https://files.pythonhosted.org/packages/19/a3/34187a78613920dfd3cdf68ef6ce5e99c4f3417f035694074beb8848cd77/MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:3169b1eefae027567d1ce6ee7cae382c57fe26e82775f460f0b2778beaad66c0", size = 21219, upload-time = "2024-10-18T15:21:49.587Z" }, + { url = "https://files.pythonhosted.org/packages/17/d8/5811082f85bb88410ad7e452263af048d685669bbbfb7b595e8689152498/MarkupSafe-3.0.2-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:eb7972a85c54febfb25b5c4b4f3af4dcc731994c7da0d8a0b4a6eb0640e1d178", size = 20946, upload-time = "2024-10-18T15:21:50.441Z" }, + { url = "https://files.pythonhosted.org/packages/7c/31/bd635fb5989440d9365c5e3c47556cfea121c7803f5034ac843e8f37c2f2/MarkupSafe-3.0.2-cp39-cp39-win32.whl", hash = "sha256:8c4e8c3ce11e1f92f6536ff07154f9d49677ebaaafc32db9db4620bc11ed480f", size = 15063, upload-time = "2024-10-18T15:21:51.385Z" }, + { url = "https://files.pythonhosted.org/packages/b3/73/085399401383ce949f727afec55ec3abd76648d04b9f22e1c0e99cb4bec3/MarkupSafe-3.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:6e296a513ca3d94054c2c881cc913116e90fd030ad1c656b3869762b754f5f8a", size = 15506, upload-time = "2024-10-18T15:21:52.974Z" }, +] + +[[package]] +name = "mdit-py-plugins" +version = "0.4.2" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "(python_full_version < '3.10' and platform_machine != 'arm64') or (python_full_version < '3.10' and sys_platform != 'darwin')", + "python_full_version < '3.10' and platform_machine == 'arm64' and sys_platform == 'darwin'", +] +dependencies = [ + { name = "markdown-it-py", marker = "python_full_version < '3.10'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/19/03/a2ecab526543b152300717cf232bb4bb8605b6edb946c845016fa9c9c9fd/mdit_py_plugins-0.4.2.tar.gz", hash = "sha256:5f2cd1fdb606ddf152d37ec30e46101a60512bc0e5fa1a7002c36647b09e26b5", size = 43542, upload-time = "2024-09-09T20:27:49.564Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a7/f7/7782a043553ee469c1ff49cfa1cdace2d6bf99a1f333cf38676b3ddf30da/mdit_py_plugins-0.4.2-py3-none-any.whl", hash = "sha256:0c673c3f889399a33b95e88d2f0d111b4447bdfea7f237dab2d488f459835636", size = 55316, upload-time = "2024-09-09T20:27:48.397Z" }, +] + +[[package]] +name = "mdit-py-plugins" +version = "0.5.0" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version >= '3.11' and platform_machine == 'arm64' and sys_platform == 'darwin'", + "python_full_version == '3.10.*' and platform_machine == 'arm64' and sys_platform == 'darwin'", + "(python_full_version >= '3.11' and platform_machine != 'arm64') or (python_full_version >= '3.11' and sys_platform != 'darwin')", + "(python_full_version == '3.10.*' and platform_machine != 'arm64') or (python_full_version == '3.10.*' and sys_platform != 'darwin')", +] +dependencies = [ + { name = "markdown-it-py", marker = "python_full_version >= '3.10'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/b2/fd/a756d36c0bfba5f6e39a1cdbdbfdd448dc02692467d83816dff4592a1ebc/mdit_py_plugins-0.5.0.tar.gz", hash = "sha256:f4918cb50119f50446560513a8e311d574ff6aaed72606ddae6d35716fe809c6", size = 44655, upload-time = "2025-08-11T07:25:49.083Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/fb/86/dd6e5db36df29e76c7a7699123569a4a18c1623ce68d826ed96c62643cae/mdit_py_plugins-0.5.0-py3-none-any.whl", hash = "sha256:07a08422fc1936a5d26d146759e9155ea466e842f5ab2f7d2266dd084c8dab1f", size = 57205, upload-time = "2025-08-11T07:25:47.597Z" }, +] + +[[package]] +name = "mdurl" +version = "0.1.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d6/54/cfe61301667036ec958cb99bd3efefba235e65cdeb9c84d24a8293ba1d90/mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba", size = 8729, upload-time = "2022-08-14T12:40:10.846Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b3/38/89ba8ad64ae25be8de66a6d463314cf1eb366222074cfda9ee839c56a4b4/mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8", size = 9979, upload-time = "2022-08-14T12:40:09.779Z" }, +] + +[[package]] +name = "migra" +version = "3.0.1663481299" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "schemainspect" }, + { name = "six" }, + { name = "sqlbag" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/0e/fb/4761e69d6028909f4b68f175f53ac69c521b75b11e977087b6ce6ec3b006/migra-3.0.1663481299.tar.gz", hash = "sha256:0cf0c125d553008d9ff5402663a51703ccc474bb65b5a4f4727906dbf58e217f", size = 10083, upload-time = "2022-09-18T06:08:24.5Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/20/66/79bf13b29c2c3a3e72d8dead21fde7ae15f84e78038c92a35e62b3e9c229/migra-3.0.1663481299-py3-none-any.whl", hash = "sha256:061643e9af63488e085d729f267ed4af4249789979732b703ddeb2c478ec9a93", size = 10537, upload-time = "2022-09-18T06:08:22.982Z" }, +] + +[[package]] +name = "myst-parser" +version = "3.0.1" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "(python_full_version < '3.10' and platform_machine != 'arm64') or (python_full_version < '3.10' and sys_platform != 'darwin')", + "python_full_version < '3.10' and platform_machine == 'arm64' and sys_platform == 'darwin'", +] +dependencies = [ + { name = "docutils", marker = "python_full_version < '3.10'" }, + { name = "jinja2", marker = "python_full_version < '3.10'" }, + { name = "markdown-it-py", marker = "python_full_version < '3.10'" }, + { name = "mdit-py-plugins", version = "0.4.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, + { name = "pyyaml", marker = "python_full_version < '3.10'" }, + { name = "sphinx", version = "7.4.7", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/49/64/e2f13dac02f599980798c01156393b781aec983b52a6e4057ee58f07c43a/myst_parser-3.0.1.tar.gz", hash = "sha256:88f0cb406cb363b077d176b51c476f62d60604d68a8dcdf4832e080441301a87", size = 92392, upload-time = "2024-04-28T20:22:42.116Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e2/de/21aa8394f16add8f7427f0a1326ccd2b3a2a8a3245c9252bc5ac034c6155/myst_parser-3.0.1-py3-none-any.whl", hash = "sha256:6457aaa33a5d474aca678b8ead9b3dc298e89c68e67012e73146ea6fd54babf1", size = 83163, upload-time = "2024-04-28T20:22:39.985Z" }, +] + +[[package]] +name = "myst-parser" +version = "4.0.1" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version >= '3.11' and platform_machine == 'arm64' and sys_platform == 'darwin'", + "python_full_version == '3.10.*' and platform_machine == 'arm64' and sys_platform == 'darwin'", + "(python_full_version >= '3.11' and platform_machine != 'arm64') or (python_full_version >= '3.11' and sys_platform != 'darwin')", + "(python_full_version == '3.10.*' and platform_machine != 'arm64') or (python_full_version == '3.10.*' and sys_platform != 'darwin')", +] +dependencies = [ + { name = "docutils", marker = "python_full_version >= '3.10'" }, + { name = "jinja2", marker = "python_full_version >= '3.10'" }, + { name = "markdown-it-py", marker = "python_full_version >= '3.10'" }, + { name = "mdit-py-plugins", version = "0.5.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, + { name = "pyyaml", marker = "python_full_version >= '3.10'" }, + { name = "sphinx", version = "8.1.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version == '3.10.*'" }, + { name = "sphinx", version = "8.2.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/66/a5/9626ba4f73555b3735ad86247a8077d4603aa8628537687c839ab08bfe44/myst_parser-4.0.1.tar.gz", hash = "sha256:5cfea715e4f3574138aecbf7d54132296bfd72bb614d31168f48c477a830a7c4", size = 93985, upload-time = "2025-02-12T10:53:03.833Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5f/df/76d0321c3797b54b60fef9ec3bd6f4cfd124b9e422182156a1dd418722cf/myst_parser-4.0.1-py3-none-any.whl", hash = "sha256:9134e88959ec3b5780aedf8a99680ea242869d012e8821db3126d427edc9c95d", size = 84579, upload-time = "2025-02-12T10:53:02.078Z" }, +] + +[[package]] +name = "nodeenv" +version = "1.9.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/43/16/fc88b08840de0e0a72a2f9d8c6bae36be573e475a6326ae854bcc549fc45/nodeenv-1.9.1.tar.gz", hash = "sha256:6ec12890a2dab7946721edbfbcd91f3319c6ccc9aec47be7c7e6b7011ee6645f", size = 47437, upload-time = "2024-06-04T18:44:11.171Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d2/1d/1b658dbd2b9fa9c4c9f32accbfc0205d532c8c6194dc0f2a4c0428e7128a/nodeenv-1.9.1-py2.py3-none-any.whl", hash = "sha256:ba11c9782d29c27c70ffbdda2d7415098754709be8a7056d79a737cd901155c9", size = 22314, upload-time = "2024-06-04T18:44:08.352Z" }, +] + +[[package]] +name = "packaging" +version = "25.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a1/d4/1fc4078c65507b51b96ca8f8c3ba19e6a61c8253c72794544580a7b6c24d/packaging-25.0.tar.gz", hash = "sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f", size = 165727, upload-time = "2025-04-19T11:48:59.673Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/20/12/38679034af332785aac8774540895e234f4d07f7545804097de4b666afd8/packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484", size = 66469, upload-time = "2025-04-19T11:48:57.875Z" }, +] + +[[package]] +name = "pluggy" +version = "1.6.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f9/e2/3e91f31a7d2b083fe6ef3fa267035b518369d9511ffab804f839851d2779/pluggy-1.6.0.tar.gz", hash = "sha256:7dcc130b76258d33b90f61b658791dede3486c3e6bfb003ee5c9bfb396dd22f3", size = 69412, upload-time = "2025-05-15T12:30:07.975Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/54/20/4d324d65cc6d9205fabedc306948156824eb9f0ee1633355a8f7ec5c66bf/pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746", size = 20538, upload-time = "2025-05-15T12:30:06.134Z" }, +] + +[[package]] +name = "procrastinate" +source = { editable = "." } +dependencies = [ + { name = "asgiref" }, + { name = "attrs" }, + { name = "contextlib2", marker = "python_full_version < '3.10'" }, + { name = "croniter" }, + { name = "psycopg", extra = ["pool"] }, + { name = "python-dateutil" }, + { name = "typing-extensions" }, +] + +[package.optional-dependencies] +aiopg = [ + { name = "aiopg" }, + { name = "psycopg2-binary" }, +] +django = [ + { name = "django", version = "4.2.24", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, + { name = "django", version = "5.2.6", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, +] +psycopg2 = [ + { name = "psycopg2-binary" }, +] +sphinx = [ + { name = "sphinx", version = "7.4.7", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, + { name = "sphinx", version = "8.1.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version == '3.10.*'" }, + { name = "sphinx", version = "8.2.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, +] +sqlalchemy = [ + { name = "sqlalchemy" }, +] + +[package.dev-dependencies] +dev = [ + { name = "doc8", version = "1.1.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, + { name = "doc8", version = "2.0.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, + { name = "pyright" }, + { name = "ruff" }, +] +docs = [ + { name = "django", version = "4.2.24", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, + { name = "django", version = "5.2.6", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, + { name = "furo" }, + { name = "myst-parser", version = "3.0.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, + { name = "myst-parser", version = "4.0.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, + { name = "sphinx", version = "7.4.7", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, + { name = "sphinx", version = "8.1.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version == '3.10.*'" }, + { name = "sphinx", version = "8.2.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, + { name = "sphinx-copybutton" }, + { name = "sphinx-github-changelog" }, + { name = "sphinxcontrib-mermaid" }, + { name = "sphinxcontrib-programoutput" }, +] +lint-format = [ + { name = "django-upgrade" }, + { name = "ruff" }, +] +pg-implem = [ + { name = "aiopg" }, + { name = "psycopg", extra = ["binary"], marker = "python_full_version >= '3.10' or platform_machine != 'arm64' or sys_platform != 'darwin'" }, + { name = "psycopg", extra = ["pool"] }, + { name = "psycopg2-binary" }, + { name = "sqlalchemy" }, +] +release = [ + { name = "dunamai" }, +] +test = [ + { name = "migra" }, + { name = "pytest-asyncio" }, + { name = "pytest-benchmark" }, + { name = "pytest-cov" }, + { name = "pytest-django" }, + { name = "pytest-mock" }, + { name = "setuptools" }, +] +types = [ + { name = "django-stubs", version = "5.1.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, + { name = "django-stubs", version = "5.2.2", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.10'" }, +] + +[package.metadata] +requires-dist = [ + { name = "aiopg", marker = "extra == 'aiopg'" }, + { name = "asgiref" }, + { name = "attrs" }, + { name = "contextlib2", marker = "python_full_version < '3.10'" }, + { name = "croniter" }, + { name = "django", marker = "extra == 'django'", specifier = ">=2.2" }, + { name = "psycopg", extras = ["pool"] }, + { name = "psycopg2-binary", marker = "extra == 'aiopg'" }, + { name = "psycopg2-binary", marker = "extra == 'psycopg2'" }, + { name = "python-dateutil" }, + { name = "sphinx", marker = "extra == 'sphinx'" }, + { name = "sqlalchemy", marker = "extra == 'sqlalchemy'", specifier = "~=2.0" }, + { name = "typing-extensions" }, +] +provides-extras = ["aiopg", "django", "psycopg2", "sphinx", "sqlalchemy"] + +[package.metadata.requires-dev] +dev = [ + { name = "doc8" }, + { name = "pyright" }, + { name = "ruff" }, +] +docs = [ + { name = "django", specifier = ">=2.2" }, + { name = "furo" }, + { name = "myst-parser" }, + { name = "sphinx" }, + { name = "sphinx-copybutton" }, + { name = "sphinx-github-changelog" }, + { name = "sphinxcontrib-mermaid" }, + { name = "sphinxcontrib-programoutput" }, +] +lint-format = [ + { name = "django-upgrade" }, + { name = "ruff" }, +] +pg-implem = [ + { name = "aiopg" }, + { name = "psycopg", extras = ["binary", "pool"], marker = "platform_machine != 'arm64' or sys_platform != 'darwin'" }, + { name = "psycopg", extras = ["binary", "pool"], marker = "python_full_version >= '3.10' and platform_machine == 'arm64' and sys_platform == 'darwin'" }, + { name = "psycopg", extras = ["pool"], marker = "python_full_version < '3.10' and platform_machine == 'arm64' and sys_platform == 'darwin'" }, + { name = "psycopg2-binary" }, + { name = "sqlalchemy" }, +] +release = [{ name = "dunamai" }] +test = [ + { name = "migra" }, + { name = "pytest-asyncio" }, + { name = "pytest-benchmark" }, + { name = "pytest-cov" }, + { name = "pytest-django" }, + { name = "pytest-mock" }, + { name = "setuptools" }, +] +types = [{ name = "django-stubs" }] + +[[package]] +name = "psycopg" +version = "3.2.9" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions", marker = "python_full_version < '3.13'" }, + { name = "tzdata", marker = "sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/27/4a/93a6ab570a8d1a4ad171a1f4256e205ce48d828781312c0bbaff36380ecb/psycopg-3.2.9.tar.gz", hash = "sha256:2fbb46fcd17bc81f993f28c47f1ebea38d66ae97cc2dbc3cad73b37cefbff700", size = 158122, upload-time = "2025-05-13T16:11:15.533Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/44/b0/a73c195a56eb6b92e937a5ca58521a5c3346fb233345adc80fd3e2f542e2/psycopg-3.2.9-py3-none-any.whl", hash = "sha256:01a8dadccdaac2123c916208c96e06631641c0566b22005493f09663c7a8d3b6", size = 202705, upload-time = "2025-05-13T16:06:26.584Z" }, +] + +[package.optional-dependencies] +binary = [ + { name = "psycopg-binary", marker = "(python_full_version >= '3.10' and implementation_name != 'pypy') or (implementation_name != 'pypy' and platform_machine != 'arm64') or (implementation_name != 'pypy' and sys_platform != 'darwin')" }, +] +pool = [ + { name = "psycopg-pool" }, +] + +[[package]] +name = "psycopg-binary" +version = "3.2.9" +source = { registry = "https://pypi.org/simple" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b6/ce/d677bc51f9b180986e5515268603519cee682eb6b5e765ae46cdb8526579/psycopg_binary-3.2.9-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:528239bbf55728ba0eacbd20632342867590273a9bacedac7538ebff890f1093", size = 4033081, upload-time = "2025-05-13T16:06:29.666Z" }, + { url = "https://files.pythonhosted.org/packages/de/f4/b56263eb20dc36d71d7188622872098400536928edf86895736e28546b3c/psycopg_binary-3.2.9-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e4978c01ca4c208c9d6376bd585e2c0771986b76ff7ea518f6d2b51faece75e8", size = 4082141, upload-time = "2025-05-13T16:06:33.81Z" }, + { url = "https://files.pythonhosted.org/packages/68/47/5316c3b0a2b1ff5f1d440a27638250569994534874a2ce88bf24f5c51c0f/psycopg_binary-3.2.9-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1ed2bab85b505d13e66a914d0f8cdfa9475c16d3491cf81394e0748b77729af2", size = 4678993, upload-time = "2025-05-13T16:06:36.309Z" }, + { url = "https://files.pythonhosted.org/packages/53/24/b2c667b59f07fd7d7805c0c2074351bf2b98a336c5030d961db316512ffb/psycopg_binary-3.2.9-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:799fa1179ab8a58d1557a95df28b492874c8f4135101b55133ec9c55fc9ae9d7", size = 4500117, upload-time = "2025-05-13T16:06:38.847Z" }, + { url = "https://files.pythonhosted.org/packages/ae/91/a08f8878b0fe0b34b083c149df950bce168bc1b18b2fe849fa42bf4378d4/psycopg_binary-3.2.9-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bb37ac3955d19e4996c3534abfa4f23181333974963826db9e0f00731274b695", size = 4766985, upload-time = "2025-05-13T16:06:42.502Z" }, + { url = "https://files.pythonhosted.org/packages/10/be/3a45d5b7d8f4c4332fd42465f2170b5aef4d28a7c79e79ac7e5e1dac74d7/psycopg_binary-3.2.9-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:001e986656f7e06c273dd4104e27f4b4e0614092e544d950c7c938d822b1a894", size = 4461990, upload-time = "2025-05-13T16:06:45.971Z" }, + { url = "https://files.pythonhosted.org/packages/03/ce/20682b9a4fc270d8dc644a0b16c1978732146c6ff0abbc48fbab2f4a70aa/psycopg_binary-3.2.9-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:fa5c80d8b4cbf23f338db88a7251cef8bb4b68e0f91cf8b6ddfa93884fdbb0c1", size = 3777947, upload-time = "2025-05-13T16:06:49.134Z" }, + { url = "https://files.pythonhosted.org/packages/07/5c/f6d486e00bcd8709908ccdd436b2a190d390dfd61e318de4060bc6ee2a1e/psycopg_binary-3.2.9-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:39a127e0cf9b55bd4734a8008adf3e01d1fd1cb36339c6a9e2b2cbb6007c50ee", size = 3337502, upload-time = "2025-05-13T16:06:51.378Z" }, + { url = "https://files.pythonhosted.org/packages/0b/a1/086508e929c0123a7f532840bb0a0c8a1ebd7e06aef3ee7fa44a3589bcdf/psycopg_binary-3.2.9-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:fb7599e436b586e265bea956751453ad32eb98be6a6e694252f4691c31b16edb", size = 3440809, upload-time = "2025-05-13T16:06:54.552Z" }, + { url = "https://files.pythonhosted.org/packages/40/f2/3a347a0f894355a6b173fca2202eca279b6197727b24e4896cf83f4263ee/psycopg_binary-3.2.9-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:5d2c9fe14fe42b3575a0b4e09b081713e83b762c8dc38a3771dd3265f8f110e7", size = 3497231, upload-time = "2025-05-13T16:06:58.858Z" }, + { url = "https://files.pythonhosted.org/packages/18/31/0845a385eb6f4521b398793293b5f746a101e80d5c43792990442d26bc2e/psycopg_binary-3.2.9-cp310-cp310-win_amd64.whl", hash = "sha256:7e4660fad2807612bb200de7262c88773c3483e85d981324b3c647176e41fdc8", size = 2936845, upload-time = "2025-05-13T16:07:02.712Z" }, + { url = "https://files.pythonhosted.org/packages/b6/84/259ea58aca48e03c3c793b4ccfe39ed63db7b8081ef784d039330d9eed96/psycopg_binary-3.2.9-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:2504e9fd94eabe545d20cddcc2ff0da86ee55d76329e1ab92ecfcc6c0a8156c4", size = 4040785, upload-time = "2025-05-13T16:07:07.569Z" }, + { url = "https://files.pythonhosted.org/packages/25/22/ce58ffda2b7e36e45042b4d67f1bbd4dd2ccf4cfd2649696685c61046475/psycopg_binary-3.2.9-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:093a0c079dd6228a7f3c3d82b906b41964eaa062a9a8c19f45ab4984bf4e872b", size = 4087601, upload-time = "2025-05-13T16:07:11.75Z" }, + { url = "https://files.pythonhosted.org/packages/c6/4f/b043e85268650c245025e80039b79663d8986f857bc3d3a72b1de67f3550/psycopg_binary-3.2.9-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:387c87b51d72442708e7a853e7e7642717e704d59571da2f3b29e748be58c78a", size = 4676524, upload-time = "2025-05-13T16:07:17.038Z" }, + { url = "https://files.pythonhosted.org/packages/da/29/7afbfbd3740ea52fda488db190ef2ef2a9ff7379b85501a2142fb9f7dd56/psycopg_binary-3.2.9-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d9ac10a2ebe93a102a326415b330fff7512f01a9401406896e78a81d75d6eddc", size = 4495671, upload-time = "2025-05-13T16:07:21.709Z" }, + { url = "https://files.pythonhosted.org/packages/ea/eb/df69112d18a938cbb74efa1573082248437fa663ba66baf2cdba8a95a2d0/psycopg_binary-3.2.9-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:72fdbda5b4c2a6a72320857ef503a6589f56d46821592d4377c8c8604810342b", size = 4768132, upload-time = "2025-05-13T16:07:25.818Z" }, + { url = "https://files.pythonhosted.org/packages/76/fe/4803b20220c04f508f50afee9169268553f46d6eed99640a08c8c1e76409/psycopg_binary-3.2.9-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f34e88940833d46108f949fdc1fcfb74d6b5ae076550cd67ab59ef47555dba95", size = 4458394, upload-time = "2025-05-13T16:07:29.148Z" }, + { url = "https://files.pythonhosted.org/packages/0f/0f/5ecc64607ef6f62b04e610b7837b1a802ca6f7cb7211339f5d166d55f1dd/psycopg_binary-3.2.9-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:a3e0f89fe35cb03ff1646ab663dabf496477bab2a072315192dbaa6928862891", size = 3776879, upload-time = "2025-05-13T16:07:32.503Z" }, + { url = "https://files.pythonhosted.org/packages/c8/d8/1c3d6e99b7db67946d0eac2cd15d10a79aa7b1e3222ce4aa8e7df72027f5/psycopg_binary-3.2.9-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:6afb3e62f2a3456f2180a4eef6b03177788df7ce938036ff7f09b696d418d186", size = 3333329, upload-time = "2025-05-13T16:07:35.555Z" }, + { url = "https://files.pythonhosted.org/packages/d7/02/a4e82099816559f558ccaf2b6945097973624dc58d5d1c91eb1e54e5a8e9/psycopg_binary-3.2.9-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:cc19ed5c7afca3f6b298bfc35a6baa27adb2019670d15c32d0bb8f780f7d560d", size = 3435683, upload-time = "2025-05-13T16:07:37.863Z" }, + { url = "https://files.pythonhosted.org/packages/91/e4/f27055290d58e8818bed8a297162a096ef7f8ecdf01d98772d4b02af46c4/psycopg_binary-3.2.9-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:bc75f63653ce4ec764c8f8c8b0ad9423e23021e1c34a84eb5f4ecac8538a4a4a", size = 3497124, upload-time = "2025-05-13T16:07:40.567Z" }, + { url = "https://files.pythonhosted.org/packages/67/3d/17ed07579625529534605eeaeba34f0536754a5667dbf20ea2624fc80614/psycopg_binary-3.2.9-cp311-cp311-win_amd64.whl", hash = "sha256:3db3ba3c470801e94836ad78bf11fd5fab22e71b0c77343a1ee95d693879937a", size = 2939520, upload-time = "2025-05-13T16:07:45.467Z" }, + { url = "https://files.pythonhosted.org/packages/29/6f/ec9957e37a606cd7564412e03f41f1b3c3637a5be018d0849914cb06e674/psycopg_binary-3.2.9-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:be7d650a434921a6b1ebe3fff324dbc2364393eb29d7672e638ce3e21076974e", size = 4022205, upload-time = "2025-05-13T16:07:48.195Z" }, + { url = "https://files.pythonhosted.org/packages/6b/ba/497b8bea72b20a862ac95a94386967b745a472d9ddc88bc3f32d5d5f0d43/psycopg_binary-3.2.9-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6a76b4722a529390683c0304501f238b365a46b1e5fb6b7249dbc0ad6fea51a0", size = 4083795, upload-time = "2025-05-13T16:07:50.917Z" }, + { url = "https://files.pythonhosted.org/packages/42/07/af9503e8e8bdad3911fd88e10e6a29240f9feaa99f57d6fac4a18b16f5a0/psycopg_binary-3.2.9-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:96a551e4683f1c307cfc3d9a05fec62c00a7264f320c9962a67a543e3ce0d8ff", size = 4655043, upload-time = "2025-05-13T16:07:54.857Z" }, + { url = "https://files.pythonhosted.org/packages/28/ed/aff8c9850df1648cc6a5cc7a381f11ee78d98a6b807edd4a5ae276ad60ad/psycopg_binary-3.2.9-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:61d0a6ceed8f08c75a395bc28cb648a81cf8dee75ba4650093ad1a24a51c8724", size = 4477972, upload-time = "2025-05-13T16:07:57.925Z" }, + { url = "https://files.pythonhosted.org/packages/5c/bd/8e9d1b77ec1a632818fe2f457c3a65af83c68710c4c162d6866947d08cc5/psycopg_binary-3.2.9-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ad280bbd409bf598683dda82232f5215cfc5f2b1bf0854e409b4d0c44a113b1d", size = 4737516, upload-time = "2025-05-13T16:08:01.616Z" }, + { url = "https://files.pythonhosted.org/packages/46/ec/222238f774cd5a0881f3f3b18fb86daceae89cc410f91ef6a9fb4556f236/psycopg_binary-3.2.9-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:76eddaf7fef1d0994e3d536ad48aa75034663d3a07f6f7e3e601105ae73aeff6", size = 4436160, upload-time = "2025-05-13T16:08:04.278Z" }, + { url = "https://files.pythonhosted.org/packages/37/78/af5af2a1b296eeca54ea7592cd19284739a844974c9747e516707e7b3b39/psycopg_binary-3.2.9-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:52e239cd66c4158e412318fbe028cd94b0ef21b0707f56dcb4bdc250ee58fd40", size = 3753518, upload-time = "2025-05-13T16:08:07.567Z" }, + { url = "https://files.pythonhosted.org/packages/ec/ac/8a3ed39ea069402e9e6e6a2f79d81a71879708b31cc3454283314994b1ae/psycopg_binary-3.2.9-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:08bf9d5eabba160dd4f6ad247cf12f229cc19d2458511cab2eb9647f42fa6795", size = 3313598, upload-time = "2025-05-13T16:08:09.999Z" }, + { url = "https://files.pythonhosted.org/packages/da/43/26549af068347c808fbfe5f07d2fa8cef747cfff7c695136172991d2378b/psycopg_binary-3.2.9-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:1b2cf018168cad87580e67bdde38ff5e51511112f1ce6ce9a8336871f465c19a", size = 3407289, upload-time = "2025-05-13T16:08:12.66Z" }, + { url = "https://files.pythonhosted.org/packages/67/55/ea8d227c77df8e8aec880ded398316735add8fda5eb4ff5cc96fac11e964/psycopg_binary-3.2.9-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:14f64d1ac6942ff089fc7e926440f7a5ced062e2ed0949d7d2d680dc5c00e2d4", size = 3472493, upload-time = "2025-05-13T16:08:15.672Z" }, + { url = "https://files.pythonhosted.org/packages/3c/02/6ff2a5bc53c3cd653d281666728e29121149179c73fddefb1e437024c192/psycopg_binary-3.2.9-cp312-cp312-win_amd64.whl", hash = "sha256:7a838852e5afb6b4126f93eb409516a8c02a49b788f4df8b6469a40c2157fa21", size = 2927400, upload-time = "2025-05-13T16:08:18.652Z" }, + { url = "https://files.pythonhosted.org/packages/28/0b/f61ff4e9f23396aca674ed4d5c9a5b7323738021d5d72d36d8b865b3deaf/psycopg_binary-3.2.9-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:98bbe35b5ad24a782c7bf267596638d78aa0e87abc7837bdac5b2a2ab954179e", size = 4017127, upload-time = "2025-05-13T16:08:21.391Z" }, + { url = "https://files.pythonhosted.org/packages/bc/00/7e181fb1179fbfc24493738b61efd0453d4b70a0c4b12728e2b82db355fd/psycopg_binary-3.2.9-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:72691a1615ebb42da8b636c5ca9f2b71f266be9e172f66209a361c175b7842c5", size = 4080322, upload-time = "2025-05-13T16:08:24.049Z" }, + { url = "https://files.pythonhosted.org/packages/58/fd/94fc267c1d1392c4211e54ccb943be96ea4032e761573cf1047951887494/psycopg_binary-3.2.9-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:25ab464bfba8c401f5536d5aa95f0ca1dd8257b5202eede04019b4415f491351", size = 4655097, upload-time = "2025-05-13T16:08:27.376Z" }, + { url = "https://files.pythonhosted.org/packages/41/17/31b3acf43de0b2ba83eac5878ff0dea5a608ca2a5c5dd48067999503a9de/psycopg_binary-3.2.9-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0e8aeefebe752f46e3c4b769e53f1d4ad71208fe1150975ef7662c22cca80fab", size = 4482114, upload-time = "2025-05-13T16:08:30.781Z" }, + { url = "https://files.pythonhosted.org/packages/85/78/b4d75e5fd5a85e17f2beb977abbba3389d11a4536b116205846b0e1cf744/psycopg_binary-3.2.9-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b7e4e4dd177a8665c9ce86bc9caae2ab3aa9360b7ce7ec01827ea1baea9ff748", size = 4737693, upload-time = "2025-05-13T16:08:34.625Z" }, + { url = "https://files.pythonhosted.org/packages/3b/95/7325a8550e3388b00b5e54f4ced5e7346b531eb4573bf054c3dbbfdc14fe/psycopg_binary-3.2.9-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7fc2915949e5c1ea27a851f7a472a7da7d0a40d679f0a31e42f1022f3c562e87", size = 4437423, upload-time = "2025-05-13T16:08:37.444Z" }, + { url = "https://files.pythonhosted.org/packages/1a/db/cef77d08e59910d483df4ee6da8af51c03bb597f500f1fe818f0f3b925d3/psycopg_binary-3.2.9-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a1fa38a4687b14f517f049477178093c39c2a10fdcced21116f47c017516498f", size = 3758667, upload-time = "2025-05-13T16:08:40.116Z" }, + { url = "https://files.pythonhosted.org/packages/95/3e/252fcbffb47189aa84d723b54682e1bb6d05c8875fa50ce1ada914ae6e28/psycopg_binary-3.2.9-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:5be8292d07a3ab828dc95b5ee6b69ca0a5b2e579a577b39671f4f5b47116dfd2", size = 3320576, upload-time = "2025-05-13T16:08:43.243Z" }, + { url = "https://files.pythonhosted.org/packages/1c/cd/9b5583936515d085a1bec32b45289ceb53b80d9ce1cea0fef4c782dc41a7/psycopg_binary-3.2.9-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:778588ca9897b6c6bab39b0d3034efff4c5438f5e3bd52fda3914175498202f9", size = 3411439, upload-time = "2025-05-13T16:08:47.321Z" }, + { url = "https://files.pythonhosted.org/packages/45/6b/6f1164ea1634c87956cdb6db759e0b8c5827f989ee3cdff0f5c70e8331f2/psycopg_binary-3.2.9-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:f0d5b3af045a187aedbd7ed5fc513bd933a97aaff78e61c3745b330792c4345b", size = 3477477, upload-time = "2025-05-13T16:08:51.166Z" }, + { url = "https://files.pythonhosted.org/packages/7b/1d/bf54cfec79377929da600c16114f0da77a5f1670f45e0c3af9fcd36879bc/psycopg_binary-3.2.9-cp313-cp313-win_amd64.whl", hash = "sha256:2290bc146a1b6a9730350f695e8b670e1d1feb8446597bed0bbe7c3c30e0abcb", size = 2928009, upload-time = "2025-05-13T16:08:53.67Z" }, + { url = "https://files.pythonhosted.org/packages/0b/4a/e095884dd016b2bde2796043c61cd383b79e5d2a820c33e2c47293707ca8/psycopg_binary-3.2.9-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:587a3f19954d687a14e0c8202628844db692dbf00bba0e6d006659bf1ca91cbe", size = 4034274, upload-time = "2025-05-13T16:09:43.738Z" }, + { url = "https://files.pythonhosted.org/packages/11/e9/ab3fad6033de260a620f6481e66092417ce31fa194dbf9ac292ab8cb9fd0/psycopg_binary-3.2.9-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:791759138380df21d356ff991265fde7fe5997b0c924a502847a9f9141e68786", size = 4083015, upload-time = "2025-05-13T16:09:54.896Z" }, + { url = "https://files.pythonhosted.org/packages/ba/c8/6cd54a349d0b62b080761eb7bda43190003ecbbf17920d57254d5c780e11/psycopg_binary-3.2.9-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:95315b8c8ddfa2fdcb7fe3ddea8a595c1364524f512160c604e3be368be9dd07", size = 4679369, upload-time = "2025-05-13T16:10:00.545Z" }, + { url = "https://files.pythonhosted.org/packages/51/34/35c65ac413c485e9340d62f14adcb34420acae44425f77aee591d49e6647/psycopg_binary-3.2.9-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:18ac08475c9b971237fcc395b0a6ee4e8580bb5cf6247bc9b8461644bef5d9f4", size = 4500889, upload-time = "2025-05-13T16:10:07.593Z" }, + { url = "https://files.pythonhosted.org/packages/77/a9/f691b8037b0bcef481b09ae4283beedbf048f79b6fe9bda1445dbb14ed18/psycopg_binary-3.2.9-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ac2c04b6345e215e65ca6aef5c05cc689a960b16674eaa1f90a8f86dfaee8c04", size = 4769218, upload-time = "2025-05-13T16:10:23.076Z" }, + { url = "https://files.pythonhosted.org/packages/ee/38/25afc811c1dfb664b31d66d6f5c070326a1f89f768f1b673273a3abe6912/psycopg_binary-3.2.9-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4c1ab25e3134774f1e476d4bb9050cdec25f10802e63e92153906ae934578734", size = 4462834, upload-time = "2025-05-13T16:10:30.442Z" }, + { url = "https://files.pythonhosted.org/packages/df/e2/eb4a8230e13f691d6e386e22b16d4b90f454839b78ac547be3f399562ee4/psycopg_binary-3.2.9-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:4bfec4a73e8447d8fe8854886ffa78df2b1c279a7592241c2eb393d4499a17e2", size = 3779527, upload-time = "2025-05-13T16:10:42.705Z" }, + { url = "https://files.pythonhosted.org/packages/26/39/0f79c7d42f0c5711861ce9db55c65e14e7f1e52bd40304b4d6e7cd505e61/psycopg_binary-3.2.9-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:166acc57af5d2ff0c0c342aed02e69a0cd5ff216cae8820c1059a6f3b7cf5f78", size = 3337958, upload-time = "2025-05-13T16:10:47.874Z" }, + { url = "https://files.pythonhosted.org/packages/11/ce/28b1d98aed9337a721b271778d07c5ac7f85730d96f0185cc6d22684536d/psycopg_binary-3.2.9-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:413f9e46259fe26d99461af8e1a2b4795a4e27cc8ac6f7919ec19bcee8945074", size = 3440567, upload-time = "2025-05-13T16:10:57.821Z" }, + { url = "https://files.pythonhosted.org/packages/24/54/40a3a8175566f8c1268af0bacf5d7b26371697b6cefa87352c1df4b435e1/psycopg_binary-3.2.9-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:354dea21137a316b6868ee41c2ae7cce001e104760cf4eab3ec85627aed9b6cd", size = 3498637, upload-time = "2025-05-13T16:11:02.854Z" }, + { url = "https://files.pythonhosted.org/packages/63/ee/51748bc8af0ba08e7415fcbbd00b7d069c068f8c08509e8dd0dd0a066394/psycopg_binary-3.2.9-cp39-cp39-win_amd64.whl", hash = "sha256:24ddb03c1ccfe12d000d950c9aba93a7297993c4e3905d9f2c9795bb0764d523", size = 2938614, upload-time = "2025-05-13T16:11:13.299Z" }, +] + +[[package]] +name = "psycopg-pool" +version = "3.2.6" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/cf/13/1e7850bb2c69a63267c3dbf37387d3f71a00fd0e2fa55c5db14d64ba1af4/psycopg_pool-3.2.6.tar.gz", hash = "sha256:0f92a7817719517212fbfe2fd58b8c35c1850cdd2a80d36b581ba2085d9148e5", size = 29770, upload-time = "2025-02-26T12:03:47.129Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/47/fd/4feb52a55c1a4bd748f2acaed1903ab54a723c47f6d0242780f4d97104d4/psycopg_pool-3.2.6-py3-none-any.whl", hash = "sha256:5887318a9f6af906d041a0b1dc1c60f8f0dda8340c2572b74e10907b51ed5da7", size = 38252, upload-time = "2025-02-26T12:03:45.073Z" }, +] + +[[package]] +name = "psycopg2-binary" +version = "2.9.10" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/cb/0e/bdc8274dc0585090b4e3432267d7be4dfbfd8971c0fa59167c711105a6bf/psycopg2-binary-2.9.10.tar.gz", hash = "sha256:4b3df0e6990aa98acda57d983942eff13d824135fe2250e6522edaa782a06de2", size = 385764, upload-time = "2024-10-16T11:24:58.126Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7a/81/331257dbf2801cdb82105306042f7a1637cc752f65f2bb688188e0de5f0b/psycopg2_binary-2.9.10-cp310-cp310-macosx_12_0_x86_64.whl", hash = "sha256:0ea8e3d0ae83564f2fc554955d327fa081d065c8ca5cc6d2abb643e2c9c1200f", size = 3043397, upload-time = "2024-10-16T11:18:58.647Z" }, + { url = "https://files.pythonhosted.org/packages/e7/9a/7f4f2f031010bbfe6a02b4a15c01e12eb6b9b7b358ab33229f28baadbfc1/psycopg2_binary-2.9.10-cp310-cp310-macosx_14_0_arm64.whl", hash = "sha256:3e9c76f0ac6f92ecfc79516a8034a544926430f7b080ec5a0537bca389ee0906", size = 3274806, upload-time = "2024-10-16T11:19:03.935Z" }, + { url = "https://files.pythonhosted.org/packages/e5/57/8ddd4b374fa811a0b0a0f49b6abad1cde9cb34df73ea3348cc283fcd70b4/psycopg2_binary-2.9.10-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2ad26b467a405c798aaa1458ba09d7e2b6e5f96b1ce0ac15d82fd9f95dc38a92", size = 2851361, upload-time = "2024-10-16T11:19:07.277Z" }, + { url = "https://files.pythonhosted.org/packages/f9/66/d1e52c20d283f1f3a8e7e5c1e06851d432f123ef57b13043b4f9b21ffa1f/psycopg2_binary-2.9.10-cp310-cp310-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:270934a475a0e4b6925b5f804e3809dd5f90f8613621d062848dd82f9cd62007", size = 3080836, upload-time = "2024-10-16T11:19:11.033Z" }, + { url = "https://files.pythonhosted.org/packages/a0/cb/592d44a9546aba78f8a1249021fe7c59d3afb8a0ba51434d6610cc3462b6/psycopg2_binary-2.9.10-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:48b338f08d93e7be4ab2b5f1dbe69dc5e9ef07170fe1f86514422076d9c010d0", size = 3264552, upload-time = "2024-10-16T11:19:14.606Z" }, + { url = "https://files.pythonhosted.org/packages/64/33/c8548560b94b7617f203d7236d6cdf36fe1a5a3645600ada6efd79da946f/psycopg2_binary-2.9.10-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f4152f8f76d2023aac16285576a9ecd2b11a9895373a1f10fd9db54b3ff06b4", size = 3019789, upload-time = "2024-10-16T11:19:18.889Z" }, + { url = "https://files.pythonhosted.org/packages/b0/0e/c2da0db5bea88a3be52307f88b75eec72c4de62814cbe9ee600c29c06334/psycopg2_binary-2.9.10-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:32581b3020c72d7a421009ee1c6bf4a131ef5f0a968fab2e2de0c9d2bb4577f1", size = 2871776, upload-time = "2024-10-16T11:19:23.023Z" }, + { url = "https://files.pythonhosted.org/packages/15/d7/774afa1eadb787ddf41aab52d4c62785563e29949613c958955031408ae6/psycopg2_binary-2.9.10-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:2ce3e21dc3437b1d960521eca599d57408a695a0d3c26797ea0f72e834c7ffe5", size = 2820959, upload-time = "2024-10-16T11:19:26.906Z" }, + { url = "https://files.pythonhosted.org/packages/5e/ed/440dc3f5991a8c6172a1cde44850ead0e483a375277a1aef7cfcec00af07/psycopg2_binary-2.9.10-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:e984839e75e0b60cfe75e351db53d6db750b00de45644c5d1f7ee5d1f34a1ce5", size = 2919329, upload-time = "2024-10-16T11:19:30.027Z" }, + { url = "https://files.pythonhosted.org/packages/03/be/2cc8f4282898306732d2ae7b7378ae14e8df3c1231b53579efa056aae887/psycopg2_binary-2.9.10-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:3c4745a90b78e51d9ba06e2088a2fe0c693ae19cc8cb051ccda44e8df8a6eb53", size = 2957659, upload-time = "2024-10-16T11:19:32.864Z" }, + { url = "https://files.pythonhosted.org/packages/d0/12/fb8e4f485d98c570e00dad5800e9a2349cfe0f71a767c856857160d343a5/psycopg2_binary-2.9.10-cp310-cp310-win32.whl", hash = "sha256:e5720a5d25e3b99cd0dc5c8a440570469ff82659bb09431c1439b92caf184d3b", size = 1024605, upload-time = "2024-10-16T11:19:35.462Z" }, + { url = "https://files.pythonhosted.org/packages/22/4f/217cd2471ecf45d82905dd09085e049af8de6cfdc008b6663c3226dc1c98/psycopg2_binary-2.9.10-cp310-cp310-win_amd64.whl", hash = "sha256:3c18f74eb4386bf35e92ab2354a12c17e5eb4d9798e4c0ad3a00783eae7cd9f1", size = 1163817, upload-time = "2024-10-16T11:19:37.384Z" }, + { url = "https://files.pythonhosted.org/packages/9c/8f/9feb01291d0d7a0a4c6a6bab24094135c2b59c6a81943752f632c75896d6/psycopg2_binary-2.9.10-cp311-cp311-macosx_12_0_x86_64.whl", hash = "sha256:04392983d0bb89a8717772a193cfaac58871321e3ec69514e1c4e0d4957b5aff", size = 3043397, upload-time = "2024-10-16T11:19:40.033Z" }, + { url = "https://files.pythonhosted.org/packages/15/30/346e4683532011561cd9c8dfeac6a8153dd96452fee0b12666058ab7893c/psycopg2_binary-2.9.10-cp311-cp311-macosx_14_0_arm64.whl", hash = "sha256:1a6784f0ce3fec4edc64e985865c17778514325074adf5ad8f80636cd029ef7c", size = 3274806, upload-time = "2024-10-16T11:19:43.5Z" }, + { url = "https://files.pythonhosted.org/packages/66/6e/4efebe76f76aee7ec99166b6c023ff8abdc4e183f7b70913d7c047701b79/psycopg2_binary-2.9.10-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b5f86c56eeb91dc3135b3fd8a95dc7ae14c538a2f3ad77a19645cf55bab1799c", size = 2851370, upload-time = "2024-10-16T11:19:46.986Z" }, + { url = "https://files.pythonhosted.org/packages/7f/fd/ff83313f86b50f7ca089b161b8e0a22bb3c319974096093cd50680433fdb/psycopg2_binary-2.9.10-cp311-cp311-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:2b3d2491d4d78b6b14f76881905c7a8a8abcf974aad4a8a0b065273a0ed7a2cb", size = 3080780, upload-time = "2024-10-16T11:19:50.242Z" }, + { url = "https://files.pythonhosted.org/packages/e6/c4/bfadd202dcda8333a7ccafdc51c541dbdfce7c2c7cda89fa2374455d795f/psycopg2_binary-2.9.10-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:2286791ececda3a723d1910441c793be44625d86d1a4e79942751197f4d30341", size = 3264583, upload-time = "2024-10-16T11:19:54.424Z" }, + { url = "https://files.pythonhosted.org/packages/5d/f1/09f45ac25e704ac954862581f9f9ae21303cc5ded3d0b775532b407f0e90/psycopg2_binary-2.9.10-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:512d29bb12608891e349af6a0cccedce51677725a921c07dba6342beaf576f9a", size = 3019831, upload-time = "2024-10-16T11:19:57.762Z" }, + { url = "https://files.pythonhosted.org/packages/9e/2e/9beaea078095cc558f215e38f647c7114987d9febfc25cb2beed7c3582a5/psycopg2_binary-2.9.10-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:5a507320c58903967ef7384355a4da7ff3f28132d679aeb23572753cbf2ec10b", size = 2871822, upload-time = "2024-10-16T11:20:04.693Z" }, + { url = "https://files.pythonhosted.org/packages/01/9e/ef93c5d93f3dc9fc92786ffab39e323b9aed066ba59fdc34cf85e2722271/psycopg2_binary-2.9.10-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:6d4fa1079cab9018f4d0bd2db307beaa612b0d13ba73b5c6304b9fe2fb441ff7", size = 2820975, upload-time = "2024-10-16T11:20:11.401Z" }, + { url = "https://files.pythonhosted.org/packages/a5/f0/049e9631e3268fe4c5a387f6fc27e267ebe199acf1bc1bc9cbde4bd6916c/psycopg2_binary-2.9.10-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:851485a42dbb0bdc1edcdabdb8557c09c9655dfa2ca0460ff210522e073e319e", size = 2919320, upload-time = "2024-10-16T11:20:17.959Z" }, + { url = "https://files.pythonhosted.org/packages/dc/9a/bcb8773b88e45fb5a5ea8339e2104d82c863a3b8558fbb2aadfe66df86b3/psycopg2_binary-2.9.10-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:35958ec9e46432d9076286dda67942ed6d968b9c3a6a2fd62b48939d1d78bf68", size = 2957617, upload-time = "2024-10-16T11:20:24.711Z" }, + { url = "https://files.pythonhosted.org/packages/e2/6b/144336a9bf08a67d217b3af3246abb1d027095dab726f0687f01f43e8c03/psycopg2_binary-2.9.10-cp311-cp311-win32.whl", hash = "sha256:ecced182e935529727401b24d76634a357c71c9275b356efafd8a2a91ec07392", size = 1024618, upload-time = "2024-10-16T11:20:27.718Z" }, + { url = "https://files.pythonhosted.org/packages/61/69/3b3d7bd583c6d3cbe5100802efa5beacaacc86e37b653fc708bf3d6853b8/psycopg2_binary-2.9.10-cp311-cp311-win_amd64.whl", hash = "sha256:ee0e8c683a7ff25d23b55b11161c2663d4b099770f6085ff0a20d4505778d6b4", size = 1163816, upload-time = "2024-10-16T11:20:30.777Z" }, + { url = "https://files.pythonhosted.org/packages/49/7d/465cc9795cf76f6d329efdafca74693714556ea3891813701ac1fee87545/psycopg2_binary-2.9.10-cp312-cp312-macosx_12_0_x86_64.whl", hash = "sha256:880845dfe1f85d9d5f7c412efea7a08946a46894537e4e5d091732eb1d34d9a0", size = 3044771, upload-time = "2024-10-16T11:20:35.234Z" }, + { url = "https://files.pythonhosted.org/packages/8b/31/6d225b7b641a1a2148e3ed65e1aa74fc86ba3fee850545e27be9e1de893d/psycopg2_binary-2.9.10-cp312-cp312-macosx_14_0_arm64.whl", hash = "sha256:9440fa522a79356aaa482aa4ba500b65f28e5d0e63b801abf6aa152a29bd842a", size = 3275336, upload-time = "2024-10-16T11:20:38.742Z" }, + { url = "https://files.pythonhosted.org/packages/30/b7/a68c2b4bff1cbb1728e3ec864b2d92327c77ad52edcd27922535a8366f68/psycopg2_binary-2.9.10-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e3923c1d9870c49a2d44f795df0c889a22380d36ef92440ff618ec315757e539", size = 2851637, upload-time = "2024-10-16T11:20:42.145Z" }, + { url = "https://files.pythonhosted.org/packages/0b/b1/cfedc0e0e6f9ad61f8657fd173b2f831ce261c02a08c0b09c652b127d813/psycopg2_binary-2.9.10-cp312-cp312-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7b2c956c028ea5de47ff3a8d6b3cc3330ab45cf0b7c3da35a2d6ff8420896526", size = 3082097, upload-time = "2024-10-16T11:20:46.185Z" }, + { url = "https://files.pythonhosted.org/packages/18/ed/0a8e4153c9b769f59c02fb5e7914f20f0b2483a19dae7bf2db54b743d0d0/psycopg2_binary-2.9.10-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:f758ed67cab30b9a8d2833609513ce4d3bd027641673d4ebc9c067e4d208eec1", size = 3264776, upload-time = "2024-10-16T11:20:50.879Z" }, + { url = "https://files.pythonhosted.org/packages/10/db/d09da68c6a0cdab41566b74e0a6068a425f077169bed0946559b7348ebe9/psycopg2_binary-2.9.10-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8cd9b4f2cfab88ed4a9106192de509464b75a906462fb846b936eabe45c2063e", size = 3020968, upload-time = "2024-10-16T11:20:56.819Z" }, + { url = "https://files.pythonhosted.org/packages/94/28/4d6f8c255f0dfffb410db2b3f9ac5218d959a66c715c34cac31081e19b95/psycopg2_binary-2.9.10-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:6dc08420625b5a20b53551c50deae6e231e6371194fa0651dbe0fb206452ae1f", size = 2872334, upload-time = "2024-10-16T11:21:02.411Z" }, + { url = "https://files.pythonhosted.org/packages/05/f7/20d7bf796593c4fea95e12119d6cc384ff1f6141a24fbb7df5a668d29d29/psycopg2_binary-2.9.10-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:d7cd730dfa7c36dbe8724426bf5612798734bff2d3c3857f36f2733f5bfc7c00", size = 2822722, upload-time = "2024-10-16T11:21:09.01Z" }, + { url = "https://files.pythonhosted.org/packages/4d/e4/0c407ae919ef626dbdb32835a03b6737013c3cc7240169843965cada2bdf/psycopg2_binary-2.9.10-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:155e69561d54d02b3c3209545fb08938e27889ff5a10c19de8d23eb5a41be8a5", size = 2920132, upload-time = "2024-10-16T11:21:16.339Z" }, + { url = "https://files.pythonhosted.org/packages/2d/70/aa69c9f69cf09a01da224909ff6ce8b68faeef476f00f7ec377e8f03be70/psycopg2_binary-2.9.10-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:c3cc28a6fd5a4a26224007712e79b81dbaee2ffb90ff406256158ec4d7b52b47", size = 2959312, upload-time = "2024-10-16T11:21:25.584Z" }, + { url = "https://files.pythonhosted.org/packages/d3/bd/213e59854fafe87ba47814bf413ace0dcee33a89c8c8c814faca6bc7cf3c/psycopg2_binary-2.9.10-cp312-cp312-win32.whl", hash = "sha256:ec8a77f521a17506a24a5f626cb2aee7850f9b69a0afe704586f63a464f3cd64", size = 1025191, upload-time = "2024-10-16T11:21:29.912Z" }, + { url = "https://files.pythonhosted.org/packages/92/29/06261ea000e2dc1e22907dbbc483a1093665509ea586b29b8986a0e56733/psycopg2_binary-2.9.10-cp312-cp312-win_amd64.whl", hash = "sha256:18c5ee682b9c6dd3696dad6e54cc7ff3a1a9020df6a5c0f861ef8bfd338c3ca0", size = 1164031, upload-time = "2024-10-16T11:21:34.211Z" }, + { url = "https://files.pythonhosted.org/packages/3e/30/d41d3ba765609c0763505d565c4d12d8f3c79793f0d0f044ff5a28bf395b/psycopg2_binary-2.9.10-cp313-cp313-macosx_12_0_x86_64.whl", hash = "sha256:26540d4a9a4e2b096f1ff9cce51253d0504dca5a85872c7f7be23be5a53eb18d", size = 3044699, upload-time = "2024-10-16T11:21:42.841Z" }, + { url = "https://files.pythonhosted.org/packages/35/44/257ddadec7ef04536ba71af6bc6a75ec05c5343004a7ec93006bee66c0bc/psycopg2_binary-2.9.10-cp313-cp313-macosx_14_0_arm64.whl", hash = "sha256:e217ce4d37667df0bc1c397fdcd8de5e81018ef305aed9415c3b093faaeb10fb", size = 3275245, upload-time = "2024-10-16T11:21:51.989Z" }, + { url = "https://files.pythonhosted.org/packages/1b/11/48ea1cd11de67f9efd7262085588790a95d9dfcd9b8a687d46caf7305c1a/psycopg2_binary-2.9.10-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:245159e7ab20a71d989da00f280ca57da7641fa2cdcf71749c193cea540a74f7", size = 2851631, upload-time = "2024-10-16T11:21:57.584Z" }, + { url = "https://files.pythonhosted.org/packages/62/e0/62ce5ee650e6c86719d621a761fe4bc846ab9eff8c1f12b1ed5741bf1c9b/psycopg2_binary-2.9.10-cp313-cp313-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:3c4ded1a24b20021ebe677b7b08ad10bf09aac197d6943bfe6fec70ac4e4690d", size = 3082140, upload-time = "2024-10-16T11:22:02.005Z" }, + { url = "https://files.pythonhosted.org/packages/27/ce/63f946c098611f7be234c0dd7cb1ad68b0b5744d34f68062bb3c5aa510c8/psycopg2_binary-2.9.10-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3abb691ff9e57d4a93355f60d4f4c1dd2d68326c968e7db17ea96df3c023ef73", size = 3264762, upload-time = "2024-10-16T11:22:06.412Z" }, + { url = "https://files.pythonhosted.org/packages/43/25/c603cd81402e69edf7daa59b1602bd41eb9859e2824b8c0855d748366ac9/psycopg2_binary-2.9.10-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8608c078134f0b3cbd9f89b34bd60a943b23fd33cc5f065e8d5f840061bd0673", size = 3020967, upload-time = "2024-10-16T11:22:11.583Z" }, + { url = "https://files.pythonhosted.org/packages/5f/d6/8708d8c6fca531057fa170cdde8df870e8b6a9b136e82b361c65e42b841e/psycopg2_binary-2.9.10-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:230eeae2d71594103cd5b93fd29d1ace6420d0b86f4778739cb1a5a32f607d1f", size = 2872326, upload-time = "2024-10-16T11:22:16.406Z" }, + { url = "https://files.pythonhosted.org/packages/ce/ac/5b1ea50fc08a9df82de7e1771537557f07c2632231bbab652c7e22597908/psycopg2_binary-2.9.10-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:bb89f0a835bcfc1d42ccd5f41f04870c1b936d8507c6df12b7737febc40f0909", size = 2822712, upload-time = "2024-10-16T11:22:21.366Z" }, + { url = "https://files.pythonhosted.org/packages/c4/fc/504d4503b2abc4570fac3ca56eb8fed5e437bf9c9ef13f36b6621db8ef00/psycopg2_binary-2.9.10-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:f0c2d907a1e102526dd2986df638343388b94c33860ff3bbe1384130828714b1", size = 2920155, upload-time = "2024-10-16T11:22:25.684Z" }, + { url = "https://files.pythonhosted.org/packages/b2/d1/323581e9273ad2c0dbd1902f3fb50c441da86e894b6e25a73c3fda32c57e/psycopg2_binary-2.9.10-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:f8157bed2f51db683f31306aa497311b560f2265998122abe1dce6428bd86567", size = 2959356, upload-time = "2024-10-16T11:22:30.562Z" }, + { url = "https://files.pythonhosted.org/packages/08/50/d13ea0a054189ae1bc21af1d85b6f8bb9bbc5572991055d70ad9006fe2d6/psycopg2_binary-2.9.10-cp313-cp313-win_amd64.whl", hash = "sha256:27422aa5f11fbcd9b18da48373eb67081243662f9b46e6fd07c3eb46e4535142", size = 2569224, upload-time = "2025-01-04T20:09:19.234Z" }, + { url = "https://files.pythonhosted.org/packages/a2/bc/e77648009b6e61af327c607543f65fdf25bcfb4100f5a6f3bdb62ddac03c/psycopg2_binary-2.9.10-cp39-cp39-macosx_12_0_x86_64.whl", hash = "sha256:7a813c8bdbaaaab1f078014b9b0b13f5de757e2b5d9be6403639b298a04d218b", size = 3043437, upload-time = "2024-10-16T11:23:42.946Z" }, + { url = "https://files.pythonhosted.org/packages/e0/e8/5a12211a1f5b959f3e3ccd342eace60c1f26422f53e06d687821dc268780/psycopg2_binary-2.9.10-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d00924255d7fc916ef66e4bf22f354a940c67179ad3fd7067d7a0a9c84d2fbfc", size = 2851340, upload-time = "2024-10-16T11:23:50.038Z" }, + { url = "https://files.pythonhosted.org/packages/47/ed/5932b0458a7fc61237b653df050513c8d18a6f4083cc7f90dcef967f7bce/psycopg2_binary-2.9.10-cp39-cp39-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7559bce4b505762d737172556a4e6ea8a9998ecac1e39b5233465093e8cee697", size = 3080905, upload-time = "2024-10-16T11:23:57.932Z" }, + { url = "https://files.pythonhosted.org/packages/71/df/8047d85c3d23864aca4613c3be1ea0fe61dbe4e050a89ac189f9dce4403e/psycopg2_binary-2.9.10-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e8b58f0a96e7a1e341fc894f62c1177a7c83febebb5ff9123b579418fdc8a481", size = 3264640, upload-time = "2024-10-16T11:24:06.122Z" }, + { url = "https://files.pythonhosted.org/packages/f3/de/6157e4ef242920e8f2749f7708d5cc8815414bdd4a27a91996e7cd5c80df/psycopg2_binary-2.9.10-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6b269105e59ac96aba877c1707c600ae55711d9dcd3fc4b5012e4af68e30c648", size = 3019812, upload-time = "2024-10-16T11:24:17.025Z" }, + { url = "https://files.pythonhosted.org/packages/25/f9/0fc49efd2d4d6db3a8d0a3f5749b33a0d3fdd872cad49fbf5bfce1c50027/psycopg2_binary-2.9.10-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:79625966e176dc97ddabc142351e0409e28acf4660b88d1cf6adb876d20c490d", size = 2871933, upload-time = "2024-10-16T11:24:24.858Z" }, + { url = "https://files.pythonhosted.org/packages/57/bc/2ed1bd182219065692ed458d218d311b0b220b20662d25d913bc4e8d3549/psycopg2_binary-2.9.10-cp39-cp39-musllinux_1_2_i686.whl", hash = "sha256:8aabf1c1a04584c168984ac678a668094d831f152859d06e055288fa515e4d30", size = 2820990, upload-time = "2024-10-16T11:24:29.571Z" }, + { url = "https://files.pythonhosted.org/packages/71/2a/43f77a9b8ee0b10e2de784d97ddc099d9fe0d9eec462a006e4d2cc74756d/psycopg2_binary-2.9.10-cp39-cp39-musllinux_1_2_ppc64le.whl", hash = "sha256:19721ac03892001ee8fdd11507e6a2e01f4e37014def96379411ca99d78aeb2c", size = 2919352, upload-time = "2024-10-16T11:24:36.906Z" }, + { url = "https://files.pythonhosted.org/packages/57/86/d2943df70469e6afab3b5b8e1367fccc61891f46de436b24ddee6f2c8404/psycopg2_binary-2.9.10-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:7f5d859928e635fa3ce3477704acee0f667b3a3d3e4bb109f2b18d4005f38287", size = 2957614, upload-time = "2024-10-16T11:24:44.423Z" }, + { url = "https://files.pythonhosted.org/packages/85/21/195d69371330983aa16139e60ba855d0a18164c9295f3a3696be41bbcd54/psycopg2_binary-2.9.10-cp39-cp39-win32.whl", hash = "sha256:3216ccf953b3f267691c90c6fe742e45d890d8272326b4a8b20850a03d05b7b8", size = 1025341, upload-time = "2024-10-16T11:24:48.056Z" }, + { url = "https://files.pythonhosted.org/packages/ad/53/73196ebc19d6fbfc22427b982fbc98698b7b9c361e5e7707e3a3247cf06d/psycopg2_binary-2.9.10-cp39-cp39-win_amd64.whl", hash = "sha256:30e34c4e97964805f715206c7b789d54a78b70f3ff19fbe590104b71c45600e5", size = 1163958, upload-time = "2024-10-16T11:24:51.882Z" }, +] + +[[package]] +name = "py-cpuinfo" +version = "9.0.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/37/a8/d832f7293ebb21690860d2e01d8115e5ff6f2ae8bbdc953f0eb0fa4bd2c7/py-cpuinfo-9.0.0.tar.gz", hash = "sha256:3cdbbf3fac90dc6f118bfd64384f309edeadd902d7c8fb17f02ffa1fc3f49690", size = 104716, upload-time = "2022-10-25T20:38:06.303Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e0/a9/023730ba63db1e494a271cb018dcd361bd2c917ba7004c3e49d5daf795a2/py_cpuinfo-9.0.0-py3-none-any.whl", hash = "sha256:859625bc251f64e21f077d099d4162689c762b5d6a4c3c97553d56241c9674d5", size = 22335, upload-time = "2022-10-25T20:38:27.636Z" }, +] + +[[package]] +name = "pygments" +version = "2.19.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/b0/77/a5b8c569bf593b0140bde72ea885a803b82086995367bf2037de0159d924/pygments-2.19.2.tar.gz", hash = "sha256:636cb2477cec7f8952536970bc533bc43743542f70392ae026374600add5b887", size = 4968631, upload-time = "2025-06-21T13:39:12.283Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c7/21/705964c7812476f378728bdf590ca4b771ec72385c533964653c68e86bdc/pygments-2.19.2-py3-none-any.whl", hash = "sha256:86540386c03d588bb81d44bc3928634ff26449851e99741617ecb9037ee5ec0b", size = 1225217, upload-time = "2025-06-21T13:39:07.939Z" }, +] + +[[package]] +name = "pyright" +version = "1.1.405" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "nodeenv" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/fb/6c/ba4bbee22e76af700ea593a1d8701e3225080956753bee9750dcc25e2649/pyright-1.1.405.tar.gz", hash = "sha256:5c2a30e1037af27eb463a1cc0b9f6d65fec48478ccf092c1ac28385a15c55763", size = 4068319, upload-time = "2025-09-04T03:37:06.776Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d5/1a/524f832e1ff1962a22a1accc775ca7b143ba2e9f5924bb6749dce566784a/pyright-1.1.405-py3-none-any.whl", hash = "sha256:a2cb13700b5508ce8e5d4546034cb7ea4aedb60215c6c33f56cec7f53996035a", size = 5905038, upload-time = "2025-09-04T03:37:04.913Z" }, +] + +[[package]] +name = "pytest" +version = "8.4.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "colorama", marker = "sys_platform == 'win32'" }, + { name = "exceptiongroup", marker = "python_full_version < '3.11'" }, + { name = "iniconfig" }, + { name = "packaging" }, + { name = "pluggy" }, + { name = "pygments" }, + { name = "tomli", marker = "python_full_version < '3.11'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/a3/5c/00a0e072241553e1a7496d638deababa67c5058571567b92a7eaa258397c/pytest-8.4.2.tar.gz", hash = "sha256:86c0d0b93306b961d58d62a4db4879f27fe25513d4b969df351abdddb3c30e01", size = 1519618, upload-time = "2025-09-04T14:34:22.711Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a8/a4/20da314d277121d6534b3a980b29035dcd51e6744bd79075a6ce8fa4eb8d/pytest-8.4.2-py3-none-any.whl", hash = "sha256:872f880de3fc3a5bdc88a11b39c9710c3497a547cfa9320bc3c5e62fbf272e79", size = 365750, upload-time = "2025-09-04T14:34:20.226Z" }, +] + +[[package]] +name = "pytest-asyncio" +version = "1.1.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "backports-asyncio-runner", marker = "python_full_version < '3.11'" }, + { name = "pytest" }, + { name = "typing-extensions", marker = "python_full_version < '3.10'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/4e/51/f8794af39eeb870e87a8c8068642fc07bce0c854d6865d7dd0f2a9d338c2/pytest_asyncio-1.1.0.tar.gz", hash = "sha256:796aa822981e01b68c12e4827b8697108f7205020f24b5793b3c41555dab68ea", size = 46652, upload-time = "2025-07-16T04:29:26.393Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c7/9d/bf86eddabf8c6c9cb1ea9a869d6873b46f105a5d292d3a6f7071f5b07935/pytest_asyncio-1.1.0-py3-none-any.whl", hash = "sha256:5fe2d69607b0bd75c656d1211f969cadba035030156745ee09e7d71740e58ecf", size = 15157, upload-time = "2025-07-16T04:29:24.929Z" }, +] + +[[package]] +name = "pytest-benchmark" +version = "5.1.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "py-cpuinfo" }, + { name = "pytest" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/39/d0/a8bd08d641b393db3be3819b03e2d9bb8760ca8479080a26a5f6e540e99c/pytest-benchmark-5.1.0.tar.gz", hash = "sha256:9ea661cdc292e8231f7cd4c10b0319e56a2118e2c09d9f50e1b3d150d2aca105", size = 337810, upload-time = "2024-10-30T11:51:48.521Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/9e/d6/b41653199ea09d5969d4e385df9bbfd9a100f28ca7e824ce7c0a016e3053/pytest_benchmark-5.1.0-py3-none-any.whl", hash = "sha256:922de2dfa3033c227c96da942d1878191afa135a29485fb942e85dff1c592c89", size = 44259, upload-time = "2024-10-30T11:51:45.94Z" }, +] + +[[package]] +name = "pytest-cov" +version = "6.3.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "coverage", extra = ["toml"] }, + { name = "pluggy" }, + { name = "pytest" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/30/4c/f883ab8f0daad69f47efdf95f55a66b51a8b939c430dadce0611508d9e99/pytest_cov-6.3.0.tar.gz", hash = "sha256:35c580e7800f87ce892e687461166e1ac2bcb8fb9e13aea79032518d6e503ff2", size = 70398, upload-time = "2025-09-06T15:40:14.361Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/80/b4/bb7263e12aade3842b938bc5c6958cae79c5ee18992f9b9349019579da0f/pytest_cov-6.3.0-py3-none-any.whl", hash = "sha256:440db28156d2468cafc0415b4f8e50856a0d11faefa38f30906048fe490f1749", size = 25115, upload-time = "2025-09-06T15:40:12.44Z" }, +] + +[[package]] +name = "pytest-django" +version = "4.11.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pytest" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/b1/fb/55d580352db26eb3d59ad50c64321ddfe228d3d8ac107db05387a2fadf3a/pytest_django-4.11.1.tar.gz", hash = "sha256:a949141a1ee103cb0e7a20f1451d355f83f5e4a5d07bdd4dcfdd1fd0ff227991", size = 86202, upload-time = "2025-04-03T18:56:09.338Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/be/ac/bd0608d229ec808e51a21044f3f2f27b9a37e7a0ebaca7247882e67876af/pytest_django-4.11.1-py3-none-any.whl", hash = "sha256:1b63773f648aa3d8541000c26929c1ea63934be1cfa674c76436966d73fe6a10", size = 25281, upload-time = "2025-04-03T18:56:07.678Z" }, +] + +[[package]] +name = "pytest-mock" +version = "3.15.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pytest" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/61/99/3323ee5c16b3637b4d941c362182d3e749c11e400bea31018c42219f3a98/pytest_mock-3.15.0.tar.gz", hash = "sha256:ab896bd190316b9d5d87b277569dfcdf718b2d049a2ccff5f7aca279c002a1cf", size = 33838, upload-time = "2025-09-04T20:57:48.679Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2b/b3/7fefc43fb706380144bcd293cc6e446e6f637ddfa8b83f48d1734156b529/pytest_mock-3.15.0-py3-none-any.whl", hash = "sha256:ef2219485fb1bd256b00e7ad7466ce26729b30eadfc7cbcdb4fa9a92ca68db6f", size = 10050, upload-time = "2025-09-04T20:57:47.274Z" }, +] + +[[package]] +name = "python-dateutil" +version = "2.9.0.post0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "six" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/66/c0/0c8b6ad9f17a802ee498c46e004a0eb49bc148f2fd230864601a86dcf6db/python-dateutil-2.9.0.post0.tar.gz", hash = "sha256:37dd54208da7e1cd875388217d5e00ebd4179249f90fb72437e91a35459a0ad3", size = 342432, upload-time = "2024-03-01T18:36:20.211Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ec/57/56b9bcc3c9c6a792fcbaf139543cee77261f3651ca9da0c93f5c1221264b/python_dateutil-2.9.0.post0-py2.py3-none-any.whl", hash = "sha256:a8b2bc7bffae282281c8140a97d3aa9c14da0b136dfe83f850eea9a5f7470427", size = 229892, upload-time = "2024-03-01T18:36:18.57Z" }, +] + +[[package]] +name = "pytz" +version = "2025.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f8/bf/abbd3cdfb8fbc7fb3d4d38d320f2441b1e7cbe29be4f23797b4a2b5d8aac/pytz-2025.2.tar.gz", hash = "sha256:360b9e3dbb49a209c21ad61809c7fb453643e048b38924c765813546746e81c3", size = 320884, upload-time = "2025-03-25T02:25:00.538Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/81/c4/34e93fe5f5429d7570ec1fa436f1986fb1f00c3e0f43a589fe2bbcd22c3f/pytz-2025.2-py2.py3-none-any.whl", hash = "sha256:5ddf76296dd8c44c26eb8f4b6f35488f3ccbf6fbbd7adee0b7262d43f0ec2f00", size = 509225, upload-time = "2025-03-25T02:24:58.468Z" }, +] + +[[package]] +name = "pyyaml" +version = "6.0.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/54/ed/79a089b6be93607fa5cdaedf301d7dfb23af5f25c398d5ead2525b063e17/pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e", size = 130631, upload-time = "2024-08-06T20:33:50.674Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/9b/95/a3fac87cb7158e231b5a6012e438c647e1a87f09f8e0d123acec8ab8bf71/PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086", size = 184199, upload-time = "2024-08-06T20:31:40.178Z" }, + { url = "https://files.pythonhosted.org/packages/c7/7a/68bd47624dab8fd4afbfd3c48e3b79efe09098ae941de5b58abcbadff5cb/PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf", size = 171758, upload-time = "2024-08-06T20:31:42.173Z" }, + { url = "https://files.pythonhosted.org/packages/49/ee/14c54df452143b9ee9f0f29074d7ca5516a36edb0b4cc40c3f280131656f/PyYAML-6.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8824b5a04a04a047e72eea5cec3bc266db09e35de6bdfe34c9436ac5ee27d237", size = 718463, upload-time = "2024-08-06T20:31:44.263Z" }, + { url = "https://files.pythonhosted.org/packages/4d/61/de363a97476e766574650d742205be468921a7b532aa2499fcd886b62530/PyYAML-6.0.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c36280e6fb8385e520936c3cb3b8042851904eba0e58d277dca80a5cfed590b", size = 719280, upload-time = "2024-08-06T20:31:50.199Z" }, + { url = "https://files.pythonhosted.org/packages/6b/4e/1523cb902fd98355e2e9ea5e5eb237cbc5f3ad5f3075fa65087aa0ecb669/PyYAML-6.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec031d5d2feb36d1d1a24380e4db6d43695f3748343d99434e6f5f9156aaa2ed", size = 751239, upload-time = "2024-08-06T20:31:52.292Z" }, + { url = "https://files.pythonhosted.org/packages/b7/33/5504b3a9a4464893c32f118a9cc045190a91637b119a9c881da1cf6b7a72/PyYAML-6.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:936d68689298c36b53b29f23c6dbb74de12b4ac12ca6cfe0e047bedceea56180", size = 695802, upload-time = "2024-08-06T20:31:53.836Z" }, + { url = "https://files.pythonhosted.org/packages/5c/20/8347dcabd41ef3a3cdc4f7b7a2aff3d06598c8779faa189cdbf878b626a4/PyYAML-6.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:23502f431948090f597378482b4812b0caae32c22213aecf3b55325e049a6c68", size = 720527, upload-time = "2024-08-06T20:31:55.565Z" }, + { url = "https://files.pythonhosted.org/packages/be/aa/5afe99233fb360d0ff37377145a949ae258aaab831bde4792b32650a4378/PyYAML-6.0.2-cp310-cp310-win32.whl", hash = "sha256:2e99c6826ffa974fe6e27cdb5ed0021786b03fc98e5ee3c5bfe1fd5015f42b99", size = 144052, upload-time = "2024-08-06T20:31:56.914Z" }, + { url = "https://files.pythonhosted.org/packages/b5/84/0fa4b06f6d6c958d207620fc60005e241ecedceee58931bb20138e1e5776/PyYAML-6.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:a4d3091415f010369ae4ed1fc6b79def9416358877534caf6a0fdd2146c87a3e", size = 161774, upload-time = "2024-08-06T20:31:58.304Z" }, + { url = "https://files.pythonhosted.org/packages/f8/aa/7af4e81f7acba21a4c6be026da38fd2b872ca46226673c89a758ebdc4fd2/PyYAML-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cc1c1159b3d456576af7a3e4d1ba7e6924cb39de8f67111c735f6fc832082774", size = 184612, upload-time = "2024-08-06T20:32:03.408Z" }, + { url = "https://files.pythonhosted.org/packages/8b/62/b9faa998fd185f65c1371643678e4d58254add437edb764a08c5a98fb986/PyYAML-6.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1e2120ef853f59c7419231f3bf4e7021f1b936f6ebd222406c3b60212205d2ee", size = 172040, upload-time = "2024-08-06T20:32:04.926Z" }, + { url = "https://files.pythonhosted.org/packages/ad/0c/c804f5f922a9a6563bab712d8dcc70251e8af811fce4524d57c2c0fd49a4/PyYAML-6.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d225db5a45f21e78dd9358e58a98702a0302f2659a3c6cd320564b75b86f47c", size = 736829, upload-time = "2024-08-06T20:32:06.459Z" }, + { url = "https://files.pythonhosted.org/packages/51/16/6af8d6a6b210c8e54f1406a6b9481febf9c64a3109c541567e35a49aa2e7/PyYAML-6.0.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ac9328ec4831237bec75defaf839f7d4564be1e6b25ac710bd1a96321cc8317", size = 764167, upload-time = "2024-08-06T20:32:08.338Z" }, + { url = "https://files.pythonhosted.org/packages/75/e4/2c27590dfc9992f73aabbeb9241ae20220bd9452df27483b6e56d3975cc5/PyYAML-6.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ad2a3decf9aaba3d29c8f537ac4b243e36bef957511b4766cb0057d32b0be85", size = 762952, upload-time = "2024-08-06T20:32:14.124Z" }, + { url = "https://files.pythonhosted.org/packages/9b/97/ecc1abf4a823f5ac61941a9c00fe501b02ac3ab0e373c3857f7d4b83e2b6/PyYAML-6.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ff3824dc5261f50c9b0dfb3be22b4567a6f938ccce4587b38952d85fd9e9afe4", size = 735301, upload-time = "2024-08-06T20:32:16.17Z" }, + { url = "https://files.pythonhosted.org/packages/45/73/0f49dacd6e82c9430e46f4a027baa4ca205e8b0a9dce1397f44edc23559d/PyYAML-6.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:797b4f722ffa07cc8d62053e4cff1486fa6dc094105d13fea7b1de7d8bf71c9e", size = 756638, upload-time = "2024-08-06T20:32:18.555Z" }, + { url = "https://files.pythonhosted.org/packages/22/5f/956f0f9fc65223a58fbc14459bf34b4cc48dec52e00535c79b8db361aabd/PyYAML-6.0.2-cp311-cp311-win32.whl", hash = "sha256:11d8f3dd2b9c1207dcaf2ee0bbbfd5991f571186ec9cc78427ba5bd32afae4b5", size = 143850, upload-time = "2024-08-06T20:32:19.889Z" }, + { url = "https://files.pythonhosted.org/packages/ed/23/8da0bbe2ab9dcdd11f4f4557ccaf95c10b9811b13ecced089d43ce59c3c8/PyYAML-6.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10ce637b18caea04431ce14fabcf5c64a1c61ec9c56b071a4b7ca131ca52d44", size = 161980, upload-time = "2024-08-06T20:32:21.273Z" }, + { url = "https://files.pythonhosted.org/packages/86/0c/c581167fc46d6d6d7ddcfb8c843a4de25bdd27e4466938109ca68492292c/PyYAML-6.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab", size = 183873, upload-time = "2024-08-06T20:32:25.131Z" }, + { url = "https://files.pythonhosted.org/packages/a8/0c/38374f5bb272c051e2a69281d71cba6fdb983413e6758b84482905e29a5d/PyYAML-6.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725", size = 173302, upload-time = "2024-08-06T20:32:26.511Z" }, + { url = "https://files.pythonhosted.org/packages/c3/93/9916574aa8c00aa06bbac729972eb1071d002b8e158bd0e83a3b9a20a1f7/PyYAML-6.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5", size = 739154, upload-time = "2024-08-06T20:32:28.363Z" }, + { url = "https://files.pythonhosted.org/packages/95/0f/b8938f1cbd09739c6da569d172531567dbcc9789e0029aa070856f123984/PyYAML-6.0.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425", size = 766223, upload-time = "2024-08-06T20:32:30.058Z" }, + { url = "https://files.pythonhosted.org/packages/b9/2b/614b4752f2e127db5cc206abc23a8c19678e92b23c3db30fc86ab731d3bd/PyYAML-6.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476", size = 767542, upload-time = "2024-08-06T20:32:31.881Z" }, + { url = "https://files.pythonhosted.org/packages/d4/00/dd137d5bcc7efea1836d6264f049359861cf548469d18da90cd8216cf05f/PyYAML-6.0.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48", size = 731164, upload-time = "2024-08-06T20:32:37.083Z" }, + { url = "https://files.pythonhosted.org/packages/c9/1f/4f998c900485e5c0ef43838363ba4a9723ac0ad73a9dc42068b12aaba4e4/PyYAML-6.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b", size = 756611, upload-time = "2024-08-06T20:32:38.898Z" }, + { url = "https://files.pythonhosted.org/packages/df/d1/f5a275fdb252768b7a11ec63585bc38d0e87c9e05668a139fea92b80634c/PyYAML-6.0.2-cp312-cp312-win32.whl", hash = "sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4", size = 140591, upload-time = "2024-08-06T20:32:40.241Z" }, + { url = "https://files.pythonhosted.org/packages/0c/e8/4f648c598b17c3d06e8753d7d13d57542b30d56e6c2dedf9c331ae56312e/PyYAML-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8", size = 156338, upload-time = "2024-08-06T20:32:41.93Z" }, + { url = "https://files.pythonhosted.org/packages/ef/e3/3af305b830494fa85d95f6d95ef7fa73f2ee1cc8ef5b495c7c3269fb835f/PyYAML-6.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba", size = 181309, upload-time = "2024-08-06T20:32:43.4Z" }, + { url = "https://files.pythonhosted.org/packages/45/9f/3b1c20a0b7a3200524eb0076cc027a970d320bd3a6592873c85c92a08731/PyYAML-6.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:50187695423ffe49e2deacb8cd10510bc361faac997de9efef88badc3bb9e2d1", size = 171679, upload-time = "2024-08-06T20:32:44.801Z" }, + { url = "https://files.pythonhosted.org/packages/7c/9a/337322f27005c33bcb656c655fa78325b730324c78620e8328ae28b64d0c/PyYAML-6.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ffe8360bab4910ef1b9e87fb812d8bc0a308b0d0eef8c8f44e0254ab3b07133", size = 733428, upload-time = "2024-08-06T20:32:46.432Z" }, + { url = "https://files.pythonhosted.org/packages/a3/69/864fbe19e6c18ea3cc196cbe5d392175b4cf3d5d0ac1403ec3f2d237ebb5/PyYAML-6.0.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:17e311b6c678207928d649faa7cb0d7b4c26a0ba73d41e99c4fff6b6c3276484", size = 763361, upload-time = "2024-08-06T20:32:51.188Z" }, + { url = "https://files.pythonhosted.org/packages/04/24/b7721e4845c2f162d26f50521b825fb061bc0a5afcf9a386840f23ea19fa/PyYAML-6.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b189594dbe54f75ab3a1acec5f1e3faa7e8cf2f1e08d9b561cb41b845f69d5", size = 759523, upload-time = "2024-08-06T20:32:53.019Z" }, + { url = "https://files.pythonhosted.org/packages/2b/b2/e3234f59ba06559c6ff63c4e10baea10e5e7df868092bf9ab40e5b9c56b6/PyYAML-6.0.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:41e4e3953a79407c794916fa277a82531dd93aad34e29c2a514c2c0c5fe971cc", size = 726660, upload-time = "2024-08-06T20:32:54.708Z" }, + { url = "https://files.pythonhosted.org/packages/fe/0f/25911a9f080464c59fab9027482f822b86bf0608957a5fcc6eaac85aa515/PyYAML-6.0.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:68ccc6023a3400877818152ad9a1033e3db8625d899c72eacb5a668902e4d652", size = 751597, upload-time = "2024-08-06T20:32:56.985Z" }, + { url = "https://files.pythonhosted.org/packages/14/0d/e2c3b43bbce3cf6bd97c840b46088a3031085179e596d4929729d8d68270/PyYAML-6.0.2-cp313-cp313-win32.whl", hash = "sha256:bc2fa7c6b47d6bc618dd7fb02ef6fdedb1090ec036abab80d4681424b84c1183", size = 140527, upload-time = "2024-08-06T20:33:03.001Z" }, + { url = "https://files.pythonhosted.org/packages/fa/de/02b54f42487e3d3c6efb3f89428677074ca7bf43aae402517bc7cca949f3/PyYAML-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563", size = 156446, upload-time = "2024-08-06T20:33:04.33Z" }, + { url = "https://files.pythonhosted.org/packages/65/d8/b7a1db13636d7fb7d4ff431593c510c8b8fca920ade06ca8ef20015493c5/PyYAML-6.0.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:688ba32a1cffef67fd2e9398a2efebaea461578b0923624778664cc1c914db5d", size = 184777, upload-time = "2024-08-06T20:33:25.896Z" }, + { url = "https://files.pythonhosted.org/packages/0a/02/6ec546cd45143fdf9840b2c6be8d875116a64076218b61d68e12548e5839/PyYAML-6.0.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:a8786accb172bd8afb8be14490a16625cbc387036876ab6ba70912730faf8e1f", size = 172318, upload-time = "2024-08-06T20:33:27.212Z" }, + { url = "https://files.pythonhosted.org/packages/0e/9a/8cc68be846c972bda34f6c2a93abb644fb2476f4dcc924d52175786932c9/PyYAML-6.0.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:d8e03406cac8513435335dbab54c0d385e4a49e4945d2909a581c83647ca0290", size = 720891, upload-time = "2024-08-06T20:33:28.974Z" }, + { url = "https://files.pythonhosted.org/packages/e9/6c/6e1b7f40181bc4805e2e07f4abc10a88ce4648e7e95ff1abe4ae4014a9b2/PyYAML-6.0.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f753120cb8181e736c57ef7636e83f31b9c0d1722c516f7e86cf15b7aa57ff12", size = 722614, upload-time = "2024-08-06T20:33:34.157Z" }, + { url = "https://files.pythonhosted.org/packages/3d/32/e7bd8535d22ea2874cef6a81021ba019474ace0d13a4819c2a4bce79bd6a/PyYAML-6.0.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3b1fdb9dc17f5a7677423d508ab4f243a726dea51fa5e70992e59a7411c89d19", size = 737360, upload-time = "2024-08-06T20:33:35.84Z" }, + { url = "https://files.pythonhosted.org/packages/d7/12/7322c1e30b9be969670b672573d45479edef72c9a0deac3bb2868f5d7469/PyYAML-6.0.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:0b69e4ce7a131fe56b7e4d770c67429700908fc0752af059838b1cfb41960e4e", size = 699006, upload-time = "2024-08-06T20:33:37.501Z" }, + { url = "https://files.pythonhosted.org/packages/82/72/04fcad41ca56491995076630c3ec1e834be241664c0c09a64c9a2589b507/PyYAML-6.0.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:a9f8c2e67970f13b16084e04f134610fd1d374bf477b17ec1599185cf611d725", size = 723577, upload-time = "2024-08-06T20:33:39.389Z" }, + { url = "https://files.pythonhosted.org/packages/ed/5e/46168b1f2757f1fcd442bc3029cd8767d88a98c9c05770d8b420948743bb/PyYAML-6.0.2-cp39-cp39-win32.whl", hash = "sha256:6395c297d42274772abc367baaa79683958044e5d3835486c16da75d2a694631", size = 144593, upload-time = "2024-08-06T20:33:46.63Z" }, + { url = "https://files.pythonhosted.org/packages/19/87/5124b1c1f2412bb95c59ec481eaf936cd32f0fe2a7b16b97b81c4c017a6a/PyYAML-6.0.2-cp39-cp39-win_amd64.whl", hash = "sha256:39693e1f8320ae4f43943590b49779ffb98acb81f788220ea932a6b6c51004d8", size = 162312, upload-time = "2024-08-06T20:33:49.073Z" }, +] + +[[package]] +name = "requests" +version = "2.32.5" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "certifi" }, + { name = "charset-normalizer" }, + { name = "idna" }, + { name = "urllib3" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/c9/74/b3ff8e6c8446842c3f5c837e9c3dfcfe2018ea6ecef224c710c85ef728f4/requests-2.32.5.tar.gz", hash = "sha256:dbba0bac56e100853db0ea71b82b4dfd5fe2bf6d3754a8893c3af500cec7d7cf", size = 134517, upload-time = "2025-08-18T20:46:02.573Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/1e/db/4254e3eabe8020b458f1a747140d32277ec7a271daf1d235b70dc0b4e6e3/requests-2.32.5-py3-none-any.whl", hash = "sha256:2462f94637a34fd532264295e186976db0f5d453d1cdd31473c85a6a161affb6", size = 64738, upload-time = "2025-08-18T20:46:00.542Z" }, +] + +[[package]] +name = "restructuredtext-lint" +version = "1.4.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "docutils" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/48/9c/6d8035cafa2d2d314f34e6cd9313a299de095b26e96f1c7312878f988eec/restructuredtext_lint-1.4.0.tar.gz", hash = "sha256:1b235c0c922341ab6c530390892eb9e92f90b9b75046063e047cacfb0f050c45", size = 16723, upload-time = "2022-02-24T05:51:10.907Z" } + +[[package]] +name = "roman-numerals-py" +version = "3.1.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/30/76/48fd56d17c5bdbdf65609abbc67288728a98ed4c02919428d4f52d23b24b/roman_numerals_py-3.1.0.tar.gz", hash = "sha256:be4bf804f083a4ce001b5eb7e3c0862479d10f94c936f6c4e5f250aa5ff5bd2d", size = 9017, upload-time = "2025-02-22T07:34:54.333Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/53/97/d2cbbaa10c9b826af0e10fdf836e1bf344d9f0abb873ebc34d1f49642d3f/roman_numerals_py-3.1.0-py3-none-any.whl", hash = "sha256:9da2ad2fb670bcf24e81070ceb3be72f6c11c440d73bd579fbeca1e9f330954c", size = 7742, upload-time = "2025-02-22T07:34:52.422Z" }, +] + +[[package]] +name = "ruff" +version = "0.12.12" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a8/f0/e0965dd709b8cabe6356811c0ee8c096806bb57d20b5019eb4e48a117410/ruff-0.12.12.tar.gz", hash = "sha256:b86cd3415dbe31b3b46a71c598f4c4b2f550346d1ccf6326b347cc0c8fd063d6", size = 5359915, upload-time = "2025-09-04T16:50:18.273Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/09/79/8d3d687224d88367b51c7974cec1040c4b015772bfbeffac95face14c04a/ruff-0.12.12-py3-none-linux_armv6l.whl", hash = "sha256:de1c4b916d98ab289818e55ce481e2cacfaad7710b01d1f990c497edf217dafc", size = 12116602, upload-time = "2025-09-04T16:49:18.892Z" }, + { url = "https://files.pythonhosted.org/packages/c3/c3/6e599657fe192462f94861a09aae935b869aea8a1da07f47d6eae471397c/ruff-0.12.12-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:7acd6045e87fac75a0b0cdedacf9ab3e1ad9d929d149785903cff9bb69ad9727", size = 12868393, upload-time = "2025-09-04T16:49:23.043Z" }, + { url = "https://files.pythonhosted.org/packages/e8/d2/9e3e40d399abc95336b1843f52fc0daaceb672d0e3c9290a28ff1a96f79d/ruff-0.12.12-py3-none-macosx_11_0_arm64.whl", hash = "sha256:abf4073688d7d6da16611f2f126be86523a8ec4343d15d276c614bda8ec44edb", size = 12036967, upload-time = "2025-09-04T16:49:26.04Z" }, + { url = "https://files.pythonhosted.org/packages/e9/03/6816b2ed08836be272e87107d905f0908be5b4a40c14bfc91043e76631b8/ruff-0.12.12-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:968e77094b1d7a576992ac078557d1439df678a34c6fe02fd979f973af167577", size = 12276038, upload-time = "2025-09-04T16:49:29.056Z" }, + { url = "https://files.pythonhosted.org/packages/9f/d5/707b92a61310edf358a389477eabd8af68f375c0ef858194be97ca5b6069/ruff-0.12.12-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:42a67d16e5b1ffc6d21c5f67851e0e769517fb57a8ebad1d0781b30888aa704e", size = 11901110, upload-time = "2025-09-04T16:49:32.07Z" }, + { url = "https://files.pythonhosted.org/packages/9d/3d/f8b1038f4b9822e26ec3d5b49cf2bc313e3c1564cceb4c1a42820bf74853/ruff-0.12.12-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:b216ec0a0674e4b1214dcc998a5088e54eaf39417327b19ffefba1c4a1e4971e", size = 13668352, upload-time = "2025-09-04T16:49:35.148Z" }, + { url = "https://files.pythonhosted.org/packages/98/0e/91421368ae6c4f3765dd41a150f760c5f725516028a6be30e58255e3c668/ruff-0.12.12-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:59f909c0fdd8f1dcdbfed0b9569b8bf428cf144bec87d9de298dcd4723f5bee8", size = 14638365, upload-time = "2025-09-04T16:49:38.892Z" }, + { url = "https://files.pythonhosted.org/packages/74/5d/88f3f06a142f58ecc8ecb0c2fe0b82343e2a2b04dcd098809f717cf74b6c/ruff-0.12.12-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9ac93d87047e765336f0c18eacad51dad0c1c33c9df7484c40f98e1d773876f5", size = 14060812, upload-time = "2025-09-04T16:49:42.732Z" }, + { url = "https://files.pythonhosted.org/packages/13/fc/8962e7ddd2e81863d5c92400820f650b86f97ff919c59836fbc4c1a6d84c/ruff-0.12.12-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:01543c137fd3650d322922e8b14cc133b8ea734617c4891c5a9fccf4bfc9aa92", size = 13050208, upload-time = "2025-09-04T16:49:46.434Z" }, + { url = "https://files.pythonhosted.org/packages/53/06/8deb52d48a9a624fd37390555d9589e719eac568c020b27e96eed671f25f/ruff-0.12.12-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2afc2fa864197634e549d87fb1e7b6feb01df0a80fd510d6489e1ce8c0b1cc45", size = 13311444, upload-time = "2025-09-04T16:49:49.931Z" }, + { url = "https://files.pythonhosted.org/packages/2a/81/de5a29af7eb8f341f8140867ffb93f82e4fde7256dadee79016ac87c2716/ruff-0.12.12-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:0c0945246f5ad776cb8925e36af2438e66188d2b57d9cf2eed2c382c58b371e5", size = 13279474, upload-time = "2025-09-04T16:49:53.465Z" }, + { url = "https://files.pythonhosted.org/packages/7f/14/d9577fdeaf791737ada1b4f5c6b59c21c3326f3f683229096cccd7674e0c/ruff-0.12.12-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:a0fbafe8c58e37aae28b84a80ba1817f2ea552e9450156018a478bf1fa80f4e4", size = 12070204, upload-time = "2025-09-04T16:49:56.882Z" }, + { url = "https://files.pythonhosted.org/packages/77/04/a910078284b47fad54506dc0af13839c418ff704e341c176f64e1127e461/ruff-0.12.12-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:b9c456fb2fc8e1282affa932c9e40f5ec31ec9cbb66751a316bd131273b57c23", size = 11880347, upload-time = "2025-09-04T16:49:59.729Z" }, + { url = "https://files.pythonhosted.org/packages/df/58/30185fcb0e89f05e7ea82e5817b47798f7fa7179863f9d9ba6fd4fe1b098/ruff-0.12.12-py3-none-musllinux_1_2_i686.whl", hash = "sha256:5f12856123b0ad0147d90b3961f5c90e7427f9acd4b40050705499c98983f489", size = 12891844, upload-time = "2025-09-04T16:50:02.591Z" }, + { url = "https://files.pythonhosted.org/packages/21/9c/28a8dacce4855e6703dcb8cdf6c1705d0b23dd01d60150786cd55aa93b16/ruff-0.12.12-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:26a1b5a2bf7dd2c47e3b46d077cd9c0fc3b93e6c6cc9ed750bd312ae9dc302ee", size = 13360687, upload-time = "2025-09-04T16:50:05.8Z" }, + { url = "https://files.pythonhosted.org/packages/c8/fa/05b6428a008e60f79546c943e54068316f32ec8ab5c4f73e4563934fbdc7/ruff-0.12.12-py3-none-win32.whl", hash = "sha256:173be2bfc142af07a01e3a759aba6f7791aa47acf3604f610b1c36db888df7b1", size = 12052870, upload-time = "2025-09-04T16:50:09.121Z" }, + { url = "https://files.pythonhosted.org/packages/85/60/d1e335417804df452589271818749d061b22772b87efda88354cf35cdb7a/ruff-0.12.12-py3-none-win_amd64.whl", hash = "sha256:e99620bf01884e5f38611934c09dd194eb665b0109104acae3ba6102b600fd0d", size = 13178016, upload-time = "2025-09-04T16:50:12.559Z" }, + { url = "https://files.pythonhosted.org/packages/28/7e/61c42657f6e4614a4258f1c3b0c5b93adc4d1f8575f5229d1906b483099b/ruff-0.12.12-py3-none-win_arm64.whl", hash = "sha256:2a8199cab4ce4d72d158319b63370abf60991495fb733db96cd923a34c52d093", size = 12256762, upload-time = "2025-09-04T16:50:15.737Z" }, +] + +[[package]] +name = "schemainspect" +version = "3.1.1663587362" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "sqlalchemy" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/00/41/c2ea77a94a7dcbde0d5b9cce70018a730e4ab5504628c14ced657c87217a/schemainspect-3.1.1663587362.tar.gz", hash = "sha256:a295ad56f7a19c09e5e1ef9f16dadbf6392e26196cb5f05b5afe613c99ce7468", size = 28520, upload-time = "2022-09-19T11:36:06.373Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0b/e2/eea82cd82c91c840f1f32cfc874db831ac3a742fbd9dfe713cae851441f1/schemainspect-3.1.1663587362-py3-none-any.whl", hash = "sha256:3071265712863c4d4e742940a4b44ac685135af3c93416872ec1bb6c822c4aca", size = 37373, upload-time = "2022-09-19T11:36:05.069Z" }, +] + +[[package]] +name = "setuptools" +version = "80.9.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/18/5d/3bf57dcd21979b887f014ea83c24ae194cfcd12b9e0fda66b957c69d1fca/setuptools-80.9.0.tar.gz", hash = "sha256:f36b47402ecde768dbfafc46e8e4207b4360c654f1f3bb84475f0a28628fb19c", size = 1319958, upload-time = "2025-05-27T00:56:51.443Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a3/dc/17031897dae0efacfea57dfd3a82fdd2a2aeb58e0ff71b77b87e44edc772/setuptools-80.9.0-py3-none-any.whl", hash = "sha256:062d34222ad13e0cc312a4c02d73f059e86a4acbfbdea8f8f76b28c99f306922", size = 1201486, upload-time = "2025-05-27T00:56:49.664Z" }, +] + +[[package]] +name = "six" +version = "1.17.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/94/e7/b2c673351809dca68a0e064b6af791aa332cf192da575fd474ed7d6f16a2/six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81", size = 34031, upload-time = "2024-12-04T17:35:28.174Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b7/ce/149a00dd41f10bc29e5921b496af8b574d8413afcd5e30dfa0ed46c2cc5e/six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274", size = 11050, upload-time = "2024-12-04T17:35:26.475Z" }, +] + +[[package]] +name = "snowballstemmer" +version = "3.0.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/75/a7/9810d872919697c9d01295633f5d574fb416d47e535f258272ca1f01f447/snowballstemmer-3.0.1.tar.gz", hash = "sha256:6d5eeeec8e9f84d4d56b847692bacf79bc2c8e90c7f80ca4444ff8b6f2e52895", size = 105575, upload-time = "2025-05-09T16:34:51.843Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c8/78/3565d011c61f5a43488987ee32b6f3f656e7f107ac2782dd57bdd7d91d9a/snowballstemmer-3.0.1-py3-none-any.whl", hash = "sha256:6cd7b3897da8d6c9ffb968a6781fa6532dce9c3618a4b127d920dab764a19064", size = 103274, upload-time = "2025-05-09T16:34:50.371Z" }, +] + +[[package]] +name = "soupsieve" +version = "2.8" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/6d/e6/21ccce3262dd4889aa3332e5a119a3491a95e8f60939870a3a035aabac0d/soupsieve-2.8.tar.gz", hash = "sha256:e2dd4a40a628cb5f28f6d4b0db8800b8f581b65bb380b97de22ba5ca8d72572f", size = 103472, upload-time = "2025-08-27T15:39:51.78Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/14/a0/bb38d3b76b8cae341dad93a2dd83ab7462e6dbcdd84d43f54ee60a8dc167/soupsieve-2.8-py3-none-any.whl", hash = "sha256:0cc76456a30e20f5d7f2e14a98a4ae2ee4e5abdc7c5ea0aafe795f344bc7984c", size = 36679, upload-time = "2025-08-27T15:39:50.179Z" }, +] + +[[package]] +name = "sphinx" +version = "7.4.7" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "(python_full_version < '3.10' and platform_machine != 'arm64') or (python_full_version < '3.10' and sys_platform != 'darwin')", + "python_full_version < '3.10' and platform_machine == 'arm64' and sys_platform == 'darwin'", +] +dependencies = [ + { name = "alabaster", version = "0.7.16", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, + { name = "babel", marker = "python_full_version < '3.10'" }, + { name = "colorama", marker = "python_full_version < '3.10' and sys_platform == 'win32'" }, + { name = "docutils", marker = "python_full_version < '3.10'" }, + { name = "imagesize", marker = "python_full_version < '3.10'" }, + { name = "importlib-metadata", marker = "python_full_version < '3.10'" }, + { name = "jinja2", marker = "python_full_version < '3.10'" }, + { name = "packaging", marker = "python_full_version < '3.10'" }, + { name = "pygments", marker = "python_full_version < '3.10'" }, + { name = "requests", marker = "python_full_version < '3.10'" }, + { name = "snowballstemmer", marker = "python_full_version < '3.10'" }, + { name = "sphinxcontrib-applehelp", marker = "python_full_version < '3.10'" }, + { name = "sphinxcontrib-devhelp", marker = "python_full_version < '3.10'" }, + { name = "sphinxcontrib-htmlhelp", marker = "python_full_version < '3.10'" }, + { name = "sphinxcontrib-jsmath", marker = "python_full_version < '3.10'" }, + { name = "sphinxcontrib-qthelp", marker = "python_full_version < '3.10'" }, + { name = "sphinxcontrib-serializinghtml", marker = "python_full_version < '3.10'" }, + { name = "tomli", marker = "python_full_version < '3.10'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/5b/be/50e50cb4f2eff47df05673d361095cafd95521d2a22521b920c67a372dcb/sphinx-7.4.7.tar.gz", hash = "sha256:242f92a7ea7e6c5b406fdc2615413890ba9f699114a9c09192d7dfead2ee9cfe", size = 8067911, upload-time = "2024-07-20T14:46:56.059Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0d/ef/153f6803c5d5f8917dbb7f7fcf6d34a871ede3296fa89c2c703f5f8a6c8e/sphinx-7.4.7-py3-none-any.whl", hash = "sha256:c2419e2135d11f1951cd994d6eb18a1835bd8fdd8429f9ca375dc1f3281bd239", size = 3401624, upload-time = "2024-07-20T14:46:52.142Z" }, +] + +[[package]] +name = "sphinx" +version = "8.1.3" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version == '3.10.*' and platform_machine == 'arm64' and sys_platform == 'darwin'", + "(python_full_version == '3.10.*' and platform_machine != 'arm64') or (python_full_version == '3.10.*' and sys_platform != 'darwin')", +] +dependencies = [ + { name = "alabaster", version = "1.0.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version == '3.10.*'" }, + { name = "babel", marker = "python_full_version == '3.10.*'" }, + { name = "colorama", marker = "python_full_version == '3.10.*' and sys_platform == 'win32'" }, + { name = "docutils", marker = "python_full_version == '3.10.*'" }, + { name = "imagesize", marker = "python_full_version == '3.10.*'" }, + { name = "jinja2", marker = "python_full_version == '3.10.*'" }, + { name = "packaging", marker = "python_full_version == '3.10.*'" }, + { name = "pygments", marker = "python_full_version == '3.10.*'" }, + { name = "requests", marker = "python_full_version == '3.10.*'" }, + { name = "snowballstemmer", marker = "python_full_version == '3.10.*'" }, + { name = "sphinxcontrib-applehelp", marker = "python_full_version == '3.10.*'" }, + { name = "sphinxcontrib-devhelp", marker = "python_full_version == '3.10.*'" }, + { name = "sphinxcontrib-htmlhelp", marker = "python_full_version == '3.10.*'" }, + { name = "sphinxcontrib-jsmath", marker = "python_full_version == '3.10.*'" }, + { name = "sphinxcontrib-qthelp", marker = "python_full_version == '3.10.*'" }, + { name = "sphinxcontrib-serializinghtml", marker = "python_full_version == '3.10.*'" }, + { name = "tomli", marker = "python_full_version == '3.10.*'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/6f/6d/be0b61178fe2cdcb67e2a92fc9ebb488e3c51c4f74a36a7824c0adf23425/sphinx-8.1.3.tar.gz", hash = "sha256:43c1911eecb0d3e161ad78611bc905d1ad0e523e4ddc202a58a821773dc4c927", size = 8184611, upload-time = "2024-10-13T20:27:13.93Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/26/60/1ddff83a56d33aaf6f10ec8ce84b4c007d9368b21008876fceda7e7381ef/sphinx-8.1.3-py3-none-any.whl", hash = "sha256:09719015511837b76bf6e03e42eb7595ac8c2e41eeb9c29c5b755c6b677992a2", size = 3487125, upload-time = "2024-10-13T20:27:10.448Z" }, +] + +[[package]] +name = "sphinx" +version = "8.2.3" +source = { registry = "https://pypi.org/simple" } +resolution-markers = [ + "python_full_version >= '3.11' and platform_machine == 'arm64' and sys_platform == 'darwin'", + "(python_full_version >= '3.11' and platform_machine != 'arm64') or (python_full_version >= '3.11' and sys_platform != 'darwin')", +] +dependencies = [ + { name = "alabaster", version = "1.0.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, + { name = "babel", marker = "python_full_version >= '3.11'" }, + { name = "colorama", marker = "python_full_version >= '3.11' and sys_platform == 'win32'" }, + { name = "docutils", marker = "python_full_version >= '3.11'" }, + { name = "imagesize", marker = "python_full_version >= '3.11'" }, + { name = "jinja2", marker = "python_full_version >= '3.11'" }, + { name = "packaging", marker = "python_full_version >= '3.11'" }, + { name = "pygments", marker = "python_full_version >= '3.11'" }, + { name = "requests", marker = "python_full_version >= '3.11'" }, + { name = "roman-numerals-py", marker = "python_full_version >= '3.11'" }, + { name = "snowballstemmer", marker = "python_full_version >= '3.11'" }, + { name = "sphinxcontrib-applehelp", marker = "python_full_version >= '3.11'" }, + { name = "sphinxcontrib-devhelp", marker = "python_full_version >= '3.11'" }, + { name = "sphinxcontrib-htmlhelp", marker = "python_full_version >= '3.11'" }, + { name = "sphinxcontrib-jsmath", marker = "python_full_version >= '3.11'" }, + { name = "sphinxcontrib-qthelp", marker = "python_full_version >= '3.11'" }, + { name = "sphinxcontrib-serializinghtml", marker = "python_full_version >= '3.11'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/38/ad/4360e50ed56cb483667b8e6dadf2d3fda62359593faabbe749a27c4eaca6/sphinx-8.2.3.tar.gz", hash = "sha256:398ad29dee7f63a75888314e9424d40f52ce5a6a87ae88e7071e80af296ec348", size = 8321876, upload-time = "2025-03-02T22:31:59.658Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/31/53/136e9eca6e0b9dc0e1962e2c908fbea2e5ac000c2a2fbd9a35797958c48b/sphinx-8.2.3-py3-none-any.whl", hash = "sha256:4405915165f13521d875a8c29c8970800a0141c14cc5416a38feca4ea5d9b9c3", size = 3589741, upload-time = "2025-03-02T22:31:56.836Z" }, +] + +[[package]] +name = "sphinx-basic-ng" +version = "1.0.0b2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "sphinx", version = "7.4.7", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, + { name = "sphinx", version = "8.1.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version == '3.10.*'" }, + { name = "sphinx", version = "8.2.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/98/0b/a866924ded68efec7a1759587a4e478aec7559d8165fac8b2ad1c0e774d6/sphinx_basic_ng-1.0.0b2.tar.gz", hash = "sha256:9ec55a47c90c8c002b5960c57492ec3021f5193cb26cebc2dc4ea226848651c9", size = 20736, upload-time = "2023-07-08T18:40:54.166Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3c/dd/018ce05c532a22007ac58d4f45232514cd9d6dd0ee1dc374e309db830983/sphinx_basic_ng-1.0.0b2-py3-none-any.whl", hash = "sha256:eb09aedbabfb650607e9b4b68c9d240b90b1e1be221d6ad71d61c52e29f7932b", size = 22496, upload-time = "2023-07-08T18:40:52.659Z" }, +] + +[[package]] +name = "sphinx-copybutton" +version = "0.5.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "sphinx", version = "7.4.7", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, + { name = "sphinx", version = "8.1.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version == '3.10.*'" }, + { name = "sphinx", version = "8.2.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/fc/2b/a964715e7f5295f77509e59309959f4125122d648f86b4fe7d70ca1d882c/sphinx-copybutton-0.5.2.tar.gz", hash = "sha256:4cf17c82fb9646d1bc9ca92ac280813a3b605d8c421225fd9913154103ee1fbd", size = 23039, upload-time = "2023-04-14T08:10:22.998Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/9e/48/1ea60e74949eecb12cdd6ac43987f9fd331156388dcc2319b45e2ebb81bf/sphinx_copybutton-0.5.2-py3-none-any.whl", hash = "sha256:fb543fd386d917746c9a2c50360c7905b605726b9355cd26e9974857afeae06e", size = 13343, upload-time = "2023-04-14T08:10:20.844Z" }, +] + +[[package]] +name = "sphinx-github-changelog" +version = "1.7.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "docutils" }, + { name = "requests" }, + { name = "sphinx", version = "7.4.7", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, + { name = "sphinx", version = "8.1.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version == '3.10.*'" }, + { name = "sphinx", version = "8.2.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/71/3c/11707a3da2fe3a2aed9575a76b2ee5dc8b943981a6a51f55a2a743b629fb/sphinx_github_changelog-1.7.1.tar.gz", hash = "sha256:2ec5a716fbd21edcb4f549f8e4503a7ab08a285585aaf291a7e97d79cd6467ab", size = 55142, upload-time = "2025-05-11T14:07:34.973Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/78/42/c556bdbf673f730c8518cdb95f479a2a7c97e9c461746b1878d15fa639f8/sphinx_github_changelog-1.7.1-py3-none-any.whl", hash = "sha256:5accdbdbd900287c2d6a737c322381e3f2b02053b67029006f335519a8025314", size = 11117, upload-time = "2025-05-11T14:07:33.553Z" }, +] + +[[package]] +name = "sphinxcontrib-applehelp" +version = "2.0.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ba/6e/b837e84a1a704953c62ef8776d45c3e8d759876b4a84fe14eba2859106fe/sphinxcontrib_applehelp-2.0.0.tar.gz", hash = "sha256:2f29ef331735ce958efa4734873f084941970894c6090408b079c61b2e1c06d1", size = 20053, upload-time = "2024-07-29T01:09:00.465Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5d/85/9ebeae2f76e9e77b952f4b274c27238156eae7979c5421fba91a28f4970d/sphinxcontrib_applehelp-2.0.0-py3-none-any.whl", hash = "sha256:4cd3f0ec4ac5dd9c17ec65e9ab272c9b867ea77425228e68ecf08d6b28ddbdb5", size = 119300, upload-time = "2024-07-29T01:08:58.99Z" }, +] + +[[package]] +name = "sphinxcontrib-devhelp" +version = "2.0.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f6/d2/5beee64d3e4e747f316bae86b55943f51e82bb86ecd325883ef65741e7da/sphinxcontrib_devhelp-2.0.0.tar.gz", hash = "sha256:411f5d96d445d1d73bb5d52133377b4248ec79db5c793ce7dbe59e074b4dd1ad", size = 12967, upload-time = "2024-07-29T01:09:23.417Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/35/7a/987e583882f985fe4d7323774889ec58049171828b58c2217e7f79cdf44e/sphinxcontrib_devhelp-2.0.0-py3-none-any.whl", hash = "sha256:aefb8b83854e4b0998877524d1029fd3e6879210422ee3780459e28a1f03a8a2", size = 82530, upload-time = "2024-07-29T01:09:21.945Z" }, +] + +[[package]] +name = "sphinxcontrib-htmlhelp" +version = "2.1.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/43/93/983afd9aa001e5201eab16b5a444ed5b9b0a7a010541e0ddfbbfd0b2470c/sphinxcontrib_htmlhelp-2.1.0.tar.gz", hash = "sha256:c9e2916ace8aad64cc13a0d233ee22317f2b9025b9cf3295249fa985cc7082e9", size = 22617, upload-time = "2024-07-29T01:09:37.889Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0a/7b/18a8c0bcec9182c05a0b3ec2a776bba4ead82750a55ff798e8d406dae604/sphinxcontrib_htmlhelp-2.1.0-py3-none-any.whl", hash = "sha256:166759820b47002d22914d64a075ce08f4c46818e17cfc9470a9786b759b19f8", size = 98705, upload-time = "2024-07-29T01:09:36.407Z" }, +] + +[[package]] +name = "sphinxcontrib-jsmath" +version = "1.0.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/b2/e8/9ed3830aeed71f17c026a07a5097edcf44b692850ef215b161b8ad875729/sphinxcontrib-jsmath-1.0.1.tar.gz", hash = "sha256:a9925e4a4587247ed2191a22df5f6970656cb8ca2bd6284309578f2153e0c4b8", size = 5787, upload-time = "2019-01-21T16:10:16.347Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c2/42/4c8646762ee83602e3fb3fbe774c2fac12f317deb0b5dbeeedd2d3ba4b77/sphinxcontrib_jsmath-1.0.1-py2.py3-none-any.whl", hash = "sha256:2ec2eaebfb78f3f2078e73666b1415417a116cc848b72e5172e596c871103178", size = 5071, upload-time = "2019-01-21T16:10:14.333Z" }, +] + +[[package]] +name = "sphinxcontrib-mermaid" +version = "1.0.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pyyaml" }, + { name = "sphinx", version = "7.4.7", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, + { name = "sphinx", version = "8.1.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version == '3.10.*'" }, + { name = "sphinx", version = "8.2.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/97/69/bf039237ad260073e8c02f820b3e00dc34f3a2de20aff7861e6b19d2f8c5/sphinxcontrib_mermaid-1.0.0.tar.gz", hash = "sha256:2e8ab67d3e1e2816663f9347d026a8dee4a858acdd4ad32dd1c808893db88146", size = 15153, upload-time = "2024-10-12T16:33:03.863Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/cd/c8/784b9ac6ea08aa594c1a4becbd0dbe77186785362e31fd633b8c6ae0197a/sphinxcontrib_mermaid-1.0.0-py3-none-any.whl", hash = "sha256:60b72710ea02087f212028feb09711225fbc2e343a10d34822fe787510e1caa3", size = 9597, upload-time = "2024-10-12T16:33:02.303Z" }, +] + +[[package]] +name = "sphinxcontrib-programoutput" +version = "0.18" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "sphinx", version = "7.4.7", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.10'" }, + { name = "sphinx", version = "8.1.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version == '3.10.*'" }, + { name = "sphinx", version = "8.2.3", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.11'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/3f/c0/834af2290f8477213ec0dd60e90104f5644aa0c37b1a0d6f0a2b5efe03c4/sphinxcontrib_programoutput-0.18.tar.gz", hash = "sha256:09e68b6411d937a80b6085f4fdeaa42e0dc5555480385938465f410589d2eed8", size = 26333, upload-time = "2024-12-06T20:38:36.959Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/04/2c/7aec6e0580f666d4f61474a50c4995a98abfff27d827f0e7bc8c4fa528f5/sphinxcontrib_programoutput-0.18-py3-none-any.whl", hash = "sha256:8a651bc85de69a808a064ff0e48d06c12b9347da4fe5fdb1e94914b01e1b0c36", size = 20346, upload-time = "2024-12-06T20:38:22.406Z" }, +] + +[[package]] +name = "sphinxcontrib-qthelp" +version = "2.0.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/68/bc/9104308fc285eb3e0b31b67688235db556cd5b0ef31d96f30e45f2e51cae/sphinxcontrib_qthelp-2.0.0.tar.gz", hash = "sha256:4fe7d0ac8fc171045be623aba3e2a8f613f8682731f9153bb2e40ece16b9bbab", size = 17165, upload-time = "2024-07-29T01:09:56.435Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/27/83/859ecdd180cacc13b1f7e857abf8582a64552ea7a061057a6c716e790fce/sphinxcontrib_qthelp-2.0.0-py3-none-any.whl", hash = "sha256:b18a828cdba941ccd6ee8445dbe72ffa3ef8cbe7505d8cd1fa0d42d3f2d5f3eb", size = 88743, upload-time = "2024-07-29T01:09:54.885Z" }, +] + +[[package]] +name = "sphinxcontrib-serializinghtml" +version = "2.0.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/3b/44/6716b257b0aa6bfd51a1b31665d1c205fb12cb5ad56de752dfa15657de2f/sphinxcontrib_serializinghtml-2.0.0.tar.gz", hash = "sha256:e9d912827f872c029017a53f0ef2180b327c3f7fd23c87229f7a8e8b70031d4d", size = 16080, upload-time = "2024-07-29T01:10:09.332Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/52/a7/d2782e4e3f77c8450f727ba74a8f12756d5ba823d81b941f1b04da9d033a/sphinxcontrib_serializinghtml-2.0.0-py3-none-any.whl", hash = "sha256:6e2cb0eef194e10c27ec0023bfeb25badbbb5868244cf5bc5bdc04e4464bf331", size = 92072, upload-time = "2024-07-29T01:10:08.203Z" }, +] + +[[package]] +name = "sqlalchemy" +version = "2.0.43" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "greenlet", marker = "(python_full_version < '3.14' and platform_machine == 'AMD64') or (python_full_version < '3.14' and platform_machine == 'WIN32') or (python_full_version < '3.14' and platform_machine == 'aarch64') or (python_full_version < '3.14' and platform_machine == 'amd64') or (python_full_version < '3.14' and platform_machine == 'ppc64le') or (python_full_version < '3.14' and platform_machine == 'win32') or (python_full_version < '3.14' and platform_machine == 'x86_64')" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/d7/bc/d59b5d97d27229b0e009bd9098cd81af71c2fa5549c580a0a67b9bed0496/sqlalchemy-2.0.43.tar.gz", hash = "sha256:788bfcef6787a7764169cfe9859fe425bf44559619e1d9f56f5bddf2ebf6f417", size = 9762949, upload-time = "2025-08-11T14:24:58.438Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/8f/4e/985f7da36f09592c5ade99321c72c15101d23c0bb7eecfd1daaca5714422/sqlalchemy-2.0.43-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:70322986c0c699dca241418fcf18e637a4369e0ec50540a2b907b184c8bca069", size = 2133162, upload-time = "2025-08-11T15:52:17.854Z" }, + { url = "https://files.pythonhosted.org/packages/37/34/798af8db3cae069461e3bc0898a1610dc469386a97048471d364dc8aae1c/sqlalchemy-2.0.43-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:87accdbba88f33efa7b592dc2e8b2a9c2cdbca73db2f9d5c510790428c09c154", size = 2123082, upload-time = "2025-08-11T15:52:19.181Z" }, + { url = "https://files.pythonhosted.org/packages/fb/0f/79cf4d9dad42f61ec5af1e022c92f66c2d110b93bb1dc9b033892971abfa/sqlalchemy-2.0.43-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c00e7845d2f692ebfc7d5e4ec1a3fd87698e4337d09e58d6749a16aedfdf8612", size = 3208871, upload-time = "2025-08-11T15:50:30.656Z" }, + { url = "https://files.pythonhosted.org/packages/56/b3/59befa58fb0e1a9802c87df02344548e6d007e77e87e6084e2131c29e033/sqlalchemy-2.0.43-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:022e436a1cb39b13756cf93b48ecce7aa95382b9cfacceb80a7d263129dfd019", size = 3209583, upload-time = "2025-08-11T15:57:47.697Z" }, + { url = "https://files.pythonhosted.org/packages/29/d2/124b50c0eb8146e8f0fe16d01026c1a073844f0b454436d8544fe9b33bd7/sqlalchemy-2.0.43-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:c5e73ba0d76eefc82ec0219d2301cb33bfe5205ed7a2602523111e2e56ccbd20", size = 3148177, upload-time = "2025-08-11T15:50:32.078Z" }, + { url = "https://files.pythonhosted.org/packages/83/f5/e369cd46aa84278107624617034a5825fedfc5c958b2836310ced4d2eadf/sqlalchemy-2.0.43-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:9c2e02f06c68092b875d5cbe4824238ab93a7fa35d9c38052c033f7ca45daa18", size = 3172276, upload-time = "2025-08-11T15:57:49.477Z" }, + { url = "https://files.pythonhosted.org/packages/de/2b/4602bf4c3477fa4c837c9774e6dd22e0389fc52310c4c4dfb7e7ba05e90d/sqlalchemy-2.0.43-cp310-cp310-win32.whl", hash = "sha256:e7a903b5b45b0d9fa03ac6a331e1c1d6b7e0ab41c63b6217b3d10357b83c8b00", size = 2101491, upload-time = "2025-08-11T15:54:59.191Z" }, + { url = "https://files.pythonhosted.org/packages/38/2d/bfc6b6143adef553a08295490ddc52607ee435b9c751c714620c1b3dd44d/sqlalchemy-2.0.43-cp310-cp310-win_amd64.whl", hash = "sha256:4bf0edb24c128b7be0c61cd17eef432e4bef507013292415f3fb7023f02b7d4b", size = 2125148, upload-time = "2025-08-11T15:55:00.593Z" }, + { url = "https://files.pythonhosted.org/packages/9d/77/fa7189fe44114658002566c6fe443d3ed0ec1fa782feb72af6ef7fbe98e7/sqlalchemy-2.0.43-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:52d9b73b8fb3e9da34c2b31e6d99d60f5f99fd8c1225c9dad24aeb74a91e1d29", size = 2136472, upload-time = "2025-08-11T15:52:21.789Z" }, + { url = "https://files.pythonhosted.org/packages/99/ea/92ac27f2fbc2e6c1766bb807084ca455265707e041ba027c09c17d697867/sqlalchemy-2.0.43-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:f42f23e152e4545157fa367b2435a1ace7571cab016ca26038867eb7df2c3631", size = 2126535, upload-time = "2025-08-11T15:52:23.109Z" }, + { url = "https://files.pythonhosted.org/packages/94/12/536ede80163e295dc57fff69724caf68f91bb40578b6ac6583a293534849/sqlalchemy-2.0.43-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4fb1a8c5438e0c5ea51afe9c6564f951525795cf432bed0c028c1cb081276685", size = 3297521, upload-time = "2025-08-11T15:50:33.536Z" }, + { url = "https://files.pythonhosted.org/packages/03/b5/cacf432e6f1fc9d156eca0560ac61d4355d2181e751ba8c0cd9cb232c8c1/sqlalchemy-2.0.43-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db691fa174e8f7036afefe3061bc40ac2b770718be2862bfb03aabae09051aca", size = 3297343, upload-time = "2025-08-11T15:57:51.186Z" }, + { url = "https://files.pythonhosted.org/packages/ca/ba/d4c9b526f18457667de4c024ffbc3a0920c34237b9e9dd298e44c7c00ee5/sqlalchemy-2.0.43-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:fe2b3b4927d0bc03d02ad883f402d5de201dbc8894ac87d2e981e7d87430e60d", size = 3232113, upload-time = "2025-08-11T15:50:34.949Z" }, + { url = "https://files.pythonhosted.org/packages/aa/79/c0121b12b1b114e2c8a10ea297a8a6d5367bc59081b2be896815154b1163/sqlalchemy-2.0.43-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:4d3d9b904ad4a6b175a2de0738248822f5ac410f52c2fd389ada0b5262d6a1e3", size = 3258240, upload-time = "2025-08-11T15:57:52.983Z" }, + { url = "https://files.pythonhosted.org/packages/79/99/a2f9be96fb382f3ba027ad42f00dbe30fdb6ba28cda5f11412eee346bec5/sqlalchemy-2.0.43-cp311-cp311-win32.whl", hash = "sha256:5cda6b51faff2639296e276591808c1726c4a77929cfaa0f514f30a5f6156921", size = 2101248, upload-time = "2025-08-11T15:55:01.855Z" }, + { url = "https://files.pythonhosted.org/packages/ee/13/744a32ebe3b4a7a9c7ea4e57babae7aa22070d47acf330d8e5a1359607f1/sqlalchemy-2.0.43-cp311-cp311-win_amd64.whl", hash = "sha256:c5d1730b25d9a07727d20ad74bc1039bbbb0a6ca24e6769861c1aa5bf2c4c4a8", size = 2126109, upload-time = "2025-08-11T15:55:04.092Z" }, + { url = "https://files.pythonhosted.org/packages/61/db/20c78f1081446095450bdc6ee6cc10045fce67a8e003a5876b6eaafc5cc4/sqlalchemy-2.0.43-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:20d81fc2736509d7a2bd33292e489b056cbae543661bb7de7ce9f1c0cd6e7f24", size = 2134891, upload-time = "2025-08-11T15:51:13.019Z" }, + { url = "https://files.pythonhosted.org/packages/45/0a/3d89034ae62b200b4396f0f95319f7d86e9945ee64d2343dcad857150fa2/sqlalchemy-2.0.43-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:25b9fc27650ff5a2c9d490c13c14906b918b0de1f8fcbb4c992712d8caf40e83", size = 2123061, upload-time = "2025-08-11T15:51:14.319Z" }, + { url = "https://files.pythonhosted.org/packages/cb/10/2711f7ff1805919221ad5bee205971254845c069ee2e7036847103ca1e4c/sqlalchemy-2.0.43-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6772e3ca8a43a65a37c88e2f3e2adfd511b0b1da37ef11ed78dea16aeae85bd9", size = 3320384, upload-time = "2025-08-11T15:52:35.088Z" }, + { url = "https://files.pythonhosted.org/packages/6e/0e/3d155e264d2ed2778484006ef04647bc63f55b3e2d12e6a4f787747b5900/sqlalchemy-2.0.43-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1a113da919c25f7f641ffbd07fbc9077abd4b3b75097c888ab818f962707eb48", size = 3329648, upload-time = "2025-08-11T15:56:34.153Z" }, + { url = "https://files.pythonhosted.org/packages/5b/81/635100fb19725c931622c673900da5efb1595c96ff5b441e07e3dd61f2be/sqlalchemy-2.0.43-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:4286a1139f14b7d70141c67a8ae1582fc2b69105f1b09d9573494eb4bb4b2687", size = 3258030, upload-time = "2025-08-11T15:52:36.933Z" }, + { url = "https://files.pythonhosted.org/packages/0c/ed/a99302716d62b4965fded12520c1cbb189f99b17a6d8cf77611d21442e47/sqlalchemy-2.0.43-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:529064085be2f4d8a6e5fab12d36ad44f1909a18848fcfbdb59cc6d4bbe48efe", size = 3294469, upload-time = "2025-08-11T15:56:35.553Z" }, + { url = "https://files.pythonhosted.org/packages/5d/a2/3a11b06715149bf3310b55a98b5c1e84a42cfb949a7b800bc75cb4e33abc/sqlalchemy-2.0.43-cp312-cp312-win32.whl", hash = "sha256:b535d35dea8bbb8195e7e2b40059e2253acb2b7579b73c1b432a35363694641d", size = 2098906, upload-time = "2025-08-11T15:55:00.645Z" }, + { url = "https://files.pythonhosted.org/packages/bc/09/405c915a974814b90aa591280623adc6ad6b322f61fd5cff80aeaef216c9/sqlalchemy-2.0.43-cp312-cp312-win_amd64.whl", hash = "sha256:1c6d85327ca688dbae7e2b06d7d84cfe4f3fffa5b5f9e21bb6ce9d0e1a0e0e0a", size = 2126260, upload-time = "2025-08-11T15:55:02.965Z" }, + { url = "https://files.pythonhosted.org/packages/41/1c/a7260bd47a6fae7e03768bf66451437b36451143f36b285522b865987ced/sqlalchemy-2.0.43-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e7c08f57f75a2bb62d7ee80a89686a5e5669f199235c6d1dac75cd59374091c3", size = 2130598, upload-time = "2025-08-11T15:51:15.903Z" }, + { url = "https://files.pythonhosted.org/packages/8e/84/8a337454e82388283830b3586ad7847aa9c76fdd4f1df09cdd1f94591873/sqlalchemy-2.0.43-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:14111d22c29efad445cd5021a70a8b42f7d9152d8ba7f73304c4d82460946aaa", size = 2118415, upload-time = "2025-08-11T15:51:17.256Z" }, + { url = "https://files.pythonhosted.org/packages/cf/ff/22ab2328148492c4d71899d62a0e65370ea66c877aea017a244a35733685/sqlalchemy-2.0.43-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:21b27b56eb2f82653168cefe6cb8e970cdaf4f3a6cb2c5e3c3c1cf3158968ff9", size = 3248707, upload-time = "2025-08-11T15:52:38.444Z" }, + { url = "https://files.pythonhosted.org/packages/dc/29/11ae2c2b981de60187f7cbc84277d9d21f101093d1b2e945c63774477aba/sqlalchemy-2.0.43-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c5a9da957c56e43d72126a3f5845603da00e0293720b03bde0aacffcf2dc04f", size = 3253602, upload-time = "2025-08-11T15:56:37.348Z" }, + { url = "https://files.pythonhosted.org/packages/b8/61/987b6c23b12c56d2be451bc70900f67dd7d989d52b1ee64f239cf19aec69/sqlalchemy-2.0.43-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:5d79f9fdc9584ec83d1b3c75e9f4595c49017f5594fee1a2217117647225d738", size = 3183248, upload-time = "2025-08-11T15:52:39.865Z" }, + { url = "https://files.pythonhosted.org/packages/86/85/29d216002d4593c2ce1c0ec2cec46dda77bfbcd221e24caa6e85eff53d89/sqlalchemy-2.0.43-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9df7126fd9db49e3a5a3999442cc67e9ee8971f3cb9644250107d7296cb2a164", size = 3219363, upload-time = "2025-08-11T15:56:39.11Z" }, + { url = "https://files.pythonhosted.org/packages/b6/e4/bd78b01919c524f190b4905d47e7630bf4130b9f48fd971ae1c6225b6f6a/sqlalchemy-2.0.43-cp313-cp313-win32.whl", hash = "sha256:7f1ac7828857fcedb0361b48b9ac4821469f7694089d15550bbcf9ab22564a1d", size = 2096718, upload-time = "2025-08-11T15:55:05.349Z" }, + { url = "https://files.pythonhosted.org/packages/ac/a5/ca2f07a2a201f9497de1928f787926613db6307992fe5cda97624eb07c2f/sqlalchemy-2.0.43-cp313-cp313-win_amd64.whl", hash = "sha256:971ba928fcde01869361f504fcff3b7143b47d30de188b11c6357c0505824197", size = 2123200, upload-time = "2025-08-11T15:55:07.932Z" }, + { url = "https://files.pythonhosted.org/packages/92/95/ddb5acf74a71e0fa4f9410c7d8555f169204ae054a49693b3cd31d0bf504/sqlalchemy-2.0.43-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:ceb5c832cc30663aeaf5e39657712f4c4241ad1f638d487ef7216258f6d41fe7", size = 2136445, upload-time = "2025-08-12T17:29:06.145Z" }, + { url = "https://files.pythonhosted.org/packages/ea/d4/7d7ea7dfbc1ddb0aa54dd63a686cd43842192b8e1bfb5315bb052925f704/sqlalchemy-2.0.43-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:11f43c39b4b2ec755573952bbcc58d976779d482f6f832d7f33a8d869ae891bf", size = 2126411, upload-time = "2025-08-12T17:29:08.138Z" }, + { url = "https://files.pythonhosted.org/packages/07/bd/123ba09bec14112de10e49d8835e6561feb24fd34131099d98d28d34f106/sqlalchemy-2.0.43-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:413391b2239db55be14fa4223034d7e13325a1812c8396ecd4f2c08696d5ccad", size = 3221776, upload-time = "2025-08-11T16:00:30.938Z" }, + { url = "https://files.pythonhosted.org/packages/ae/35/553e45d5b91b15980c13e1dbcd7591f49047589843fff903c086d7985afb/sqlalchemy-2.0.43-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c379e37b08c6c527181a397212346be39319fb64323741d23e46abd97a400d34", size = 3221665, upload-time = "2025-08-12T17:29:11.307Z" }, + { url = "https://files.pythonhosted.org/packages/07/4d/ff03e516087251da99bd879b5fdb2c697ff20295c836318dda988e12ec19/sqlalchemy-2.0.43-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:03d73ab2a37d9e40dec4984d1813d7878e01dbdc742448d44a7341b7a9f408c7", size = 3160067, upload-time = "2025-08-11T16:00:33.148Z" }, + { url = "https://files.pythonhosted.org/packages/ae/88/cbc7caa186ecdc5dea013e9ccc00d78b93a6638dc39656a42369a9536458/sqlalchemy-2.0.43-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:8cee08f15d9e238ede42e9bbc1d6e7158d0ca4f176e4eab21f88ac819ae3bd7b", size = 3184462, upload-time = "2025-08-12T17:29:14.919Z" }, + { url = "https://files.pythonhosted.org/packages/ab/69/f8bbd43080b6fa75cb44ff3a1cc99aaae538dd0ade1a58206912b2565d72/sqlalchemy-2.0.43-cp39-cp39-win32.whl", hash = "sha256:b3edaec7e8b6dc5cd94523c6df4f294014df67097c8217a89929c99975811414", size = 2104031, upload-time = "2025-08-11T15:48:56.453Z" }, + { url = "https://files.pythonhosted.org/packages/36/39/2ec1b0e7a4f44d833d924e7bfca8054c72e37eb73f4d02795d16d8b0230a/sqlalchemy-2.0.43-cp39-cp39-win_amd64.whl", hash = "sha256:227119ce0a89e762ecd882dc661e0aa677a690c914e358f0dd8932a2e8b2765b", size = 2128007, upload-time = "2025-08-11T15:48:57.872Z" }, + { url = "https://files.pythonhosted.org/packages/b8/d9/13bdde6521f322861fab67473cec4b1cc8999f3871953531cf61945fad92/sqlalchemy-2.0.43-py3-none-any.whl", hash = "sha256:1681c21dd2ccee222c2fe0bef671d1aef7c504087c9c4e800371cfcc8ac966fc", size = 1924759, upload-time = "2025-08-11T15:39:53.024Z" }, +] + +[[package]] +name = "sqlbag" +version = "0.1.1617247075" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "packaging" }, + { name = "six" }, + { name = "sqlalchemy" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/07/6f/46171ef9ef6d177b94dff96e6403c7fb7466de5c9ee767b0218a21945fdb/sqlbag-0.1.1617247075.tar.gz", hash = "sha256:b9d7862c3b2030356d796ca872907962fd54704066978d7ae89383f5123366ed", size = 11652, upload-time = "2021-04-01T03:22:24.096Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/27/c8/1d4bd038d4b34f3810fd8ab70a48cc6f3d2373666f2797f0298a97b088c9/sqlbag-0.1.1617247075-py2.py3-none-any.whl", hash = "sha256:ecdef26d661f8640711030ac6ee618deb92b91f9f0fc2efbf8a3b133af13092d", size = 14971, upload-time = "2021-04-01T03:22:22.654Z" }, +] + +[[package]] +name = "sqlparse" +version = "0.5.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/e5/40/edede8dd6977b0d3da179a342c198ed100dd2aba4be081861ee5911e4da4/sqlparse-0.5.3.tar.gz", hash = "sha256:09f67787f56a0b16ecdbde1bfc7f5d9c3371ca683cfeaa8e6ff60b4807ec9272", size = 84999, upload-time = "2024-12-10T12:05:30.728Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a9/5c/bfd6bd0bf979426d405cc6e71eceb8701b148b16c21d2dc3c261efc61c7b/sqlparse-0.5.3-py3-none-any.whl", hash = "sha256:cf2196ed3418f3ba5de6af7e82c694a9fbdbfecccdfc72e281548517081f16ca", size = 44415, upload-time = "2024-12-10T12:05:27.824Z" }, +] + +[[package]] +name = "stevedore" +version = "5.5.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/2a/5f/8418daad5c353300b7661dd8ce2574b0410a6316a8be650a189d5c68d938/stevedore-5.5.0.tar.gz", hash = "sha256:d31496a4f4df9825e1a1e4f1f74d19abb0154aff311c3b376fcc89dae8fccd73", size = 513878, upload-time = "2025-08-25T12:54:26.806Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/80/c5/0c06759b95747882bb50abda18f5fb48c3e9b0fbfc6ebc0e23550b52415d/stevedore-5.5.0-py3-none-any.whl", hash = "sha256:18363d4d268181e8e8452e71a38cd77630f345b2ef6b4a8d5614dac5ee0d18cf", size = 49518, upload-time = "2025-08-25T12:54:25.445Z" }, +] + +[[package]] +name = "tokenize-rt" +version = "6.2.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/69/ed/8f07e893132d5051d86a553e749d5c89b2a4776eb3a579b72ed61f8559ca/tokenize_rt-6.2.0.tar.gz", hash = "sha256:8439c042b330c553fdbe1758e4a05c0ed460dbbbb24a606f11f0dee75da4cad6", size = 5476, upload-time = "2025-05-23T23:48:00.035Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/33/f0/3fe8c6e69135a845f4106f2ff8b6805638d4e85c264e70114e8126689587/tokenize_rt-6.2.0-py2.py3-none-any.whl", hash = "sha256:a152bf4f249c847a66497a4a95f63376ed68ac6abf092a2f7cfb29d044ecff44", size = 6004, upload-time = "2025-05-23T23:47:58.812Z" }, +] + +[[package]] +name = "tomli" +version = "2.2.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/18/87/302344fed471e44a87289cf4967697d07e532f2421fdaf868a303cbae4ff/tomli-2.2.1.tar.gz", hash = "sha256:cd45e1dc79c835ce60f7404ec8119f2eb06d38b1deba146f07ced3bbc44505ff", size = 17175, upload-time = "2024-11-27T22:38:36.873Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/43/ca/75707e6efa2b37c77dadb324ae7d9571cb424e61ea73fad7c56c2d14527f/tomli-2.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:678e4fa69e4575eb77d103de3df8a895e1591b48e740211bd1067378c69e8249", size = 131077, upload-time = "2024-11-27T22:37:54.956Z" }, + { url = "https://files.pythonhosted.org/packages/c7/16/51ae563a8615d472fdbffc43a3f3d46588c264ac4f024f63f01283becfbb/tomli-2.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6", size = 123429, upload-time = "2024-11-27T22:37:56.698Z" }, + { url = "https://files.pythonhosted.org/packages/f1/dd/4f6cd1e7b160041db83c694abc78e100473c15d54620083dbd5aae7b990e/tomli-2.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ece47d672db52ac607a3d9599a9d48dcb2f2f735c6c2d1f34130085bb12b112a", size = 226067, upload-time = "2024-11-27T22:37:57.63Z" }, + { url = "https://files.pythonhosted.org/packages/a9/6b/c54ede5dc70d648cc6361eaf429304b02f2871a345bbdd51e993d6cdf550/tomli-2.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6972ca9c9cc9f0acaa56a8ca1ff51e7af152a9f87fb64623e31d5c83700080ee", size = 236030, upload-time = "2024-11-27T22:37:59.344Z" }, + { url = "https://files.pythonhosted.org/packages/1f/47/999514fa49cfaf7a92c805a86c3c43f4215621855d151b61c602abb38091/tomli-2.2.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c954d2250168d28797dd4e3ac5cf812a406cd5a92674ee4c8f123c889786aa8e", size = 240898, upload-time = "2024-11-27T22:38:00.429Z" }, + { url = "https://files.pythonhosted.org/packages/73/41/0a01279a7ae09ee1573b423318e7934674ce06eb33f50936655071d81a24/tomli-2.2.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8dd28b3e155b80f4d54beb40a441d366adcfe740969820caf156c019fb5c7ec4", size = 229894, upload-time = "2024-11-27T22:38:02.094Z" }, + { url = "https://files.pythonhosted.org/packages/55/18/5d8bc5b0a0362311ce4d18830a5d28943667599a60d20118074ea1b01bb7/tomli-2.2.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e59e304978767a54663af13c07b3d1af22ddee3bb2fb0618ca1593e4f593a106", size = 245319, upload-time = "2024-11-27T22:38:03.206Z" }, + { url = "https://files.pythonhosted.org/packages/92/a3/7ade0576d17f3cdf5ff44d61390d4b3febb8a9fc2b480c75c47ea048c646/tomli-2.2.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:33580bccab0338d00994d7f16f4c4ec25b776af3ffaac1ed74e0b3fc95e885a8", size = 238273, upload-time = "2024-11-27T22:38:04.217Z" }, + { url = "https://files.pythonhosted.org/packages/72/6f/fa64ef058ac1446a1e51110c375339b3ec6be245af9d14c87c4a6412dd32/tomli-2.2.1-cp311-cp311-win32.whl", hash = "sha256:465af0e0875402f1d226519c9904f37254b3045fc5084697cefb9bdde1ff99ff", size = 98310, upload-time = "2024-11-27T22:38:05.908Z" }, + { url = "https://files.pythonhosted.org/packages/6a/1c/4a2dcde4a51b81be3530565e92eda625d94dafb46dbeb15069df4caffc34/tomli-2.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:2d0f2fdd22b02c6d81637a3c95f8cd77f995846af7414c5c4b8d0545afa1bc4b", size = 108309, upload-time = "2024-11-27T22:38:06.812Z" }, + { url = "https://files.pythonhosted.org/packages/52/e1/f8af4c2fcde17500422858155aeb0d7e93477a0d59a98e56cbfe75070fd0/tomli-2.2.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:4a8f6e44de52d5e6c657c9fe83b562f5f4256d8ebbfe4ff922c495620a7f6cea", size = 132762, upload-time = "2024-11-27T22:38:07.731Z" }, + { url = "https://files.pythonhosted.org/packages/03/b8/152c68bb84fc00396b83e7bbddd5ec0bd3dd409db4195e2a9b3e398ad2e3/tomli-2.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8d57ca8095a641b8237d5b079147646153d22552f1c637fd3ba7f4b0b29167a8", size = 123453, upload-time = "2024-11-27T22:38:09.384Z" }, + { url = "https://files.pythonhosted.org/packages/c8/d6/fc9267af9166f79ac528ff7e8c55c8181ded34eb4b0e93daa767b8841573/tomli-2.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e340144ad7ae1533cb897d406382b4b6fede8890a03738ff1683af800d54192", size = 233486, upload-time = "2024-11-27T22:38:10.329Z" }, + { url = "https://files.pythonhosted.org/packages/5c/51/51c3f2884d7bab89af25f678447ea7d297b53b5a3b5730a7cb2ef6069f07/tomli-2.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db2b95f9de79181805df90bedc5a5ab4c165e6ec3fe99f970d0e302f384ad222", size = 242349, upload-time = "2024-11-27T22:38:11.443Z" }, + { url = "https://files.pythonhosted.org/packages/ab/df/bfa89627d13a5cc22402e441e8a931ef2108403db390ff3345c05253935e/tomli-2.2.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:40741994320b232529c802f8bc86da4e1aa9f413db394617b9a256ae0f9a7f77", size = 252159, upload-time = "2024-11-27T22:38:13.099Z" }, + { url = "https://files.pythonhosted.org/packages/9e/6e/fa2b916dced65763a5168c6ccb91066f7639bdc88b48adda990db10c8c0b/tomli-2.2.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:400e720fe168c0f8521520190686ef8ef033fb19fc493da09779e592861b78c6", size = 237243, upload-time = "2024-11-27T22:38:14.766Z" }, + { url = "https://files.pythonhosted.org/packages/b4/04/885d3b1f650e1153cbb93a6a9782c58a972b94ea4483ae4ac5cedd5e4a09/tomli-2.2.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:02abe224de6ae62c19f090f68da4e27b10af2b93213d36cf44e6e1c5abd19fdd", size = 259645, upload-time = "2024-11-27T22:38:15.843Z" }, + { url = "https://files.pythonhosted.org/packages/9c/de/6b432d66e986e501586da298e28ebeefd3edc2c780f3ad73d22566034239/tomli-2.2.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b82ebccc8c8a36f2094e969560a1b836758481f3dc360ce9a3277c65f374285e", size = 244584, upload-time = "2024-11-27T22:38:17.645Z" }, + { url = "https://files.pythonhosted.org/packages/1c/9a/47c0449b98e6e7d1be6cbac02f93dd79003234ddc4aaab6ba07a9a7482e2/tomli-2.2.1-cp312-cp312-win32.whl", hash = "sha256:889f80ef92701b9dbb224e49ec87c645ce5df3fa2cc548664eb8a25e03127a98", size = 98875, upload-time = "2024-11-27T22:38:19.159Z" }, + { url = "https://files.pythonhosted.org/packages/ef/60/9b9638f081c6f1261e2688bd487625cd1e660d0a85bd469e91d8db969734/tomli-2.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:7fc04e92e1d624a4a63c76474610238576942d6b8950a2d7f908a340494e67e4", size = 109418, upload-time = "2024-11-27T22:38:20.064Z" }, + { url = "https://files.pythonhosted.org/packages/04/90/2ee5f2e0362cb8a0b6499dc44f4d7d48f8fff06d28ba46e6f1eaa61a1388/tomli-2.2.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f4039b9cbc3048b2416cc57ab3bda989a6fcf9b36cf8937f01a6e731b64f80d7", size = 132708, upload-time = "2024-11-27T22:38:21.659Z" }, + { url = "https://files.pythonhosted.org/packages/c0/ec/46b4108816de6b385141f082ba99e315501ccd0a2ea23db4a100dd3990ea/tomli-2.2.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:286f0ca2ffeeb5b9bd4fcc8d6c330534323ec51b2f52da063b11c502da16f30c", size = 123582, upload-time = "2024-11-27T22:38:22.693Z" }, + { url = "https://files.pythonhosted.org/packages/a0/bd/b470466d0137b37b68d24556c38a0cc819e8febe392d5b199dcd7f578365/tomli-2.2.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a92ef1a44547e894e2a17d24e7557a5e85a9e1d0048b0b5e7541f76c5032cb13", size = 232543, upload-time = "2024-11-27T22:38:24.367Z" }, + { url = "https://files.pythonhosted.org/packages/d9/e5/82e80ff3b751373f7cead2815bcbe2d51c895b3c990686741a8e56ec42ab/tomli-2.2.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9316dc65bed1684c9a98ee68759ceaed29d229e985297003e494aa825ebb0281", size = 241691, upload-time = "2024-11-27T22:38:26.081Z" }, + { url = "https://files.pythonhosted.org/packages/05/7e/2a110bc2713557d6a1bfb06af23dd01e7dde52b6ee7dadc589868f9abfac/tomli-2.2.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e85e99945e688e32d5a35c1ff38ed0b3f41f43fad8df0bdf79f72b2ba7bc5272", size = 251170, upload-time = "2024-11-27T22:38:27.921Z" }, + { url = "https://files.pythonhosted.org/packages/64/7b/22d713946efe00e0adbcdfd6d1aa119ae03fd0b60ebed51ebb3fa9f5a2e5/tomli-2.2.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ac065718db92ca818f8d6141b5f66369833d4a80a9d74435a268c52bdfa73140", size = 236530, upload-time = "2024-11-27T22:38:29.591Z" }, + { url = "https://files.pythonhosted.org/packages/38/31/3a76f67da4b0cf37b742ca76beaf819dca0ebef26d78fc794a576e08accf/tomli-2.2.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:d920f33822747519673ee656a4b6ac33e382eca9d331c87770faa3eef562aeb2", size = 258666, upload-time = "2024-11-27T22:38:30.639Z" }, + { url = "https://files.pythonhosted.org/packages/07/10/5af1293da642aded87e8a988753945d0cf7e00a9452d3911dd3bb354c9e2/tomli-2.2.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a198f10c4d1b1375d7687bc25294306e551bf1abfa4eace6650070a5c1ae2744", size = 243954, upload-time = "2024-11-27T22:38:31.702Z" }, + { url = "https://files.pythonhosted.org/packages/5b/b9/1ed31d167be802da0fc95020d04cd27b7d7065cc6fbefdd2f9186f60d7bd/tomli-2.2.1-cp313-cp313-win32.whl", hash = "sha256:d3f5614314d758649ab2ab3a62d4f2004c825922f9e370b29416484086b264ec", size = 98724, upload-time = "2024-11-27T22:38:32.837Z" }, + { url = "https://files.pythonhosted.org/packages/c7/32/b0963458706accd9afcfeb867c0f9175a741bf7b19cd424230714d722198/tomli-2.2.1-cp313-cp313-win_amd64.whl", hash = "sha256:a38aa0308e754b0e3c67e344754dff64999ff9b513e691d0e786265c93583c69", size = 109383, upload-time = "2024-11-27T22:38:34.455Z" }, + { url = "https://files.pythonhosted.org/packages/6e/c2/61d3e0f47e2b74ef40a68b9e6ad5984f6241a942f7cd3bbfbdbd03861ea9/tomli-2.2.1-py3-none-any.whl", hash = "sha256:cb55c73c5f4408779d0cf3eef9f762b9c9f147a77de7b258bef0a5628adc85cc", size = 14257, upload-time = "2024-11-27T22:38:35.385Z" }, +] + +[[package]] +name = "types-pyyaml" +version = "6.0.12.20250822" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/49/85/90a442e538359ab5c9e30de415006fb22567aa4301c908c09f19e42975c2/types_pyyaml-6.0.12.20250822.tar.gz", hash = "sha256:259f1d93079d335730a9db7cff2bcaf65d7e04b4a56b5927d49a612199b59413", size = 17481, upload-time = "2025-08-22T03:02:16.209Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/32/8e/8f0aca667c97c0d76024b37cffa39e76e2ce39ca54a38f285a64e6ae33ba/types_pyyaml-6.0.12.20250822-py3-none-any.whl", hash = "sha256:1fe1a5e146aa315483592d292b72a172b65b946a6d98aa6ddd8e4aa838ab7098", size = 20314, upload-time = "2025-08-22T03:02:15.002Z" }, +] + +[[package]] +name = "typing-extensions" +version = "4.15.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/72/94/1a15dd82efb362ac84269196e94cf00f187f7ed21c242792a923cdb1c61f/typing_extensions-4.15.0.tar.gz", hash = "sha256:0cea48d173cc12fa28ecabc3b837ea3cf6f38c6d1136f85cbaaf598984861466", size = 109391, upload-time = "2025-08-25T13:49:26.313Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/18/67/36e9267722cc04a6b9f15c7f3441c2363321a3ea07da7ae0c0707beb2a9c/typing_extensions-4.15.0-py3-none-any.whl", hash = "sha256:f0fa19c6845758ab08074a0cfa8b7aecb71c999ca73d62883bc25cc018c4e548", size = 44614, upload-time = "2025-08-25T13:49:24.86Z" }, +] + +[[package]] +name = "tzdata" +version = "2025.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/95/32/1a225d6164441be760d75c2c42e2780dc0873fe382da3e98a2e1e48361e5/tzdata-2025.2.tar.gz", hash = "sha256:b60a638fcc0daffadf82fe0f57e53d06bdec2f36c4df66280ae79bce6bd6f2b9", size = 196380, upload-time = "2025-03-23T13:54:43.652Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5c/23/c7abc0ca0a1526a0774eca151daeb8de62ec457e77262b66b359c3c7679e/tzdata-2025.2-py2.py3-none-any.whl", hash = "sha256:1a403fada01ff9221ca8044d701868fa132215d84beb92242d9acd2147f667a8", size = 347839, upload-time = "2025-03-23T13:54:41.845Z" }, +] + +[[package]] +name = "urllib3" +version = "2.5.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/15/22/9ee70a2574a4f4599c47dd506532914ce044817c7752a79b6a51286319bc/urllib3-2.5.0.tar.gz", hash = "sha256:3fc47733c7e419d4bc3f6b3dc2b4f890bb743906a30d56ba4a5bfa4bbff92760", size = 393185, upload-time = "2025-06-18T14:07:41.644Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a7/c2/fe1e52489ae3122415c51f387e221dd0773709bad6c6cdaa599e8a2c5185/urllib3-2.5.0-py3-none-any.whl", hash = "sha256:e6b01673c0fa6a13e374b50871808eb3bf7046c4b125b216f6bf1cc604cff0dc", size = 129795, upload-time = "2025-06-18T14:07:40.39Z" }, +] + +[[package]] +name = "zipp" +version = "3.23.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/e3/02/0f2892c661036d50ede074e376733dca2ae7c6eb617489437771209d4180/zipp-3.23.0.tar.gz", hash = "sha256:a07157588a12518c9d4034df3fbbee09c814741a33ff63c05fa29d26a2404166", size = 25547, upload-time = "2025-06-08T17:06:39.4Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2e/54/647ade08bf0db230bfea292f893923872fd20be6ac6f53b2b936ba839d75/zipp-3.23.0-py3-none-any.whl", hash = "sha256:071652d6115ed432f5ce1d34c336c0adfd6a884660d1e9712a256d3d3bd4b14e", size = 10276, upload-time = "2025-06-08T17:06:38.034Z" }, +]