diff --git a/.bumpversion.cfg b/.bumpversion.cfg index b8d5a45..e873275 100644 --- a/.bumpversion.cfg +++ b/.bumpversion.cfg @@ -1,5 +1,5 @@ [bumpversion] -current_version = 0.12.3 +current_version = 0.19.0 commit = True tag = True message = bump: v{current_version} --> v{new_version} @@ -7,3 +7,7 @@ message = bump: v{current_version} --> v{new_version} [bumpversion:file:pyproject.toml] search = version = "{current_version}" replace = version = "{new_version}" + +[bumpversion:file:uv.lock] +search = version = "{current_version}" +replace = version = "{new_version}" diff --git a/.github/workflows/ci.yaml b/.github/workflows/ci.yaml index 581cc73..0126803 100644 --- a/.github/workflows/ci.yaml +++ b/.github/workflows/ci.yaml @@ -9,28 +9,26 @@ jobs: lint: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v3 - - uses: actions/setup-python@v4 - with: - python-version: 3.9 - - name: Install Poetry - uses: snok/install-poetry@v1 + - uses: actions/checkout@v6 + + - uses: actions/setup-python@v6 with: - virtualenvs-create: true - virtualenvs-in-project: true - - name: Load cached venv - id: cached-poetry-dependencies - uses: actions/cache@v3 + python-version-file: "pyproject.toml" + + - name: Install uv + uses: astral-sh/setup-uv@v7 with: - path: .venv - key: venv-${{ runner.os }}-${{ steps.setup-python.outputs.python-version }}-${{ hashFiles('**/poetry.lock') }} - - name: Install dependencies - if: steps.cached-poetry-dependencies.outputs.cache-hit != 'true' - run: poetry install --no-interaction --no-root - - name: Run black - run: poetry run black --check . + enable-cache: true + cache-dependency-glob: "uv.lock" + + - name: Install the project + run: uv sync --all-extras --dev + + - name: Run ruff + run: uv run ruff format --check + - name: Run mypy - run: poetry run mypy src + run: uv run mypy . test: needs: lint @@ -38,31 +36,26 @@ jobs: fail-fast: true matrix: os: ["ubuntu-latest", "macos-latest"] - python-version: ["3.9", "3.10", "3.11"] + python-version: ["3.10", "3.11", "3.12"] runs-on: ${{ matrix.os }} steps: - name: Check out repository - uses: actions/checkout@v3 + uses: actions/checkout@v6 + - name: Set up python ${{ matrix.python-version }} id: setup-python - uses: actions/setup-python@v4 + uses: actions/setup-python@v6 with: python-version: ${{ matrix.python-version }} - - name: Install Poetry - uses: snok/install-poetry@v1 - with: - virtualenvs-create: true - virtualenvs-in-project: true - - name: Load cached venv - id: cached-poetry-dependencies - uses: actions/cache@v3 + + - name: Install uv + uses: astral-sh/setup-uv@v7 with: - path: .venv - key: venv-${{ runner.os }}-${{ steps.setup-python.outputs.python-version }}-${{ hashFiles('**/poetry.lock') }} - - name: Install dependencies - if: steps.cached-poetry-dependencies.outputs.cache-hit != 'true' - run: poetry install --no-interaction --no-root - - name: Install library - run: poetry install --no-interaction + enable-cache: true + cache-dependency-glob: "uv.lock" + + - name: Install the project + run: uv sync --all-extras --dev + - name: Run tests - run: poetry run pytest + run: uv run pytest diff --git a/.gitignore b/.gitignore index 7a2c7a2..cfff959 100644 --- a/.gitignore +++ b/.gitignore @@ -1,9 +1,9 @@ -apy.egg-info/ build/ dist/ tests/data *.pyc .venv .vimspector.json -poetry.lock +uv.lock .python-version +venv/ diff --git a/.mise.toml b/.mise.toml new file mode 100644 index 0000000..0973ceb --- /dev/null +++ b/.mise.toml @@ -0,0 +1,13 @@ +[tools] +uv = 'latest' + +[env] +_.python.venv = { path = ".venv" } + +[tasks] +test = """ +uv run pytest +uv run mypy . +uv run ruff format --check +""" +doctoc = "doctoc --notitle --maxlevel 2 README.md" diff --git a/README.md b/README.md index cabfb50..c37891b 100644 --- a/README.md +++ b/README.md @@ -6,37 +6,55 @@ Anki. It does not require Anki to be running at the same time. ### Important -* `apy` is **WORK IN PROGRESS**, and there may still be some major changes. * This script and its author(s) are not affiliated/associated with the main Anki project in any way. * Use this software entirely at your own risk. Frequent backups are encouraged. ### Table of Contents -* [Install instructions](#install-instructions) -* [Usage](#usage) -* [Configuration](#configuration) -* [Zsh completion](#zsh-completion) -* [Changelog](#changelog) -* [Relevant resources](#relevant-resources) -* [Alternatives](#alternatives) + + + +- [Install instructions](#install-instructions) +- [Usage](#usage) +- [Examples of adding new notes](#examples-of-adding-new-notes) +- [Configuration](#configuration) +- [Zsh completion](#zsh-completion) +- [Fish completion](#fish-completion) +- [Relevant resources](#relevant-resources) +- [Alternatives](#alternatives) +- [Contributing](#contributing) + + ## Install instructions -`apy` can be installed in the "usual" way with `pip`: +> [!WARNING] +> +> Installing Python packages (e.g. with `pip install`) outside [virtual +> environments](https://docs.python.org/3/library/venv.html) is not recommended, +> even at the user level! If you do this, then be aware that you may experience +> issues due to conflicts with other packages/tools installed in the same manner. + +There are two recommended ways to install `apy`. + +### With uv + +[`uv`](https://docs.astral.sh/uv/) is a relatively new python package and +project manager. It is very fast and it includes features to install and run +tools in isolated environments. + +See [`uv` Getting Started](https://docs.astral.sh/uv/#getting-started) for how +to install `uv`. When it's available, you can install `apy` with: ```bash -pip install git+https://github.com/lervag/apy +uv tool install apyanki ``` -**However**, note that installing Python packages outside [virtual -environments](https://docs.python.org/3/library/venv.html) is not recommended, -even at the user level! If you do this, then be aware that you may experience -issues due to conflicts with other packages/tools installed in the same manner. +### With pipx -Instead, the best way to install `apy` for normal usage is with -[`pipx`](https://pypa.github.io/pipx/). This will ensure `apy` doesn't -interfere with other Python packages already on your system. +[`pipx`](https://pipx.pypa.io/stable/) is a simple and useful python tool to +install and run python applications in isolated environments. If you don't already have `pipx`, install it with your distribution's package manager. For instance, on Ubuntu: @@ -49,12 +67,12 @@ sudo apt install pipx Then, install `apy` with: ```bash -pipx install git+https://github.com/lervag/apy +pipx install apyanki ``` ### Requirements -`apy` should work for Python 3.9 and later. +`apy` should work for Python 3.10 and later. Technically, `apy` does **not** depend on any existing Anki installation, since it pulls in a copy of the non-GUI components of Anki as a separate dependency. @@ -80,7 +98,7 @@ apy add-single -s preset "Question/Front" "Answer/Back" # List leech cards (will show cid values for each card). Note that the query # should be similar to a search query in the Anki browser. -apy list -v tag:leech +apy list-cards -v tag:leech # Review and possibly edit file with given cid apy review cid:12345678 @@ -90,6 +108,92 @@ apy review cid:12345678 improve the process of adding and editing cards. For more information about this, see [the Wiki](https://github.com/lervag/apy/wiki/Vim). +## Examples of adding new notes + +With `apy add` and `apy add-from-file`, the note input is provided through +Markdown format. `apy add` opens an interactive editor that will write +a temporary file that `apy` reads afterwords. `apy add-from-file` will add +notes from a specified file. These are therefore more or less equivalent +commands. + +In the following, you may find a few examples of how such notes can be written. +These will all be based on the standard note types (or _models_). See +[here](https://docs.ankiweb.net/getting-started.html?highlight=note#note-types) +for the Anki documentation of the standard note types. + +### Add with a specified model/note type + +This example will create two notes and four cards with the "Basic (and reversed +card)" note type. + +````md +model: Basic (and reversed card) +tags: mytag + +# Note + +## Front + +This is my front. + +## Back + +This is my back. + +# Note + +## Front + +This is my second front. + +## Back + +This is my second back. +```` + +### Add with code snippet + +This example will create a Basic note with a single card that includes a code +snippet. + +````md +model: Basic +tags: code + +# Note + +## Front + +What programming language is this? + +## Back + +```python +x = 1 +if x > 0: + print("this is python") +``` +```` + +### Add a Cloze note + +The following shows how a Cloze card can be added, e.g. after doing `apy add -m Cloze`: + +```md +# Note +model: Cloze +tags: mytag, othertag + +## Text + +{{c1::Canberra}} was {{c2::founded}}}} in 1913 + +Or {{c3::REPLACE}} here. + +## Back Extra +Extra information goes here. +``` + ## Configuration `apy` loads configuration from `~/.config/apy/apy.json`. The following keys are @@ -97,6 +201,7 @@ currently recognized: - `base_path`: Specify where `apy` should look for your Anki database. This is usually something like `/home/your_name/.local/share/Anki2/`. +- `auto_sync`: Set to `true` to enable auto sync after any mutating operations. - `img_viewers`: Specify a dictionary of image viewer commands. Each key is a file extension. The value is a command list, e.g. `['display', 'density', '300']` which specifies the command and its options to use for the @@ -106,18 +211,31 @@ currently recognized: '-d']`. - `markdown_models`: Specify a list of models for which `apy` will use a markdown converter. +- `markdown_pygments_style`: Specify the code highlight scheme to use for + fenced code blocks in Markdown notes. See the [Pygments + documentation](https://pygments.org/docs/styles/) for more details. +- `markdown_latex_mode`: Specify how `$$ ... $$` and `$ ... $` will be + interpreted when transforming Markdown into field HTML and vice versa (it + works both ways). The following values are recognized: + - `mathjax` (default): `$$…$$` → `\[…\]` and `$…$` → `\(…\)` + - `latex`: `$$…$$` → `[$$]…[/$$]` and `$…$` → `[$]…[/$]` - `pngCommands`/`svgCommands`: Set LaTeX commands to generate PNG/SVG files. This is inspired by the [Edit LaTeX build process](https://ankiweb.net/shared/info/937148547) addon to Anki. - `presets`: Specify preset combination of model and tags for use with `apy add-single`. - `profile_name`: Specify which profile to load by default. -- `query`: Specify default query for `apy list`, `apy review` and `apy tag`. +- `query`: Specify default query for `apy list*`, `apy review` and `apy tag`. +- `review_show_cards`: Whether to show list of cards by default during note + review +- `review_verbose`: Whether to show note details by default during note + review An example configuration: ```json { + "auto_sync": true, "base_path": "/home/your_name/.local/share/Anki2/", "profile_name": "MyAnkiProfile", "query": "tag:leech", @@ -133,7 +251,9 @@ An example configuration: ["lualatex", "-interaction=nonstopmode", "tmp.tex"], ["pdfcrop", "tmp.pdf", "tmp.pdf"], ["pdf2svg", "tmp.pdf", "tmp.svg"] - ] + ], + "review_show_cards": true, + "markdown_latex_mode": "latex" } ``` @@ -156,30 +276,14 @@ Then add the following line to ones `.zshrc` file: fpath=($HOME/.local/zsh-functions $fpath) ``` -## Changelog - -This is just a simple changelog. See the commit history and issue threads for -details. The main purpose of the changelog is to show which versions of `apy` are -compatible with which versions of Anki. - -PS! In future updates this changelog will be removed in favor of the [release -history on GitHub](https://github.com/lervag/apy/releases). - -| Version | Version note | -|:-------:| ------------------------------------------- | -| `HEAD` | Development branch (works with Anki 23.12) | -| 0.12 | Compatible with Anki 23.12 | -| 0.11 | Compatible with Anki 2.1.66 | -| 0.10 | Compatible with Anki 2.1.64 | -| 0.9 | Compatible with Anki 2.1.49 | -| 0.8 | Compatible with Anki 2.1.35--2.1.44 | -| 0.7 | Several improvements | -| 0.6 | Presets, choose profile, add-single | -| 0.5 | Minor improvements | -| 0.4 | Minor improvements | -| 0.3 | Compatible with Anki 2.1.26 | -| 0.2 | Compatible with Anki 2.1.23 | -| 0.1 | Compatible with Anki 2.1.13 | +## Fish completion + +There is also a fish completion file available. To use it, one may symlink or +copy it to `~/.config/fish/completions/` directory: + +``` +ln -s /path/to/apy/completion/apy.fish ~/.config/fish/completions/ +``` ## Relevant resources @@ -303,44 +407,24 @@ and developing the `apy` code. ### Setup -This project uses [Poetry](https://python-poetry.org/) as the build system and -to manage dependencies. +This project uses [`uv`](https://docs.astral.sh/uv/) as the build system and +to manage dependencies. Install `uv` first if you don't have it already. -Install Poetry first if you don't have it already. Fork the repository, then -clone your fork and install a local development build of the project using -Poetry. +Now fork the repository, then clone your fork and install a local development +build of the project. ```sh # Clone the forked repo git clone git@github.com:/apy.git cd apy/ - -# Install the project with Poetry -poetry install -``` - -Poetry will create a virtual environment for you (see -[here](https://python-poetry.org/docs/configuration/#virtualenvsin-project) for -where the environment is created). You can either activate this environment -yourself then issue commands in the usual way, or you can prefix your commands -with `poetry run`. Example: - -```sh -poetry run apy --version ``` ### Tests -To run the tests, activate the virtual environment and run: - -```sh -pytest -``` - -Alternatively, without activating the environment: +To run the tests, run: ```sh -poetry run pytest +uv run pytest ``` ### Linting @@ -348,11 +432,11 @@ poetry run pytest To format the code, run: ```sh -poetry run black . +uv run ruff format ``` To type-check the code, run: ```sh -poetry run mypy src +uv run mypy . ``` diff --git a/completion/_apy b/completion/_apy index cc2935e..5d2d472 100644 --- a/completion/_apy +++ b/completion/_apy @@ -17,7 +17,7 @@ __model() { case "$words[1]" in edit-css) opts=( \ - '(-m --model)'{-m,--model}'[Specify model]:model:' \ + '(-m --model)'{-m,--model}'[Specify model]:model:_apy_models' \ '(-s --sync-after)'{-s,--sync-after}'[Synchronize after edit]' \ $opts_help \ );; @@ -46,16 +46,22 @@ _apy() { subcmds=( \ 'add:Add notes interactively from terminal' \ + 'add-from-file:Add notes from Markdown file (alias for update-from-file)' \ 'add-single:Add a single note from command line arguments' \ - 'add-from-file:Add notes from Markdown file For input file' \ + 'backup:Backup Anki database to specified target file' \ 'check-media:Check media' \ + 'edit:Edit notes that match QUERY directly' \ 'info:Print some basic statistics' \ + 'list-cards:List cards that match QUERY' \ + 'list-cards-table:List cards that match QUERY in table format' \ + 'list-models:List available models' \ + 'list-notes:List notes that match QUERY' \ 'model:Interact with the models' \ - 'list:Print cards that match query' \ - 'review:Review/Edit notes that match query [default: marked/flagged]' \ - 'reposition:Reposition new card with given CID' \ + 'reposition:Reposition new cards that match QUERY' \ + 'review:Review/Edit notes that match QUERY [default: marked/flagged]' \ 'sync:Synchronize collection with AnkiWeb' \ - 'tag:Add or remove tags from notes that match query' \ + 'tag:Add or remove tags from notes that match QUERY' \ + 'update-from-file:Update existing or add new notes from Markdown file' \ ) _arguments $opts '*:: :->subcmds' && return 0 @@ -68,38 +74,80 @@ _apy() { case "$words[1]" in add) opts=( \ - '(-t --tags)'{-t,--tags}'[Specify tags]:tags:' \ - '(-m --model)'{-m,--model}'[Specify model]:model:' \ + '(-t --tags)'{-t,--tags}'[Specify tags]:tags:_apy_tags' \ + '(-m --model)'{-m,--model}'[Specify model]:model:_apy_models' \ '(-d --deck)'{-d,--deck}'[Specify deck]:deck:' \ $opts_help \ );; add-single) opts=( \ '(-s --preset)'{-s,--preset}'[Specify a preset]:preset:' \ - '(-t --tags)'{-t,--tags}'[Specify tags]:tags:' \ - '(-m --model)'{-m,--model}'[Specify model]:model:' \ + '(-t --tags)'{-t,--tags}'[Specify tags]:tags:_apy_tags' \ + '(-m --model)'{-m,--model}'[Specify model]:model:_apy_models' \ '(-d --deck)'{-d,--deck}'[Specify deck]:deck:' \ '::Fields' \ $opts_help \ );; - add-from-file) + add-from-file|update-from-file) opts=( \ '::Markdown input file:_files -g "*.md"' \ - '(-t --tags)'{-t,--tags}'[Specify tags]:tags:' \ + '(-t --tags)'{-t,--tags}'[Specify tags]:tags:_apy_tags' \ + '(-d --deck)'{-d,--deck}'[Specify deck]:deck:' \ + '(-u --update-file)'{-u,--update-file}'[Update original file with note IDs]' \ + $opts_help \ + );; + backup) + opts=( \ + '::Target file' \ + '(-m --include-media)'{-m,--include-media}'[Include media files]:media:' \ + '(-l --legacy)'{-l,--legacy}'[Support older Anki Versions]:legacy:' \ + $opts_help \ + );; + edit) + opts=( \ + '::Query' \ + '(-f --force-multiple)'{-f,--force-multiple}'[Allow editing multiple notes]' \ $opts_help \ );; info) opts=( $opts_help );; - model) __model; return;; - list) + list-cards) + opts=( \ + '::Query' \ + '(-v --verbose)'{-v,--verbose}'[Print note details]' \ + $opts_help \ + );; + list-cards-table) + opts=( \ + '::Query' \ + '(-a --show-answer)'{-a,--show-answer}'[Display answer]' \ + '(-m --show-model)'{-m,--show-model}'[Display model]' \ + '(-c --show-cid)'{-c,--show-cid}'[Display card ids]' \ + '(-d --show-due)'{-d,--show-due}'[Display card due time in days]' \ + '(-t --show-type)'{-t,--show-type}'[Display card type]' \ + '(-e --show-ease)'{-e,--show-ease}'[Display card ease]' \ + '(-l --show-lapses)'{-l,--show-lapses}'[Display card number of lapses]' \ + '(-l --show-deck)'{-l,--show-lapses}'[Display deck]' \ + $opts_help \ + );; + list-models) + opts=( \ + $opts_help \ + );; + list-notes) opts=( \ '::Query' \ - '(-v --verbose)'{-v,--verbose}'[Be verbose]' \ + '(-c --show-cards)'{-c,--show-cards}'[Print card specs]' \ + '(-r --show-raw-fields)'{-r,--show-raw-fields}'[Print raw field data]' \ + '(-v --verbose)'{-v,--verbose}'[Print note details]' \ $opts_help \ );; + model) __model; return;; review) opts=( \ - '(-q --query)'{-q,--query}'[Query string]:query:' \ + '::Query' \ + '(-m --check-markdown-consistency)'{-m,----check-markdown-consistency}'[Check for Markdown consistency]' \ + '(-n --cmc-range)'{-n,--cmc-range}'[Number of days to check]:range:' \ $opts_help \ );; reposition) @@ -111,7 +159,7 @@ _apy() { tag) opts=( \ '(-a --add-tags)'{-a,--add-tags}'[Add specified tags]:tags:' \ - '(-r --remove-tags)'{-r,--remove-tags}'[Remove specified tags]:tags:' \ + '(-r --remove-tags)'{-r,--remove-tags}'[Remove specified tags]:tags:_apy_tags' \ $opts_help \ '::Query' \ );; @@ -122,4 +170,56 @@ _apy() { _arguments $opts } +_apy_models() { + local cache_dir="${XDG_CACHE_HOME:-/tmp/cache}/apy" + local cache_file="$cache_dir/apy_models" + + local -a models + + [[ -d $cache_dir ]] || mkdir -p $cache_dir + if [[ -f $cache_file && $(($(date +%s) - $(date -r $cache_file +%s))) -lt 300 ]] + then + models=(${(f)"$(<$cache_file)"}) + else + models=(${(f)"$(apy list-models 2>/dev/null)"}) + print -l $models > $cache_file + fi + models=(${models//:/\\:}) + _describe 'model' models +} + +_apy_tags() { + local cache_dir="${XDG_CACHE_HOME:-/tmp/cache}/apy" + local cache_file="$cache_dir/apy_tags" + + local -a tags + + [[ -d $cache_dir ]] || mkdir -p $cache_dir + if [[ -f $cache_file && $(($(date +%s) - $(date -r $cache_file +%s))) -lt 300 ]] + then + tags=(${(f)"$(<$cache_file)"}) + else + tags=(${(f)"$(apy tag -s 2>/dev/null)"}) + print -l $tags > $cache_file + fi + + local current_input="${words[CURRENT]}" + local current_tag="${current_input##*,}" + local prefix="${current_input%,*}" + + local -a used_tags_array=(${(s:,:)prefix}) + local -a available_tags + for tag in $tags; do + if [[ ! " ${used_tags_array[@]} " =~ " ${tag} " ]]; then + available_tags+=($tag) + fi + done + + if [[ $current_input == *","* ]]; then + compadd -P "${prefix}," -a available_tags + else + _describe 'tags' available_tags + fi +} + _apy "$@" diff --git a/completion/apy.fish b/completion/apy.fish new file mode 100644 index 0000000..9c19462 --- /dev/null +++ b/completion/apy.fish @@ -0,0 +1,106 @@ +# Fish shell completion for apy +# Copy this file to ~/.config/fish/completions/ + +function __fish_apy_no_subcommand + set -l cmd (commandline -opc) + if [ (count $cmd) -eq 1 ] + return 0 + end + return 1 +end + +function __fish_apy_using_command + set -l cmd (commandline -opc) + if [ (count $cmd) -gt 1 ] + if [ $argv[1] = $cmd[2] ] + return 0 + end + end + return 1 +end + +# Main apy command +complete -f -c apy -n '__fish_apy_no_subcommand' -l help -s h -d 'Show help' +complete -f -c apy -n '__fish_apy_no_subcommand' -l base-path -s b -d 'Set Anki base directory' -a '(__fish_complete_directories)' +complete -f -c apy -n '__fish_apy_no_subcommand' -l profile-name -s p -d 'Specify name of Anki profile to use' +complete -f -c apy -n '__fish_apy_no_subcommand' -l version -s V -d 'Show apy version' + +# Subcommands +complete -f -c apy -n '__fish_apy_no_subcommand' -a add -d 'Add notes interactively from terminal' +complete -f -c apy -n '__fish_apy_no_subcommand' -a add-single -d 'Add a single note from command line arguments' +complete -f -c apy -n '__fish_apy_no_subcommand' -a add-from-file -d 'Add notes from Markdown file (alias for update-from-file)' +complete -f -c apy -n '__fish_apy_no_subcommand' -a update-from-file -d 'Update existing or add new notes from Markdown file' +complete -f -c apy -n '__fish_apy_no_subcommand' -a check-media -d 'Check media' +complete -f -c apy -n '__fish_apy_no_subcommand' -a info -d 'Print some basic statistics' +complete -f -c apy -n '__fish_apy_no_subcommand' -a model -d 'Interact with the models' +complete -f -c apy -n '__fish_apy_no_subcommand' -a list -d 'Print cards that match query' +complete -f -c apy -n '__fish_apy_no_subcommand' -a review -d 'Review/Edit notes that match query' +complete -f -c apy -n '__fish_apy_no_subcommand' -a reposition -d 'Reposition new card with given CID' +complete -f -c apy -n '__fish_apy_no_subcommand' -a sync -d 'Synchronize collection with AnkiWeb' +complete -f -c apy -n '__fish_apy_no_subcommand' -a tag -d 'Add or remove tags from notes that match query' +complete -f -c apy -n '__fish_apy_no_subcommand' -a edit -d 'Edit notes that match query directly' +complete -f -c apy -n '__fish_apy_no_subcommand' -a backup -d 'Backup Anki database to specified target file' + +# add options +complete -f -c apy -n '__fish_apy_using_command add' -l help -s h -d 'Show help' +complete -f -c apy -n '__fish_apy_using_command add' -l tags -s t -d 'Specify default tags for new cards' +complete -f -c apy -n '__fish_apy_using_command add' -l model -s m -d 'Specify default model for new cards' +complete -f -c apy -n '__fish_apy_using_command add' -l deck -s d -d 'Specify default deck for new cards' + +# add-single options +complete -f -c apy -n '__fish_apy_using_command add-single' -l help -s h -d 'Show help' +complete -f -c apy -n '__fish_apy_using_command add-single' -l parse-markdown -s p -d 'Parse input as Markdown' +complete -f -c apy -n '__fish_apy_using_command add-single' -l preset -s s -d 'Specify a preset' +complete -f -c apy -n '__fish_apy_using_command add-single' -l tags -s t -d 'Specify default tags for new cards' +complete -f -c apy -n '__fish_apy_using_command add-single' -l model -s m -d 'Specify default model for new cards' +complete -f -c apy -n '__fish_apy_using_command add-single' -l deck -s d -d 'Specify default deck for new cards' + +# add-from-file and update-from-file options +for cmd in add-from-file update-from-file + complete -f -c apy -n "__fish_apy_using_command $cmd" -l help -s h -d 'Show help' + complete -f -c apy -n "__fish_apy_using_command $cmd" -l tags -s t -d 'Specify default tags for cards' + complete -f -c apy -n "__fish_apy_using_command $cmd" -l deck -s d -d 'Specify default deck for cards' + complete -f -c apy -n "__fish_apy_using_command $cmd" -l update-file -s u -d 'Update original file with note IDs' + # File argument + complete -f -c apy -n "__fish_apy_using_command $cmd" -k -a "(__fish_complete_suffix .md)" +end + +# list options +complete -f -c apy -n '__fish_apy_using_command list' -l help -s h -d 'Show help' +complete -f -c apy -n '__fish_apy_using_command list' -l show-answer -s a -d 'Display answer' +complete -f -c apy -n '__fish_apy_using_command list' -l show-model -s m -d 'Display model' +complete -f -c apy -n '__fish_apy_using_command list' -l show-cid -s c -d 'Display card ids' +complete -f -c apy -n '__fish_apy_using_command list' -l show-due -s d -d 'Display card due time in days' +complete -f -c apy -n '__fish_apy_using_command list' -l show-type -s t -d 'Display card type' +complete -f -c apy -n '__fish_apy_using_command list' -l show-ease -s e -d 'Display card ease' +complete -f -c apy -n '__fish_apy_using_command list' -l show-lapses -s l -d 'Display card number of lapses' + +# tag options +complete -f -c apy -n '__fish_apy_using_command tag' -l help -s h -d 'Show help' +complete -f -c apy -n '__fish_apy_using_command tag' -l add-tags -s a -d 'Add specified tags to matched notes' +complete -f -c apy -n '__fish_apy_using_command tag' -l remove-tags -s r -d 'Remove specified tags from matched notes' +complete -f -c apy -n '__fish_apy_using_command tag' -l sort-by-count -s c -d 'When listing tags, sort by note count' +complete -f -c apy -n '__fish_apy_using_command tag' -l purge -s p -d 'Remove all unused tags' + +# review options +complete -f -c apy -n '__fish_apy_using_command review' -l help -s h -d 'Show help' +complete -f -c apy -n '__fish_apy_using_command review' -l check-markdown-consistency -s m -d 'Check for Markdown consistency' +complete -f -c apy -n '__fish_apy_using_command review' -l cmc-range -s n -d 'Number of days backwards to check consistency' + +# edit options +complete -f -c apy -n '__fish_apy_using_command edit' -l help -s h -d 'Show help' +complete -f -c apy -n '__fish_apy_using_command edit' -l force-multiple -s f -d 'Allow editing multiple notes' + +# backup options +complete -f -c apy -n '__fish_apy_using_command backup' -l help -s h -d 'Show help' +complete -f -c apy -n '__fish_apy_using_command backup' -l include-media -s m -d 'Include media files in backup' +complete -f -c apy -n '__fish_apy_using_command backup' -l legacy -s l -d 'Support older Anki versions' + +# model subcommands +complete -f -c apy -n '__fish_apy_using_command model' -a edit-css -d 'Edit the CSS template for the specified model' +complete -f -c apy -n '__fish_apy_using_command model' -a rename -d 'Rename model from old_name to new_name' + +# model edit-css options +complete -f -c apy -n '__fish_apy_using_command model; and __fish_seen_subcommand_from edit-css' -l help -s h -d 'Show help' +complete -f -c apy -n '__fish_apy_using_command model; and __fish_seen_subcommand_from edit-css' -l model-name -s m -d 'Specify for which model to edit CSS template' +complete -f -c apy -n '__fish_apy_using_command model; and __fish_seen_subcommand_from edit-css' -l sync-after -s s -d 'Perform sync after any change' \ No newline at end of file diff --git a/poetry.lock b/poetry.lock deleted file mode 100644 index 1dd60b5..0000000 --- a/poetry.lock +++ /dev/null @@ -1,958 +0,0 @@ -# This file is automatically @generated by Poetry 1.8.2 and should not be changed by hand. - -[[package]] -name = "anki" -version = "23.12.1" -description = "" -optional = false -python-versions = ">=3.9" -files = [ - {file = "anki-23.12.1-cp39-abi3-macosx_10_13_x86_64.whl", hash = "sha256:f3d48291b7ce60b72bf9cfbec4dc990d0a9a29f30c5412cc6d30833fe550a652"}, - {file = "anki-23.12.1-cp39-abi3-macosx_11_0_arm64.whl", hash = "sha256:59ca12994956777b65efc7e4ea9235d01532b7fa1537173d46e28eb7eba552cd"}, - {file = "anki-23.12.1-cp39-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:d9676c1595c28d335ef5dc1e60e5eb624f4f1e948a5344dde43374bfd8a98b29"}, - {file = "anki-23.12.1-cp39-abi3-manylinux_2_31_aarch64.whl", hash = "sha256:08e807327090ae40a0e5eb331ff60ac9af8f58faa3b46835420ee84a4191c553"}, - {file = "anki-23.12.1-cp39-abi3-win_amd64.whl", hash = "sha256:af5e34a94baed0883e1df9de636ec690cfb741eaddfbb788d97db44f4218e074"}, -] - -[package.dependencies] -beautifulsoup4 = "*" -decorator = "*" -distro = {version = "*", markers = "sys_platform != \"darwin\" and sys_platform != \"win32\""} -markdown = "*" -orjson = "*" -protobuf = ">=4.21" -psutil = {version = "*", markers = "sys_platform == \"win32\""} -requests = {version = "*", extras = ["socks"]} - -[[package]] -name = "astroid" -version = "3.0.2" -description = "An abstract syntax tree for Python with inference support." -optional = false -python-versions = ">=3.8.0" -files = [ - {file = "astroid-3.0.2-py3-none-any.whl", hash = "sha256:d6e62862355f60e716164082d6b4b041d38e2a8cf1c7cd953ded5108bac8ff5c"}, - {file = "astroid-3.0.2.tar.gz", hash = "sha256:4a61cf0a59097c7bb52689b0fd63717cd2a8a14dc9f1eee97b82d814881c8c91"}, -] - -[package.dependencies] -typing-extensions = {version = ">=4.0.0", markers = "python_version < \"3.11\""} - -[[package]] -name = "beautifulsoup4" -version = "4.12.2" -description = "Screen-scraping library" -optional = false -python-versions = ">=3.6.0" -files = [ - {file = "beautifulsoup4-4.12.2-py3-none-any.whl", hash = "sha256:bd2520ca0d9d7d12694a53d44ac482d181b4ec1888909b035a3dbf40d0f57d4a"}, - {file = "beautifulsoup4-4.12.2.tar.gz", hash = "sha256:492bbc69dca35d12daac71c4db1bfff0c876c00ef4a2ffacce226d4638eb72da"}, -] - -[package.dependencies] -soupsieve = ">1.2" - -[package.extras] -html5lib = ["html5lib"] -lxml = ["lxml"] - -[[package]] -name = "black" -version = "24.3.0" -description = "The uncompromising code formatter." -optional = false -python-versions = ">=3.8" -files = [ - {file = "black-24.3.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:7d5e026f8da0322b5662fa7a8e752b3fa2dac1c1cbc213c3d7ff9bdd0ab12395"}, - {file = "black-24.3.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9f50ea1132e2189d8dff0115ab75b65590a3e97de1e143795adb4ce317934995"}, - {file = "black-24.3.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e2af80566f43c85f5797365077fb64a393861a3730bd110971ab7a0c94e873e7"}, - {file = "black-24.3.0-cp310-cp310-win_amd64.whl", hash = "sha256:4be5bb28e090456adfc1255e03967fb67ca846a03be7aadf6249096100ee32d0"}, - {file = "black-24.3.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4f1373a7808a8f135b774039f61d59e4be7eb56b2513d3d2f02a8b9365b8a8a9"}, - {file = "black-24.3.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:aadf7a02d947936ee418777e0247ea114f78aff0d0959461057cae8a04f20597"}, - {file = "black-24.3.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:65c02e4ea2ae09d16314d30912a58ada9a5c4fdfedf9512d23326128ac08ac3d"}, - {file = "black-24.3.0-cp311-cp311-win_amd64.whl", hash = "sha256:bf21b7b230718a5f08bd32d5e4f1db7fc8788345c8aea1d155fc17852b3410f5"}, - {file = "black-24.3.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:2818cf72dfd5d289e48f37ccfa08b460bf469e67fb7c4abb07edc2e9f16fb63f"}, - {file = "black-24.3.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:4acf672def7eb1725f41f38bf6bf425c8237248bb0804faa3965c036f7672d11"}, - {file = "black-24.3.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c7ed6668cbbfcd231fa0dc1b137d3e40c04c7f786e626b405c62bcd5db5857e4"}, - {file = "black-24.3.0-cp312-cp312-win_amd64.whl", hash = "sha256:56f52cfbd3dabe2798d76dbdd299faa046a901041faf2cf33288bc4e6dae57b5"}, - {file = "black-24.3.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:79dcf34b33e38ed1b17434693763301d7ccbd1c5860674a8f871bd15139e7837"}, - {file = "black-24.3.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:e19cb1c6365fd6dc38a6eae2dcb691d7d83935c10215aef8e6c38edee3f77abd"}, - {file = "black-24.3.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:65b76c275e4c1c5ce6e9870911384bff5ca31ab63d19c76811cb1fb162678213"}, - {file = "black-24.3.0-cp38-cp38-win_amd64.whl", hash = "sha256:b5991d523eee14756f3c8d5df5231550ae8993e2286b8014e2fdea7156ed0959"}, - {file = "black-24.3.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:c45f8dff244b3c431b36e3224b6be4a127c6aca780853574c00faf99258041eb"}, - {file = "black-24.3.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:6905238a754ceb7788a73f02b45637d820b2f5478b20fec82ea865e4f5d4d9f7"}, - {file = "black-24.3.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d7de8d330763c66663661a1ffd432274a2f92f07feeddd89ffd085b5744f85e7"}, - {file = "black-24.3.0-cp39-cp39-win_amd64.whl", hash = "sha256:7bb041dca0d784697af4646d3b62ba4a6b028276ae878e53f6b4f74ddd6db99f"}, - {file = "black-24.3.0-py3-none-any.whl", hash = "sha256:41622020d7120e01d377f74249e677039d20e6344ff5851de8a10f11f513bf93"}, - {file = "black-24.3.0.tar.gz", hash = "sha256:a0c9c4a0771afc6919578cec71ce82a3e31e054904e7197deacbc9382671c41f"}, -] - -[package.dependencies] -click = ">=8.0.0" -mypy-extensions = ">=0.4.3" -packaging = ">=22.0" -pathspec = ">=0.9.0" -platformdirs = ">=2" -tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} -typing-extensions = {version = ">=4.0.1", markers = "python_version < \"3.11\""} - -[package.extras] -colorama = ["colorama (>=0.4.3)"] -d = ["aiohttp (>=3.7.4)", "aiohttp (>=3.7.4,!=3.9.0)"] -jupyter = ["ipython (>=7.8.0)", "tokenize-rt (>=3.2.0)"] -uvloop = ["uvloop (>=0.15.2)"] - -[[package]] -name = "bump2version" -version = "1.0.1" -description = "Version-bump your software with a single command!" -optional = false -python-versions = ">=3.5" -files = [ - {file = "bump2version-1.0.1-py2.py3-none-any.whl", hash = "sha256:37f927ea17cde7ae2d7baf832f8e80ce3777624554a653006c9144f8017fe410"}, - {file = "bump2version-1.0.1.tar.gz", hash = "sha256:762cb2bfad61f4ec8e2bdf452c7c267416f8c70dd9ecb1653fd0bbb01fa936e6"}, -] - -[[package]] -name = "certifi" -version = "2023.11.17" -description = "Python package for providing Mozilla's CA Bundle." -optional = false -python-versions = ">=3.6" -files = [ - {file = "certifi-2023.11.17-py3-none-any.whl", hash = "sha256:e036ab49d5b79556f99cfc2d9320b34cfbe5be05c5871b51de9329f0603b0474"}, - {file = "certifi-2023.11.17.tar.gz", hash = "sha256:9b469f3a900bf28dc19b8cfbf8019bf47f7fdd1a65a1d4ffb98fc14166beb4d1"}, -] - -[[package]] -name = "charset-normalizer" -version = "3.3.2" -description = "The Real First Universal Charset Detector. Open, modern and actively maintained alternative to Chardet." -optional = false -python-versions = ">=3.7.0" -files = [ - {file = "charset-normalizer-3.3.2.tar.gz", hash = "sha256:f30c3cb33b24454a82faecaf01b19c18562b1e89558fb6c56de4d9118a032fd5"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:25baf083bf6f6b341f4121c2f3c548875ee6f5339300e08be3f2b2ba1721cdd3"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:06435b539f889b1f6f4ac1758871aae42dc3a8c0e24ac9e60c2384973ad73027"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9063e24fdb1e498ab71cb7419e24622516c4a04476b17a2dab57e8baa30d6e03"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6897af51655e3691ff853668779c7bad41579facacf5fd7253b0133308cf000d"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1d3193f4a680c64b4b6a9115943538edb896edc190f0b222e73761716519268e"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:cd70574b12bb8a4d2aaa0094515df2463cb429d8536cfb6c7ce983246983e5a6"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8465322196c8b4d7ab6d1e049e4c5cb460d0394da4a27d23cc242fbf0034b6b5"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:a9a8e9031d613fd2009c182b69c7b2c1ef8239a0efb1df3f7c8da66d5dd3d537"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:beb58fe5cdb101e3a055192ac291b7a21e3b7ef4f67fa1d74e331a7f2124341c"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:e06ed3eb3218bc64786f7db41917d4e686cc4856944f53d5bdf83a6884432e12"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_ppc64le.whl", hash = "sha256:2e81c7b9c8979ce92ed306c249d46894776a909505d8f5a4ba55b14206e3222f"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_s390x.whl", hash = "sha256:572c3763a264ba47b3cf708a44ce965d98555f618ca42c926a9c1616d8f34269"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:fd1abc0d89e30cc4e02e4064dc67fcc51bd941eb395c502aac3ec19fab46b519"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-win32.whl", hash = "sha256:3d47fa203a7bd9c5b6cee4736ee84ca03b8ef23193c0d1ca99b5089f72645c73"}, - {file = "charset_normalizer-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:10955842570876604d404661fbccbc9c7e684caf432c09c715ec38fbae45ae09"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:802fe99cca7457642125a8a88a084cef28ff0cf9407060f7b93dca5aa25480db"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:573f6eac48f4769d667c4442081b1794f52919e7edada77495aaed9236d13a96"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:549a3a73da901d5bc3ce8d24e0600d1fa85524c10287f6004fbab87672bf3e1e"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f27273b60488abe721a075bcca6d7f3964f9f6f067c8c4c605743023d7d3944f"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:1ceae2f17a9c33cb48e3263960dc5fc8005351ee19db217e9b1bb15d28c02574"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:65f6f63034100ead094b8744b3b97965785388f308a64cf8d7c34f2f2e5be0c4"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:753f10e867343b4511128c6ed8c82f7bec3bd026875576dfd88483c5c73b2fd8"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:4a78b2b446bd7c934f5dcedc588903fb2f5eec172f3d29e52a9096a43722adfc"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:e537484df0d8f426ce2afb2d0f8e1c3d0b114b83f8850e5f2fbea0e797bd82ae"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:eb6904c354526e758fda7167b33005998fb68c46fbc10e013ca97f21ca5c8887"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_ppc64le.whl", hash = "sha256:deb6be0ac38ece9ba87dea880e438f25ca3eddfac8b002a2ec3d9183a454e8ae"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_s390x.whl", hash = "sha256:4ab2fe47fae9e0f9dee8c04187ce5d09f48eabe611be8259444906793ab7cbce"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:80402cd6ee291dcb72644d6eac93785fe2c8b9cb30893c1af5b8fdd753b9d40f"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-win32.whl", hash = "sha256:7cd13a2e3ddeed6913a65e66e94b51d80a041145a026c27e6bb76c31a853c6ab"}, - {file = "charset_normalizer-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:663946639d296df6a2bb2aa51b60a2454ca1cb29835324c640dafb5ff2131a77"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_universal2.whl", hash = "sha256:0b2b64d2bb6d3fb9112bafa732def486049e63de9618b5843bcdd081d8144cd8"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:ddbb2551d7e0102e7252db79ba445cdab71b26640817ab1e3e3648dad515003b"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:55086ee1064215781fff39a1af09518bc9255b50d6333f2e4c74ca09fac6a8f6"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8f4a014bc36d3c57402e2977dada34f9c12300af536839dc38c0beab8878f38a"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a10af20b82360ab00827f916a6058451b723b4e65030c5a18577c8b2de5b3389"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:8d756e44e94489e49571086ef83b2bb8ce311e730092d2c34ca8f7d925cb20aa"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:90d558489962fd4918143277a773316e56c72da56ec7aa3dc3dbbe20fdfed15b"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6ac7ffc7ad6d040517be39eb591cac5ff87416c2537df6ba3cba3bae290c0fed"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:7ed9e526742851e8d5cc9e6cf41427dfc6068d4f5a3bb03659444b4cabf6bc26"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:8bdb58ff7ba23002a4c5808d608e4e6c687175724f54a5dade5fa8c67b604e4d"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_ppc64le.whl", hash = "sha256:6b3251890fff30ee142c44144871185dbe13b11bab478a88887a639655be1068"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_s390x.whl", hash = "sha256:b4a23f61ce87adf89be746c8a8974fe1c823c891d8f86eb218bb957c924bb143"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:efcb3f6676480691518c177e3b465bcddf57cea040302f9f4e6e191af91174d4"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-win32.whl", hash = "sha256:d965bba47ddeec8cd560687584e88cf699fd28f192ceb452d1d7ee807c5597b7"}, - {file = "charset_normalizer-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:96b02a3dc4381e5494fad39be677abcb5e6634bf7b4fa83a6dd3112607547001"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:95f2a5796329323b8f0512e09dbb7a1860c46a39da62ecb2324f116fa8fdc85c"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:c002b4ffc0be611f0d9da932eb0f704fe2602a9a949d1f738e4c34c75b0863d5"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:a981a536974bbc7a512cf44ed14938cf01030a99e9b3a06dd59578882f06f985"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:3287761bc4ee9e33561a7e058c72ac0938c4f57fe49a09eae428fd88aafe7bb6"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:42cb296636fcc8b0644486d15c12376cb9fa75443e00fb25de0b8602e64c1714"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0a55554a2fa0d408816b3b5cedf0045f4b8e1a6065aec45849de2d6f3f8e9786"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_aarch64.whl", hash = "sha256:c083af607d2515612056a31f0a8d9e0fcb5876b7bfc0abad3ecd275bc4ebc2d5"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:87d1351268731db79e0f8e745d92493ee2841c974128ef629dc518b937d9194c"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_ppc64le.whl", hash = "sha256:bd8f7df7d12c2db9fab40bdd87a7c09b1530128315d047a086fa3ae3435cb3a8"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_s390x.whl", hash = "sha256:c180f51afb394e165eafe4ac2936a14bee3eb10debc9d9e4db8958fe36afe711"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:8c622a5fe39a48f78944a87d4fb8a53ee07344641b0562c540d840748571b811"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-win32.whl", hash = "sha256:db364eca23f876da6f9e16c9da0df51aa4f104a972735574842618b8c6d999d4"}, - {file = "charset_normalizer-3.3.2-cp37-cp37m-win_amd64.whl", hash = "sha256:86216b5cee4b06df986d214f664305142d9c76df9b6512be2738aa72a2048f99"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_universal2.whl", hash = "sha256:6463effa3186ea09411d50efc7d85360b38d5f09b870c48e4600f63af490e56a"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:6c4caeef8fa63d06bd437cd4bdcf3ffefe6738fb1b25951440d80dc7df8c03ac"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:37e55c8e51c236f95b033f6fb391d7d7970ba5fe7ff453dad675e88cf303377a"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fb69256e180cb6c8a894fee62b3afebae785babc1ee98b81cdf68bbca1987f33"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:ae5f4161f18c61806f411a13b0310bea87f987c7d2ecdbdaad0e94eb2e404238"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b2b0a0c0517616b6869869f8c581d4eb2dd83a4d79e0ebcb7d373ef9956aeb0a"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:45485e01ff4d3630ec0d9617310448a8702f70e9c01906b0d0118bdf9d124cf2"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:eb00ed941194665c332bf8e078baf037d6c35d7c4f3102ea2d4f16ca94a26dc8"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_aarch64.whl", hash = "sha256:2127566c664442652f024c837091890cb1942c30937add288223dc895793f898"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:a50aebfa173e157099939b17f18600f72f84eed3049e743b68ad15bd69b6bf99"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_ppc64le.whl", hash = "sha256:4d0d1650369165a14e14e1e47b372cfcb31d6ab44e6e33cb2d4e57265290044d"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_s390x.whl", hash = "sha256:923c0c831b7cfcb071580d3f46c4baf50f174be571576556269530f4bbd79d04"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:06a81e93cd441c56a9b65d8e1d043daeb97a3d0856d177d5c90ba85acb3db087"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-win32.whl", hash = "sha256:6ef1d82a3af9d3eecdba2321dc1b3c238245d890843e040e41e470ffa64c3e25"}, - {file = "charset_normalizer-3.3.2-cp38-cp38-win_amd64.whl", hash = "sha256:eb8821e09e916165e160797a6c17edda0679379a4be5c716c260e836e122f54b"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_universal2.whl", hash = "sha256:c235ebd9baae02f1b77bcea61bce332cb4331dc3617d254df3323aa01ab47bd4"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5b4c145409bef602a690e7cfad0a15a55c13320ff7a3ad7ca59c13bb8ba4d45d"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:68d1f8a9e9e37c1223b656399be5d6b448dea850bed7d0f87a8311f1ff3dabb0"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:22afcb9f253dac0696b5a4be4a1c0f8762f8239e21b99680099abd9b2b1b2269"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e27ad930a842b4c5eb8ac0016b0a54f5aebbe679340c26101df33424142c143c"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1f79682fbe303db92bc2b1136016a38a42e835d932bab5b3b1bfcfbf0640e519"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:b261ccdec7821281dade748d088bb6e9b69e6d15b30652b74cbbac25e280b796"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:122c7fa62b130ed55f8f285bfd56d5f4b4a5b503609d181f9ad85e55c89f4185"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_aarch64.whl", hash = "sha256:d0eccceffcb53201b5bfebb52600a5fb483a20b61da9dbc885f8b103cbe7598c"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:9f96df6923e21816da7e0ad3fd47dd8f94b2a5ce594e00677c0013018b813458"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_ppc64le.whl", hash = "sha256:7f04c839ed0b6b98b1a7501a002144b76c18fb1c1850c8b98d458ac269e26ed2"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_s390x.whl", hash = "sha256:34d1c8da1e78d2e001f363791c98a272bb734000fcef47a491c1e3b0505657a8"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ff8fa367d09b717b2a17a052544193ad76cd49979c805768879cb63d9ca50561"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-win32.whl", hash = "sha256:aed38f6e4fb3f5d6bf81bfa990a07806be9d83cf7bacef998ab1a9bd660a581f"}, - {file = "charset_normalizer-3.3.2-cp39-cp39-win_amd64.whl", hash = "sha256:b01b88d45a6fcb69667cd6d2f7a9aeb4bf53760d7fc536bf679ec94fe9f3ff3d"}, - {file = "charset_normalizer-3.3.2-py3-none-any.whl", hash = "sha256:3e4d1f6587322d2788836a99c69062fbb091331ec940e02d12d179c1d53e25fc"}, -] - -[[package]] -name = "click" -version = "8.1.7" -description = "Composable command line interface toolkit" -optional = false -python-versions = ">=3.7" -files = [ - {file = "click-8.1.7-py3-none-any.whl", hash = "sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28"}, - {file = "click-8.1.7.tar.gz", hash = "sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de"}, -] - -[package.dependencies] -colorama = {version = "*", markers = "platform_system == \"Windows\""} - -[[package]] -name = "colorama" -version = "0.4.6" -description = "Cross-platform colored terminal text." -optional = false -python-versions = "!=3.0.*,!=3.1.*,!=3.2.*,!=3.3.*,!=3.4.*,!=3.5.*,!=3.6.*,>=2.7" -files = [ - {file = "colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6"}, - {file = "colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44"}, -] - -[[package]] -name = "decorator" -version = "5.1.1" -description = "Decorators for Humans" -optional = false -python-versions = ">=3.5" -files = [ - {file = "decorator-5.1.1-py3-none-any.whl", hash = "sha256:b8c3f85900b9dc423225913c5aace94729fe1fa9763b38939a95226f02d37186"}, - {file = "decorator-5.1.1.tar.gz", hash = "sha256:637996211036b6385ef91435e4fae22989472f9d571faba8927ba8253acbc330"}, -] - -[[package]] -name = "dill" -version = "0.3.7" -description = "serialize all of Python" -optional = false -python-versions = ">=3.7" -files = [ - {file = "dill-0.3.7-py3-none-any.whl", hash = "sha256:76b122c08ef4ce2eedcd4d1abd8e641114bfc6c2867f49f3c41facf65bf19f5e"}, - {file = "dill-0.3.7.tar.gz", hash = "sha256:cc1c8b182eb3013e24bd475ff2e9295af86c1a38eb1aff128dac8962a9ce3c03"}, -] - -[package.extras] -graph = ["objgraph (>=1.7.2)"] - -[[package]] -name = "distro" -version = "1.9.0" -description = "Distro - an OS platform information API" -optional = false -python-versions = ">=3.6" -files = [ - {file = "distro-1.9.0-py3-none-any.whl", hash = "sha256:7bffd925d65168f85027d8da9af6bddab658135b840670a223589bc0c8ef02b2"}, - {file = "distro-1.9.0.tar.gz", hash = "sha256:2fa77c6fd8940f116ee1d6b94a2f90b13b5ea8d019b98bc8bafdcabcdd9bdbed"}, -] - -[[package]] -name = "exceptiongroup" -version = "1.2.0" -description = "Backport of PEP 654 (exception groups)" -optional = false -python-versions = ">=3.7" -files = [ - {file = "exceptiongroup-1.2.0-py3-none-any.whl", hash = "sha256:4bfd3996ac73b41e9b9628b04e079f193850720ea5945fc96a08633c66912f14"}, - {file = "exceptiongroup-1.2.0.tar.gz", hash = "sha256:91f5c769735f051a4290d52edd0858999b57e5876e9f85937691bd4c9fa3ed68"}, -] - -[package.extras] -test = ["pytest (>=6)"] - -[[package]] -name = "html5lib" -version = "1.1" -description = "HTML parser based on the WHATWG HTML specification" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*" -files = [ - {file = "html5lib-1.1-py2.py3-none-any.whl", hash = "sha256:0d78f8fde1c230e99fe37986a60526d7049ed4bf8a9fadbad5f00e22e58e041d"}, - {file = "html5lib-1.1.tar.gz", hash = "sha256:b2e5b40261e20f354d198eae92afc10d750afb487ed5e50f9c4eaf07c184146f"}, -] - -[package.dependencies] -six = ">=1.9" -webencodings = "*" - -[package.extras] -all = ["chardet (>=2.2)", "genshi", "lxml"] -chardet = ["chardet (>=2.2)"] -genshi = ["genshi"] -lxml = ["lxml"] - -[[package]] -name = "idna" -version = "3.6" -description = "Internationalized Domain Names in Applications (IDNA)" -optional = false -python-versions = ">=3.5" -files = [ - {file = "idna-3.6-py3-none-any.whl", hash = "sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f"}, - {file = "idna-3.6.tar.gz", hash = "sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca"}, -] - -[[package]] -name = "importlib-metadata" -version = "7.0.1" -description = "Read metadata from Python packages" -optional = false -python-versions = ">=3.8" -files = [ - {file = "importlib_metadata-7.0.1-py3-none-any.whl", hash = "sha256:4805911c3a4ec7c3966410053e9ec6a1fecd629117df5adee56dfc9432a1081e"}, - {file = "importlib_metadata-7.0.1.tar.gz", hash = "sha256:f238736bb06590ae52ac1fab06a3a9ef1d8dce2b7a35b5ab329371d6c8f5d2cc"}, -] - -[package.dependencies] -zipp = ">=0.5" - -[package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"] -perf = ["ipython"] -testing = ["flufl.flake8", "importlib-resources (>=1.3)", "packaging", "pyfakefs", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf (>=0.9.2)", "pytest-ruff"] - -[[package]] -name = "iniconfig" -version = "2.0.0" -description = "brain-dead simple config-ini parsing" -optional = false -python-versions = ">=3.7" -files = [ - {file = "iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374"}, - {file = "iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3"}, -] - -[[package]] -name = "isort" -version = "5.13.2" -description = "A Python utility / library to sort Python imports." -optional = false -python-versions = ">=3.8.0" -files = [ - {file = "isort-5.13.2-py3-none-any.whl", hash = "sha256:8ca5e72a8d85860d5a3fa69b8745237f2939afe12dbf656afbcb47fe72d947a6"}, - {file = "isort-5.13.2.tar.gz", hash = "sha256:48fdfcb9face5d58a4f6dde2e72a1fb8dcaf8ab26f95ab49fab84c2ddefb0109"}, -] - -[package.extras] -colors = ["colorama (>=0.4.6)"] - -[[package]] -name = "markdown" -version = "3.5.1" -description = "Python implementation of John Gruber's Markdown." -optional = false -python-versions = ">=3.8" -files = [ - {file = "Markdown-3.5.1-py3-none-any.whl", hash = "sha256:5874b47d4ee3f0b14d764324d2c94c03ea66bee56f2d929da9f2508d65e722dc"}, - {file = "Markdown-3.5.1.tar.gz", hash = "sha256:b65d7beb248dc22f2e8a31fb706d93798093c308dc1aba295aedeb9d41a813bd"}, -] - -[package.dependencies] -importlib-metadata = {version = ">=4.4", markers = "python_version < \"3.10\""} - -[package.extras] -docs = ["mdx-gh-links (>=0.2)", "mkdocs (>=1.5)", "mkdocs-gen-files", "mkdocs-literate-nav", "mkdocs-nature (>=0.6)", "mkdocs-section-index", "mkdocstrings[python]"] -testing = ["coverage", "pyyaml"] - -[[package]] -name = "markdown-it-py" -version = "3.0.0" -description = "Python port of markdown-it. Markdown parsing, done right!" -optional = false -python-versions = ">=3.8" -files = [ - {file = "markdown-it-py-3.0.0.tar.gz", hash = "sha256:e3f60a94fa066dc52ec76661e37c851cb232d92f9886b15cb560aaada2df8feb"}, - {file = "markdown_it_py-3.0.0-py3-none-any.whl", hash = "sha256:355216845c60bd96232cd8d8c40e8f9765cc86f46880e43a8fd22dc1a1a8cab1"}, -] - -[package.dependencies] -mdurl = ">=0.1,<1.0" - -[package.extras] -benchmarking = ["psutil", "pytest", "pytest-benchmark"] -code-style = ["pre-commit (>=3.0,<4.0)"] -compare = ["commonmark (>=0.9,<1.0)", "markdown (>=3.4,<4.0)", "mistletoe (>=1.0,<2.0)", "mistune (>=2.0,<3.0)", "panflute (>=2.3,<3.0)"] -linkify = ["linkify-it-py (>=1,<3)"] -plugins = ["mdit-py-plugins"] -profiling = ["gprof2dot"] -rtd = ["jupyter_sphinx", "mdit-py-plugins", "myst-parser", "pyyaml", "sphinx", "sphinx-copybutton", "sphinx-design", "sphinx_book_theme"] -testing = ["coverage", "pytest", "pytest-cov", "pytest-regressions"] - -[[package]] -name = "markdownify" -version = "0.11.6" -description = "Convert HTML to markdown." -optional = false -python-versions = "*" -files = [ - {file = "markdownify-0.11.6-py3-none-any.whl", hash = "sha256:ba35fe289d5e9073bcd7d2cad629278fe25f1a93741fcdc0bfb4f009076d8324"}, - {file = "markdownify-0.11.6.tar.gz", hash = "sha256:009b240e0c9f4c8eaf1d085625dcd4011e12f0f8cec55dedf9ea6f7655e49bfe"}, -] - -[package.dependencies] -beautifulsoup4 = ">=4.9,<5" -six = ">=1.15,<2" - -[[package]] -name = "mccabe" -version = "0.7.0" -description = "McCabe checker, plugin for flake8" -optional = false -python-versions = ">=3.6" -files = [ - {file = "mccabe-0.7.0-py2.py3-none-any.whl", hash = "sha256:6c2d30ab6be0e4a46919781807b4f0d834ebdd6c6e3dca0bda5a15f863427b6e"}, - {file = "mccabe-0.7.0.tar.gz", hash = "sha256:348e0240c33b60bbdf4e523192ef919f28cb2c3d7d5c7794f74009290f236325"}, -] - -[[package]] -name = "mdurl" -version = "0.1.2" -description = "Markdown URL utilities" -optional = false -python-versions = ">=3.7" -files = [ - {file = "mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8"}, - {file = "mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba"}, -] - -[[package]] -name = "mypy" -version = "1.8.0" -description = "Optional static typing for Python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "mypy-1.8.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:485a8942f671120f76afffff70f259e1cd0f0cfe08f81c05d8816d958d4577d3"}, - {file = "mypy-1.8.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:df9824ac11deaf007443e7ed2a4a26bebff98d2bc43c6da21b2b64185da011c4"}, - {file = "mypy-1.8.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:2afecd6354bbfb6e0160f4e4ad9ba6e4e003b767dd80d85516e71f2e955ab50d"}, - {file = "mypy-1.8.0-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8963b83d53ee733a6e4196954502b33567ad07dfd74851f32be18eb932fb1cb9"}, - {file = "mypy-1.8.0-cp310-cp310-win_amd64.whl", hash = "sha256:e46f44b54ebddbeedbd3d5b289a893219065ef805d95094d16a0af6630f5d410"}, - {file = "mypy-1.8.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:855fe27b80375e5c5878492f0729540db47b186509c98dae341254c8f45f42ae"}, - {file = "mypy-1.8.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4c886c6cce2d070bd7df4ec4a05a13ee20c0aa60cb587e8d1265b6c03cf91da3"}, - {file = "mypy-1.8.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:d19c413b3c07cbecf1f991e2221746b0d2a9410b59cb3f4fb9557f0365a1a817"}, - {file = "mypy-1.8.0-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:9261ed810972061388918c83c3f5cd46079d875026ba97380f3e3978a72f503d"}, - {file = "mypy-1.8.0-cp311-cp311-win_amd64.whl", hash = "sha256:51720c776d148bad2372ca21ca29256ed483aa9a4cdefefcef49006dff2a6835"}, - {file = "mypy-1.8.0-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:52825b01f5c4c1c4eb0db253ec09c7aa17e1a7304d247c48b6f3599ef40db8bd"}, - {file = "mypy-1.8.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f5ac9a4eeb1ec0f1ccdc6f326bcdb464de5f80eb07fb38b5ddd7b0de6bc61e55"}, - {file = "mypy-1.8.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:afe3fe972c645b4632c563d3f3eff1cdca2fa058f730df2b93a35e3b0c538218"}, - {file = "mypy-1.8.0-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:42c6680d256ab35637ef88891c6bd02514ccb7e1122133ac96055ff458f93fc3"}, - {file = "mypy-1.8.0-cp312-cp312-win_amd64.whl", hash = "sha256:720a5ca70e136b675af3af63db533c1c8c9181314d207568bbe79051f122669e"}, - {file = "mypy-1.8.0-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:028cf9f2cae89e202d7b6593cd98db6759379f17a319b5faf4f9978d7084cdc6"}, - {file = "mypy-1.8.0-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:4e6d97288757e1ddba10dd9549ac27982e3e74a49d8d0179fc14d4365c7add66"}, - {file = "mypy-1.8.0-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7f1478736fcebb90f97e40aff11a5f253af890c845ee0c850fe80aa060a267c6"}, - {file = "mypy-1.8.0-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:42419861b43e6962a649068a61f4a4839205a3ef525b858377a960b9e2de6e0d"}, - {file = "mypy-1.8.0-cp38-cp38-win_amd64.whl", hash = "sha256:2b5b6c721bd4aabaadead3a5e6fa85c11c6c795e0c81a7215776ef8afc66de02"}, - {file = "mypy-1.8.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:5c1538c38584029352878a0466f03a8ee7547d7bd9f641f57a0f3017a7c905b8"}, - {file = "mypy-1.8.0-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:4ef4be7baf08a203170f29e89d79064463b7fc7a0908b9d0d5114e8009c3a259"}, - {file = "mypy-1.8.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7178def594014aa6c35a8ff411cf37d682f428b3b5617ca79029d8ae72f5402b"}, - {file = "mypy-1.8.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:ab3c84fa13c04aeeeabb2a7f67a25ef5d77ac9d6486ff33ded762ef353aa5592"}, - {file = "mypy-1.8.0-cp39-cp39-win_amd64.whl", hash = "sha256:99b00bc72855812a60d253420d8a2eae839b0afa4938f09f4d2aa9bb4654263a"}, - {file = "mypy-1.8.0-py3-none-any.whl", hash = "sha256:538fd81bb5e430cc1381a443971c0475582ff9f434c16cd46d2c66763ce85d9d"}, - {file = "mypy-1.8.0.tar.gz", hash = "sha256:6ff8b244d7085a0b425b56d327b480c3b29cafbd2eff27316a004f9a7391ae07"}, -] - -[package.dependencies] -mypy-extensions = ">=1.0.0" -tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} -typing-extensions = ">=4.1.0" - -[package.extras] -dmypy = ["psutil (>=4.0)"] -install-types = ["pip"] -mypyc = ["setuptools (>=50)"] -reports = ["lxml"] - -[[package]] -name = "mypy-extensions" -version = "1.0.0" -description = "Type system extensions for programs checked with the mypy type checker." -optional = false -python-versions = ">=3.5" -files = [ - {file = "mypy_extensions-1.0.0-py3-none-any.whl", hash = "sha256:4392f6c0eb8a5668a69e23d168ffa70f0be9ccfd32b5cc2d26a34ae5b844552d"}, - {file = "mypy_extensions-1.0.0.tar.gz", hash = "sha256:75dbf8955dc00442a438fc4d0666508a9a97b6bd41aa2f0ffe9d2f2725af0782"}, -] - -[[package]] -name = "orjson" -version = "3.9.15" -description = "Fast, correct Python JSON library supporting dataclasses, datetimes, and numpy" -optional = false -python-versions = ">=3.8" -files = [ - {file = "orjson-3.9.15-cp310-cp310-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:d61f7ce4727a9fa7680cd6f3986b0e2c732639f46a5e0156e550e35258aa313a"}, - {file = "orjson-3.9.15-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4feeb41882e8aa17634b589533baafdceb387e01e117b1ec65534ec724023d04"}, - {file = "orjson-3.9.15-cp310-cp310-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fbbeb3c9b2edb5fd044b2a070f127a0ac456ffd079cb82746fc84af01ef021a4"}, - {file = "orjson-3.9.15-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:b66bcc5670e8a6b78f0313bcb74774c8291f6f8aeef10fe70e910b8040f3ab75"}, - {file = "orjson-3.9.15-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2973474811db7b35c30248d1129c64fd2bdf40d57d84beed2a9a379a6f57d0ab"}, - {file = "orjson-3.9.15-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9fe41b6f72f52d3da4db524c8653e46243c8c92df826ab5ffaece2dba9cccd58"}, - {file = "orjson-3.9.15-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:4228aace81781cc9d05a3ec3a6d2673a1ad0d8725b4e915f1089803e9efd2b99"}, - {file = "orjson-3.9.15-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:6f7b65bfaf69493c73423ce9db66cfe9138b2f9ef62897486417a8fcb0a92bfe"}, - {file = "orjson-3.9.15-cp310-none-win32.whl", hash = "sha256:2d99e3c4c13a7b0fb3792cc04c2829c9db07838fb6973e578b85c1745e7d0ce7"}, - {file = "orjson-3.9.15-cp310-none-win_amd64.whl", hash = "sha256:b725da33e6e58e4a5d27958568484aa766e825e93aa20c26c91168be58e08cbb"}, - {file = "orjson-3.9.15-cp311-cp311-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:c8e8fe01e435005d4421f183038fc70ca85d2c1e490f51fb972db92af6e047c2"}, - {file = "orjson-3.9.15-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:87f1097acb569dde17f246faa268759a71a2cb8c96dd392cd25c668b104cad2f"}, - {file = "orjson-3.9.15-cp311-cp311-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:ff0f9913d82e1d1fadbd976424c316fbc4d9c525c81d047bbdd16bd27dd98cfc"}, - {file = "orjson-3.9.15-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:8055ec598605b0077e29652ccfe9372247474375e0e3f5775c91d9434e12d6b1"}, - {file = "orjson-3.9.15-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d6768a327ea1ba44c9114dba5fdda4a214bdb70129065cd0807eb5f010bfcbb5"}, - {file = "orjson-3.9.15-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:12365576039b1a5a47df01aadb353b68223da413e2e7f98c02403061aad34bde"}, - {file = "orjson-3.9.15-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:71c6b009d431b3839d7c14c3af86788b3cfac41e969e3e1c22f8a6ea13139404"}, - {file = "orjson-3.9.15-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:e18668f1bd39e69b7fed19fa7cd1cd110a121ec25439328b5c89934e6d30d357"}, - {file = "orjson-3.9.15-cp311-none-win32.whl", hash = "sha256:62482873e0289cf7313461009bf62ac8b2e54bc6f00c6fabcde785709231a5d7"}, - {file = "orjson-3.9.15-cp311-none-win_amd64.whl", hash = "sha256:b3d336ed75d17c7b1af233a6561cf421dee41d9204aa3cfcc6c9c65cd5bb69a8"}, - {file = "orjson-3.9.15-cp312-cp312-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:82425dd5c7bd3adfe4e94c78e27e2fa02971750c2b7ffba648b0f5d5cc016a73"}, - {file = "orjson-3.9.15-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2c51378d4a8255b2e7c1e5cc430644f0939539deddfa77f6fac7b56a9784160a"}, - {file = "orjson-3.9.15-cp312-cp312-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:6ae4e06be04dc00618247c4ae3f7c3e561d5bc19ab6941427f6d3722a0875ef7"}, - {file = "orjson-3.9.15-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:bcef128f970bb63ecf9a65f7beafd9b55e3aaf0efc271a4154050fc15cdb386e"}, - {file = "orjson-3.9.15-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:b72758f3ffc36ca566ba98a8e7f4f373b6c17c646ff8ad9b21ad10c29186f00d"}, - {file = "orjson-3.9.15-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:10c57bc7b946cf2efa67ac55766e41764b66d40cbd9489041e637c1304400494"}, - {file = "orjson-3.9.15-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:946c3a1ef25338e78107fba746f299f926db408d34553b4754e90a7de1d44068"}, - {file = "orjson-3.9.15-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:2f256d03957075fcb5923410058982aea85455d035607486ccb847f095442bda"}, - {file = "orjson-3.9.15-cp312-none-win_amd64.whl", hash = "sha256:5bb399e1b49db120653a31463b4a7b27cf2fbfe60469546baf681d1b39f4edf2"}, - {file = "orjson-3.9.15-cp38-cp38-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:b17f0f14a9c0ba55ff6279a922d1932e24b13fc218a3e968ecdbf791b3682b25"}, - {file = "orjson-3.9.15-cp38-cp38-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:7f6cbd8e6e446fb7e4ed5bac4661a29e43f38aeecbf60c4b900b825a353276a1"}, - {file = "orjson-3.9.15-cp38-cp38-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:76bc6356d07c1d9f4b782813094d0caf1703b729d876ab6a676f3aaa9a47e37c"}, - {file = "orjson-3.9.15-cp38-cp38-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fdfa97090e2d6f73dced247a2f2d8004ac6449df6568f30e7fa1a045767c69a6"}, - {file = "orjson-3.9.15-cp38-cp38-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7413070a3e927e4207d00bd65f42d1b780fb0d32d7b1d951f6dc6ade318e1b5a"}, - {file = "orjson-3.9.15-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9cf1596680ac1f01839dba32d496136bdd5d8ffb858c280fa82bbfeb173bdd40"}, - {file = "orjson-3.9.15-cp38-cp38-musllinux_1_2_aarch64.whl", hash = "sha256:809d653c155e2cc4fd39ad69c08fdff7f4016c355ae4b88905219d3579e31eb7"}, - {file = "orjson-3.9.15-cp38-cp38-musllinux_1_2_x86_64.whl", hash = "sha256:920fa5a0c5175ab14b9c78f6f820b75804fb4984423ee4c4f1e6d748f8b22bc1"}, - {file = "orjson-3.9.15-cp38-none-win32.whl", hash = "sha256:2b5c0f532905e60cf22a511120e3719b85d9c25d0e1c2a8abb20c4dede3b05a5"}, - {file = "orjson-3.9.15-cp38-none-win_amd64.whl", hash = "sha256:67384f588f7f8daf040114337d34a5188346e3fae6c38b6a19a2fe8c663a2f9b"}, - {file = "orjson-3.9.15-cp39-cp39-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:6fc2fe4647927070df3d93f561d7e588a38865ea0040027662e3e541d592811e"}, - {file = "orjson-3.9.15-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:34cbcd216e7af5270f2ffa63a963346845eb71e174ea530867b7443892d77180"}, - {file = "orjson-3.9.15-cp39-cp39-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:f541587f5c558abd93cb0de491ce99a9ef8d1ae29dd6ab4dbb5a13281ae04cbd"}, - {file = "orjson-3.9.15-cp39-cp39-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:92255879280ef9c3c0bcb327c5a1b8ed694c290d61a6a532458264f887f052cb"}, - {file = "orjson-3.9.15-cp39-cp39-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:05a1f57fb601c426635fcae9ddbe90dfc1ed42245eb4c75e4960440cac667262"}, - {file = "orjson-3.9.15-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ede0bde16cc6e9b96633df1631fbcd66491d1063667f260a4f2386a098393790"}, - {file = "orjson-3.9.15-cp39-cp39-musllinux_1_2_aarch64.whl", hash = "sha256:e88b97ef13910e5f87bcbc4dd7979a7de9ba8702b54d3204ac587e83639c0c2b"}, - {file = "orjson-3.9.15-cp39-cp39-musllinux_1_2_x86_64.whl", hash = "sha256:57d5d8cf9c27f7ef6bc56a5925c7fbc76b61288ab674eb352c26ac780caa5b10"}, - {file = "orjson-3.9.15-cp39-none-win32.whl", hash = "sha256:001f4eb0ecd8e9ebd295722d0cbedf0748680fb9998d3993abaed2f40587257a"}, - {file = "orjson-3.9.15-cp39-none-win_amd64.whl", hash = "sha256:ea0b183a5fe6b2b45f3b854b0d19c4e932d6f5934ae1f723b07cf9560edd4ec7"}, - {file = "orjson-3.9.15.tar.gz", hash = "sha256:95cae920959d772f30ab36d3b25f83bb0f3be671e986c72ce22f8fa700dae061"}, -] - -[[package]] -name = "packaging" -version = "23.2" -description = "Core utilities for Python packages" -optional = false -python-versions = ">=3.7" -files = [ - {file = "packaging-23.2-py3-none-any.whl", hash = "sha256:8c491190033a9af7e1d931d0b5dacc2ef47509b34dd0de67ed209b5203fc88c7"}, - {file = "packaging-23.2.tar.gz", hash = "sha256:048fb0e9405036518eaaf48a55953c750c11e1a1b68e0dd1a9d62ed0c092cfc5"}, -] - -[[package]] -name = "pathspec" -version = "0.12.1" -description = "Utility library for gitignore style pattern matching of file paths." -optional = false -python-versions = ">=3.8" -files = [ - {file = "pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08"}, - {file = "pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712"}, -] - -[[package]] -name = "platformdirs" -version = "4.1.0" -description = "A small Python package for determining appropriate platform-specific dirs, e.g. a \"user data dir\"." -optional = false -python-versions = ">=3.8" -files = [ - {file = "platformdirs-4.1.0-py3-none-any.whl", hash = "sha256:11c8f37bcca40db96d8144522d925583bdb7a31f7b0e37e3ed4318400a8e2380"}, - {file = "platformdirs-4.1.0.tar.gz", hash = "sha256:906d548203468492d432bcb294d4bc2fff751bf84971fbb2c10918cc206ee420"}, -] - -[package.extras] -docs = ["furo (>=2023.7.26)", "proselint (>=0.13)", "sphinx (>=7.1.1)", "sphinx-autodoc-typehints (>=1.24)"] -test = ["appdirs (==1.4.4)", "covdefaults (>=2.3)", "pytest (>=7.4)", "pytest-cov (>=4.1)", "pytest-mock (>=3.11.1)"] - -[[package]] -name = "pluggy" -version = "1.3.0" -description = "plugin and hook calling mechanisms for python" -optional = false -python-versions = ">=3.8" -files = [ - {file = "pluggy-1.3.0-py3-none-any.whl", hash = "sha256:d89c696a773f8bd377d18e5ecda92b7a3793cbe66c87060a6fb58c7b6e1061f7"}, - {file = "pluggy-1.3.0.tar.gz", hash = "sha256:cf61ae8f126ac6f7c451172cf30e3e43d3ca77615509771b3a984a0730651e12"}, -] - -[package.extras] -dev = ["pre-commit", "tox"] -testing = ["pytest", "pytest-benchmark"] - -[[package]] -name = "protobuf" -version = "4.25.1" -description = "" -optional = false -python-versions = ">=3.8" -files = [ - {file = "protobuf-4.25.1-cp310-abi3-win32.whl", hash = "sha256:193f50a6ab78a970c9b4f148e7c750cfde64f59815e86f686c22e26b4fe01ce7"}, - {file = "protobuf-4.25.1-cp310-abi3-win_amd64.whl", hash = "sha256:3497c1af9f2526962f09329fd61a36566305e6c72da2590ae0d7d1322818843b"}, - {file = "protobuf-4.25.1-cp37-abi3-macosx_10_9_universal2.whl", hash = "sha256:0bf384e75b92c42830c0a679b0cd4d6e2b36ae0cf3dbb1e1dfdda48a244f4bcd"}, - {file = "protobuf-4.25.1-cp37-abi3-manylinux2014_aarch64.whl", hash = "sha256:0f881b589ff449bf0b931a711926e9ddaad3b35089cc039ce1af50b21a4ae8cb"}, - {file = "protobuf-4.25.1-cp37-abi3-manylinux2014_x86_64.whl", hash = "sha256:ca37bf6a6d0046272c152eea90d2e4ef34593aaa32e8873fc14c16440f22d4b7"}, - {file = "protobuf-4.25.1-cp38-cp38-win32.whl", hash = "sha256:abc0525ae2689a8000837729eef7883b9391cd6aa7950249dcf5a4ede230d5dd"}, - {file = "protobuf-4.25.1-cp38-cp38-win_amd64.whl", hash = "sha256:1484f9e692091450e7edf418c939e15bfc8fc68856e36ce399aed6889dae8bb0"}, - {file = "protobuf-4.25.1-cp39-cp39-win32.whl", hash = "sha256:8bdbeaddaac52d15c6dce38c71b03038ef7772b977847eb6d374fc86636fa510"}, - {file = "protobuf-4.25.1-cp39-cp39-win_amd64.whl", hash = "sha256:becc576b7e6b553d22cbdf418686ee4daa443d7217999125c045ad56322dda10"}, - {file = "protobuf-4.25.1-py3-none-any.whl", hash = "sha256:a19731d5e83ae4737bb2a089605e636077ac001d18781b3cf489b9546c7c80d6"}, - {file = "protobuf-4.25.1.tar.gz", hash = "sha256:57d65074b4f5baa4ab5da1605c02be90ac20c8b40fb137d6a8df9f416b0d0ce2"}, -] - -[[package]] -name = "psutil" -version = "5.9.7" -description = "Cross-platform lib for process and system monitoring in Python." -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*, !=3.4.*, !=3.5.*" -files = [ - {file = "psutil-5.9.7-cp27-cp27m-macosx_10_9_x86_64.whl", hash = "sha256:0bd41bf2d1463dfa535942b2a8f0e958acf6607ac0be52265ab31f7923bcd5e6"}, - {file = "psutil-5.9.7-cp27-cp27m-manylinux2010_i686.whl", hash = "sha256:5794944462509e49d4d458f4dbfb92c47539e7d8d15c796f141f474010084056"}, - {file = "psutil-5.9.7-cp27-cp27m-manylinux2010_x86_64.whl", hash = "sha256:fe361f743cb3389b8efda21980d93eb55c1f1e3898269bc9a2a1d0bb7b1f6508"}, - {file = "psutil-5.9.7-cp27-cp27mu-manylinux2010_i686.whl", hash = "sha256:e469990e28f1ad738f65a42dcfc17adaed9d0f325d55047593cb9033a0ab63df"}, - {file = "psutil-5.9.7-cp27-cp27mu-manylinux2010_x86_64.whl", hash = "sha256:3c4747a3e2ead1589e647e64aad601981f01b68f9398ddf94d01e3dc0d1e57c7"}, - {file = "psutil-5.9.7-cp27-none-win32.whl", hash = "sha256:1d4bc4a0148fdd7fd8f38e0498639ae128e64538faa507df25a20f8f7fb2341c"}, - {file = "psutil-5.9.7-cp27-none-win_amd64.whl", hash = "sha256:4c03362e280d06bbbfcd52f29acd79c733e0af33d707c54255d21029b8b32ba6"}, - {file = "psutil-5.9.7-cp36-abi3-macosx_10_9_x86_64.whl", hash = "sha256:ea36cc62e69a13ec52b2f625c27527f6e4479bca2b340b7a452af55b34fcbe2e"}, - {file = "psutil-5.9.7-cp36-abi3-manylinux_2_12_i686.manylinux2010_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1132704b876e58d277168cd729d64750633d5ff0183acf5b3c986b8466cd0284"}, - {file = "psutil-5.9.7-cp36-abi3-manylinux_2_12_x86_64.manylinux2010_x86_64.manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fe8b7f07948f1304497ce4f4684881250cd859b16d06a1dc4d7941eeb6233bfe"}, - {file = "psutil-5.9.7-cp36-cp36m-win32.whl", hash = "sha256:b27f8fdb190c8c03914f908a4555159327d7481dac2f01008d483137ef3311a9"}, - {file = "psutil-5.9.7-cp36-cp36m-win_amd64.whl", hash = "sha256:44969859757f4d8f2a9bd5b76eba8c3099a2c8cf3992ff62144061e39ba8568e"}, - {file = "psutil-5.9.7-cp37-abi3-win32.whl", hash = "sha256:c727ca5a9b2dd5193b8644b9f0c883d54f1248310023b5ad3e92036c5e2ada68"}, - {file = "psutil-5.9.7-cp37-abi3-win_amd64.whl", hash = "sha256:f37f87e4d73b79e6c5e749440c3113b81d1ee7d26f21c19c47371ddea834f414"}, - {file = "psutil-5.9.7-cp38-abi3-macosx_11_0_arm64.whl", hash = "sha256:032f4f2c909818c86cea4fe2cc407f1c0f0cde8e6c6d702b28b8ce0c0d143340"}, - {file = "psutil-5.9.7.tar.gz", hash = "sha256:3f02134e82cfb5d089fddf20bb2e03fd5cd52395321d1c8458a9e58500ff417c"}, -] - -[package.extras] -test = ["enum34", "ipaddress", "mock", "pywin32", "wmi"] - -[[package]] -name = "pygments" -version = "2.17.2" -description = "Pygments is a syntax highlighting package written in Python." -optional = false -python-versions = ">=3.7" -files = [ - {file = "pygments-2.17.2-py3-none-any.whl", hash = "sha256:b27c2826c47d0f3219f29554824c30c5e8945175d888647acd804ddd04af846c"}, - {file = "pygments-2.17.2.tar.gz", hash = "sha256:da46cec9fd2de5be3a8a784f434e4c4ab670b4ff54d605c4c2717e9d49c4c367"}, -] - -[package.extras] -plugins = ["importlib-metadata"] -windows-terminal = ["colorama (>=0.4.6)"] - -[[package]] -name = "pylint" -version = "3.0.3" -description = "python code static checker" -optional = false -python-versions = ">=3.8.0" -files = [ - {file = "pylint-3.0.3-py3-none-any.whl", hash = "sha256:7a1585285aefc5165db81083c3e06363a27448f6b467b3b0f30dbd0ac1f73810"}, - {file = "pylint-3.0.3.tar.gz", hash = "sha256:58c2398b0301e049609a8429789ec6edf3aabe9b6c5fec916acd18639c16de8b"}, -] - -[package.dependencies] -astroid = ">=3.0.1,<=3.1.0-dev0" -colorama = {version = ">=0.4.5", markers = "sys_platform == \"win32\""} -dill = [ - {version = ">=0.2", markers = "python_version < \"3.11\""}, - {version = ">=0.3.7", markers = "python_version >= \"3.12\""}, - {version = ">=0.3.6", markers = "python_version >= \"3.11\" and python_version < \"3.12\""}, -] -isort = ">=4.2.5,<5.13.0 || >5.13.0,<6" -mccabe = ">=0.6,<0.8" -platformdirs = ">=2.2.0" -tomli = {version = ">=1.1.0", markers = "python_version < \"3.11\""} -tomlkit = ">=0.10.1" -typing-extensions = {version = ">=3.10.0", markers = "python_version < \"3.10\""} - -[package.extras] -spelling = ["pyenchant (>=3.2,<4.0)"] -testutils = ["gitpython (>3)"] - -[[package]] -name = "pysocks" -version = "1.7.1" -description = "A Python SOCKS client module. See https://github.com/Anorov/PySocks for more information." -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" -files = [ - {file = "PySocks-1.7.1-py27-none-any.whl", hash = "sha256:08e69f092cc6dbe92a0fdd16eeb9b9ffbc13cadfe5ca4c7bd92ffb078b293299"}, - {file = "PySocks-1.7.1-py3-none-any.whl", hash = "sha256:2725bd0a9925919b9b51739eea5f9e2bae91e83288108a9ad338b2e3a4435ee5"}, - {file = "PySocks-1.7.1.tar.gz", hash = "sha256:3f8804571ebe159c380ac6de37643bb4685970655d3bba243530d6558b799aa0"}, -] - -[[package]] -name = "pytest" -version = "7.4.3" -description = "pytest: simple powerful testing with Python" -optional = false -python-versions = ">=3.7" -files = [ - {file = "pytest-7.4.3-py3-none-any.whl", hash = "sha256:0d009c083ea859a71b76adf7c1d502e4bc170b80a8ef002da5806527b9591fac"}, - {file = "pytest-7.4.3.tar.gz", hash = "sha256:d989d136982de4e3b29dabcc838ad581c64e8ed52c11fbe86ddebd9da0818cd5"}, -] - -[package.dependencies] -colorama = {version = "*", markers = "sys_platform == \"win32\""} -exceptiongroup = {version = ">=1.0.0rc8", markers = "python_version < \"3.11\""} -iniconfig = "*" -packaging = "*" -pluggy = ">=0.12,<2.0" -tomli = {version = ">=1.0.0", markers = "python_version < \"3.11\""} - -[package.extras] -testing = ["argcomplete", "attrs (>=19.2.0)", "hypothesis (>=3.56)", "mock", "nose", "pygments (>=2.7.2)", "requests", "setuptools", "xmlschema"] - -[[package]] -name = "readchar" -version = "4.0.5" -description = "Library to easily read single chars and key strokes" -optional = false -python-versions = ">=3.7" -files = [ - {file = "readchar-4.0.5-py3-none-any.whl", hash = "sha256:76ec784a5dd2afac3b7da8003329834cdd9824294c260027f8c8d2e4d0a78f43"}, - {file = "readchar-4.0.5.tar.gz", hash = "sha256:08a456c2d7c1888cde3f4688b542621b676eb38cd6cfed7eb6cb2e2905ddc826"}, -] - -[package.dependencies] -setuptools = ">=41.0" - -[[package]] -name = "requests" -version = "2.31.0" -description = "Python HTTP for Humans." -optional = false -python-versions = ">=3.7" -files = [ - {file = "requests-2.31.0-py3-none-any.whl", hash = "sha256:58cd2187c01e70e6e26505bca751777aa9f2ee0b7f4300988b709f44e013003f"}, - {file = "requests-2.31.0.tar.gz", hash = "sha256:942c5a758f98d790eaed1a29cb6eefc7ffb0d1cf7af05c3d2791656dbd6ad1e1"}, -] - -[package.dependencies] -certifi = ">=2017.4.17" -charset-normalizer = ">=2,<4" -idna = ">=2.5,<4" -PySocks = {version = ">=1.5.6,<1.5.7 || >1.5.7", optional = true, markers = "extra == \"socks\""} -urllib3 = ">=1.21.1,<3" - -[package.extras] -socks = ["PySocks (>=1.5.6,!=1.5.7)"] -use-chardet-on-py3 = ["chardet (>=3.0.2,<6)"] - -[[package]] -name = "rich" -version = "13.7.1" -description = "Render rich text, tables, progress bars, syntax highlighting, markdown and more to the terminal" -optional = false -python-versions = ">=3.7.0" -files = [ - {file = "rich-13.7.1-py3-none-any.whl", hash = "sha256:4edbae314f59eb482f54e9e30bf00d33350aaa94f4bfcd4e9e3110e64d0d7222"}, - {file = "rich-13.7.1.tar.gz", hash = "sha256:9be308cb1fe2f1f57d67ce99e95af38a1e2bc71ad9813b0e247cf7ffbcc3a432"}, -] - -[package.dependencies] -markdown-it-py = ">=2.2.0" -pygments = ">=2.13.0,<3.0.0" - -[package.extras] -jupyter = ["ipywidgets (>=7.5.1,<9)"] - -[[package]] -name = "setuptools" -version = "69.0.3" -description = "Easily download, build, install, upgrade, and uninstall Python packages" -optional = false -python-versions = ">=3.8" -files = [ - {file = "setuptools-69.0.3-py3-none-any.whl", hash = "sha256:385eb4edd9c9d5c17540511303e39a147ce2fc04bc55289c322b9e5904fe2c05"}, - {file = "setuptools-69.0.3.tar.gz", hash = "sha256:be1af57fc409f93647f2e8e4573a142ed38724b8cdd389706a867bb4efcf1e78"}, -] - -[package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "pygments-github-lexers (==0.0.5)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-favicon", "sphinx-inline-tabs", "sphinx-lint", "sphinx-notfound-page (>=1,<2)", "sphinx-reredirects", "sphinxcontrib-towncrier"] -testing = ["build[virtualenv]", "filelock (>=3.4.0)", "flake8-2020", "ini2toml[lite] (>=0.9)", "jaraco.develop (>=7.21)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "pip (>=19.1)", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-mypy (>=0.9.1)", "pytest-perf", "pytest-ruff", "pytest-timeout", "pytest-xdist", "tomli-w (>=1.0.0)", "virtualenv (>=13.0.0)", "wheel"] -testing-integration = ["build[virtualenv] (>=1.0.3)", "filelock (>=3.4.0)", "jaraco.envs (>=2.2)", "jaraco.path (>=3.2.0)", "packaging (>=23.1)", "pytest", "pytest-enabler", "pytest-xdist", "tomli", "virtualenv (>=13.0.0)", "wheel"] - -[[package]] -name = "six" -version = "1.16.0" -description = "Python 2 and 3 compatibility utilities" -optional = false -python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*" -files = [ - {file = "six-1.16.0-py2.py3-none-any.whl", hash = "sha256:8abb2f1d86890a2dfb989f9a77cfcfd3e47c2a354b01111771326f8aa26e0254"}, - {file = "six-1.16.0.tar.gz", hash = "sha256:1e61c37477a1626458e36f7b1d82aa5c9b094fa4802892072e49de9c60c4c926"}, -] - -[[package]] -name = "soupsieve" -version = "2.5" -description = "A modern CSS selector implementation for Beautiful Soup." -optional = false -python-versions = ">=3.8" -files = [ - {file = "soupsieve-2.5-py3-none-any.whl", hash = "sha256:eaa337ff55a1579b6549dc679565eac1e3d000563bcb1c8ab0d0fefbc0c2cdc7"}, - {file = "soupsieve-2.5.tar.gz", hash = "sha256:5663d5a7b3bfaeee0bc4372e7fc48f9cff4940b3eec54a6451cc5299f1097690"}, -] - -[[package]] -name = "tomli" -version = "2.0.1" -description = "A lil' TOML parser" -optional = false -python-versions = ">=3.7" -files = [ - {file = "tomli-2.0.1-py3-none-any.whl", hash = "sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc"}, - {file = "tomli-2.0.1.tar.gz", hash = "sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f"}, -] - -[[package]] -name = "tomlkit" -version = "0.12.3" -description = "Style preserving TOML library" -optional = false -python-versions = ">=3.7" -files = [ - {file = "tomlkit-0.12.3-py3-none-any.whl", hash = "sha256:b0a645a9156dc7cb5d3a1f0d4bab66db287fcb8e0430bdd4664a095ea16414ba"}, - {file = "tomlkit-0.12.3.tar.gz", hash = "sha256:75baf5012d06501f07bee5bf8e801b9f343e7aac5a92581f20f80ce632e6b5a4"}, -] - -[[package]] -name = "types-beautifulsoup4" -version = "4.12.0.7" -description = "Typing stubs for beautifulsoup4" -optional = false -python-versions = ">=3.7" -files = [ - {file = "types-beautifulsoup4-4.12.0.7.tar.gz", hash = "sha256:59980028d29bf55d0db359efa305b75bacf0cb92e3f3f6b3fd408f2531df274c"}, - {file = "types_beautifulsoup4-4.12.0.7-py3-none-any.whl", hash = "sha256:8b03b054cb2e62abf82bbbeda57a07257026f4ed9010ef17d8f8eff43bb1f9b7"}, -] - -[package.dependencies] -types-html5lib = "*" - -[[package]] -name = "types-html5lib" -version = "1.1.11.15" -description = "Typing stubs for html5lib" -optional = false -python-versions = "*" -files = [ - {file = "types-html5lib-1.1.11.15.tar.gz", hash = "sha256:80e1a2062d22a3affe5c28d97da30bffbf3a076d393c80fc6f1671216c1bd492"}, - {file = "types_html5lib-1.1.11.15-py3-none-any.whl", hash = "sha256:16fe936d99b9f7fc210e2e21a2aed1b6bbbc554ad8242a6ef75f6f2bddb27e58"}, -] - -[[package]] -name = "types-markdown" -version = "3.5.0.3" -description = "Typing stubs for Markdown" -optional = false -python-versions = ">=3.7" -files = [ - {file = "types-Markdown-3.5.0.3.tar.gz", hash = "sha256:9afd38a8f53e19d43de3f8d89742b3674b5736767806ed9356d64ccb09f76439"}, - {file = "types_Markdown-3.5.0.3-py3-none-any.whl", hash = "sha256:2299b9086c695f408a3ebabf820f1fba3b239f1b3bfdbb32bf42d530b42cdd83"}, -] - -[[package]] -name = "typing-extensions" -version = "4.9.0" -description = "Backported and Experimental Type Hints for Python 3.8+" -optional = false -python-versions = ">=3.8" -files = [ - {file = "typing_extensions-4.9.0-py3-none-any.whl", hash = "sha256:af72aea155e91adfc61c3ae9e0e342dbc0cba726d6cba4b6c72c1f34e47291cd"}, - {file = "typing_extensions-4.9.0.tar.gz", hash = "sha256:23478f88c37f27d76ac8aee6c905017a143b0b1b886c3c9f66bc2fd94f9f5783"}, -] - -[[package]] -name = "urllib3" -version = "2.1.0" -description = "HTTP library with thread-safe connection pooling, file post, and more." -optional = false -python-versions = ">=3.8" -files = [ - {file = "urllib3-2.1.0-py3-none-any.whl", hash = "sha256:55901e917a5896a349ff771be919f8bd99aff50b79fe58fec595eb37bbc56bb3"}, - {file = "urllib3-2.1.0.tar.gz", hash = "sha256:df7aa8afb0148fa78488e7899b2c59b5f4ffcfa82e6c54ccb9dd37c1d7b52d54"}, -] - -[package.extras] -brotli = ["brotli (>=1.0.9)", "brotlicffi (>=0.8.0)"] -socks = ["pysocks (>=1.5.6,!=1.5.7,<2.0)"] -zstd = ["zstandard (>=0.18.0)"] - -[[package]] -name = "webencodings" -version = "0.5.1" -description = "Character encoding aliases for legacy web content" -optional = false -python-versions = "*" -files = [ - {file = "webencodings-0.5.1-py2.py3-none-any.whl", hash = "sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78"}, - {file = "webencodings-0.5.1.tar.gz", hash = "sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923"}, -] - -[[package]] -name = "zipp" -version = "3.17.0" -description = "Backport of pathlib-compatible object wrapper for zip files" -optional = false -python-versions = ">=3.8" -files = [ - {file = "zipp-3.17.0-py3-none-any.whl", hash = "sha256:0e923e726174922dce09c53c59ad483ff7bbb8e572e00c7f7c46b88556409f31"}, - {file = "zipp-3.17.0.tar.gz", hash = "sha256:84e64a1c28cf7e91ed2078bb8cc8c259cb19b76942096c8d7b84947690cabaf0"}, -] - -[package.extras] -docs = ["furo", "jaraco.packaging (>=9.3)", "jaraco.tidelift (>=1.4)", "rst.linker (>=1.9)", "sphinx (<7.2.5)", "sphinx (>=3.5)", "sphinx-lint"] -testing = ["big-O", "jaraco.functools", "jaraco.itertools", "more-itertools", "pytest (>=6)", "pytest-black (>=0.3.7)", "pytest-checkdocs (>=2.4)", "pytest-cov", "pytest-enabler (>=2.2)", "pytest-ignore-flaky", "pytest-mypy (>=0.9.1)", "pytest-ruff"] - -[metadata] -lock-version = "2.0" -python-versions = "^3.9" -content-hash = "5ea1413caacec1f790fb745888b50cc4e674682863ab6435aed0f296f7923cc3" diff --git a/pyproject.toml b/pyproject.toml index 6830888..41560b3 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,63 +1,61 @@ -[tool.poetry] -name = "apy" -version = "0.12.3" +[project] +name = "apyanki" +version = "0.19.4" description = "CLI script for interacting with local Anki collection" -authors = ["Karl Yngve Lervåg "] +authors = [{ name = "Karl Yngve Lervåg", email = "karl.yngve@lervag.net" }] +requires-python = ">=3.14" readme = "README.md" +classifiers = [ + "Programming Language :: Python :: 3", + "License :: OSI Approved :: MIT License", + "Operating System :: OS Independent", + "Environment :: Console", +] +dependencies = [ + "beautifulsoup4", + "click", + "markdown", + "readchar", + "markdownify", + "anki>=25.9", + "html5lib", + "rich", +] -[tool.poetry.dependencies] -python = "^3.9" -beautifulsoup4 = "^4.12.2" -click = "^8.1.3" -markdown = "^3.4.3" -readchar = "^4.0.5" -markdownify = "^0.11.6" -anki = "^23.10" -html5lib = "^1.1" -rich = "^13.7.1" - -[tool.poetry.scripts] -apy = "apy.cli:main" - -[tool.poetry.group.dev.dependencies] -pytest = "^7.3.1" -pylint = "^3.0.2" -bump2version = "^1.0.1" -black = "^24.3.0" -mypy = "^1.3.0" -types-beautifulsoup4 = "^4.12.0.5" -types-markdown = "^3.4.2.9" - -[tool.pytest] -filterwarnings = ["ignore::DeprecationWarning:html5lib.*:"] - -[tool.pylint.master] -init-hook = 'import sys; sys.path.append("/usr/share/anki")' - -[tool.pylint.reports] -output-format = "colorized" +[project.urls] +Homepage = "https://github.com/lervag/apy" +"Bug Tracker" = "https://github.com/lervag/apy/issues" -[tool.pylint.format] -max-line-length = "88" +[project.scripts] +apy = "apyanki.cli:main" -[tool.pylint."messages control"] -max-statements = 75 -disable = [ - "invalid-name", - "too-many-branches", - "too-many-instance-attributes", +[dependency-groups] +dev = [ + "pytest", + "mypy", + "types-beautifulsoup4", + "types-markdown", + "ruff", ] [build-system] -requires = ["poetry-core"] -build-backend = "poetry.core.masonry.api" +requires = ["hatchling"] +build-backend = "hatchling.build" + +[tool.pytest] +filterwarnings = ["ignore::DeprecationWarning:html5lib.*:"] [tool.mypy] -exclude = ".venv" +exclude = ["^tests/.*\\.py$"] strict = true [[tool.mypy.overrides]] -module = [ - "markdownify", -] +module = ["markdownify"] ignore_missing_imports = true + +[tool.basedpyright] +ignore = ["tests/*.py"] +reportImportCycles = false +reportExplicitAny = false +reportAny = false +reportImplicitStringConcatenation = false diff --git a/src/apy/cli.py b/src/apy/cli.py deleted file mode 100644 index 3a2dfe8..0000000 --- a/src/apy/cli.py +++ /dev/null @@ -1,480 +0,0 @@ -"""A script to interact with the Anki database""" - -import os -from pathlib import Path -import sys -from typing import Any, Optional - -import click - -from apy import __version__ -from apy.anki import Anki -from apy.config import cfg, cfg_file -from apy.console import console -from apy.note import Note - -CONTEXT_SETTINGS = {"help_option_names": ["-h", "--help"]} - - -@click.group(context_settings=CONTEXT_SETTINGS, invoke_without_command=True) -@click.option("-b", "--base-path", help="Set Anki base directory") -@click.option("-p", "--profile-name", help="Specify name of Anki profile to use") -@click.option("-V", "--version", is_flag=True, help="Show apy version") -@click.pass_context -def main(ctx: Any, base_path: str, profile_name: str, version: bool) -> None: - """A script to interact with the Anki database. - - The base_path directory may be specified with the -b / --base-path option. For - convenience, it may also be specified in the config file `~/.config/apy/apy.json` - or with the environment variable APY_BASE or ANKI_BASE. This should point to the - base directory where Anki stores its database and related files. See the Anki - documentation for information about where this is located on different systems - (https://docs.ankiweb.net/files.html#file-locations). - - A few sub commands will open an editor for input. Vim is used by default. - The input is parsed when one saves and quits. To abort, one should exit the - editor with a non-zero exit code. In Vim, one can do this with the `:cquit` - command. - - One may specify a different editor with the EDITOR environment variable. - For example, to use emacs one can add this to one's `~/.bashrc` (or similar) - file: - - export EDITOR=emacs - - Note: Use `apy subcmd --help` to get detailed help for a given subcommand. - """ - if version: - console.print(f"apy {__version__}") - sys.exit() - - if base_path: - cfg["base_path"] = os.path.abspath(os.path.expanduser(base_path)) - - if profile_name: - cfg["profile_name"] = profile_name - - if ctx.invoked_subcommand is None: - ctx.invoke(info) - - -@main.command("add-single") -@click.argument("fields", nargs=-1) -@click.option("-p", "--parse-markdown", is_flag=True, help="Parse input as Markdown.") -@click.option("-s", "--preset", default="default", help="Specify a preset.") -@click.option("-t", "--tags", help="Specify default tags for new cards.") -@click.option( - "-m", "--model", "model_name", help="Specify default model for new cards." -) -@click.option("-d", "--deck", help="Specify default deck for new cards.") -def add_single( - fields: list[str], - parse_markdown: bool, - tags: Optional[str] = None, - preset: Optional[str] = None, - model_name: Optional[str] = None, - deck: Optional[str] = None, -) -> None: - """Add a single note from command line arguments. - - Examples: - - \b - # Add a note to the default deck - apy add-single myfront myback - - \b - # Add a cloze deletion note to the default deck - apy add-single -m Cloze "cloze {{c1::deletion}}" "extra text" - - \b - # Add a note to deck "MyDeck" with tags 'my-tag' and 'new-tag' - apy add-single -t "my-tag new-tag" -d MyDeck myfront myback - """ - with Anki(**cfg) as a: - tags_preset = " ".join(cfg["presets"][preset]["tags"]) - if not tags: - tags = tags_preset - else: - tags += " " + tags_preset - - if not model_name: - model_name = cfg["presets"][preset]["model"] - - a.add_notes_single(fields, parse_markdown, tags, model_name, deck) - - -@main.command() -@click.option("-t", "--tags", default="", help="Specify default tags for new cards.") -@click.option( - "-m", - "--model", - "model_name", - default="Basic", - help=("Specify default model for new cards."), -) -@click.option("-d", "--deck", help="Specify default deck for new cards.") -def add(tags: str, model_name: str, deck: str) -> None: - """Add notes interactively from terminal. - - Examples: - - \b - # Add notes to deck "MyDeck" with tags 'my-tag' and 'new-tag' - apy add -t "my-tag new-tag" -d MyDeck - - \b - # Ask for the model and the deck for each new card - apy add -m ASK -d ask - """ - with Anki(**cfg) as a: - notes = a.add_notes_with_editor(tags, model_name, deck) - _added_notes_postprocessing(a, notes) - - -@main.command("add-from-file") -@click.argument("file", type=click.Path(exists=True, dir_okay=False)) -@click.option("-t", "--tags", default="", help="Specify default tags for new cards.") -@click.option("-d", "--deck", help="Specify default deck for new cards.") -def add_from_file(file: Path, tags: str, deck: str) -> None: - """Add notes from Markdown file. - - The example below should adequately specify the syntax. Any initial "key: value" - pairs specify default values for all the following notes. The following keys are - accepted: - - * model: The note model (required) - * tags: The note model (optional) - * deck: Which deck the note should be added to (optional) - * markdown: Set to "false" or "no" if apy should not use a markdown converter while - converting the input note to an Anki note. (optional) - - Here is the example Markdown input: - - // example.md - model: Basic - tags: marked - - # Note 1 - ## Front - Question? - - ## Back - Answer. - - # Note 2 - tag: silly-tag - - ## Front - Question? - - ## Back - Answer - - # Note 3 - model: NewModel - markdown: false (default is true) - - ## NewFront - FieldOne - - ## NewBack - FieldTwo - - ## FieldThree - FieldThree - """ - with Anki(**cfg) as a: - notes = a.add_notes_from_file(str(file), tags, deck) - _added_notes_postprocessing(a, notes) - - -def _added_notes_postprocessing(a: Anki, notes: list[Note]) -> None: - """Common postprocessing after 'apy add[-from-file]'.""" - n_notes = len(notes) - if n_notes == 0: - console.print("No notes added") - return - - decks = [a.col.decks.name(c.did) for n in notes for c in n.n.cards()] - n_decks = len(decks) - if n_decks == 0: - console.print("No notes added") - return - - if a.n_decks > 1: - if n_notes == 1: - console.print(f"Added note to deck: {decks[0]}") - elif n_decks > 1: - console.print(f"Added {n_notes} notes to {n_decks} different decks") - else: - console.print(f"Added {n_notes} notes to deck: {decks[0]}") - else: - console.print(f"Added {n_notes} notes") - - for note in notes: - cards = note.n.cards() - console.print(f"* nid: {note.n.id} (with {len(cards)} cards)") - for card in note.n.cards(): - console.print(f" * cid: {card.id}") - - -@main.command("check-media") -def check_media() -> None: - """Check media.""" - with Anki(**cfg) as a: - a.check_media() - - -@main.command() -def info() -> None: - """Print some basic statistics.""" - if cfg_file.exists(): - for key in cfg.keys(): - console.print(f"Config loaded: {key}") - console.print(f"Config file: {cfg_file}") - else: - console.print("Config file: Not found") - - with Anki(**cfg) as a: - console.print(f"Collection path: {a.col.path}") - console.print(f"Scheduler version: {a.col.sched_ver()}") - - if a.col.decks.count() > 1: - console.print("Decks:") - for name in sorted(a.deck_names): - console.print(f" - {name}") - - sum_notes = a.col.note_count() - sum_cards = a.col.card_count() - sum_due = len(a.col.find_notes("is:due")) - sum_marked = len(a.col.find_notes("tag:marked")) - sum_flagged = len(a.col.find_notes("-flag:0")) - sum_new = len(a.col.find_notes("is:new")) - sum_susp = len(a.col.find_notes("is:suspended")) - - console.print( - f"\n{'Model':24s} {'notes':>7s} {'cards':>7s} " - f"{'due':>7s} {'new':>7s} {'susp.':>7s} " - f"{'marked':>7s} {'flagged':>7s}" - ) - console.rule() - models = sorted(a.model_names) - for m in models: - nnotes = len(set(a.col.find_notes(f'"note:{m}"'))) - if nnotes == 0: - continue - ncards = len(a.find_cards(f'"note:{m}"')) - ndue = len(a.find_cards(f'"note:{m}" is:due')) - nmarked = len(a.find_cards(f'"note:{m}" tag:marked')) - nflagged = len(a.find_cards(f'"note:{m}" -flag:0')) - nnew = len(a.find_cards(f'"note:{m}" is:new')) - nsusp = len(a.find_cards(f'"note:{m}" is:suspended')) - name = m[:24] - console.print( - f"{name:24s} {nnotes:7d} {ncards:7d} " - f"{ndue:7d} {nnew:7d} {nsusp:7d} " - f"{nmarked:7d} {nflagged:7d}" - ) - console.rule() - console.print( - f"{'Sum':24s} {sum_notes:7d} {sum_cards:7d} " - f"{sum_due:7d} {sum_new:7d} {sum_susp:7d} " - f"{sum_marked:7d} {sum_flagged:7d}" - ) - console.rule() - - -@main.group(context_settings=CONTEXT_SETTINGS, invoke_without_command=True) -def model() -> None: - """Interact with Anki models.""" - - -@model.command("edit-css") -@click.option( - "-m", - "--model-name", - default="Basic", - help="Specify for which model to edit CSS template.", -) -@click.option("-s", "--sync-after", is_flag=True, help="Perform sync after any change.") -def edit_css(model_name: str, sync_after: bool) -> None: - """Edit the CSS template for the specified model.""" - with Anki(**cfg) as a: - a.edit_model_css(model_name) - - if a.modified and sync_after: - a.sync() - a.modified = False - - -@model.command() -@click.argument("old-name") -@click.argument("new-name") -def rename(old_name: str, new_name: str) -> None: - """Rename model from old_name to new_name.""" - with Anki(**cfg) as a: - a.rename_model(old_name, new_name) - - -@main.command("list") -@click.argument("query", required=False, nargs=-1) -@click.option("-v", "--verbose", is_flag=True, help="Be verbose, show more info") -def list_cards(query: str, verbose: bool) -> None: - """List cards that match QUERY. - - The default QUERY is "tag:marked OR -flag:0". This default can be - customized in the config file `~/.config/apy/apy.json`, e.g. with - - \b - { - "query": "tag:marked OR tag:leech" - } - """ - if query: - query = " ".join(query) - else: - query = cfg["query"] - - with Anki(**cfg) as a: - a.list_cards(query, verbose) - - -@main.command() -@click.argument("query", required=False, nargs=-1) -@click.option( - "-m", - "--check-markdown-consistency", - is_flag=True, - help="Check for Markdown consistency", -) -@click.option( - "-n", - "--cmc-range", - default=7, - type=int, - help="Number of days backwards to check consistency", -) -def review(query: str, check_markdown_consistency: bool, cmc_range: int) -> None: - """Review/Edit notes that match QUERY. - - The default QUERY is "tag:marked OR -flag:0". This default can be - customized in the config file `~/.config/apy/apy.json`, e.g. with - - \b - { - "query": "tag:marked OR tag:leech" - } - """ - if query: - query = " ".join(query) - else: - query = cfg["query"] - - with Anki(**cfg) as a: - notes = list(a.find_notes(query)) - - # Add inconsistent notes - if check_markdown_consistency: - notes += [ - n - for n in a.find_notes(f"rated:{cmc_range}") - if not n.has_consistent_markdown() - ] - - number_of_notes = len(notes) - for i, note in enumerate(notes): - if not note.review(i, number_of_notes): - break - - -@main.command() -def sync() -> None: - """Synchronize collection with AnkiWeb.""" - with Anki(**cfg) as a: - a.sync() - - -@main.command() -@click.argument("query", required=False, nargs=-1) -@click.option("-a", "--add-tags", help="Add specified tags to matched notes.") -@click.option("-r", "--remove-tags", help="Remove specified tags from matched notes.") -def tag(query: str, add_tags: str, remove_tags: str) -> None: - """Add/Remove tags to/from notes that match QUERY. - - The default QUERY is "tag:marked OR -flag:0". This default can be - customized in the config file `~/.config/apy/apy.json`, e.g. with - - \b - { - "query": "tag:marked OR tag:leech" - } - - If neither of the options --add-tags or --remove-tags are supplied, then - this command simply lists all tags. - """ - if query: - query = " ".join(query) - else: - query = cfg["query"] - - with Anki(**cfg) as a: - if add_tags is None and remove_tags is None: - a.list_tags() - return - - n_notes = len(list(a.find_notes(query))) - if n_notes == 0: - console.print("No matching notes!") - raise click.Abort() - - console.print(f"The operation will be applied to {n_notes} matched notes:") - a.list_notes(query) - console.print("") - - if add_tags is not None: - console.print(f"Add tags: [green]{add_tags}") - if remove_tags is not None: - console.print(f"Remove tags: [red]{remove_tags}") - - if not console.confirm("Continue?"): - raise click.Abort() - - if add_tags is not None: - a.change_tags(query, add_tags) - - if remove_tags is not None: - a.change_tags(query, remove_tags, add=False) - - -@main.command() -@click.argument("position", type=int, required=True, nargs=1) -@click.argument("query", required=True, nargs=-1) -def reposition(position: int, query: str) -> None: - """Reposition cards that match QUERY. - - Sets the new position to POSITION and shifts other cards. - - Note that repositioning only works with new cards! - """ - query = " ".join(query) - - with Anki(**cfg) as a: - cids = list(a.find_cards(query)) - if not cids: - console.print(f"No matching cards for query: {query}!") - raise click.Abort() - - for cid in cids: - card = a.col.get_card(cid) - if card.type != 0: - console.print("Can only reposition new cards!") - raise click.Abort() - - a.col.sched.reposition_new_cards(cids, position, 1, False, True) - a.modified = True - - -if __name__ == "__main__": - # pylint: disable=no-value-for-parameter - main() diff --git a/src/apy/utilities.py b/src/apy/utilities.py deleted file mode 100644 index 1d02947..0000000 --- a/src/apy/utilities.py +++ /dev/null @@ -1,87 +0,0 @@ -"""Simple utility functions.""" - -from contextlib import contextmanager, redirect_stdout -from io import TextIOWrapper -import os -from subprocess import call -from tempfile import NamedTemporaryFile -from types import TracebackType -from typing import Any, Generator, Optional, TypeVar - -import readchar - -from apy.console import console - - -class cd: - """Context manager for changing the current working directory""" - - def __init__(self, newPath: str) -> None: - self.newPath = os.path.expanduser(newPath) - self.savedPath = "" - - def __enter__(self) -> None: - self.savedPath = os.getcwd() - os.chdir(self.newPath) - - def __exit__( - self, - exc_type: Optional[type[BaseException]], - exc_val: Optional[BaseException], - exc_tb: Optional[TracebackType], - ) -> None: - os.chdir(self.savedPath) - - -def editor(filepath: str) -> int: - """Use EDITOR to edit file at given path""" - return call([os.environ.get("EDITOR", "vim"), filepath]) - - -def edit_text(input_text: str, prefix: str = "") -> str: - """Use EDITOR to edit text (from a temporary file)""" - if prefix: - prefix = prefix + "_" - - with NamedTemporaryFile(mode="w+", prefix=prefix, suffix=".md") as tf: - tf.write(input_text) - tf.flush() - editor(tf.name) - tf.seek(0) - edited_message = tf.read().strip() - - return edited_message - - -chooseType = TypeVar("chooseType") - - -def choose(items: list[chooseType], text: str = "Choose from list:") -> chooseType: - """Choose from list of items""" - console.print(text) - for i, element in enumerate(items): - console.print(f"{i+1}: {element}") - console.print("> ", end="") - - while True: - choice = readchar.readchar() - - try: - index = int(choice) - except ValueError: - continue - - try: - reply = items[index - 1] - console.print(index) - return reply - except IndexError: - continue - - -@contextmanager -def suppress_stdout() -> Generator[TextIOWrapper, Any, Any]: - """A context manager that redirects stdout to devnull""" - with open(os.devnull, "w", encoding="utf8") as fnull: - with redirect_stdout(fnull) as out: - yield out diff --git a/src/apy/__init__.py b/src/apyanki/__init__.py similarity index 90% rename from src/apy/__init__.py rename to src/apyanki/__init__.py index 9fa220e..47db5ec 100644 --- a/src/apy/__init__.py +++ b/src/apyanki/__init__.py @@ -3,7 +3,7 @@ import os from importlib.metadata import version -__version__ = version("apy") +__version__ = version("apyanki") # Reduce rust verbosity, unless already explicitly increased. Anki by default diff --git a/src/apy/anki.py b/src/apyanki/anki.py similarity index 53% rename from src/apy/anki.py rename to src/apyanki/anki.py index dbff3be..692f85e 100644 --- a/src/apy/anki.py +++ b/src/apyanki/anki.py @@ -1,29 +1,34 @@ """An Anki collection wrapper class.""" from __future__ import annotations + import os -from pathlib import Path import pickle +import re import sqlite3 import tempfile import time +from collections.abc import Generator, KeysView +from pathlib import Path from types import TracebackType -from typing import Any, Generator, Optional, Sequence, TYPE_CHECKING, Type +from typing import TYPE_CHECKING, Any from click import Abort +from rich.markdown import Markdown from rich.progress import Progress, SpinnerColumn, TextColumn +from rich.table import Table from rich.text import Text -from apy.config import cfg -from apy.console import console -from apy.fields import prepare_field_for_cli_oneline -from apy.note import Note, NoteData, markdown_file_to_notes -from apy.utilities import cd, choose, editor, suppress_stdout +from apyanki import cards +from apyanki.config import cfg +from apyanki.console import console +from apyanki.note import Note, NoteData, markdown_file_to_notes +from apyanki.utilities import cd, choose, edit_file, suppress_stdout if TYPE_CHECKING: - from anki.notes import NoteId + from anki.collection import OpChangesWithCount from anki.models import NotetypeDict - from anki.cards import CardId + from anki.notes import NoteId class Anki: @@ -31,35 +36,38 @@ class Anki: def __init__( self, - base_path: Optional[str] = None, - collection_db_path: Optional[str] = None, - profile_name: Optional[str] = None, + base_path: str | None = None, + collection_db_path: str | None = None, + profile_name: str | None = None, **_kwargs: dict[str, Any], ): - self.modified = False + self.modified: bool = False - self._meta = None - self._collection_db_path = "" - self._profile_name = profile_name - self._profile = None + self._meta: Any = None + self._collection_db_path: str = "" + self._profile_name: str = profile_name or "" + self._profile: dict[Any, Any] | None = None self._init_load_profile(base_path, collection_db_path) self._init_load_collection() self._init_load_config() - self.today: int = self.col.sched.today + with suppress_stdout(): + self.today: int = self.col.sched.today self.model_name_to_id: dict[str, int] = { m["name"]: m["id"] for m in self.col.models.all() } - self.model_names = list(self.model_name_to_id.keys()) + self.model_names: list[str] = list(self.model_name_to_id.keys()) - self.deck_name_to_id = {d["name"]: d["id"] for d in self.col.decks.all()} - self.deck_names = self.deck_name_to_id.keys() + self.deck_name_to_id: dict[str, int] = { + d["name"]: d["id"] for d in self.col.decks.all() + } + self.deck_names: KeysView[str] = self.deck_name_to_id.keys() self.n_decks: int = len(self.deck_names) def _init_load_profile( - self, base_path_str: Optional[str], collection_db_path: Optional[str] + self, base_path_str: str | None, collection_db_path: str | None ) -> None: """Load the Anki profile from database""" if base_path_str is None: @@ -94,7 +102,7 @@ def _init_load_profile( profiles_dict = {name: pickle.loads(data) for name, data in profiles} - if self._profile_name is None: + if not self._profile_name: self._profile_name = self._meta.get( "last_loaded_profile_name", profiles[0][0] ) @@ -106,7 +114,6 @@ def _init_load_profile( def _init_load_collection(self) -> None: """Load the Anki collection""" - # pylint: disable=import-outside-toplevel from anki.collection import Collection from anki.errors import DBError @@ -114,8 +121,7 @@ def _init_load_collection(self) -> None: save_cwd = os.getcwd() try: - with suppress_stdout(): - self.col = Collection(self._collection_db_path) + self.col: Collection = Collection(self._collection_db_path) except AssertionError as error: console.print("Path to database is not valid!") console.print(f"path = {self._collection_db_path}") @@ -130,7 +136,6 @@ def _init_load_collection(self) -> None: @staticmethod def _init_load_config() -> None: """Load custom configuration""" - # pylint: disable=import-outside-toplevel from anki import latex # Update LaTeX commands @@ -145,20 +150,22 @@ def __enter__(self) -> Anki: def __exit__( self, - exc_type: Optional[Type[BaseException]], - exc_val: Optional[BaseException], - exc_tb: Optional[TracebackType], + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, ) -> None: if self.modified: - console.print("Database was modified.") - if self._profile is not None and self._profile["syncKey"]: - console.print("[blue]Remember to sync!") + if cfg["auto_sync"]: + self.sync() + else: + console.print("Database was modified.") + if self._profile is not None and self._profile["syncKey"]: + console.print("[blue]Remember to sync!") self.col.close() def sync(self) -> None: """Sync collection to AnkiWeb""" - # pylint: disable=import-outside-toplevel from anki.sync import SyncAuth if self._profile is None: @@ -176,9 +183,6 @@ def sync(self) -> None: io_timeout_secs=self._profile.get("networkTimeout") or 30, ) - if auth is None: - return - with Progress( TextColumn( "Syncing {task.fields[name]} [green]…[/green] {task.description}" @@ -191,7 +195,7 @@ def sync(self) -> None: # Perform main sync with suppress_stdout(): - self.col.sync_collection(auth, True) + _ = self.col.sync_collection(auth, True) progress.update(t1, total=1, completed=1, description="[green]done!") # Perform media sync @@ -228,7 +232,6 @@ def sync(self) -> None: def check_media(self) -> None: """Check media (will rebuild missing LaTeX files)""" - # pylint: disable=import-outside-toplevel from anki.notes import NoteId with cd(self.col.media.dir()): @@ -261,7 +264,7 @@ def check_media(self) -> None: if console.confirm("Review note?"): note = Note(self, self.col.get_note(nid)) - note.review() + _ = note.review() for file in output.unused: console.print(f"[red]Unused: {file}") @@ -271,10 +274,6 @@ def check_media(self) -> None: if os.path.isfile(file): os.remove(file) - def find_cards(self, query: str) -> Sequence[CardId]: - """Find card ids in Collection that match query""" - return self.col.find_cards(query) - def find_notes(self, query: str) -> Generator[Note, None, None]: """Find notes in Collection and return Note objects""" return ( @@ -286,12 +285,11 @@ def delete_notes(self, ids: NoteId | list[NoteId]) -> None: if not isinstance(ids, list): ids = [ids] - self.col.remove_notes(ids) + _ = self.col.remove_notes(ids) self.modified = True - def get_model(self, model_name: str) -> Optional[NotetypeDict]: + def get_model(self, model_name: str) -> NotetypeDict | None: """Get model from model name""" - # pylint: disable=import-outside-toplevel from anki.models import NotetypeId model_id = self.model_name_to_id.get(model_name) @@ -330,30 +328,49 @@ def rename_model(self, old_model_name: str, new_model_name: str) -> None: self.model_names = list(self.model_name_to_id.keys()) # Save changes - self.col.models.update_dict(model) + _ = self.col.models.update_dict(model) self.modified = True - def list_tags(self) -> None: + def list_tags(self, sort_by_count: bool = False, simple: bool = False) -> None: """List all tags""" + if sort_by_count: + + def sorter(x: tuple[str, int]) -> str | int: + return x[1] + else: + + def sorter(x: tuple[str, int]) -> str | int: + return x[0] + tags = [(t, len(self.col.find_notes(f"tag:{t}"))) for t in self.col.tags.all()] - width = len(max(tags, key=lambda x: len(x[0]))[0]) + 2 - filler = " " * (cfg["width"] - 2 * width - 8) - for (t1, n1), (t2, n2) in zip( - sorted(tags, key=lambda x: x[0]), sorted(tags, key=lambda x: x[1]) - ): - console.print(f"{t1:{width}s}{n1:4d}{filler}{t2:{width}s}{n2:4d}") + if simple: + for t, _ in tags: + console.print(t) + else: + table = Table(show_edge=False, box=None, header_style="bold white") + table.add_column("tag", style="cyan") + table.add_column("notes", style="magenta", justify="right") + + for tag, n in sorted(tags, key=sorter): + table.add_row(tag, str(n)) + + console.print(table) def change_tags(self, query: str, tags: str, add: bool = True) -> None: """Add/Remove tags from notes that match query""" note_ids = self.col.find_notes(query) if add: - self.col.tags.bulk_add(note_ids, tags) + _ = self.col.tags.bulk_add(note_ids, tags) else: - self.col.tags.bulk_remove(note_ids, tags) + _ = self.col.tags.bulk_remove(note_ids, tags) self.modified = True + def purge_unused_tags(self) -> OpChangesWithCount: + """Purge all unused tags""" + return self.col.tags.clear_unused_tags() + def edit_model_css(self, model_name: str) -> None: """Edit the CSS part of a given model.""" model = self.get_model(model_name) @@ -364,10 +381,10 @@ def edit_model_css(self, model_name: str) -> None: with tempfile.NamedTemporaryFile( mode="w+", prefix="_apy_edit_", suffix=".css", delete=False ) as tf: - tf.write(model["css"]) + _ = tf.write(model["css"]) tf.flush() - retcode = editor(tf.name) + retcode = edit_file(tf.name) if retcode != 0: console.print(f"Editor return with exit code {retcode}!") return @@ -380,59 +397,106 @@ def edit_model_css(self, model_name: str) -> None: self.col.models.save(model, templates=True) self.modified = True - def list_notes(self, query: str, verbose: bool = False) -> None: - """List notes that match a query""" + def list_note_questions(self, query: str) -> None: + """List first card questions for notes that match a query""" for note in self.find_notes(query): - if note.suspended: - style = "red" - elif "marked" in note.n.tags: - style = "yellow" - else: - style = "white" - - first_field = prepare_field_for_cli_oneline(note.n.values()[0]) - - out = Text("Q: ", style=style) - out.append(first_field) + cards.print_question(note.n.cards()[0]) - console.print(out.fit(console.width)) - if verbose: - console.print(f"model: {note.model_name}\n") + def list_notes( + self, query: str, show_cards: bool, show_raw_fields: bool, verbose: bool + ) -> None: + """List notes that match a query""" + for note in self.find_notes(query): + note.pprint( + print_raw=show_raw_fields, list_cards=show_cards, verbose=verbose + ) - def list_cards(self, query: str, verbose: bool = False) -> None: + def list_cards(self, query: str, verbose: bool) -> None: """List cards that match a query""" + for cid in self.col.find_cards(query): + cards.card_pprint(self.col.get_card(cid), verbose) + + def list_cards_as_table(self, query: str, opts_display: dict[str, bool]) -> None: + """List cards that match a query in tabular format""" + width = console.width - 1 + if opts_display.get("show_cid", False): + width -= 15 + if opts_display.get("show_due", False): + width -= 6 + if opts_display.get("show_type", False): + width -= 9 + if opts_display.get("show_ease", False): + width -= 5 + if opts_display.get("show_lapses", False): + width -= 5 + if opts_display.get("show_model", False): + width -= 25 + if opts_display.get("show_answer", False): + width //= 2 + width -= 1 + if opts_display.get("show_deck", False): + width -= 25 + + table = Table(box=None, header_style="bold white") + table.add_column("question") + if opts_display.get("show_answer", False): + table.add_column("answer") + if opts_display.get("show_cid", False): + table.add_column("cid", min_width=13) + if opts_display.get("show_due", False): + table.add_column("due", min_width=4) + if opts_display.get("show_type", False): + table.add_column("type", min_width=8) + if opts_display.get("show_ease", False): + table.add_column("ease", min_width=3) + if opts_display.get("show_lapses", False): + table.add_column("lapses", min_width=3) + if opts_display.get("show_model", False): + table.add_column("model", min_width=10) + if opts_display.get("show_deck", False): + table.add_column("deck", min_width=10) + + for cid in self.col.find_cards(query): + card = self.col.get_card(cid) + question, answer = cards.card_fields_as_md( + card, one_line=True, max_width=width + ) - def _styled(key: str, value: Any) -> Text: - """Simple convenience printer.""" - return Text(f"[yellow]{key}:[/yellow] {value}") - - for cid in self.find_cards(query): - c = self.col.get_card(cid) - question = prepare_field_for_cli_oneline(c.question()) - answer = prepare_field_for_cli_oneline(c.answer()) - - console.print(_styled("Q", question).fit(console.width)) - if verbose: - console.print(_styled("A", answer).fit(console.width)) - - cardtype = int(c.type) - card_type = ["new", "learning", "review", "relearning"][cardtype] - - console.print( - f"{_styled('model', c.note_type()['name'])} " - f"{_styled('type', card_type)} " - f"{_styled('ease', c.factor/10)}% " - f"{_styled('lapses', c.lapses)}\n" - f"{_styled('cid', cid)} " - f"{_styled('due', c.due)}\n" - ) + row: list[str | Text | Markdown] = [Markdown(question)] + if opts_display.get("show_answer", False): + row += [Markdown(answer)] + if opts_display.get("show_cid", False): + row += [str(card.id)] + if opts_display.get("show_due", False): + row += [str(card.due)] + if opts_display.get("show_type", False): + card_type = ["new", "learning", "review", "relearning"][int(card.type)] + row += [card_type] + if opts_display.get("show_ease", False): + row += [str(int(card.factor / 10))] + if opts_display.get("show_lapses", False): + row += [str(card.lapses)] + if opts_display.get("show_model", False): + row += [card.note_type()["name"]] + if opts_display.get("show_deck", False): + deck_id = card.current_deck_id() + row += [self.col.decks.name(deck_id)] + table.add_row(*row) + + console.print(table) + + def list_models(self) -> None: + """List available models""" + for name in self.model_names: + console.print(name) def add_notes_with_editor( self, tags: str = "", - model_name: Optional[str] = None, - deck_name: Optional[str] = None, - template: Optional[Note] = None, + model_name: str | None = None, + deck_name: str | None = None, + template: Note | None = None, + respect_note_ids: bool = True, ) -> list[Note]: """Add new notes to collection with editor""" if template: @@ -461,7 +525,7 @@ def add_notes_with_editor( model = self.set_model(model_name) input_strings += [ x - for y in [[f'## {field["name"]}', ""] for field in model["flds"]] + for y in [[f"## {field['name']}", ""] for field in model["flds"]] for x in y ] @@ -470,31 +534,135 @@ def add_notes_with_editor( with tempfile.NamedTemporaryFile( mode="w+", prefix="apy_note_", suffix=".md", delete=False ) as tf: - tf.write(input_string) + _ = tf.write(input_string) tf.flush() - retcode = editor(tf.name) + retcode = edit_file(tf.name) if retcode != 0: console.print(f"Editor return with exit code {retcode}!") return [] - return self.add_notes_from_file(tf.name) + return self.add_notes_from_file(tf.name, respect_note_ids=respect_note_ids) def add_notes_from_file( - self, filename: str, tags: str = "", deck: Optional[str] = None + self, + filename: str, + tags: str = "", + deck: str | None = None, + update_origin_file: bool = False, + respect_note_ids: bool = True, ) -> list[Note]: - """Add new notes to collection from Markdown file""" - notes = markdown_file_to_notes(filename) - return self.add_notes_from_list(notes, tags, deck) + """Add notes from Markdown file + + Args: + filename: Path to the markdown file containing notes + tags: Additional tags to add to the notes + deck: Default deck for notes without a deck specified + update_origin_file: If True, update the original file with note IDs + respect_note_ids: If True, then this function looks for nid: or cid: headers + in the file to determine if a note should be updated + rather than added. + + Returns: + List of notes that were updated or added + """ + with open(filename, "r", encoding="utf-8") as f: + original_content = f.read() + + has_missing_nids: bool = False + notes: list[Note] = [] + + for note_data in markdown_file_to_notes(filename): + if tags: + note_data.tags = f"{tags} {note_data.tags}" + + if deck and not note_data.deck: + note_data.deck = deck + + has_missing_nids |= note_data.nid is None + + if respect_note_ids: + note = note_data.update_or_add_to_collection(self) + else: + note = note_data.add_to_collection(self) + + notes.append(note) + + # Update the original file with note IDs if requested + if update_origin_file and has_missing_nids: + self._update_file_with_note_ids(filename, original_content, notes) + + return notes + + def _update_file_with_note_ids( + self, filename: str, content: str, notes: list[Note] + ) -> None: + """Update the original markdown file with note IDs + + This function adds nid: headers to notes in the file that don't have them. + + Args: + filename: Path to the markdown file + content: Original content of the file + notes: List of notes that were added/updated + """ + # Find all '# Note' or similar headers in the file + note_headers = re.finditer(r"^# .*$", content, re.MULTILINE) + note_positions = [match.start() for match in note_headers] + + if not note_positions: + return # No notes found in file + + # Add an extra position at the end to simplify boundary handling + note_positions.append(len(content)) + + # Extract each note's section and check if it needs to be updated + # Keep content before first '# Note' + updated_content: list[str] = [content[0 : note_positions[0]]] + for i in range(len(note_positions) - 1): + start = note_positions[i] + end = note_positions[i + 1] + + # Get the section for this note + section = content[start:end] + + # Check if this section already has an nid + if re.search(r"^nid:", section, re.MULTILINE): + # Already has an ID, keep as is + updated_content.append(section) + else: + # No ID, add the note ID from our updated notes + # We need to find where to insert the ID line (after model, tags, etc.) + lines = section.split("\n") + + # Find a good position to insert the ID (after model, tags, deck) + insert_pos = 1 # Default: after the first line (the title) + for j, line in enumerate(lines[1:], 1): + # Look for model:, tags:, deck: lines + if re.match(r"^(model|tag[s]?|deck|markdown|md):", line): + insert_pos = j + 1 # Insert after this line + + # If we have a note ID for this position, insert it + if i < len(notes): + note_id = notes[i].n.id + lines.insert(insert_pos, f"nid: {note_id}") + updated_content.append("\n".join(lines)) + else: + # Couldn't match this section to a note, keep unchanged + updated_content.append(section) + + # Write back the updated content + with open(filename, "w", encoding="utf-8") as f: + _ = f.write("".join(updated_content)) def add_notes_from_list( self, parsed_notes: list[NoteData], tags: str = "", - deck: Optional[str] = None, + deck: str | None = None, ) -> list[Note]: """Add new notes to collection from note list (from parsed file)""" - notes = [] + notes: list[Note] = [] for note in parsed_notes: if note.deck is None: note.deck = deck @@ -508,8 +676,8 @@ def add_notes_single( field_values: list[str], markdown: bool, tags: str = "", - model_name_in: Optional[str] = None, - deck: Optional[str] = None, + model_name_in: str | None = None, + deck: str | None = None, ) -> Note: """Add new note to collection from args""" model_name: str diff --git a/src/apyanki/cards.py b/src/apyanki/cards.py new file mode 100644 index 0000000..687ba1f --- /dev/null +++ b/src/apyanki/cards.py @@ -0,0 +1,120 @@ +"""Utility functions for working with Anki cards""" + +from __future__ import annotations + +import re +from typing import TYPE_CHECKING + +from rich.markdown import Markdown +from rich.text import Text + +from markdownify import markdownify as to_md + +from apyanki.console import console, consolePlain + +if TYPE_CHECKING: + from anki.cards import Card + + +def card_pprint(card: Card, verbose: bool = True) -> None: + """Pretty print a card.""" + flag = get_flag(card) + consolePlain.print(f"[green]# Card (cid: {card.id})[/green]{flag}\n") + + if verbose: + card_type = ["new", "learning", "review", "relearning"][int(card.type)] + details = [ + f"[yellow]nid:[/yellow] {card.nid}", + f"[yellow]model:[/yellow] {card.note_type()['name']}", + f"[yellow]type:[/yellow] {card_type}", + f"[yellow]due:[/yellow] {card.due} days", + f"[yellow]interval:[/yellow] {card.ivl} days", + f"[yellow]repetitions:[/yellow] {card.reps}", + f"[yellow]lapses:[/yellow] {card.lapses}", + f"[yellow]ease:[/yellow] {int(card.factor / 10)} %", + "", + ] + for detail in details: + consolePlain.print(detail) + + question, answer = card_fields_as_md(card) + for title, field in [ + ["Front", question], + ["Back", answer], + ]: + console.print(f"[blue]## {title}[/blue]\n") + console.print(Markdown(field)) + console.print() + + +def card_fields_as_md( + card: Card, one_line: bool = False, max_width: int = 0 +) -> tuple[str, str]: + rendered = card.render_output() + + return ( + _field_to_md(rendered.question_text, one_line, max_width), + _field_to_md(rendered.answer_text, one_line, max_width), + ) + + +def _field_to_md(field: str, one_line: bool = False, max_width: int = 0) -> str: + prepared_field: str = to_md(field) + + if one_line: + prepared_field = prepared_field.replace("\n", " ") + prepared_field = re.sub(r"\s\s+", " ", prepared_field) + + if max_width > 0: + prepared_field = prepared_field[0:max_width] + + return prepared_field + + +def print_question(card: Card) -> None: + """Print the card question""" + question, _ = card_fields_as_md(card) + + output = Text("Q: ") + output.stylize("yellow", 0, 2) + _ = output.append_text(Text.from_markup(question)) + console.print(output.fit(console.width)) + + +def print_answer(card: Card) -> None: + """Print the card answer""" + _, answer = card_fields_as_md(card) + + output = Text("Q: ") + output.stylize("yellow", 0, 2) + _ = output.append_text(Text.from_markup(answer)) + console.print(output.fit(console.width)) + + +def get_flag(card: Card, text: str = "  ") -> str: + """Get rich formatted flag of card""" + style = { + 1: "red", + 2: "orange", + 3: "green", + 4: "blue", + 5: "pink1", + 6: "medium_turquoise", + 7: "purple", + }.get(card.flags) + + if style: + return f"[{style}]{text}[/{style}]" + + return "" + + +def get_due_days(card: Card, today: int) -> str: + """Get number of days until card is due""" + if card.type < 2: + return "0" + + if card.type == 2: + return str(card.due - today) + + return "?" diff --git a/src/apyanki/cli.py b/src/apyanki/cli.py new file mode 100644 index 0000000..ccfab4c --- /dev/null +++ b/src/apyanki/cli.py @@ -0,0 +1,800 @@ +"""A script to interact with the Anki database""" + +import os +import sys +from pathlib import Path +from typing import Any + +import click + +from apyanki import __version__ +from apyanki.anki import Anki +from apyanki.config import cfg, cfg_file +from apyanki.console import console +from apyanki.note import Note +from apyanki.utilities import suppress_stdout + +CONTEXT_SETTINGS = {"help_option_names": ["-h", "--help"]} + + +@click.group(context_settings=CONTEXT_SETTINGS, invoke_without_command=True) +@click.option("-b", "--base-path", help="Set Anki base directory") +@click.option("-p", "--profile-name", help="Specify name of Anki profile to use") +@click.option("-V", "--version", is_flag=True, help="Show apy version") +@click.pass_context +def main(ctx: Any, base_path: str, profile_name: str, version: bool) -> None: + """A script to interact with the Anki database. + + The base_path directory may be specified with the -b / --base-path option. For + convenience, it may also be specified in the config file `~/.config/apy/apy.json` + or with the environment variable APY_BASE or ANKI_BASE. This should point to the + base directory where Anki stores its database and related files. See the Anki + documentation for information about where this is located on different systems + (https://docs.ankiweb.net/files.html#file-locations). + + A few sub commands will open an editor for input. Vim is used by default. + The input is parsed when one saves and quits. To abort, one should exit the + editor with a non-zero exit code. In Vim, one can do this with the `:cquit` + command. + + One may specify a different editor with the VISUAL or EDITOR environment variable. + For example, to use emacs one can add this to one's `~/.bashrc` (or similar) + file: + + export VISUAL=emacs + + Note: Use `apy subcmd --help` to get detailed help for a given subcommand. + """ + if version: + console.print(f"apy {__version__}") + sys.exit() + + if base_path: + cfg["base_path"] = os.path.abspath(os.path.expanduser(base_path)) + + if profile_name: + cfg["profile_name"] = profile_name + + if ctx.invoked_subcommand is None: + ctx.invoke(info) + + +@main.command("add-single") +@click.argument("fields", nargs=-1) +@click.option("-p", "--parse-markdown", is_flag=True, help="Parse input as Markdown.") +@click.option("-s", "--preset", default="default", help="Specify a preset.") +@click.option("-t", "--tags", help="Specify default tags for new cards.") +@click.option( + "-m", "--model", "model_name", help="Specify default model for new cards." +) +@click.option("-d", "--deck", help="Specify default deck for new cards.") +def add_single( + fields: list[str], + parse_markdown: bool, + tags: str | None = None, + preset: str | None = None, + model_name: str | None = None, + deck: str | None = None, +) -> None: + """Add a single note from command line arguments. + + Examples: + + \b + # Add a note to the default deck + apy add-single myfront myback + + \b + # Add a cloze deletion note to the default deck + apy add-single -m Cloze "cloze {{c1::deletion}}" "extra text" + + \b + # Add a note to deck "MyDeck" with tags 'my-tag' and 'new-tag' + apy add-single -t "my-tag new-tag" -d MyDeck myfront myback + """ + with Anki(**cfg) as a: + tags_preset = " ".join(cfg["presets"][preset]["tags"]) + if not tags: + tags = tags_preset + else: + tags += " " + tags_preset + + if not model_name: + model_name = cfg["presets"][preset]["model"] + + _ = a.add_notes_single(fields, parse_markdown, tags, model_name, deck) + + +@main.command() +@click.option("-t", "--tags", default="", help="Specify default tags for new cards.") +@click.option( + "-m", + "--model", + "model_name", + default="Basic", + help=("Specify default model for new cards."), +) +@click.option("-d", "--deck", help="Specify default deck for new cards.") +def add(tags: str, model_name: str, deck: str) -> None: + """Add notes interactively from terminal. + + Examples: + + \b + # Add notes to deck "MyDeck" with tags 'my-tag' and 'new-tag' + apy add -t "my-tag new-tag" -d MyDeck + + \b + # Ask for the model and the deck for each new card + apy add -m ASK -d ask + """ + with Anki(**cfg) as a: + notes = a.add_notes_with_editor(tags, model_name, deck) + _added_notes_postprocessing(a, notes) + + +@main.command("update-from-file") +@click.argument("file", type=click.Path(exists=True, dir_okay=False)) +@click.option("-t", "--tags", default="", help="Specify default tags for cards.") +@click.option("-d", "--deck", help="Specify default deck for cards.") +@click.option( + "-u", "--update-file", is_flag=True, help="Update original file with note IDs." +) +def update_from_file(file: Path, tags: str, deck: str, update_file: bool) -> None: + """Update existing notes or add new notes from Markdown file. + + This command will update existing notes if a note ID (nid) or card ID (cid) + is provided in the file header, otherwise it will add new notes. + + With the --update-file option, the original file will be updated to include + note IDs for any new notes added. + + The syntax is similar to add-from-file, but with two additional keys: + + \b + * nid: The note ID to update (optional) + * cid: The card ID to update (optional, used if nid is not provided) + + If neither nid nor cid is provided, a new note will be created. + + Here is an example Markdown input for updating: + + // example.md + model: Basic + tags: marked + nid: 1619153168151 + + # Note 1 + ## Front + Updated question? + + ## Back + Updated answer. + + # Note 2 + cid: 1619153168152 + + ## Front + Another updated question? + + ## Back + Another updated answer. + + # Note 3 + model: Basic + + ## Front + This will be a new note (no ID provided) + + ## Back + New note content + """ + with Anki(**cfg) as a: + notes = a.add_notes_from_file(str(file), tags, deck, update_file) + _added_notes_postprocessing(a, notes) + + +# Create an alias for backward compatibility +@main.command("add-from-file") +@click.argument("file", type=click.Path(exists=True, dir_okay=False)) +@click.option("-t", "--tags", default="", help="Specify default tags for new cards.") +@click.option("-d", "--deck", help="Specify default deck for new cards.") +@click.option( + "-u", "--update-file", is_flag=True, help="Update original file with note IDs." +) +def add_from_file(file: Path, tags: str, deck: str, update_file: bool) -> None: + """Add notes from Markdown file. + + With the --update-file option, the original file will be updated to include + note IDs for any new notes added. + + This command is an alias for update-from-file, which can both add new notes + and update existing ones. + """ + with Anki(**cfg) as a: + notes = a.add_notes_from_file(str(file), tags, deck, update_file) + _added_notes_postprocessing(a, notes) + + +def _added_notes_postprocessing(a: Anki, notes: list[Note]) -> None: + """Common postprocessing after 'apy add[-from-file]' or 'apy update-from-file'.""" + n_notes = len(notes) + if n_notes == 0: + console.print("No notes added or updated") + return + + decks = [a.col.decks.name(c.did) for n in notes for c in n.n.cards()] + n_decks = len(set(decks)) + if n_decks == 0: + console.print("No notes added or updated") + return + + # Check if the command is update or add (based on caller function name) + import inspect + + caller_frame = inspect.currentframe() + if caller_frame is not None and caller_frame.f_back is not None: + caller_function = caller_frame.f_back.f_code.co_name + else: + caller_function = "" + is_update = "update" in caller_function.lower() + + action_word = "Updated/added" if is_update else "Added" + + if a.n_decks > 1: + if n_notes == 1: + console.print(f"{action_word} note to deck: {decks[0]}") + elif n_decks > 1: + console.print(f"{action_word} {n_notes} notes to {n_decks} different decks") + else: + console.print(f"{action_word} {n_notes} notes to deck: {decks[0]}") + else: + console.print(f"{action_word} {n_notes} notes") + + for note in notes: + cards = note.n.cards() + console.print(f"* nid: {note.n.id} (with {len(cards)} cards)") + for card in note.n.cards(): + console.print(f" * cid: {card.id}") + + +@main.command("check-media") +def check_media() -> None: + """Check media.""" + with Anki(**cfg) as a: + a.check_media() + + +@main.command() +def info() -> None: + """Print some basic statistics.""" + if cfg_file.exists(): + for key in cfg.keys(): + console.print(f"Config loaded: {key}") + console.print(f"Config file: {cfg_file}") + else: + console.print("Config file: Not found") + + with Anki(**cfg) as a: + scheduler = 3 if a.col.v3_scheduler() else a.col.sched_ver() + console.print(f"Collection path: {a.col.path}") + console.print(f"Scheduler version: {scheduler}") + + if a.col.decks.count() > 1: + console.print("Decks:") + for name in sorted(a.deck_names): + console.print(f" - {name}") + + sum_notes = a.col.note_count() + sum_marked = len(a.col.find_notes("tag:marked")) + sum_cards = a.col.card_count() + sum_due = len(a.col.find_cards("is:due")) + sum_new = len(a.col.find_cards("is:new")) + sum_flagged = len(a.col.find_cards("-flag:0")) + sum_susp = len(a.col.find_cards("is:suspended")) + + console.print( + "\n" + f"{'Model':24s} " + f"{'notes':>7s} " + f"{'marked':>7s} " + f"{'cards':>7s} " + f"{'due':>7s} " + f"{'new':>7s} " + f"{'flagged':>7s}" + f"{'susp.':>7s} " + ) + console.rule() + models = sorted(a.model_names) + for m in models: + nnotes = len(set(a.col.find_notes(f'"note:{m}"'))) + if nnotes == 0: + continue + nmarked = len(a.col.find_notes(f'"note:{m}" tag:marked')) + ncards = len(a.col.find_cards(f'"note:{m}"')) + ndue = len(a.col.find_cards(f'"note:{m}" is:due')) + nnew = len(a.col.find_cards(f'"note:{m}" is:new')) + nflagged = len(a.col.find_cards(f'"note:{m}" -flag:0')) + nsusp = len(a.col.find_cards(f'"note:{m}" is:suspended')) + + name = m[:24] + console.print( + f"{name:24s} " + f"{nnotes:7d} " + f"{nmarked:7d} " + f"{ncards:7d} " + f"{ndue:7d} " + f"{nnew:7d} " + f"{nflagged:7d}" + f"{nsusp:7d} " + ) + console.rule() + console.print( + f"{'Sum':24s} " + f"{sum_notes:7d} " + f"{sum_marked:7d} " + f"{sum_cards:7d} " + f"{sum_due:7d} " + f"{sum_new:7d} " + f"{sum_flagged:7d}" + f"{sum_susp:7d} " + ) + console.rule() + + +@main.group(context_settings=CONTEXT_SETTINGS, invoke_without_command=True) +def model() -> None: + """Interact with Anki models.""" + + +@model.command("edit-css") +@click.option( + "-m", + "--model-name", + default="Basic", + help="Specify for which model to edit CSS template.", +) +@click.option("-s", "--sync-after", is_flag=True, help="Perform sync after any change.") +def edit_css(model_name: str, sync_after: bool) -> None: + """Edit the CSS template for the specified model.""" + with Anki(**cfg) as a: + a.edit_model_css(model_name) + + if a.modified and sync_after: + a.sync() + a.modified = False + + +@model.command() +@click.argument("old-name") +@click.argument("new-name") +def rename(old_name: str, new_name: str) -> None: + """Rename model from old_name to new_name.""" + with Anki(**cfg) as a: + a.rename_model(old_name, new_name) + + +@main.command("list-cards") +@click.argument("query", required=False, nargs=-1) +@click.option("-v", "--verbose", is_flag=True, help="Print details for each card") +def list_cards(query: str, verbose: bool) -> None: + """List cards that match QUERY. + + The default QUERY is "tag:marked OR -flag:0". This default can be + customized in the config file `~/.config/apy/apy.json`, e.g. with + + \b + { + "query": "tag:marked OR tag:leech" + } + """ + if query: + query = " ".join(query) + else: + query = cfg["query"] + + with Anki(**cfg) as a: + a.list_cards(query, verbose) + + +@main.command("list-cards-table") +@click.argument("query", required=False, nargs=-1) +@click.option("-a", "--show-answer", is_flag=True, help="Display answer") +@click.option("-m", "--show-model", is_flag=True, help="Display model") +@click.option("-c", "--show-cid", is_flag=True, help="Display card ids") +@click.option("-d", "--show-due", is_flag=True, help="Display card due time in days") +@click.option("-t", "--show-type", is_flag=True, help="Display card type") +@click.option("-e", "--show-ease", is_flag=True, help="Display card ease") +@click.option("-l", "--show-lapses", is_flag=True, help="Display card number of lapses") +@click.option("-D", "--show-deck", is_flag=True, help="Display deck") +def list_cards_table( + query: str, + show_answer: bool, + show_model: bool, + show_due: bool, + show_type: bool, + show_ease: bool, + show_lapses: bool, + show_cid: bool, + show_deck: bool, +) -> None: + """List cards that match QUERY in a tabular format. + + The default QUERY is "tag:marked OR -flag:0". This default can be + customized in the config file `~/.config/apy/apy.json`, e.g. with + + \b + { + "query": "tag:marked OR tag:leech" + } + """ + if query: + query = " ".join(query) + else: + query = cfg["query"] + + with Anki(**cfg) as a: + a.list_cards_as_table( + query, + { + "show_answer": show_answer, + "show_model": show_model, + "show_cid": show_cid, + "show_due": show_due, + "show_type": show_type, + "show_ease": show_ease, + "show_lapses": show_lapses, + "show_deck": show_deck, + }, + ) + + +@main.command("list-models") +def list_models() -> None: + """List available models.""" + with Anki(**cfg) as a: + a.list_models() + + +@main.command("list-notes") +@click.argument("query", required=False, nargs=-1) +@click.option("-c", "--show-cards", is_flag=True, help="Print card specs") +@click.option("-r", "--show-raw-fields", is_flag=True, help="Print raw field data") +@click.option("-v", "--verbose", is_flag=True, help="Print note details") +def list_notes( + query: str, show_cards: bool, show_raw_fields: bool, verbose: bool +) -> None: + """List notes that match QUERY. + + The default QUERY is "tag:marked OR -flag:0". This default can be + customized in the config file `~/.config/apy/apy.json`, e.g. with + + \b + { + "query": "tag:marked OR tag:leech" + } + """ + if query: + query = " ".join(query) + else: + query = cfg["query"] + + with Anki(**cfg) as a: + a.list_notes(query, show_cards, show_raw_fields, verbose) + + +@main.command() +@click.argument("query", required=False, nargs=-1) +@click.option( + "-m", + "--check-markdown-consistency", + is_flag=True, + help="Check for Markdown consistency", +) +@click.option( + "-n", + "--cmc-range", + default=7, + type=int, + help="Number of days backwards to check consistency", +) +def review(query: str, check_markdown_consistency: bool, cmc_range: int) -> None: + """Review/Edit notes that match QUERY. + + The default QUERY is "tag:marked OR -flag:0". This default can be + customized in the config file `~/.config/apy/apy.json`, e.g. with + + \b + { + "query": "tag:marked OR tag:leech" + } + """ + if query: + query = " ".join(query) + else: + query = cfg["query"] + + with Anki(**cfg) as a: + notes = list(a.find_notes(query)) + + # Add inconsistent notes + if check_markdown_consistency: + notes += [ + n + for n in a.find_notes(f"rated:{cmc_range}") + if not n.has_consistent_markdown() + ] + + i = 0 + number_of_notes = len(notes) + while i < number_of_notes: + note = notes[i] + status = note.review(i, number_of_notes) + + if status == "stop": + break + + if status == "rewind": + i = max(i - 1, 0) + else: + i += 1 + + +@main.command() +@click.argument("query", nargs=-1, required=True) +@click.option( + "--force-multiple", + "-f", + is_flag=True, + help="Allow editing multiple notes (will edit them one by one)", +) +def edit(query: str, force_multiple: bool) -> None: + """Edit notes that match QUERY directly. + + This command allows direct editing of notes matching the provided query + without navigating through the interactive review interface. + + If the query matches multiple notes, you'll be prompted to confirm + unless --force-multiple is specified. + + Examples: + + \b + # Edit a note by its card ID + apy edit cid:1740342619916 + + \b + # Edit a note by its note ID + apy edit nid:1234567890123 + + \b + # Edit a note containing specific text + apy edit "front:error" + """ + query = " ".join(query) + + with Anki(**cfg) as a: + notes = list(a.find_notes(query)) + + # Handle no matches + if not notes: + console.print(f"No notes found matching query: {query}") + return + + # Handle multiple matches + if len(notes) > 1 and not force_multiple: + console.print(f"Query matched {len(notes)} notes. The first five:\n") + + # Show preview of the first 5 matching notes + for i, note in enumerate(notes[:5]): + preview_text = note.n.fields[0][:50].replace("\n", " ") + if len(preview_text) == 50: + preview_text += "..." + console.print(f"{i + 1}. nid:{note.n.id} - {preview_text}") + + console.print( + "\nHints:\n" + "* Use 'apy edit --force-multiple' to edit all matches or refine your query so it only matches a single note.\n" + "* Use 'apy list QUERY' to view all matches." + ) + return + + # Edit each note + edited_count = 0 + for i, note in enumerate(notes): + if len(notes) > 1: + console.print( + f"\nEditing note {i + 1} of {len(notes)} (nid: {note.n.id})" + ) + + # Show a brief preview of the note + preview_text = note.n.fields[0][:50].replace("\n", " ") + if len(preview_text) == 50: + preview_text += "..." + console.print(f"Content preview: {preview_text}") + console.print(f"Tags: {', '.join(note.n.tags)}") + + if not console.confirm("Edit this note?"): + console.print("Skipping...") + continue + + # Use the direct edit method (bypassing the review interface) + note.edit() + edited_count += 1 + + # Summary message + if edited_count > 0: + console.print( + f"\n[green]Successfully edited {edited_count} note(s)[/green]" + ) + else: + console.print("\n[yellow]No notes were edited[/yellow]") + + +@main.command() +def sync() -> None: + """Synchronize collection with AnkiWeb.""" + with Anki(**cfg) as a: + a.sync() + + +@main.command() +@click.argument("query", required=False, nargs=-1) +@click.option("-a", "--add-tags", help="Add specified tags to matched notes.") +@click.option("-r", "--remove-tags", help="Remove specified tags from matched notes.") +@click.option( + "-c", "--sort-by-count", is_flag=True, help="When listing tags, sort by note count" +) +@click.option("-s", "--simple", is_flag=True, help="Only list available tags") +@click.option( + "-p", + "--purge", + is_flag=True, + help="If specified, then the command will remove all unused tags", +) +def tag( + query: str, + add_tags: str | None, + remove_tags: str | None, + simple: bool, + sort_by_count: bool, + purge: bool, +) -> None: + """List all tags or add/remove tags from notes that match the query. + + The default query is "tag:marked OR -flag:0". This default can be + customized in the config file `~/.config/apy/apy.json`, e.g. with + + \b + { + "query": "tag:marked OR tag:leech" + } + + If none of the options --add-tags, --remove-tags, or --purge are supplied, then the + command simply lists all tags used in the collection. + + Examples: + + \b + # List all tags + apy tag + + \b + # List all tags but sort by the note count + apy tag -c + + \b + # Remove tag "bar" from all notes that match "foo" + apy tag "foo" --remove-tags bar + + \b + # Remove all unused tags + apy tag --purge + """ + if query: + query = " ".join(query) + else: + query = cfg["query"] + + with Anki(**cfg) as a: + if purge: + changes = a.purge_unused_tags() + if changes.count > 0: + console.print(f"[yellow]Purged {changes.count} unused tags.") + else: + console.print("No unused tags found.") + + return + + if (add_tags is None or add_tags == "") and ( + remove_tags is None or remove_tags == "" + ): + a.list_tags(sort_by_count, simple) + return + + n_notes = len(list(a.find_notes(query))) + if n_notes == 0: + console.print("No matching notes!") + raise click.Abort() + + console.print(f"The operation will be applied to {n_notes} matched notes:") + a.list_note_questions(query) + console.print("") + + if add_tags is not None: + console.print(f"Add tags: [green]{add_tags}") + if remove_tags is not None: + console.print(f"Remove tags: [red]{remove_tags}") + + if not console.confirm("Continue?"): + raise click.Abort() + + if add_tags is not None: + a.change_tags(query, add_tags) + + if remove_tags is not None: + a.change_tags(query, remove_tags, add=False) + + +@main.command() +@click.argument("position", type=int, required=True, nargs=1) +@click.argument("query", required=True, nargs=-1) +def reposition(position: int, query: str) -> None: + """Reposition cards that match QUERY. + + Sets the new position to POSITION and shifts other cards. + + Note that repositioning only works with new cards! + """ + query = " ".join(query) + + with Anki(**cfg) as a: + cids = list(a.col.find_cards(query)) + if not cids: + console.print(f"No matching cards for query: {query}!") + raise click.Abort() + + for cid in cids: + card = a.col.get_card(cid) + if card.type != 0: + console.print("Can only reposition new cards!") + raise click.Abort() + + _ = a.col.sched.reposition_new_cards(cids, position, 1, False, True) + a.modified = True + + +@main.command() +@click.argument( + "target-file", type=click.Path(exists=False, resolve_path=True, path_type=Path) +) +@click.option( + "-m", "--include-media", is_flag=True, help="Include media files in backup." +) +@click.option( + "-l", + "--legacy", + is_flag=True, + help="Support older Anki versions (slower/larger files)", +) +def backup(target_file: Path, include_media: bool, legacy: bool) -> None: + """Backup Anki database to specified target file.""" + with Anki(**cfg) as a: + target_filename = str(target_file) + + if not target_filename.endswith(".colpkg"): + console.print("[yellow]Warning: Target should have .colpkg extension!") + raise click.Abort() + + if target_file.exists(): + console.print("[yellow]Warning: Target file already exists!") + console.print(f"[yellow] {target_file}") + if not console.confirm("Do you want to overwrite it?"): + raise click.Abort() + + with suppress_stdout(): + a.col.export_collection_package(target_filename, include_media, legacy) + + +if __name__ == "__main__": + main() diff --git a/src/apy/config.py b/src/apyanki/config.py similarity index 89% rename from src/apy/config.py rename to src/apyanki/config.py index b8d8125..2fadd36 100644 --- a/src/apy/config.py +++ b/src/apyanki/config.py @@ -3,10 +3,10 @@ import json import os from pathlib import Path -from typing import Any, Optional +from typing import Any -def get_base_path() -> Optional[str]: +def get_base_path() -> str | None: """Get base path on current system""" # If base_path not defined: Look in environment variables if path_as_str := os.environ.get("APY_BASE"): @@ -48,9 +48,14 @@ def get_base_path() -> Optional[str]: }, "img_viewers_default": ["feh"], "markdown_models": ["Basic"], + "markdown_pygments_style": "friendly", + "markdown_latex_mode": "mathjax", "presets": {}, "profile_name": None, "query": "tag:marked OR -flag:0", + "review_show_cards": False, + "review_verbose": False, + "auto_sync": False, } # Ensure that cfg has required keys diff --git a/src/apy/console.py b/src/apyanki/console.py similarity index 86% rename from src/apy/console.py rename to src/apyanki/console.py index b5f5fd1..33cab97 100644 --- a/src/apy/console.py +++ b/src/apyanki/console.py @@ -1,6 +1,6 @@ """Global console instance for CLI output""" -from typing import Any, Optional +from typing import Any import click import readchar @@ -16,16 +16,14 @@ def wait_for_keypress(self) -> None: console.print( "[white]Press [italic]any key[/italic] to continue ... [/white]", end="" ) - readchar.readchar() + _ = readchar.readchar() def prompt(self, prompt: str, **kwargs: Any) -> str: """Prompt for string.""" p = Prompt(console=self) return p.ask(prompt, **kwargs) - def prompt_int( - self, prompt: str, suffix: Optional[str] = None, **kwargs: Any - ) -> int: + def prompt_int(self, prompt: str, suffix: str | None = None, **kwargs: Any) -> int: """Prompt for integer.""" result: int diff --git a/src/apy/fields.py b/src/apyanki/fields.py similarity index 65% rename from src/apy/fields.py rename to src/apyanki/fields.py index 14feb92..91549a5 100644 --- a/src/apy/fields.py +++ b/src/apyanki/fields.py @@ -1,14 +1,15 @@ """Functions for manipulating note fields""" from __future__ import annotations + import base64 -from pathlib import Path import re -from typing import Optional, TYPE_CHECKING import warnings +from pathlib import Path +from typing import TYPE_CHECKING -from bs4 import BeautifulSoup, MarkupResemblesLocatorWarning, Tag import markdown +from bs4 import BeautifulSoup, MarkupResemblesLocatorWarning, Tag from markdown.extensions.abbr import AbbrExtension from markdown.extensions.codehilite import CodeHiliteExtension from markdown.extensions.def_list import DefListExtension @@ -16,22 +17,36 @@ from markdown.extensions.footnotes import FootnoteExtension from markdownify import markdownify as to_md +from apyanki.config import cfg +from apyanki.markdown_math import MathProtectExtension + if TYPE_CHECKING: from anki.models import NotetypeDict - from apy.anki import Anki + + from apyanki.anki import Anki warnings.filterwarnings("ignore", category=MarkupResemblesLocatorWarning) -def prepare_field_for_cli(field: str, use_markdown: bool = False) -> str: +def prepare_field_for_cli( + field: str, use_markdown: bool = False, check_consistency: bool = True +) -> str: """Prepare field html for printing to screen""" - text = convert_field_to_text(field) + text = convert_field_to_text(field, check_consistency) regex_replaces = [ - [r"\[latex\]\s*(.*)\[/latex\]", r"```tex\n\1\n```"], - [r"\\s*(.*)\s*\", r"\n\1"], + [r"\[latex\]\s*(.*?)\[/latex\]", r"```tex\n\1\n```"], + [r"\\s*(.*?)\s*\", r"\n\1"], ] + if not use_markdown: + regex_replaces += [ + [r"(.*?)", r"[bold]\1[/bold]"], + [r"(.*?)", r"[italic]\1[/italic]"], + [r"\*\*(.*?)\*\*", r"[bold]\1[/bold]"], + [r"_(.*?)_", r"[italic]\1[/italic]"], + [r"`(.*?)`", r"[magenta]\1[/magenta]"], + ] literal_replaces: list[list[str]] if use_markdown: @@ -62,7 +77,6 @@ def prepare_field_for_cli_raw(field: str) -> str: and (second := first.next) and (third := second.next) and isinstance(third, Tag) - and isinstance(third.contents, list) ): content = [ e.prettify() if isinstance(e, Tag) else str(e) for e in third.contents @@ -72,21 +86,12 @@ def prepare_field_for_cli_raw(field: str) -> str: return f"Could not parse!\n{field}" -def prepare_field_for_cli_oneline(field: str) -> str: - """Prepare field html for printing to screen on one line""" - text = prepare_field_for_cli(field) - text = text.replace("\n", " ") - text = re.sub(r"\s\s\s+", " ", text) - return text - - -def convert_field_to_text(field: str) -> str: +def convert_field_to_text(field: str, check_consistency: bool = True) -> str: """Extract text from field HTML""" if check_if_generated_from_markdown(field): - return _convert_field_to_markdown(field, check_consistency=True) + return _convert_field_to_markdown(field, check_consistency) text = _clean_html(field) - text = re.sub(r"\.*\<\/style\>", "", field, flags=re.S) for source, target in [ ["
", "\n"], ["
", "\n"], @@ -117,13 +122,9 @@ def toggle_field_to_markdown(field_or_text: str) -> str: def check_if_generated_from_markdown(field: str) -> bool: """Check if text is a generated HTML""" - tag = _get_first_tag(BeautifulSoup(field, "html.parser")) + tag = BeautifulSoup(field, "html.parser").find() - return ( - tag is not None - and tag.attrs is not None - and "data-original-markdown" in tag.attrs - ) + return isinstance(tag, Tag) and "data-original-markdown" in tag.attrs def check_if_inconsistent_markdown(field: str) -> bool: @@ -149,11 +150,9 @@ def img_paths_from_field_latex(html: str, ntd: NotetypeDict, anki: Anki) -> list Note: The returned paths are relative to the Anki media directory. """ - # pylint: disable=import-outside-toplevel from anki import latex - # pylint: disable=protected-access - proto = anki.col._backend.extract_latex( + proto = anki.col._backend.extract_latex( # pyright: ignore [reportPrivateUsage] text=html, svg=ntd.get("latexsvg", False), expand_clozes=False ) out = latex.ExtractedLatexOutput.from_proto(proto) @@ -167,45 +166,42 @@ def img_paths_from_field_latex(html: str, ntd: NotetypeDict, anki: Anki) -> list def _convert_field_to_markdown(field: str, check_consistency: bool = False) -> str: """Extract generated markdown text from field HTML""" - tag = _get_first_tag(BeautifulSoup(field, "html.parser")) - if not tag: + tag = BeautifulSoup(field, "html.parser").find() + if not isinstance(tag, Tag): return field original_markdown = tag["data-original-markdown"] if isinstance(original_markdown, list): original_markdown = "\n".join(original_markdown) - text = ( - base64.b64decode(original_markdown.encode()) - .decode("utf-8") - .replace("
", "\n") - ) + text = base64.b64decode(original_markdown.encode()).decode().replace("
", "\n") if check_consistency and field != _convert_markdown_to_field(text): html_clean = re.sub(r' data-original-markdown="[^"]*"', "", field) - text += f"\n\n### Current HTML → Markdown\n{to_md(html_clean)}" - text += f"\n### Current HTML\n```html\n{html_clean}\n```" + consistency_text = f"\n\n### Current HTML → Markdown\n{to_md(html_clean)}" + consistency_text += f"\n### Current HTML\n```html\n{html_clean}\n```" + else: + consistency_text = "" - # For convenience: Fix mathjax escaping - # converted = converted.replace(r"\[", r"[") - # converted = converted.replace(r"\]", r"]") - # converted = converted.replace(r"\(", r"(") - # converted = converted.replace(r"\)", r")") + # Apply latex translation based on specified latex mode + if cfg["markdown_latex_mode"] == "latex": + text = _latex_to_mdlatex(text) + else: + text = _mathjax_to_mdlatex(text) - return text + return text + consistency_text def _convert_markdown_to_field(text: str) -> str: """Convert Markdown to field HTML""" - # Don't convert if md text is really plain - if re.match(r"[a-zA-Z0-9æøåÆØÅ ,.?+-]*$", text): + + # Return input text if it only contains allowed characters + if re.fullmatch(r"[a-zA-Z0-9æøåÆØÅ ,.?+-]*", text): return text # Prepare original markdown for restoring # Note: convert newlines to
to make text readable in the Anki viewer - original_encoded = base64.b64encode( - text.replace("\n", "
").encode("utf-8") - ).decode() + original_encoded = base64.b64encode(text.replace("\n", "
").encode()).decode() # For convenience: Escape some common LaTeX constructs text = text.replace(r"\\", r"\\\\") @@ -230,32 +226,52 @@ def _convert_markdown_to_field(text: str) -> str: CodeHiliteExtension( noclasses=True, linenums=False, - pygments_style="friendly", + pygments_style=cfg["markdown_pygments_style"], guess_lang=False, ), DefListExtension(), FencedCodeExtension(), FootnoteExtension(), + MathProtectExtension(cfg["markdown_latex_mode"]), ], output_format="html", ) - html_tree = BeautifulSoup(html, "html.parser") + # Parse HTML and attach original markdown + soup = BeautifulSoup(html or "
 
", "html.parser") + root = soup.find() + if isinstance(root, Tag): + root["data-original-markdown"] = original_encoded - # Find html tree root tag - tag = _get_first_tag(html_tree) - if not tag: - if not html: - # Add space to prevent input field from shrinking in UI - html = " " - html_tree = BeautifulSoup(f"
{html}
", "html.parser") - tag = _get_first_tag(html_tree) + return str(soup) + + +def _latex_to_mdlatex(text: str) -> str: + """Replace [$$]…[/$$] and [$]…[/$] with $$…$$ and $…$""" + pattern = re.compile( + r""" + (\[\$\$\])(.*?)\[/\$\$\] # match [$$]…[/$$] + | # or + (\[\$\])(.*?)\[/\$\] # match [$]…[/$] + """, + re.DOTALL | re.VERBOSE, + ) - if tag: - # Store original_encoded as data-attribute on tree root - tag["data-original-markdown"] = original_encoded + def replacer(match: re.Match[str]) -> str: + if match.group(1): + return f"$${match.group(2)}$$" + elif match.group(3): + return f"${match.group(4)}$" + return match.group(0) - return str(html_tree) + return pattern.sub(replacer, text) + + +def _mathjax_to_mdlatex(text: str) -> str: + """Replace \\[…\\] and \\(…\\) with $$…$$ and $…$""" + text = re.sub(r"\\\[(.*?)\\\]", r"$$\1$$", text, flags=re.DOTALL) + text = re.sub(r"\\\((.*?)\\\)", r"$\1$", text, flags=re.DOTALL) + return text def _clean_html(text: str) -> str: @@ -268,12 +284,3 @@ def _clean_html(text: str) -> str: text = re.sub(r"\\s*\<\/i\>", "", text) text = re.sub(r"\\s*\<\/div\>", "", text) return text.strip() - - -def _get_first_tag(tree: BeautifulSoup) -> Optional[Tag]: - """Get first tag among children of tree""" - for child in tree.children: - if isinstance(child, Tag): - return child - - return None diff --git a/src/apyanki/markdown_math.py b/src/apyanki/markdown_math.py new file mode 100644 index 0000000..f598de8 --- /dev/null +++ b/src/apyanki/markdown_math.py @@ -0,0 +1,66 @@ +"""Extension to avoid converting markdown within math blocks""" + +from __future__ import annotations + +import re + +from markdown import Markdown +from markdown.extensions import Extension +from markdown.postprocessors import Postprocessor +from markdown.preprocessors import Preprocessor + + +class MathProtectExtension(Extension): + def __init__(self, markdown_latex_mode: str) -> None: + super().__init__() + self.markdown_latex_mode: str = markdown_latex_mode + + def extendMarkdown(self, md: Markdown) -> None: # pyright: ignore[reportImplicitOverride] + math_preprocessor = MathPreprocessor(md, self.markdown_latex_mode) + math_postprocessor = MathPostprocessor(md, math_preprocessor.placeholders) + + md.preprocessors.register(math_preprocessor, "math_block_processor", 25) + md.postprocessors.register(math_postprocessor, "math_block_restorer", 25) + + +class MathPreprocessor(Preprocessor): + def __init__(self, md: Markdown, markdown_latex_mode: str) -> None: + super().__init__(md) + self.counter: int = 0 + self.placeholders: dict[str, str] = {} + + # Apply latex translation based on specified latex mode + if markdown_latex_mode == "latex": + self.fmt_display: str = "[$$]{math}[/$$]" + self.fmt_inline: str = "[$]{math}[/$]" + else: + self.fmt_display = r"\[{math}\]" + self.fmt_inline = r"\({math}\)" + + def run(self, lines: list[str]) -> list[str]: # pyright: ignore[reportImplicitOverride] + def replacer(match: re.Match[str]) -> str: + placeholder = f"MATH-PLACEHOLDER-{self.counter}" + self.counter += 1 + + if matched := match.group(1): + self.placeholders[placeholder] = self.fmt_display.format(math=matched) + elif matched := match.group(2): + self.placeholders[placeholder] = self.fmt_inline.format(math=matched) + + return placeholder + + pattern = re.compile(r"\$\$(.*?)\$\$|\$(.*?)\$", re.DOTALL) + lines_joined = "\n".join(lines) + lines_processed = pattern.sub(replacer, lines_joined) + return lines_processed.split("\n") + + +class MathPostprocessor(Postprocessor): + def __init__(self, md: Markdown, placeholders: dict[str, str]) -> None: + super().__init__(md) + self.placeholders: dict[str, str] = placeholders + + def run(self, text: str) -> str: # pyright: ignore[reportImplicitOverride] + for placeholder, math in self.placeholders.items(): + text = text.replace(placeholder, math) + return text diff --git a/src/apy/note.py b/src/apyanki/note.py similarity index 55% rename from src/apy/note.py rename to src/apyanki/note.py index 01e4cfb..9161d04 100644 --- a/src/apy/note.py +++ b/src/apyanki/note.py @@ -1,24 +1,28 @@ """Classes and functions for interacting with and creating notes""" from __future__ import annotations -from dataclasses import dataclass + import os -from pathlib import Path import re -from subprocess import DEVNULL, Popen import tempfile +from dataclasses import dataclass +from pathlib import Path +from subprocess import DEVNULL, Popen from time import localtime, strftime -from typing import Any, Optional, TYPE_CHECKING +from typing import TYPE_CHECKING, Any, Literal -from click import Abort import readchar +from click import Abort from rich.columns import Columns -from rich.text import Text from rich.markdown import Markdown +from rich.table import Table +from rich.text import Text +from typing_extensions import override -from apy.config import cfg -from apy.console import console, consolePlain -from apy.fields import ( +from apyanki import cards +from apyanki.config import cfg +from apyanki.console import console, consolePlain +from apyanki.fields import ( check_if_generated_from_markdown, check_if_inconsistent_markdown, convert_field_to_text, @@ -29,33 +33,36 @@ prepare_field_for_cli_raw, toggle_field_to_markdown, ) -from apy.utilities import cd, choose, editor +from apyanki.utilities import cd, choose, edit_file if TYPE_CHECKING: - from apy.anki import Anki from anki.notes import Note as ANote + from apyanki.anki import Anki + class Note: """A Note wrapper class""" def __init__(self, anki: Anki, note: ANote) -> None: - self.a = anki - self.n = note + self.a: Anki = anki + self.n: ANote = note note_type = note.note_type() if note_type: - self.model_name = note_type["name"] + self.model_name: str = note_type["name"] else: self.model_name = "__invalid-note__" - self.field_names = list(self.n.keys()) - self.suspended = any(c.queue == -1 for c in self.n.cards()) + self.field_names: list[str] = list(self.n.keys()) + self.suspended: bool = any(c.queue == -1 for c in self.n.cards()) + @override def __repr__(self) -> str: """Convert note to Markdown format""" lines = [ "# Note", f"model: {self.model_name}", f"tags: {self.get_tag_string()}", + f"nid: {self.n.id}", ] if self.a.n_decks > 1: @@ -73,44 +80,53 @@ def __repr__(self) -> str: return "\n".join(lines) - def pprint(self, print_raw: bool = False) -> None: + def pprint( + self, print_raw: bool = False, list_cards: bool = False, verbose: bool = False + ) -> None: """Print to screen""" - # pylint: disable=import-outside-toplevel from anki import latex - created = strftime("%F %H:%M", localtime(self.n.id / 1000)) - modified = strftime("%F %H:%M", localtime(self.n.mod)) - next_due = min(c.due for c in self.n.cards()) - self.a.today - types = ", ".join( - { - ["new", "learning", "review", "relearning"][c.type] - for c in self.n.cards() - } - ) - header = f"[green]# Note (nid: {self.n.id})[/green]" if self.suspended: - header += " [red](suspended)[/red]" - - columned = [ - f"[yellow]model:[/yellow] {self.model_name} ({len(self.n.cards())} cards)", - f"[yellow]card type:[/yellow] {types}", - f"[yellow]created:[/yellow] {created}", - f"[yellow]modified:[/yellow] {modified}", - f"[yellow]next due:[/yellow] {next_due} days", - f"[yellow]tags:[/yellow] {self.get_tag_string()}", - ] - if self.a.n_decks > 1: - columned += ["[yellow]deck:[/yellow] " + self.get_deck()] - - flags = [str(c.template()["name"]) for c in self.n.cards() if c.flags > 0] - if flags: - flag_str = ", ".join(["[magenta]x[/magenta]" for x in flags]) - columned += [f"[yellow]flagged:[/yellow] {flag_str}"] - - consolePlain.print(header) - consolePlain.print(Columns(columned, width=37)) - console.print() + header += " | [red]suspended[/red]" + + tags = self.get_tag_string() + if "marked" in tags: + header += " | [blue]marked[/blue]" + if "leech" in tags: + header += " | [yellow]leech[/yellow]" + + consolePlain.print(header + "\n") + + if verbose: + created = strftime("%F %H:%M", localtime(self.n.id / 1000)) + modified = strftime("%F %H:%M", localtime(self.n.mod)) + details = [ + f"[yellow]model:[/yellow] {self.model_name} ({len(self.n.cards())} cards)", + f"[yellow]tags:[/yellow] {tags}", + f"[yellow]created:[/yellow] {created}", + f"[yellow]modified:[/yellow] {modified}", + ] + if self.a.n_decks > 1: + details += ["[yellow]deck:[/yellow] " + self.get_deck()] + + if not list_cards: + flagged = [ + cards.get_flag(c, str(c.template()["name"])) + for c in self.n.cards() + if c.flags > 0 + ] + if flagged: + details += [f"[yellow]flagged:[/yellow] {', '.join(flagged)}"] + + for detail in details: + consolePlain.print(detail) + + if list_cards: + self.print_cards() + + if verbose or list_cards: + console.print() imgs: list[Path] = [] for name, field in self.n.items(): @@ -132,7 +148,7 @@ def pprint(self, print_raw: bool = False) -> None: # Render LaTeX if necessary and fill list of LaTeX images note_type = self.n.note_type() if note_type: - latex.render_latex(field, note_type, self.a.col) + _ = latex.render_latex(field, note_type, self.a.col) imgs += img_paths_from_field_latex(field, note_type, self.a) if imgs: @@ -141,6 +157,32 @@ def pprint(self, print_raw: bool = False) -> None: console.print("- " + str(line)) console.print("") + def print_cards(self) -> None: + """Print list of cards to screen""" + table = Table( + show_edge=False, + padding=(0, 3, 0, 0), + highlight=True, + box=None, + header_style=None, + ) + table.add_column("Card name", header_style="yellow", no_wrap=True) + table.add_column("Due", justify="right", header_style="white") + table.add_column("Interval", justify="right", header_style="white") + table.add_column("Reps", justify="right", header_style="white") + table.add_column("Lapses", justify="right", header_style="white") + table.add_column("Ease", justify="right", header_style="white") + for card in sorted(self.n.cards(), key=lambda x: x.factor): + table.add_row( + "- " + str(card.template()["name"]) + cards.get_flag(card), + cards.get_due_days(card, self.a.today), + str(card.ivl), + str(card.reps), + str(card.lapses), + str(card.factor / 10.0), + ) + console.print(table) + def show_images(self) -> None: """Show in the fields""" note_type = self.n.note_type() @@ -157,17 +199,18 @@ def show_images(self) -> None: view_cmd = cfg["img_viewers"].get( file.suffix[1:], cfg["img_viewers_default"] ) - Popen(view_cmd + [file], stdout=DEVNULL, stderr=DEVNULL) + _ = Popen(view_cmd + [file], stdout=DEVNULL, stderr=DEVNULL) def edit(self) -> None: """Edit tags and fields of current note""" with tempfile.NamedTemporaryFile( mode="w+", dir=os.getcwd(), prefix="edit_note_", suffix=".md" ) as tf: - tf.write(str(self)) + # Write the note content (includes note ID from __repr__) + _ = tf.write(str(self)) tf.flush() - retcode = editor(tf.name) + retcode = edit_file(tf.name) if retcode != 0: console.print(f"[red]Editor return with exit code {retcode}![/red]") return @@ -178,29 +221,43 @@ def edit(self) -> None: console.print("[red]Something went wrong when editing note![/red]") return + # Handle additional notes created during editing if len(notes) > 1: added_notes = self.a.add_notes_from_list(notes[1:]) - console.print( - f"[green]Added {len(added_notes)} new notes while editing.[/green]" - ) - console.wait_for_keypress() + if added_notes: + console.print( + f"[green]Added {len(added_notes)} new notes while editing.[/green]" + ) + for added_note in added_notes: + cards = added_note.n.cards() + console.print(f"* nid: {added_note.n.id} (with {len(cards)} cards)") + console.wait_for_keypress() + # Update the current note from the first note in the file note = notes[0] + # Update tags if changed new_tags = note.tags.split() - if new_tags != self.n.tags: + if sorted(new_tags) != sorted(self.n.tags): self.n.tags = new_tags + # Update deck if changed if note.deck is not None and note.deck != self.get_deck(): self.set_deck(note.deck) + # Update fields if changed for i, text in enumerate(note.fields.values()): - self.n.fields[i] = convert_text_to_field(text, use_markdown=note.markdown) + new_field = convert_text_to_field(text, note.markdown) + if new_field != self.n.fields[i]: + self.n.fields[i] = new_field - self.n.flush() + # Save changes + _ = self.a.col.update_note(self.n) self.a.modified = True + + # Check for duplication issues if self.n.dupeOrEmpty(): - console.print("The updated note is now a dupe!") + console.print("[red]Warning: The updated note is now a dupe![/red]") console.wait_for_keypress() def delete(self) -> None: @@ -211,7 +268,7 @@ def has_consistent_markdown(self) -> bool: """Check if markdown fields are consistent with html values""" return any(check_if_inconsistent_markdown(f) for f in self.n.values()) - def change_model(self) -> bool: + def change_model(self) -> Note | None: """Change the note type""" console.clear() console.print("[red]Warning![/red]") @@ -221,18 +278,18 @@ def change_model(self) -> bool: "progress is lost!" ) if not console.confirm("\nContinue?"): - return False + return None - models = sorted(self.a.model_names) # type: ignore[has-type] + models = sorted(self.a.model_names) while True: console.clear() console.print("Please choose new model:") for n, m in enumerate(models): - console.print(f" {n+1}: {m}") - index: int = console.prompt_int(">>> ", prompt_suffix="") - 1 + console.print(f" {n + 1}: {m}") + index: int = console.prompt_int(">>> ", suffix="") - 1 try: new_model = models[index] - self.a.set_model(new_model) + _ = self.a.set_model(new_model) model = self.a.get_model(new_model) if not model: continue @@ -242,25 +299,31 @@ def change_model(self) -> bool: break fields: dict[str, str] = {} - first_field: str = model["flds"][0] - for field_name in model["flds"]: - fields[field_name] = "" + first_field: str = model["flds"][0]["name"] + for field in model["flds"]: + fields[field["name"]] = "" + fields[first_field] = f"Created from Note {self.n.id}\n" for old_field_name, old_field in self.n.items(): - fields[first_field] += f"### {old_field_name}\n{old_field}\n" + text = convert_field_to_text(old_field) + fields[first_field] += f"\n### {old_field_name}\n{text}\n" + + if model["name"] == "Cloze": + fields[first_field] += "\nCloze card needs clozes: {{c1::content}}" note_data = NoteData( model["name"], " ".join(self.n.tags), fields, any(check_if_generated_from_markdown(f) for f in self.n.values()), + deck=self.get_deck(), ) new_note = note_data.add_to_collection(self.a) new_note.edit() self.a.delete_notes(self.n.id) - return True + return new_note def toggle_marked(self) -> None: """Toggle marked tag for note""" @@ -271,14 +334,23 @@ def toggle_marked(self) -> None: self.n.flush() self.a.modified = True + def toggle_leech(self) -> None: + """Toggle leech tag for note""" + if "leech" in self.n.tags: + self.n.remove_tag("leech") + else: + self.n.add_tag("leech") + self.n.flush() + self.a.modified = True + def toggle_suspend(self) -> None: """Toggle suspend for note""" cids = [c.id for c in self.n.cards()] if self.suspended: - self.a.col.sched.unsuspendCards(cids) + _ = self.a.col.sched.unsuspendCards(cids) else: - self.a.col.sched.suspendCards(cids) + _ = self.a.col.sched.suspendCards(cids) self.suspended = not self.suspended self.a.modified = True @@ -301,15 +373,25 @@ def clear_flags(self) -> None: c.flush() self.a.modified = True - def show_cards(self) -> None: - """Show cards for note""" - for i, c in enumerate(self.n.cards()): - number = f'{str(i) + ".":>3s}' - name = c.template()["name"] - if c.flags > 0: - name = f"[red]{name}[/red]" - console.print(f" [white]{number}[/white] {name}") + def reset_progress(self) -> None: + """Reset progress for a card""" + card_list = {c.template()["name"]: c for c in self.n.cards()} + if len(card_list) <= 1: + card_name = next(iter(card_list)) + else: + card_name = choose(list(card_list.keys()), "Select card to reset:") + card = card_list[card_name] + console.print("\n[magenta]Resetting progress for card:") + cards.card_pprint(card) + if not console.confirm("[red bold]Are you sure?"): + return + + _ = self.a.col.sched.schedule_cards_as_new( + [card.id], restore_position=True, reset_counts=True + ) + self.a.modified = True + console.print("[magenta]The progress was reset.") console.wait_for_keypress() def get_deck(self) -> str: @@ -322,7 +404,7 @@ def set_deck(self, deck: str) -> None: cids = [c.id for c in self.n.cards()] if cids and newdid: - self.a.col.set_deck(cids, newdid) + _ = self.a.col.set_deck(cids, newdid) self.a.modified = True def set_deck_interactive(self) -> None: @@ -346,10 +428,10 @@ def get_tag_string(self) -> str: def review( self, - i: Optional[int] = None, - number_of_notes: Optional[int] = None, - remove_actions: Optional[list[str]] = None, - ) -> bool: + i: int | None = None, + number_of_notes: int | None = None, + remove_actions: list[str] | None = None, + ) -> Literal["stop", "continue", "rewind"]: """Interactive review of the note This method is used by the review command. @@ -363,21 +445,25 @@ def review( actions = { "c": "Continue", + "p": "Go back", + "s": "Save and stop", + "x": "Save and stop", "e": "Edit", "a": "Add new", "d": "Delete", "m": "Toggle markdown", "*": "Toggle marked", + "l": "Toggle leech", "z": "Toggle suspend", - "p": "Toggle pprint", + "P": "Toggle pprint", "F": "Clear flags", - "C": "Show card names", + "R": "Reset progress", "f": "Show images", "E": "Edit CSS", "D": "Change deck", "N": "Change model", - "s": "Save and stop", - "x": "Abort", + "v": "Show cards", + "V": "Show details", } if remove_actions: @@ -388,12 +474,16 @@ def review( note_number_string = "" if i is not None: if number_of_notes: - note_number_string = f" {i+1} of {number_of_notes}" + note_number_string = f" {i + 1} of {number_of_notes}" else: - note_number_string = f" {i+1}" + note_number_string = f" {i + 1}" menu = Columns( - [f"[blue]{key}[/blue]: {value}" for key, value in actions.items()], + [ + f"[blue]{key}[/blue]: {value}" + for key, value in actions.items() + if key != "x" + ], padding=(0, 2), title=Text( f"Reviewing note{note_number_string}", @@ -403,20 +493,25 @@ def review( ) print_raw_fields = False + verbose = cfg["review_verbose"] + show_cards = cfg["review_show_cards"] refresh = True while True: if refresh: console.clear() console.print(menu) console.print("") - self.pprint(print_raw_fields) + self.pprint(print_raw_fields, list_cards=show_cards, verbose=verbose) refresh = True choice = readchar.readchar() action = actions.get(choice) if action == "Continue": - return True + return "continue" + + if action == "Go back": + return "rewind" if action == "Edit": self.edit() @@ -427,6 +522,7 @@ def review( tags=self.get_tag_string(), model_name=self.model_name, template=self, + respect_note_ids=False, ) console.print(f"Added {len(notes)} notes") @@ -437,7 +533,7 @@ def review( "Are you sure you want to delete the note?" ): self.delete() - return True + return "continue" if action == "Toggle markdown": self.toggle_markdown() @@ -447,6 +543,10 @@ def review( self.toggle_marked() continue + if action == "Toggle leech": + self.toggle_leech() + continue + if action == "Toggle suspend": self.toggle_suspend() continue @@ -459,8 +559,8 @@ def review( self.clear_flags() continue - if action == "Show card names": - self.show_cards() + if action == "Reset progress": + self.reset_progress() continue if action == "Show images": @@ -477,18 +577,24 @@ def review( continue if action == "Change model": - if self.change_model(): - return True + new_note = self.change_model() + if new_note is not None: + return new_note.review(i, number_of_notes) continue if action == "Save and stop": console.print("Stopped") - return False + return "stop" - if action == "Abort": - console.print("[red]Abort now implies [bold]Save and stop!") - console.print("This is because Anki always saves database changes!") - raise Abort() + if action == "Show cards": + show_cards = not show_cards + cfg["review_show_cards"] = show_cards + continue + + if action == "Show details": + verbose = not verbose + cfg["review_verbose"] = verbose + continue @dataclass @@ -499,7 +605,9 @@ class NoteData: tags: str fields: dict[str, str] markdown: bool = True - deck: Optional[str] = None + deck: str | None = None + nid: str | None = None + cid: str | None = None def add_to_collection(self, anki: Anki) -> Note: """Add note to collection @@ -516,25 +624,24 @@ def add_to_collection(self, anki: Anki) -> Note: field_names = [x.replace(" (markdown)", "") for x in self.fields.keys()] for x, y in zip(model_field_names, field_names): if x != y: - console.print("Warning: Inconsistent field names " f"({x} != {y})") + console.print(f"Warning: Inconsistent field names ({x} != {y})") notetype = anki.col.models.current(for_deck=False) new_note = anki.col.new_note(notetype) note_type = new_note.note_type() if self.deck is not None and note_type is not None: - note_type["did"] = anki.deck_name_to_id[self.deck] # type: ignore[has-type] + note_type["did"] = anki.deck_name_to_id[self.deck] new_note.fields = [ - convert_text_to_field(f, use_markdown=self.markdown) - for f in self.fields.values() + convert_text_to_field(f, self.markdown) for f in self.fields.values() ] for tag in self.tags.strip().split(): new_note.add_tag(tag) - if not new_note.dupeOrEmpty(): - anki.col.addNote(new_note) + if not new_note.duplicate_or_empty(): + _ = anki.col.addNote(new_note) anki.modified = True else: field_name, field_value = list(self.fields.items())[0] @@ -544,6 +651,112 @@ def add_to_collection(self, anki: Anki) -> Note: return Note(anki, new_note) + def update_or_add_to_collection(self, anki: Anki) -> Note: + """Update existing note in collection if ID is provided, otherwise add as new + + Returns: The updated or new note + """ + # First try to find the note by nid or cid + existing_note = None + + if self.nid: + # Try to find the note by its note ID + try: + # Import NoteId here to avoid circular imports at module level + from anki.notes import NoteId + + note_id = NoteId(int(self.nid)) + existing_note = anki.col.get_note(note_id) + return self._update_note(anki, existing_note) + except ValueError, TypeError: + console.print( + f"[yellow]Invalid note ID format: {self.nid}. Will create a new note.[/yellow]" + ) + except Exception as e: + console.print( + f"[yellow]Note with ID {self.nid} not found: {e}. Will create a new note.[/yellow]" + ) + + if not existing_note and self.cid: + # Try to find the note by card ID + try: + # Import CardId here to avoid circular imports at module level + from anki.cards import CardId + + card_id = CardId(int(self.cid)) + card = anki.col.get_card(card_id) + if card: + existing_note = card.note() + return self._update_note(anki, existing_note) + except ValueError, TypeError: + console.print( + f"[yellow]Invalid card ID format: {self.cid}. Will create a new note.[/yellow]" + ) + except Exception as e: + console.print( + f"[yellow]Card with ID {self.cid} not found: {e}. Will create a new note.[/yellow]" + ) + + # If no existing note found or ID not provided, add as new + return self.add_to_collection(anki) + + def _update_note(self, anki: Anki, existing_note: Any) -> Note: + """Update an existing note with new field values + + Returns: The updated note + """ + # Verify model match + note_type = existing_note.note_type() + if note_type and note_type["name"] != self.model: + console.print( + f"[yellow]Warning: Model mismatch. File specifies '{self.model}', note has '{note_type['name']}'.[/yellow]" + ) + if not console.confirm("Continue with update anyway?"): + console.print( + "[yellow]Update canceled. Adding as new note instead.[/yellow]" + ) + return self.add_to_collection(anki) + + # Update tags + existing_note.tags = self.tags.strip().split() + + # Update deck if specified + if self.deck is not None: + try: + # Get first card and update its deck + cards = existing_note.cards() + if cards: + # Explicitly cast to int to satisfy mypy + deck_id = anki.deck_name_to_id.get(self.deck, None) + if deck_id is not None: # Make sure deck_id exists and is not None + card_ids = [c.id for c in cards] + _ = anki.col.set_deck(card_ids, deck_id) + except Exception as e: + console.print(f"[yellow]Failed to update deck: {e}[/yellow]") + + # Update fields + field_names = list(existing_note.keys()) + for i, field_name in enumerate(field_names): + # Match field names from the file to the existing note + matching_field = None + for file_field_name, content in self.fields.items(): + clean_name = file_field_name.replace(" (markdown)", "") + if clean_name.lower() == field_name.lower(): + matching_field = content + break + + if matching_field is not None: + existing_note.fields[i] = convert_text_to_field( + matching_field, + self.markdown, + ) + + # Save the updated note + _ = anki.col.update_note(existing_note) + anki.modified = True + + return Note(anki, existing_note) + def markdown_file_to_notes(filename: str) -> list[NoteData]: """Parse note data from a Markdown file""" @@ -555,6 +768,8 @@ def markdown_file_to_notes(filename: str) -> list[NoteData]: fields=x["fields"], markdown=x["markdown"], deck=x["deck"], + nid=x["nid"], + cid=x["cid"], ) for x in _parse_markdown_file(filename) ] @@ -576,6 +791,8 @@ def _parse_markdown_file(filename: str) -> list[dict[str, Any]]: "markdown": True, "tags": "", "deck": None, + "nid": None, + "cid": None, } with open(filename, "r", encoding="utf8") as f: for line in f: @@ -592,12 +809,16 @@ def _parse_markdown_file(filename: str) -> list[dict[str, Any]]: defaults["tags"] = v.replace(",", "") elif k in ("markdown", "md"): defaults["markdown"] = v in ("true", "yes") + elif k == "nid": + defaults["nid"] = v + elif k == "cid": + defaults["cid"] = v else: defaults[k] = v notes: list[dict[str, Any]] = [] current_note: dict[str, Any] = {} - current_field: Optional[str] = None + current_field: str | None = None is_in_codeblock = False with open(filename, "r", encoding="utf8") as f: for line in f: @@ -623,7 +844,14 @@ def _parse_markdown_file(filename: str) -> list[dict[str, Any]]: k = k.lower() v = v.strip() if k in ("tag", "tags"): - current_note["tags"] = v.replace(",", "") + # Merge global tags with note-specific tags + current_tags = current_note.get("tags", "").strip() + if current_tags: + current_note["tags"] = ( + f"{current_tags} {v.replace(',', '')}" + ) + else: + current_note["tags"] = v.replace(",", "") elif k in ("markdown", "md"): current_note["markdown"] = v in ("true", "yes") else: diff --git a/src/apyanki/utilities.py b/src/apyanki/utilities.py new file mode 100644 index 0000000..c95dbfd --- /dev/null +++ b/src/apyanki/utilities.py @@ -0,0 +1,151 @@ +"""Simple utility functions.""" + +import os +import shutil +from collections.abc import Generator +from contextlib import contextmanager, redirect_stdout +from io import TextIOWrapper +from subprocess import PIPE, Popen, call +from tempfile import NamedTemporaryFile +from types import TracebackType +from typing import Any, TypeVar + +import readchar +from click import Abort + +from apyanki.console import console + + +class cd: + """Context manager for changing the current working directory""" + + def __init__(self, newPath: str) -> None: + self.newPath: str = os.path.expanduser(newPath) + self.savedPath: str = "" + + def __enter__(self) -> None: + self.savedPath = os.getcwd() + os.chdir(self.newPath) + + def __exit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> None: + os.chdir(self.savedPath) + + +def edit_file(filepath: str) -> int: + """Use $VISUAL or $EDITOR to edit file at given path""" + editor = os.environ.get("VISUAL", os.environ.get("EDITOR", "vim")) + return call([editor, filepath]) + + +def edit_text(input_text: str, prefix: str = "") -> str: + """Use EDITOR to edit text (from a temporary file)""" + if prefix: + prefix = prefix + "_" + + with NamedTemporaryFile(mode="w+", prefix=prefix, suffix=".md") as tf: + _ = tf.write(input_text) + tf.flush() + _ = edit_file(tf.name) + _ = tf.seek(0) + edited_message = tf.read().strip() + + return edited_message + + +chooseType = TypeVar("chooseType") + + +def choose(items: list[chooseType], text: str = "Choose from list:") -> chooseType: + """Choose from list of items""" + if shutil.which("fzf"): + return choose_with_fzf(items, text) + return choose_from_list(items, text) + + +def choose_with_fzf( + items: list[chooseType], text: str = "Choose from list:" +) -> chooseType: + """Choose from list of items with fzf""" + fzf_input = "\n".join(map(str, items)).encode("utf-8") + + fzf_process = Popen( + ["fzf", "--prompt", f"{text}> "], + stdin=PIPE, + stdout=PIPE, + stderr=PIPE, + ) + stdout, _ = fzf_process.communicate(input=fzf_input) + + if fzf_process.returncode != 0: + raise Abort() + + selected_item_str = stdout.decode("utf-8").strip() + + # Find the selected item in the original list to preserve its type + for item in items: + if str(item) == selected_item_str: + return item + + # This should not be reached if fzf returns a valid selection + raise Abort() + + +def choose_from_list( + items: list[chooseType], text: str = "Choose from list:" +) -> chooseType: + """Choose from list of items""" + console.print(text) + for i, element in enumerate(items): + console.print(f"{i + 1}: {element}") + + index = _read_number_between(1, len(items)) - 1 + return items[index] + + +@contextmanager +def suppress_stdout() -> Generator[TextIOWrapper, Any, Any]: + """A context manager that redirects stdout to devnull""" + with open(os.devnull, "w", encoding="utf8") as fnull: + with redirect_stdout(fnull) as out: + yield out + + +def _read_number_between(first: int, last: int) -> int: + """Read number from user input between first and last (inclusive)""" + console.print("> ", end="") + while True: + choice_str = "" + choice_int = 0 + choice_digits = 0 + max_digits = len(str(last)) + + while choice_digits < max_digits: + if choice_digits > 0 and int(choice_str + "0") > last: + break + + char = readchar.readchar() + if char == "\n": + break + + try: + _ = int(char) + except ValueError: + continue + + next_int = int(choice_str + char) + if next_int > 0: + console.print(char, end="") + choice_str += char + choice_int = next_int + choice_digits += 1 + + if first <= choice_int <= last: + console.print("") + return choice_int + + console.print(f"\nPlease type number between {first} and {last}!\n> ", end="") diff --git a/tests/common.py b/tests/common.py index d0d546b..cbac543 100644 --- a/tests/common.py +++ b/tests/common.py @@ -1,15 +1,28 @@ """Implement some basic test fixtures""" -# pylint: disable=too-few-public-methods import os import tempfile import shutil +import pytest -from apy.anki import Anki +from apyanki.anki import Anki testDir = os.path.dirname(__file__) +@pytest.fixture +def collection(): + """Create a temporary Anki collection for testing.""" + tmppath = os.path.join(tempfile.gettempdir(), "tempfile_test.anki2") + shutil.copy2(testDir + "/data/test_base/Test/collection.anki2", tmppath) + + yield tmppath + + # Clean up after test + if os.path.exists(tmppath): + os.remove(tmppath) + + class AnkiTest: """Create Anki collection wrapper""" diff --git a/tests/test_batch_edit.py b/tests/test_batch_edit.py index 803c543..c8450f6 100644 --- a/tests/test_batch_edit.py +++ b/tests/test_batch_edit.py @@ -1,8 +1,12 @@ """Test batch editing""" +# ruff: noqa: F401, F811 +import os import pytest +import textwrap -from common import testDir, AnkiSimple +from common import testDir, AnkiSimple, collection +from apyanki.anki import Anki pytestmark = pytest.mark.filterwarnings("ignore") @@ -20,3 +24,355 @@ def test_change_tags(): a.change_tags(query, "test", add=False) assert len(list(a.find_notes(query))) == 0 + + +def test_add_from_file(collection): + """Test adding a note from a Markdown file.""" + with open("test.md", "w") as f: + f.write( + textwrap.dedent( + """ + model: Basic + tags: marked + + # Note 1 + ## Front + Question? + + ## Back + Answer. + """ + ) + ) + + with Anki(collection_db_path=collection) as a: + # Add note + note = a.add_notes_from_file("test.md")[0] + assert note.n is not None + assert note.model_name == "Basic" + assert note.n.tags == ["marked"] + assert "Question?" in note.n.fields[0] + assert "Answer." in note.n.fields[1] + + # Clean up + os.remove("test.md") + + +def test_update_from_file(collection): + """Test updating a note from a Markdown file.""" + # First create a note + with open("test.md", "w") as f: + f.write( + textwrap.dedent( + """\ + model: Basic + tags: marked + + # Note 1 + ## Front + Original question? + + ## Back + Original answer. + """ + ) + ) + + with Anki(collection_db_path=collection) as a: + # Add initial note + note = a.add_notes_from_file("test.md")[0] + note_id = note.n.id + + # Now create update file with the note ID + with open("test_update.md", "w") as f: + f.write( + textwrap.dedent( + f"""\ + model: Basic + tags: marked updated + nid: {note_id} + + # Note 1 + ## Front + Updated question? + + ## Back + Updated answer. + """ + ) + ) + + # Update the note + updated_note = a.add_notes_from_file("test_update.md")[0] + + # Verify it's the same note but updated + assert updated_note.n.id == note_id + assert updated_note.model_name == "Basic" + assert sorted(updated_note.n.tags) == ["marked", "updated"] + assert "Updated question?" in updated_note.n.fields[0] + assert "Updated answer." in updated_note.n.fields[1] + + # Clean up + os.remove("test.md") + os.remove("test_update.md") + + +def test_update_from_file_by_cid(collection): + """Test updating a note from a Markdown file using card ID.""" + # First create a note + with open("test.md", "w") as f: + f.write( + textwrap.dedent( + """\ + model: Basic + tags: marked + + # Note 1 + ## Front + Original question? + + ## Back + Original answer. + """ + ) + ) + + with Anki(collection_db_path=collection) as a: + # Add initial note + note = a.add_notes_from_file("test.md")[0] + card_id = note.n.cards()[0].id + + # Now create update file with the card ID + with open("test_update_cid.md", "w") as f: + f.write( + textwrap.dedent( + f"""\ + model: Basic + tags: marked card-updated + cid: {card_id} + + # Note 1 + ## Front + Updated by card ID! + + ## Back + Updated answer via card ID. + """ + ) + ) + + # Update the note + updated_note = a.add_notes_from_file("test_update_cid.md")[0] + + # Verify it's the same note but updated + assert updated_note.n.id == note.n.id + assert sorted(updated_note.n.tags) == ["card-updated", "marked"] + assert "Updated by card ID!" in updated_note.n.fields[0] + assert "Updated answer via card ID." in updated_note.n.fields[1] + + # Clean up + os.remove("test.md") + os.remove("test_update_cid.md") + + +def test_update_from_file_new_and_existing(collection): + """Test updating a file with both new and existing notes.""" + # First create a note + with open("test.md", "w") as f: + f.write( + textwrap.dedent( + """\ + model: Basic + tags: marked + + # Note 1 + ## Front + Original question? + + ## Back + Original answer. + """ + ) + ) + + with Anki(collection_db_path=collection) as a: + # Add initial note + note = a.add_notes_from_file("test.md")[0] + note_id = note.n.id + + # Now create update file with both the existing note and a new note + with open("test_mixed.md", "w") as f: + f.write( + textwrap.dedent( + f"""\ + model: Basic + tags: common-tag + + # Existing Note + nid: {note_id} + tags: existing-updated + + ## Front + Updated existing note. + + ## Back + Updated content. + + # New Note + tags: new-note + + ## Front + This is a new note. + + ## Back + Brand new content. + """ + ) + ) + + # Update the note + updated_notes = a.add_notes_from_file("test_mixed.md") + + # Verify we have two notes + assert len(updated_notes) == 2 + + # Find the existing and new notes + existing_note = next((n for n in updated_notes if n.n.id == note_id), None) + new_note = next((n for n in updated_notes if n.n.id != note_id), None) + + # Verify existing note was updated + assert existing_note is not None + assert sorted(existing_note.n.tags) == ["common-tag", "existing-updated"] + assert "Updated existing note." in existing_note.n.fields[0] + assert "Updated content." in existing_note.n.fields[1] + + # Verify new note was created + assert new_note is not None + assert sorted(new_note.n.tags) == ["common-tag", "new-note"] + assert "This is a new note." in new_note.n.fields[0] + assert "Brand new content." in new_note.n.fields[1] + + # Clean up + os.remove("test.md") + os.remove("test_mixed.md") + + +def test_update_file_with_note_ids(collection): + """Test that --update-file option updates the original file with note IDs.""" + # First create a note file without IDs + with open("test_no_ids.md", "w") as f: + f.write( + textwrap.dedent( + """\ + model: Basic + tags: test-update-file + + # Note 1 + ## Front + Test question for auto-update + + ## Back + Test answer for auto-update + + # Note 2 + ## Front + Another test question + + ## Back + Another test answer + """ + ) + ) + + with Anki(collection_db_path=collection) as a: + # Add notes with update_file=True + notes = a.add_notes_from_file("test_no_ids.md", update_origin_file=True) + + # Verify two notes were added + assert len(notes) == 2 + + # Read the file again to check if IDs were added + with open("test_no_ids.md", "r") as f: + updated_content = f.read() + + # The file should now contain nid: lines + assert f"nid: {notes[0].n.id}" in updated_content + assert f"nid: {notes[1].n.id}" in updated_content + + # Clean up + os.remove("test_no_ids.md") + + +def test_update_file_with_mixed_notes(collection): + """Test that --update-file option updates only new notes in update-from-file.""" + # First create a note to get its ID + with open("test_initial.md", "w") as f: + f.write( + textwrap.dedent( + """\ + model: Basic + tags: initial-note + + # Initial Note + ## Front + Initial question + + ## Back + Initial answer + """ + ) + ) + + with Anki(collection_db_path=collection) as a: + # Add the initial note + initial_note = a.add_notes_from_file("test_initial.md")[0] + note_id = initial_note.n.id + + # Now create a file with the existing note ID and a new note + with open("test_update_mix.md", "w") as f: + f.write( + textwrap.dedent( + f"""\ + model: Basic + tags: common-tag + + # Existing Note + nid: {note_id} + tags: update-note + + ## Front + Updated question text + + ## Back + Updated answer text + + # New Note Without ID + tags: new-note-tag + + ## Front + New question without ID + + ## Back + New answer without ID + """ + ) + ) + + # Update notes with update_file=True + notes = a.add_notes_from_file("test_update_mix.md", update_origin_file=True) + + # Verify two notes were affected + assert len(notes) == 2 + + # Read the updated file + with open("test_update_mix.md", "r") as f: + updated_content = f.read() + + # Verify the original ID is preserved and the new note got an ID + new_note = next(n for n in notes if n.n.id != note_id) + assert f"nid: {note_id}" in updated_content # Original ID + assert f"nid: {new_note.n.id}" in updated_content # New ID + + # Clean up + os.remove("test_initial.md") + os.remove("test_update_mix.md") diff --git a/tests/test_cli.py b/tests/test_cli.py index f78170a..82a1e59 100644 --- a/tests/test_cli.py +++ b/tests/test_cli.py @@ -6,7 +6,7 @@ import pytest from click.testing import CliRunner -from apy.cli import main +from apyanki.cli import main test_data_dir = "tests/data/" test_collection_dir = test_data_dir + "test_base/" @@ -37,13 +37,27 @@ def test_cli_base_directory(): @pytest.mark.parametrize( "note_files", [test_data_dir + file for file in note_files_input] ) -def test_cli_add_from_file(note_files): - """Test 'apy add-from-file' for various note file inputs.""" +def test_cli_update_from_file(note_files): + """Test 'apy update-from-file' for various note file inputs.""" runner = CliRunner() with tempfile.TemporaryDirectory() as tmpdirname: shutil.copytree(test_collection_dir, tmpdirname, dirs_exist_ok=True) - result = runner.invoke(main, ["-b", tmpdirname, "add-from-file", note_files]) + result = runner.invoke(main, ["-b", tmpdirname, "update-from-file", note_files]) + + assert result.exit_code == 0 + + +def test_cli_add_from_file_alias(): + """Test that 'apy add-from-file' works as an alias for 'update-from-file'.""" + runner = CliRunner() + + with tempfile.TemporaryDirectory() as tmpdirname: + shutil.copytree(test_collection_dir, tmpdirname, dirs_exist_ok=True) + # Should work as an alias to update-from-file + result = runner.invoke( + main, ["-b", tmpdirname, "add-from-file", test_data_dir + "basic.md"] + ) assert result.exit_code == 0 diff --git a/tests/test_errors.py b/tests/test_errors.py index a6ac515..03d9405 100644 --- a/tests/test_errors.py +++ b/tests/test_errors.py @@ -3,13 +3,13 @@ import pytest from click import Abort -from apy.anki import Anki +from apyanki.anki import Anki def test_basepath_is_none(): """Blah""" with pytest.raises(Abort): - Anki(base=None) + Anki(base_path=None) with pytest.raises(Abort): - Anki(base="/non/existing/path") + Anki(base_path="/non/existing/path") diff --git a/tests/test_markdown_latex_mode.py b/tests/test_markdown_latex_mode.py new file mode 100644 index 0000000..cd6b56e --- /dev/null +++ b/tests/test_markdown_latex_mode.py @@ -0,0 +1,85 @@ +from bs4 import BeautifulSoup +from common import AnkiEmpty + +from apyanki import fields +from apyanki.config import cfg + + +def test_mathjax_to_mdlatex() -> None: + for [in_string, expect] in [ + ["\\[block\\]", "$$block$$"], + ["\\(inline\\)", "$inline$"], + ]: + out = fields._mathjax_to_mdlatex(in_string) + assert out == expect + + +def test_latex_to_mdlatex() -> None: + for [in_string, expect] in [ + [r"[$$]block[/$$]", "$$block$$"], + [r"[$]inline[/$]", "$inline$"], + ]: + out = fields._latex_to_mdlatex(in_string) + assert out == expect + + +def test_markdown_to_latex_1() -> None: + cfg["markdown_latex_mode"] = "latex" + + with AnkiEmpty() as a: + note = a.add_notes_single( + ["This is $$block$$ math.", "This is $inline$ math."], + markdown=True, + ) + assert "data-original-markdown" in note.n.fields[0] + assert "[$$]block[/$$]" in note.n.fields[0] + assert "[$]inline[/$]" in note.n.fields[1] + + +def test_markdown_to_latex_2() -> None: + cfg["markdown_latex_mode"] = "latex" + + with AnkiEmpty() as a: + input = r"$\mathbb{R}_+$ and $\mathbb{R}_+$" + expected = r"[$]\mathbb{R}_+[/$] and [$]\mathbb{R}_+[/$]" + + note = a.add_notes_single([input, ""], markdown=True) + soup = BeautifulSoup(note.n.fields[0], "html.parser") + assert soup.text == expected + + +def test_markdown_to_latex_3() -> None: + cfg["markdown_latex_mode"] = "latex" + + with AnkiEmpty() as a: + input = r"$$\mathbb{R}_+$$ and $$\mathbb{R}_+$$" + expected = r"[$$]\mathbb{R}_+[/$$] and [$$]\mathbb{R}_+[/$$]" + + note = a.add_notes_single([input, ""], markdown=True) + soup = BeautifulSoup(note.n.fields[0], "html.parser") + assert soup.text == expected + + +def test_markdown_to_mathjax_1() -> None: + cfg["markdown_latex_mode"] = "mathjax" + + with AnkiEmpty() as a: + note = a.add_notes_single( + ["This is $$block$$ math.", "This is $inline$ math."], + markdown=True, + ) + assert "data-original-markdown" in note.n.fields[0] + assert "\\[block\\]" in note.n.fields[0] + assert "\\(inline\\)" in note.n.fields[1] + + +def test_markdown_to_mathjax_2() -> None: + cfg["markdown_latex_mode"] = "mathjax" + + with AnkiEmpty() as a: + input = r"$\mathbb{R}_+$ and $$\mathbb{R}_+$$" + expected = r"\(\mathbb{R}_+\) and \[\mathbb{R}_+\]" + + note = a.add_notes_single([input, ""], markdown=True) + soup = BeautifulSoup(note.n.fields[0], "html.parser") + assert soup.text == expected diff --git a/uv.lock b/uv.lock new file mode 100644 index 0000000..fb8df1b --- /dev/null +++ b/uv.lock @@ -0,0 +1,546 @@ +version = 1 +revision = 3 +requires-python = ">=3.14" + +[[package]] +name = "anki" +version = "25.9.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "decorator" }, + { name = "distro", marker = "sys_platform != 'darwin' and sys_platform != 'win32'" }, + { name = "markdown" }, + { name = "orjson" }, + { name = "protobuf" }, + { name = "requests", extra = ["socks"] }, + { name = "typing-extensions" }, +] +wheels = [ + { url = "https://files.pythonhosted.org/packages/49/ea/4befc79361774c8e1e5421a884aa8090d4b97c5b83087510f4a1c82ee521/anki-25.9.2-cp39-abi3-macosx_12_0_arm64.whl", hash = "sha256:76788f41246dfdb383bf318f1a0c141c57bfe693441fd3d82500e17ad7c6ac04", size = 9676135, upload-time = "2025-09-17T07:11:45.028Z" }, + { url = "https://files.pythonhosted.org/packages/b4/ae/147ad74fa47af1c98984ce29567a9a84006e1d5c38ac1ccf597ea2bd1be4/anki-25.9.2-cp39-abi3-macosx_12_0_x86_64.whl", hash = "sha256:245e6cd597743c70df40dd18ea22ca69f8f25e7749bb3433af5c43336ead3747", size = 10155215, upload-time = "2025-09-17T07:11:50.742Z" }, + { url = "https://files.pythonhosted.org/packages/c1/49/484a786ea0e1b3659de9478f2546368c5970da60a1cd403cec1fa2f81d65/anki-25.9.2-cp39-abi3-manylinux_2_36_aarch64.whl", hash = "sha256:a5705f472d8173317ce4fbec9ca26bd6ee6e5ca4f864f716e7b3cbdf8096a276", size = 10905312, upload-time = "2025-09-17T07:11:55.751Z" }, + { url = "https://files.pythonhosted.org/packages/22/1c/37fe0377fd5fbfe27b17db20679d76aeb1cef7be3ddfb22e24c0bb62cf96/anki-25.9.2-cp39-abi3-manylinux_2_36_x86_64.whl", hash = "sha256:8ddf94130e36a85e57955b9c7d083a9ab812319f7f427e75f51e32b734222b26", size = 11400639, upload-time = "2025-09-17T07:12:01.723Z" }, + { url = "https://files.pythonhosted.org/packages/29/4a/ec3f02075e957031c856717f8d524780c450700644f4826e9fcd81e4b6c3/anki-25.9.2-cp39-abi3-win_amd64.whl", hash = "sha256:0a7e70c7cfea844c8d06917025f735d4c6e3c2558eb70b1828993f4bce171219", size = 9989743, upload-time = "2025-09-17T07:12:07.061Z" }, +] + +[[package]] +name = "apyanki" +version = "0.19.4" +source = { editable = "." } +dependencies = [ + { name = "anki" }, + { name = "beautifulsoup4" }, + { name = "click" }, + { name = "html5lib" }, + { name = "markdown" }, + { name = "markdownify" }, + { name = "readchar" }, + { name = "rich" }, +] + +[package.dev-dependencies] +dev = [ + { name = "mypy" }, + { name = "pytest" }, + { name = "ruff" }, + { name = "types-beautifulsoup4" }, + { name = "types-markdown" }, +] + +[package.metadata] +requires-dist = [ + { name = "anki", specifier = ">=25.9" }, + { name = "beautifulsoup4" }, + { name = "click" }, + { name = "html5lib" }, + { name = "markdown" }, + { name = "markdownify" }, + { name = "readchar" }, + { name = "rich" }, +] + +[package.metadata.requires-dev] +dev = [ + { name = "mypy" }, + { name = "pytest" }, + { name = "ruff" }, + { name = "types-beautifulsoup4" }, + { name = "types-markdown" }, +] + +[[package]] +name = "beautifulsoup4" +version = "4.14.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "soupsieve" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/c3/b0/1c6a16426d389813b48d95e26898aff79abbde42ad353958ad95cc8c9b21/beautifulsoup4-4.14.3.tar.gz", hash = "sha256:6292b1c5186d356bba669ef9f7f051757099565ad9ada5dd630bd9de5fa7fb86", size = 627737, upload-time = "2025-11-30T15:08:26.084Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/1a/39/47f9197bdd44df24d67ac8893641e16f386c984a0619ef2ee4c51fbbc019/beautifulsoup4-4.14.3-py3-none-any.whl", hash = "sha256:0918bfe44902e6ad8d57732ba310582e98da931428d231a5ecb9e7c703a735bb", size = 107721, upload-time = "2025-11-30T15:08:24.087Z" }, +] + +[[package]] +name = "certifi" +version = "2026.2.25" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/af/2d/7bf41579a8986e348fa033a31cdd0e4121114f6bce2457e8876010b092dd/certifi-2026.2.25.tar.gz", hash = "sha256:e887ab5cee78ea814d3472169153c2d12cd43b14bd03329a39a9c6e2e80bfba7", size = 155029, upload-time = "2026-02-25T02:54:17.342Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/9a/3c/c17fb3ca2d9c3acff52e30b309f538586f9f5b9c9cf454f3845fc9af4881/certifi-2026.2.25-py3-none-any.whl", hash = "sha256:027692e4402ad994f1c42e52a4997a9763c646b73e4096e4d5d6db8af1d6f0fa", size = 153684, upload-time = "2026-02-25T02:54:15.766Z" }, +] + +[[package]] +name = "charset-normalizer" +version = "3.4.5" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/1d/35/02daf95b9cd686320bb622eb148792655c9412dbb9b67abb5694e5910a24/charset_normalizer-3.4.5.tar.gz", hash = "sha256:95adae7b6c42a6c5b5b559b1a99149f090a57128155daeea91732c8d970d8644", size = 134804, upload-time = "2026-03-06T06:03:19.46Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/43/be/0f0fd9bb4a7fa4fb5067fb7d9ac693d4e928d306f80a0d02bde43a7c4aee/charset_normalizer-3.4.5-cp314-cp314-macosx_10_15_universal2.whl", hash = "sha256:8197abe5ca1ffb7d91e78360f915eef5addff270f8a71c1fc5be24a56f3e4873", size = 280232, upload-time = "2026-03-06T06:02:01.508Z" }, + { url = "https://files.pythonhosted.org/packages/28/02/983b5445e4bef49cd8c9da73a8e029f0825f39b74a06d201bfaa2e55142a/charset_normalizer-3.4.5-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a2aecdb364b8a1802afdc7f9327d55dad5366bc97d8502d0f5854e50712dbc5f", size = 189688, upload-time = "2026-03-06T06:02:02.857Z" }, + { url = "https://files.pythonhosted.org/packages/d0/88/152745c5166437687028027dc080e2daed6fe11cfa95a22f4602591c42db/charset_normalizer-3.4.5-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a66aa5022bf81ab4b1bebfb009db4fd68e0c6d4307a1ce5ef6a26e5878dfc9e4", size = 206833, upload-time = "2026-03-06T06:02:05.127Z" }, + { url = "https://files.pythonhosted.org/packages/cb/0f/ebc15c8b02af2f19be9678d6eed115feeeccc45ce1f4b098d986c13e8769/charset_normalizer-3.4.5-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d77f97e515688bd615c1d1f795d540f32542d514242067adcb8ef532504cb9ee", size = 202879, upload-time = "2026-03-06T06:02:06.446Z" }, + { url = "https://files.pythonhosted.org/packages/38/9c/71336bff6934418dc8d1e8a1644176ac9088068bc571da612767619c97b3/charset_normalizer-3.4.5-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:01a1ed54b953303ca7e310fafe0fe347aab348bd81834a0bcd602eb538f89d66", size = 195764, upload-time = "2026-03-06T06:02:08.763Z" }, + { url = "https://files.pythonhosted.org/packages/b7/95/ce92fde4f98615661871bc282a856cf9b8a15f686ba0af012984660d480b/charset_normalizer-3.4.5-cp314-cp314-manylinux_2_31_armv7l.whl", hash = "sha256:b2d37d78297b39a9eb9eb92c0f6df98c706467282055419df141389b23f93362", size = 183728, upload-time = "2026-03-06T06:02:10.137Z" }, + { url = "https://files.pythonhosted.org/packages/1c/e7/f5b4588d94e747ce45ae680f0f242bc2d98dbd4eccfab73e6160b6893893/charset_normalizer-3.4.5-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:e71bbb595973622b817c042bd943c3f3667e9c9983ce3d205f973f486fec98a7", size = 192937, upload-time = "2026-03-06T06:02:11.663Z" }, + { url = "https://files.pythonhosted.org/packages/f9/29/9d94ed6b929bf9f48bf6ede6e7474576499f07c4c5e878fb186083622716/charset_normalizer-3.4.5-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:4cd966c2559f501c6fd69294d082c2934c8dd4719deb32c22961a5ac6db0df1d", size = 192040, upload-time = "2026-03-06T06:02:13.489Z" }, + { url = "https://files.pythonhosted.org/packages/15/d2/1a093a1cf827957f9445f2fe7298bcc16f8fc5e05c1ed2ad1af0b239035e/charset_normalizer-3.4.5-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:d5e52d127045d6ae01a1e821acfad2f3a1866c54d0e837828538fabe8d9d1bd6", size = 184107, upload-time = "2026-03-06T06:02:14.83Z" }, + { url = "https://files.pythonhosted.org/packages/0f/7d/82068ce16bd36135df7b97f6333c5d808b94e01d4599a682e2337ed5fd14/charset_normalizer-3.4.5-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:30a2b1a48478c3428d047ed9690d57c23038dac838a87ad624c85c0a78ebeb39", size = 208310, upload-time = "2026-03-06T06:02:16.165Z" }, + { url = "https://files.pythonhosted.org/packages/84/4e/4dfb52307bb6af4a5c9e73e482d171b81d36f522b21ccd28a49656baa680/charset_normalizer-3.4.5-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:d8ed79b8f6372ca4254955005830fd61c1ccdd8c0fac6603e2c145c61dd95db6", size = 192918, upload-time = "2026-03-06T06:02:18.144Z" }, + { url = "https://files.pythonhosted.org/packages/08/a4/159ff7da662cf7201502ca89980b8f06acf3e887b278956646a8aeb178ab/charset_normalizer-3.4.5-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:c5af897b45fa606b12464ccbe0014bbf8c09191e0a66aab6aa9d5cf6e77e0c94", size = 204615, upload-time = "2026-03-06T06:02:19.821Z" }, + { url = "https://files.pythonhosted.org/packages/d6/62/0dd6172203cb6b429ffffc9935001fde42e5250d57f07b0c28c6046deb6b/charset_normalizer-3.4.5-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:1088345bcc93c58d8d8f3d783eca4a6e7a7752bbff26c3eee7e73c597c191c2e", size = 197784, upload-time = "2026-03-06T06:02:21.86Z" }, + { url = "https://files.pythonhosted.org/packages/c7/5e/1aab5cb737039b9c59e63627dc8bbc0d02562a14f831cc450e5f91d84ce1/charset_normalizer-3.4.5-cp314-cp314-win32.whl", hash = "sha256:ee57b926940ba00bca7ba7041e665cc956e55ef482f851b9b65acb20d867e7a2", size = 133009, upload-time = "2026-03-06T06:02:23.289Z" }, + { url = "https://files.pythonhosted.org/packages/40/65/e7c6c77d7aaa4c0d7974f2e403e17f0ed2cb0fc135f77d686b916bf1eead/charset_normalizer-3.4.5-cp314-cp314-win_amd64.whl", hash = "sha256:4481e6da1830c8a1cc0b746b47f603b653dadb690bcd851d039ffaefe70533aa", size = 143511, upload-time = "2026-03-06T06:02:26.195Z" }, + { url = "https://files.pythonhosted.org/packages/ba/91/52b0841c71f152f563b8e072896c14e3d83b195c188b338d3cc2e582d1d4/charset_normalizer-3.4.5-cp314-cp314-win_arm64.whl", hash = "sha256:97ab7787092eb9b50fb47fa04f24c75b768a606af1bcba1957f07f128a7219e4", size = 133775, upload-time = "2026-03-06T06:02:27.473Z" }, + { url = "https://files.pythonhosted.org/packages/c5/60/3a621758945513adfd4db86827a5bafcc615f913dbd0b4c2ed64a65731be/charset_normalizer-3.4.5-py3-none-any.whl", hash = "sha256:9db5e3fcdcee89a78c04dffb3fe33c79f77bd741a624946db2591c81b2fc85b0", size = 55455, upload-time = "2026-03-06T06:03:17.827Z" }, +] + +[[package]] +name = "click" +version = "8.3.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "colorama", marker = "sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/3d/fa/656b739db8587d7b5dfa22e22ed02566950fbfbcdc20311993483657a5c0/click-8.3.1.tar.gz", hash = "sha256:12ff4785d337a1bb490bb7e9c2b1ee5da3112e94a8622f26a6c77f5d2fc6842a", size = 295065, upload-time = "2025-11-15T20:45:42.706Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/98/78/01c019cdb5d6498122777c1a43056ebb3ebfeef2076d9d026bfe15583b2b/click-8.3.1-py3-none-any.whl", hash = "sha256:981153a64e25f12d547d3426c367a4857371575ee7ad18df2a6183ab0545b2a6", size = 108274, upload-time = "2025-11-15T20:45:41.139Z" }, +] + +[[package]] +name = "colorama" +version = "0.4.6" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d8/53/6f443c9a4a8358a93a6792e2acffb9d9d5cb0a5cfd8802644b7b1c9a02e4/colorama-0.4.6.tar.gz", hash = "sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44", size = 27697, upload-time = "2022-10-25T02:36:22.414Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d1/d6/3965ed04c63042e047cb6a3e6ed1a63a35087b6a609aa3a15ed8ac56c221/colorama-0.4.6-py2.py3-none-any.whl", hash = "sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6", size = 25335, upload-time = "2022-10-25T02:36:20.889Z" }, +] + +[[package]] +name = "decorator" +version = "5.2.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/43/fa/6d96a0978d19e17b68d634497769987b16c8f4cd0a7a05048bec693caa6b/decorator-5.2.1.tar.gz", hash = "sha256:65f266143752f734b0a7cc83c46f4618af75b8c5911b00ccb61d0ac9b6da0360", size = 56711, upload-time = "2025-02-24T04:41:34.073Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/4e/8c/f3147f5c4b73e7550fe5f9352eaa956ae838d5c51eb58e7a25b9f3e2643b/decorator-5.2.1-py3-none-any.whl", hash = "sha256:d316bb415a2d9e2d2b3abcc4084c6502fc09240e292cd76a76afc106a1c8e04a", size = 9190, upload-time = "2025-02-24T04:41:32.565Z" }, +] + +[[package]] +name = "distro" +version = "1.9.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/fc/f8/98eea607f65de6527f8a2e8885fc8015d3e6f5775df186e443e0964a11c3/distro-1.9.0.tar.gz", hash = "sha256:2fa77c6fd8940f116ee1d6b94a2f90b13b5ea8d019b98bc8bafdcabcdd9bdbed", size = 60722, upload-time = "2023-12-24T09:54:32.31Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/12/b3/231ffd4ab1fc9d679809f356cebee130ac7daa00d6d6f3206dd4fd137e9e/distro-1.9.0-py3-none-any.whl", hash = "sha256:7bffd925d65168f85027d8da9af6bddab658135b840670a223589bc0c8ef02b2", size = 20277, upload-time = "2023-12-24T09:54:30.421Z" }, +] + +[[package]] +name = "html5lib" +version = "1.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "six" }, + { name = "webencodings" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/ac/b6/b55c3f49042f1df3dcd422b7f224f939892ee94f22abcf503a9b7339eaf2/html5lib-1.1.tar.gz", hash = "sha256:b2e5b40261e20f354d198eae92afc10d750afb487ed5e50f9c4eaf07c184146f", size = 272215, upload-time = "2020-06-22T23:32:38.834Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/6c/dd/a834df6482147d48e225a49515aabc28974ad5a4ca3215c18a882565b028/html5lib-1.1-py2.py3-none-any.whl", hash = "sha256:0d78f8fde1c230e99fe37986a60526d7049ed4bf8a9fadbad5f00e22e58e041d", size = 112173, upload-time = "2020-06-22T23:32:36.781Z" }, +] + +[[package]] +name = "idna" +version = "3.11" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/6f/6d/0703ccc57f3a7233505399edb88de3cbd678da106337b9fcde432b65ed60/idna-3.11.tar.gz", hash = "sha256:795dafcc9c04ed0c1fb032c2aa73654d8e8c5023a7df64a53f39190ada629902", size = 194582, upload-time = "2025-10-12T14:55:20.501Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0e/61/66938bbb5fc52dbdf84594873d5b51fb1f7c7794e9c0f5bd885f30bc507b/idna-3.11-py3-none-any.whl", hash = "sha256:771a87f49d9defaf64091e6e6fe9c18d4833f140bd19464795bc32d966ca37ea", size = 71008, upload-time = "2025-10-12T14:55:18.883Z" }, +] + +[[package]] +name = "iniconfig" +version = "2.3.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/72/34/14ca021ce8e5dfedc35312d08ba8bf51fdd999c576889fc2c24cb97f4f10/iniconfig-2.3.0.tar.gz", hash = "sha256:c76315c77db068650d49c5b56314774a7804df16fee4402c1f19d6d15d8c4730", size = 20503, upload-time = "2025-10-18T21:55:43.219Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/cb/b1/3846dd7f199d53cb17f49cba7e651e9ce294d8497c8c150530ed11865bb8/iniconfig-2.3.0-py3-none-any.whl", hash = "sha256:f631c04d2c48c52b84d0d0549c99ff3859c98df65b3101406327ecc7d53fbf12", size = 7484, upload-time = "2025-10-18T21:55:41.639Z" }, +] + +[[package]] +name = "librt" +version = "0.8.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/56/9c/b4b0c54d84da4a94b37bd44151e46d5e583c9534c7e02250b961b1b6d8a8/librt-0.8.1.tar.gz", hash = "sha256:be46a14693955b3bd96014ccbdb8339ee8c9346fbe11c1b78901b55125f14c73", size = 177471, upload-time = "2026-02-17T16:13:06.101Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c9/6a/907ef6800f7bca71b525a05f1839b21f708c09043b1c6aa77b6b827b3996/librt-0.8.1-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:6cfa7fe54fd4d1f47130017351a959fe5804bda7a0bc7e07a2cdbc3fdd28d34f", size = 66081, upload-time = "2026-02-17T16:12:12.766Z" }, + { url = "https://files.pythonhosted.org/packages/1b/18/25e991cd5640c9fb0f8d91b18797b29066b792f17bf8493da183bf5caabe/librt-0.8.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:228c2409c079f8c11fb2e5d7b277077f694cb93443eb760e00b3b83cb8b3176c", size = 68309, upload-time = "2026-02-17T16:12:13.756Z" }, + { url = "https://files.pythonhosted.org/packages/a4/36/46820d03f058cfb5a9de5940640ba03165ed8aded69e0733c417bb04df34/librt-0.8.1-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:7aae78ab5e3206181780e56912d1b9bb9f90a7249ce12f0e8bf531d0462dd0fc", size = 196804, upload-time = "2026-02-17T16:12:14.818Z" }, + { url = "https://files.pythonhosted.org/packages/59/18/5dd0d3b87b8ff9c061849fbdb347758d1f724b9a82241aa908e0ec54ccd0/librt-0.8.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:172d57ec04346b047ca6af181e1ea4858086c80bdf455f61994c4aa6fc3f866c", size = 206907, upload-time = "2026-02-17T16:12:16.513Z" }, + { url = "https://files.pythonhosted.org/packages/d1/96/ef04902aad1424fd7299b62d1890e803e6ab4018c3044dca5922319c4b97/librt-0.8.1-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6b1977c4ea97ce5eb7755a78fae68d87e4102e4aaf54985e8b56806849cc06a3", size = 221217, upload-time = "2026-02-17T16:12:17.906Z" }, + { url = "https://files.pythonhosted.org/packages/6d/ff/7e01f2dda84a8f5d280637a2e5827210a8acca9a567a54507ef1c75b342d/librt-0.8.1-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:10c42e1f6fd06733ef65ae7bebce2872bcafd8d6e6b0a08fe0a05a23b044fb14", size = 214622, upload-time = "2026-02-17T16:12:19.108Z" }, + { url = "https://files.pythonhosted.org/packages/1e/8c/5b093d08a13946034fed57619742f790faf77058558b14ca36a6e331161e/librt-0.8.1-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:4c8dfa264b9193c4ee19113c985c95f876fae5e51f731494fc4e0cf594990ba7", size = 221987, upload-time = "2026-02-17T16:12:20.331Z" }, + { url = "https://files.pythonhosted.org/packages/d3/cc/86b0b3b151d40920ad45a94ce0171dec1aebba8a9d72bb3fa00c73ab25dd/librt-0.8.1-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:01170b6729a438f0dedc4a26ed342e3dc4f02d1000b4b19f980e1877f0c297e6", size = 215132, upload-time = "2026-02-17T16:12:21.54Z" }, + { url = "https://files.pythonhosted.org/packages/fc/be/8588164a46edf1e69858d952654e216a9a91174688eeefb9efbb38a9c799/librt-0.8.1-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:7b02679a0d783bdae30d443025b94465d8c3dc512f32f5b5031f93f57ac32071", size = 215195, upload-time = "2026-02-17T16:12:23.073Z" }, + { url = "https://files.pythonhosted.org/packages/f5/f2/0b9279bea735c734d69344ecfe056c1ba211694a72df10f568745c899c76/librt-0.8.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:190b109bb69592a3401fe1ffdea41a2e73370ace2ffdc4a0e8e2b39cdea81b78", size = 237946, upload-time = "2026-02-17T16:12:24.275Z" }, + { url = "https://files.pythonhosted.org/packages/e9/cc/5f2a34fbc8aeb35314a3641f9956fa9051a947424652fad9882be7a97949/librt-0.8.1-cp314-cp314-win32.whl", hash = "sha256:e70a57ecf89a0f64c24e37f38d3fe217a58169d2fe6ed6d70554964042474023", size = 50689, upload-time = "2026-02-17T16:12:25.766Z" }, + { url = "https://files.pythonhosted.org/packages/a0/76/cd4d010ab2147339ca2b93e959c3686e964edc6de66ddacc935c325883d7/librt-0.8.1-cp314-cp314-win_amd64.whl", hash = "sha256:7e2f3edca35664499fbb36e4770650c4bd4a08abc1f4458eab9df4ec56389730", size = 57875, upload-time = "2026-02-17T16:12:27.465Z" }, + { url = "https://files.pythonhosted.org/packages/84/0f/2143cb3c3ca48bd3379dcd11817163ca50781927c4537345d608b5045998/librt-0.8.1-cp314-cp314-win_arm64.whl", hash = "sha256:0d2f82168e55ddefd27c01c654ce52379c0750ddc31ee86b4b266bcf4d65f2a3", size = 48058, upload-time = "2026-02-17T16:12:28.556Z" }, + { url = "https://files.pythonhosted.org/packages/d2/0e/9b23a87e37baf00311c3efe6b48d6b6c168c29902dfc3f04c338372fd7db/librt-0.8.1-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:2c74a2da57a094bd48d03fa5d196da83d2815678385d2978657499063709abe1", size = 68313, upload-time = "2026-02-17T16:12:29.659Z" }, + { url = "https://files.pythonhosted.org/packages/db/9a/859c41e5a4f1c84200a7d2b92f586aa27133c8243b6cac9926f6e54d01b9/librt-0.8.1-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:a355d99c4c0d8e5b770313b8b247411ed40949ca44e33e46a4789b9293a907ee", size = 70994, upload-time = "2026-02-17T16:12:31.516Z" }, + { url = "https://files.pythonhosted.org/packages/4c/28/10605366ee599ed34223ac2bf66404c6fb59399f47108215d16d5ad751a8/librt-0.8.1-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:2eb345e8b33fb748227409c9f1233d4df354d6e54091f0e8fc53acdb2ffedeb7", size = 220770, upload-time = "2026-02-17T16:12:33.294Z" }, + { url = "https://files.pythonhosted.org/packages/af/8d/16ed8fd452dafae9c48d17a6bc1ee3e818fd40ef718d149a8eff2c9f4ea2/librt-0.8.1-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9be2f15e53ce4e83cc08adc29b26fb5978db62ef2a366fbdf716c8a6c8901040", size = 235409, upload-time = "2026-02-17T16:12:35.443Z" }, + { url = "https://files.pythonhosted.org/packages/89/1b/7bdf3e49349c134b25db816e4a3db6b94a47ac69d7d46b1e682c2c4949be/librt-0.8.1-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:785ae29c1f5c6e7c2cde2c7c0e148147f4503da3abc5d44d482068da5322fd9e", size = 246473, upload-time = "2026-02-17T16:12:36.656Z" }, + { url = "https://files.pythonhosted.org/packages/4e/8a/91fab8e4fd2a24930a17188c7af5380eb27b203d72101c9cc000dbdfd95a/librt-0.8.1-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:1d3a7da44baf692f0c6aeb5b2a09c5e6fc7a703bca9ffa337ddd2e2da53f7732", size = 238866, upload-time = "2026-02-17T16:12:37.849Z" }, + { url = "https://files.pythonhosted.org/packages/b9/e0/c45a098843fc7c07e18a7f8a24ca8496aecbf7bdcd54980c6ca1aaa79a8e/librt-0.8.1-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:5fc48998000cbc39ec0d5311312dda93ecf92b39aaf184c5e817d5d440b29624", size = 250248, upload-time = "2026-02-17T16:12:39.445Z" }, + { url = "https://files.pythonhosted.org/packages/82/30/07627de23036640c952cce0c1fe78972e77d7d2f8fd54fa5ef4554ff4a56/librt-0.8.1-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:e96baa6820280077a78244b2e06e416480ed859bbd8e5d641cf5742919d8beb4", size = 240629, upload-time = "2026-02-17T16:12:40.889Z" }, + { url = "https://files.pythonhosted.org/packages/fb/c1/55bfe1ee3542eba055616f9098eaf6eddb966efb0ca0f44eaa4aba327307/librt-0.8.1-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:31362dbfe297b23590530007062c32c6f6176f6099646bb2c95ab1b00a57c382", size = 239615, upload-time = "2026-02-17T16:12:42.446Z" }, + { url = "https://files.pythonhosted.org/packages/2b/39/191d3d28abc26c9099b19852e6c99f7f6d400b82fa5a4e80291bd3803e19/librt-0.8.1-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:cc3656283d11540ab0ea01978378e73e10002145117055e03722417aeab30994", size = 263001, upload-time = "2026-02-17T16:12:43.627Z" }, + { url = "https://files.pythonhosted.org/packages/b9/eb/7697f60fbe7042ab4e88f4ee6af496b7f222fffb0a4e3593ef1f29f81652/librt-0.8.1-cp314-cp314t-win32.whl", hash = "sha256:738f08021b3142c2918c03692608baed43bc51144c29e35807682f8070ee2a3a", size = 51328, upload-time = "2026-02-17T16:12:45.148Z" }, + { url = "https://files.pythonhosted.org/packages/7c/72/34bf2eb7a15414a23e5e70ecb9440c1d3179f393d9349338a91e2781c0fb/librt-0.8.1-cp314-cp314t-win_amd64.whl", hash = "sha256:89815a22daf9c51884fb5dbe4f1ef65ee6a146e0b6a8df05f753e2e4a9359bf4", size = 58722, upload-time = "2026-02-17T16:12:46.85Z" }, + { url = "https://files.pythonhosted.org/packages/b2/c8/d148e041732d631fc76036f8b30fae4e77b027a1e95b7a84bb522481a940/librt-0.8.1-cp314-cp314t-win_arm64.whl", hash = "sha256:bf512a71a23504ed08103a13c941f763db13fb11177beb3d9244c98c29fb4a61", size = 48755, upload-time = "2026-02-17T16:12:47.943Z" }, +] + +[[package]] +name = "markdown" +version = "3.10.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/2b/f4/69fa6ed85ae003c2378ffa8f6d2e3234662abd02c10d216c0ba96081a238/markdown-3.10.2.tar.gz", hash = "sha256:994d51325d25ad8aa7ce4ebaec003febcce822c3f8c911e3b17c52f7f589f950", size = 368805, upload-time = "2026-02-09T14:57:26.942Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/de/1f/77fa3081e4f66ca3576c896ae5d31c3002ac6607f9747d2e3aa49227e464/markdown-3.10.2-py3-none-any.whl", hash = "sha256:e91464b71ae3ee7afd3017d9f358ef0baf158fd9a298db92f1d4761133824c36", size = 108180, upload-time = "2026-02-09T14:57:25.787Z" }, +] + +[[package]] +name = "markdown-it-py" +version = "4.0.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "mdurl" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/5b/f5/4ec618ed16cc4f8fb3b701563655a69816155e79e24a17b651541804721d/markdown_it_py-4.0.0.tar.gz", hash = "sha256:cb0a2b4aa34f932c007117b194e945bd74e0ec24133ceb5bac59009cda1cb9f3", size = 73070, upload-time = "2025-08-11T12:57:52.854Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/94/54/e7d793b573f298e1c9013b8c4dade17d481164aa517d1d7148619c2cedbf/markdown_it_py-4.0.0-py3-none-any.whl", hash = "sha256:87327c59b172c5011896038353a81343b6754500a08cd7a4973bb48c6d578147", size = 87321, upload-time = "2025-08-11T12:57:51.923Z" }, +] + +[[package]] +name = "markdownify" +version = "1.2.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "beautifulsoup4" }, + { name = "six" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/3f/bc/c8c8eea5335341306b0fa7e1cb33c5e1c8d24ef70ddd684da65f41c49c92/markdownify-1.2.2.tar.gz", hash = "sha256:b274f1b5943180b031b699b199cbaeb1e2ac938b75851849a31fd0c3d6603d09", size = 18816, upload-time = "2025-11-16T19:21:18.565Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/43/ce/f1e3e9d959db134cedf06825fae8d5b294bd368aacdd0831a3975b7c4d55/markdownify-1.2.2-py3-none-any.whl", hash = "sha256:3f02d3cc52714084d6e589f70397b6fc9f2f3a8531481bf35e8cc39f975e186a", size = 15724, upload-time = "2025-11-16T19:21:17.622Z" }, +] + +[[package]] +name = "mdurl" +version = "0.1.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/d6/54/cfe61301667036ec958cb99bd3efefba235e65cdeb9c84d24a8293ba1d90/mdurl-0.1.2.tar.gz", hash = "sha256:bb413d29f5eea38f31dd4754dd7377d4465116fb207585f97bf925588687c1ba", size = 8729, upload-time = "2022-08-14T12:40:10.846Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b3/38/89ba8ad64ae25be8de66a6d463314cf1eb366222074cfda9ee839c56a4b4/mdurl-0.1.2-py3-none-any.whl", hash = "sha256:84008a41e51615a49fc9966191ff91509e3c40b939176e643fd50a5c2196b8f8", size = 9979, upload-time = "2022-08-14T12:40:09.779Z" }, +] + +[[package]] +name = "mypy" +version = "1.19.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "librt", marker = "platform_python_implementation != 'PyPy'" }, + { name = "mypy-extensions" }, + { name = "pathspec" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/f5/db/4efed9504bc01309ab9c2da7e352cc223569f05478012b5d9ece38fd44d2/mypy-1.19.1.tar.gz", hash = "sha256:19d88bb05303fe63f71dd2c6270daca27cb9401c4ca8255fe50d1d920e0eb9ba", size = 3582404, upload-time = "2025-12-15T05:03:48.42Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/de/eb/b83e75f4c820c4247a58580ef86fcd35165028f191e7e1ba57128c52782d/mypy-1.19.1-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:06e6170bd5836770e8104c8fdd58e5e725cfeb309f0a6c681a811f557e97eac1", size = 13199744, upload-time = "2025-12-15T05:03:30.823Z" }, + { url = "https://files.pythonhosted.org/packages/94/28/52785ab7bfa165f87fcbb61547a93f98bb20e7f82f90f165a1f69bce7b3d/mypy-1.19.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:804bd67b8054a85447c8954215a906d6eff9cabeabe493fb6334b24f4bfff718", size = 12215815, upload-time = "2025-12-15T05:02:42.323Z" }, + { url = "https://files.pythonhosted.org/packages/0a/c6/bdd60774a0dbfb05122e3e925f2e9e846c009e479dcec4821dad881f5b52/mypy-1.19.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:21761006a7f497cb0d4de3d8ef4ca70532256688b0523eee02baf9eec895e27b", size = 12740047, upload-time = "2025-12-15T05:03:33.168Z" }, + { url = "https://files.pythonhosted.org/packages/32/2a/66ba933fe6c76bd40d1fe916a83f04fed253152f451a877520b3c4a5e41e/mypy-1.19.1-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:28902ee51f12e0f19e1e16fbe2f8f06b6637f482c459dd393efddd0ec7f82045", size = 13601998, upload-time = "2025-12-15T05:03:13.056Z" }, + { url = "https://files.pythonhosted.org/packages/e3/da/5055c63e377c5c2418760411fd6a63ee2b96cf95397259038756c042574f/mypy-1.19.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:481daf36a4c443332e2ae9c137dfee878fcea781a2e3f895d54bd3002a900957", size = 13807476, upload-time = "2025-12-15T05:03:17.977Z" }, + { url = "https://files.pythonhosted.org/packages/cd/09/4ebd873390a063176f06b0dbf1f7783dd87bd120eae7727fa4ae4179b685/mypy-1.19.1-cp314-cp314-win_amd64.whl", hash = "sha256:8bb5c6f6d043655e055be9b542aa5f3bdd30e4f3589163e85f93f3640060509f", size = 10281872, upload-time = "2025-12-15T05:03:05.549Z" }, + { url = "https://files.pythonhosted.org/packages/8d/f4/4ce9a05ce5ded1de3ec1c1d96cf9f9504a04e54ce0ed55cfa38619a32b8d/mypy-1.19.1-py3-none-any.whl", hash = "sha256:f1235f5ea01b7db5468d53ece6aaddf1ad0b88d9e7462b86ef96fe04995d7247", size = 2471239, upload-time = "2025-12-15T05:03:07.248Z" }, +] + +[[package]] +name = "mypy-extensions" +version = "1.1.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a2/6e/371856a3fb9d31ca8dac321cda606860fa4548858c0cc45d9d1d4ca2628b/mypy_extensions-1.1.0.tar.gz", hash = "sha256:52e68efc3284861e772bbcd66823fde5ae21fd2fdb51c62a211403730b916558", size = 6343, upload-time = "2025-04-22T14:54:24.164Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/79/7b/2c79738432f5c924bef5071f933bcc9efd0473bac3b4aa584a6f7c1c8df8/mypy_extensions-1.1.0-py3-none-any.whl", hash = "sha256:1be4cccdb0f2482337c4743e60421de3a356cd97508abadd57d47403e94f5505", size = 4963, upload-time = "2025-04-22T14:54:22.983Z" }, +] + +[[package]] +name = "orjson" +version = "3.11.7" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/53/45/b268004f745ede84e5798b48ee12b05129d19235d0e15267aa57dcdb400b/orjson-3.11.7.tar.gz", hash = "sha256:9b1a67243945819ce55d24a30b59d6a168e86220452d2c96f4d1f093e71c0c49", size = 6144992, upload-time = "2026-02-02T15:38:49.29Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e9/1e/745565dca749813db9a093c5ebc4bac1a9475c64d54b95654336ac3ed961/orjson-3.11.7-cp314-cp314-macosx_10_15_x86_64.macosx_11_0_arm64.macosx_10_15_universal2.whl", hash = "sha256:de0a37f21d0d364954ad5de1970491d7fbd0fb1ef7417d4d56a36dc01ba0c0a0", size = 228391, upload-time = "2026-02-02T15:38:27.757Z" }, + { url = "https://files.pythonhosted.org/packages/46/19/e40f6225da4d3aa0c8dc6e5219c5e87c2063a560fe0d72a88deb59776794/orjson-3.11.7-cp314-cp314-macosx_15_0_arm64.whl", hash = "sha256:c2428d358d85e8da9d37cba18b8c4047c55222007a84f97156a5b22028dfbfc0", size = 125188, upload-time = "2026-02-02T15:38:29.241Z" }, + { url = "https://files.pythonhosted.org/packages/9d/7e/c4de2babef2c0817fd1f048fd176aa48c37bec8aef53d2fa932983032cce/orjson-3.11.7-cp314-cp314-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3c4bc6c6ac52cdaa267552544c73e486fecbd710b7ac09bc024d5a78555a22f6", size = 128097, upload-time = "2026-02-02T15:38:30.618Z" }, + { url = "https://files.pythonhosted.org/packages/eb/74/233d360632bafd2197f217eee7fb9c9d0229eac0c18128aee5b35b0014fe/orjson-3.11.7-cp314-cp314-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:bd0d68edd7dfca1b2eca9361a44ac9f24b078de3481003159929a0573f21a6bf", size = 123364, upload-time = "2026-02-02T15:38:32.363Z" }, + { url = "https://files.pythonhosted.org/packages/79/51/af79504981dd31efe20a9e360eb49c15f06df2b40e7f25a0a52d9ae888e8/orjson-3.11.7-cp314-cp314-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:623ad1b9548ef63886319c16fa317848e465a21513b31a6ad7b57443c3e0dcf5", size = 129076, upload-time = "2026-02-02T15:38:33.68Z" }, + { url = "https://files.pythonhosted.org/packages/67/e2/da898eb68b72304f8de05ca6715870d09d603ee98d30a27e8a9629abc64b/orjson-3.11.7-cp314-cp314-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6e776b998ac37c0396093d10290e60283f59cfe0fc3fccbd0ccc4bd04dd19892", size = 141705, upload-time = "2026-02-02T15:38:34.989Z" }, + { url = "https://files.pythonhosted.org/packages/c5/89/15364d92acb3d903b029e28d834edb8780c2b97404cbf7929aa6b9abdb24/orjson-3.11.7-cp314-cp314-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:652c6c3af76716f4a9c290371ba2e390ede06f6603edb277b481daf37f6f464e", size = 130855, upload-time = "2026-02-02T15:38:36.379Z" }, + { url = "https://files.pythonhosted.org/packages/c2/8b/ecdad52d0b38d4b8f514be603e69ccd5eacf4e7241f972e37e79792212ec/orjson-3.11.7-cp314-cp314-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a56df3239294ea5964adf074c54bcc4f0ccd21636049a2cf3ca9cf03b5d03cf1", size = 133386, upload-time = "2026-02-02T15:38:37.704Z" }, + { url = "https://files.pythonhosted.org/packages/b9/0e/45e1dcf10e17d0924b7c9162f87ec7b4ca79e28a0548acf6a71788d3e108/orjson-3.11.7-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:bda117c4148e81f746655d5a3239ae9bd00cb7bc3ca178b5fc5a5997e9744183", size = 138295, upload-time = "2026-02-02T15:38:39.096Z" }, + { url = "https://files.pythonhosted.org/packages/63/d7/4d2e8b03561257af0450f2845b91fbd111d7e526ccdf737267108075e0ba/orjson-3.11.7-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:23d6c20517a97a9daf1d48b580fcdc6f0516c6f4b5038823426033690b4d2650", size = 408720, upload-time = "2026-02-02T15:38:40.634Z" }, + { url = "https://files.pythonhosted.org/packages/78/cf/d45343518282108b29c12a65892445fc51f9319dc3c552ceb51bb5905ed2/orjson-3.11.7-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:8ff206156006da5b847c9304b6308a01e8cdbc8cce824e2779a5ba71c3def141", size = 144152, upload-time = "2026-02-02T15:38:42.262Z" }, + { url = "https://files.pythonhosted.org/packages/a9/3a/d6001f51a7275aacd342e77b735c71fa04125a3f93c36fee4526bc8c654e/orjson-3.11.7-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:962d046ee1765f74a1da723f4b33e3b228fe3a48bd307acce5021dfefe0e29b2", size = 134814, upload-time = "2026-02-02T15:38:43.627Z" }, + { url = "https://files.pythonhosted.org/packages/1d/d3/f19b47ce16820cc2c480f7f1723e17f6d411b3a295c60c8ad3aa9ff1c96a/orjson-3.11.7-cp314-cp314-win32.whl", hash = "sha256:89e13dd3f89f1c38a9c9eba5fbf7cdc2d1feca82f5f290864b4b7a6aac704576", size = 127997, upload-time = "2026-02-02T15:38:45.06Z" }, + { url = "https://files.pythonhosted.org/packages/12/df/172771902943af54bf661a8d102bdf2e7f932127968080632bda6054b62c/orjson-3.11.7-cp314-cp314-win_amd64.whl", hash = "sha256:845c3e0d8ded9c9271cd79596b9b552448b885b97110f628fb687aee2eed11c1", size = 124985, upload-time = "2026-02-02T15:38:46.388Z" }, + { url = "https://files.pythonhosted.org/packages/6f/1c/f2a8d8a1b17514660a614ce5f7aac74b934e69f5abc2700cc7ced882a009/orjson-3.11.7-cp314-cp314-win_arm64.whl", hash = "sha256:4a2e9c5be347b937a2e0203866f12bba36082e89b402ddb9e927d5822e43088d", size = 126038, upload-time = "2026-02-02T15:38:47.703Z" }, +] + +[[package]] +name = "packaging" +version = "26.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/65/ee/299d360cdc32edc7d2cf530f3accf79c4fca01e96ffc950d8a52213bd8e4/packaging-26.0.tar.gz", hash = "sha256:00243ae351a257117b6a241061796684b084ed1c516a08c48a3f7e147a9d80b4", size = 143416, upload-time = "2026-01-21T20:50:39.064Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b7/b9/c538f279a4e237a006a2c98387d081e9eb060d203d8ed34467cc0f0b9b53/packaging-26.0-py3-none-any.whl", hash = "sha256:b36f1fef9334a5588b4166f8bcd26a14e521f2b55e6b9de3aaa80d3ff7a37529", size = 74366, upload-time = "2026-01-21T20:50:37.788Z" }, +] + +[[package]] +name = "pathspec" +version = "1.0.4" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/fa/36/e27608899f9b8d4dff0617b2d9ab17ca5608956ca44461ac14ac48b44015/pathspec-1.0.4.tar.gz", hash = "sha256:0210e2ae8a21a9137c0d470578cb0e595af87edaa6ebf12ff176f14a02e0e645", size = 131200, upload-time = "2026-01-27T03:59:46.938Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/ef/3c/2c197d226f9ea224a9ab8d197933f9da0ae0aac5b6e0f884e2b8d9c8e9f7/pathspec-1.0.4-py3-none-any.whl", hash = "sha256:fb6ae2fd4e7c921a165808a552060e722767cfa526f99ca5156ed2ce45a5c723", size = 55206, upload-time = "2026-01-27T03:59:45.137Z" }, +] + +[[package]] +name = "pluggy" +version = "1.6.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f9/e2/3e91f31a7d2b083fe6ef3fa267035b518369d9511ffab804f839851d2779/pluggy-1.6.0.tar.gz", hash = "sha256:7dcc130b76258d33b90f61b658791dede3486c3e6bfb003ee5c9bfb396dd22f3", size = 69412, upload-time = "2025-05-15T12:30:07.975Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/54/20/4d324d65cc6d9205fabedc306948156824eb9f0ee1633355a8f7ec5c66bf/pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746", size = 20538, upload-time = "2025-05-15T12:30:06.134Z" }, +] + +[[package]] +name = "protobuf" +version = "7.34.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/f2/00/04a2ab36b70a52d0356852979e08b44edde0435f2115dc66e25f2100f3ab/protobuf-7.34.0.tar.gz", hash = "sha256:3871a3df67c710aaf7bb8d214cc997342e63ceebd940c8c7fc65c9b3d697591a", size = 454726, upload-time = "2026-02-27T00:30:25.421Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/13/c4/6322ab5c8f279c4c358bc14eb8aefc0550b97222a39f04eb3c1af7a830fa/protobuf-7.34.0-cp310-abi3-macosx_10_9_universal2.whl", hash = "sha256:8e329966799f2c271d5e05e236459fe1cbfdb8755aaa3b0914fa60947ddea408", size = 429248, upload-time = "2026-02-27T00:30:14.924Z" }, + { url = "https://files.pythonhosted.org/packages/45/99/b029bbbc61e8937545da5b79aa405ab2d9cf307a728f8c9459ad60d7a481/protobuf-7.34.0-cp310-abi3-manylinux2014_aarch64.whl", hash = "sha256:9d7a5005fb96f3c1e64f397f91500b0eb371b28da81296ae73a6b08a5b76cdd6", size = 325753, upload-time = "2026-02-27T00:30:17.247Z" }, + { url = "https://files.pythonhosted.org/packages/cc/79/09f02671eb75b251c5550a1c48e7b3d4b0623efd7c95a15a50f6f9fc1e2e/protobuf-7.34.0-cp310-abi3-manylinux2014_s390x.whl", hash = "sha256:4a72a8ec94e7a9f7ef7fe818ed26d073305f347f8b3b5ba31e22f81fd85fca02", size = 340200, upload-time = "2026-02-27T00:30:18.672Z" }, + { url = "https://files.pythonhosted.org/packages/b5/57/89727baef7578897af5ed166735ceb315819f1c184da8c3441271dbcfde7/protobuf-7.34.0-cp310-abi3-manylinux2014_x86_64.whl", hash = "sha256:964cf977e07f479c0697964e83deda72bcbc75c3badab506fb061b352d991b01", size = 324268, upload-time = "2026-02-27T00:30:20.088Z" }, + { url = "https://files.pythonhosted.org/packages/1f/3e/38ff2ddee5cc946f575c9d8cc822e34bde205cf61acf8099ad88ef19d7d2/protobuf-7.34.0-cp310-abi3-win32.whl", hash = "sha256:f791ec509707a1d91bd02e07df157e75e4fb9fbdad12a81b7396201ec244e2e3", size = 426628, upload-time = "2026-02-27T00:30:21.555Z" }, + { url = "https://files.pythonhosted.org/packages/cb/71/7c32eaf34a61a1bae1b62a2ac4ffe09b8d1bb0cf93ad505f42040023db89/protobuf-7.34.0-cp310-abi3-win_amd64.whl", hash = "sha256:9f9079f1dde4e32342ecbd1c118d76367090d4aaa19da78230c38101c5b3dd40", size = 437901, upload-time = "2026-02-27T00:30:22.836Z" }, + { url = "https://files.pythonhosted.org/packages/a4/e7/14dc9366696dcb53a413449881743426ed289d687bcf3d5aee4726c32ebb/protobuf-7.34.0-py3-none-any.whl", hash = "sha256:e3b914dd77fa33fa06ab2baa97937746ab25695f389869afdf03e81f34e45dc7", size = 170716, upload-time = "2026-02-27T00:30:23.994Z" }, +] + +[[package]] +name = "pygments" +version = "2.20.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/c3/b2/bc9c9196916376152d655522fdcebac55e66de6603a76a02bca1b6414f6c/pygments-2.20.0.tar.gz", hash = "sha256:6757cd03768053ff99f3039c1a36d6c0aa0b263438fcab17520b30a303a82b5f", size = 4955991, upload-time = "2026-03-29T13:29:33.898Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f4/7e/a72dd26f3b0f4f2bf1dd8923c85f7ceb43172af56d63c7383eb62b332364/pygments-2.20.0-py3-none-any.whl", hash = "sha256:81a9e26dd42fd28a23a2d169d86d7ac03b46e2f8b59ed4698fb4785f946d0176", size = 1231151, upload-time = "2026-03-29T13:29:30.038Z" }, +] + +[[package]] +name = "pysocks" +version = "1.7.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/bd/11/293dd436aea955d45fc4e8a35b6ae7270f5b8e00b53cf6c024c83b657a11/PySocks-1.7.1.tar.gz", hash = "sha256:3f8804571ebe159c380ac6de37643bb4685970655d3bba243530d6558b799aa0", size = 284429, upload-time = "2019-09-20T02:07:35.714Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/8d/59/b4572118e098ac8e46e399a1dd0f2d85403ce8bbaad9ec79373ed6badaf9/PySocks-1.7.1-py3-none-any.whl", hash = "sha256:2725bd0a9925919b9b51739eea5f9e2bae91e83288108a9ad338b2e3a4435ee5", size = 16725, upload-time = "2019-09-20T02:06:22.938Z" }, +] + +[[package]] +name = "pytest" +version = "9.0.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "colorama", marker = "sys_platform == 'win32'" }, + { name = "iniconfig" }, + { name = "packaging" }, + { name = "pluggy" }, + { name = "pygments" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/d1/db/7ef3487e0fb0049ddb5ce41d3a49c235bf9ad299b6a25d5780a89f19230f/pytest-9.0.2.tar.gz", hash = "sha256:75186651a92bd89611d1d9fc20f0b4345fd827c41ccd5c299a868a05d70edf11", size = 1568901, upload-time = "2025-12-06T21:30:51.014Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3b/ab/b3226f0bd7cdcf710fbede2b3548584366da3b19b5021e74f5bde2a8fa3f/pytest-9.0.2-py3-none-any.whl", hash = "sha256:711ffd45bf766d5264d487b917733b453d917afd2b0ad65223959f59089f875b", size = 374801, upload-time = "2025-12-06T21:30:49.154Z" }, +] + +[[package]] +name = "readchar" +version = "4.2.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/dd/f8/8657b8cbb4ebeabfbdf991ac40eca8a1d1bd012011bd44ad1ed10f5cb494/readchar-4.2.1.tar.gz", hash = "sha256:91ce3faf07688de14d800592951e5575e9c7a3213738ed01d394dcc949b79adb", size = 9685, upload-time = "2024-11-04T18:28:07.757Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/a9/10/e4b1e0e5b6b6745c8098c275b69bc9d73e9542d5c7da4f137542b499ed44/readchar-4.2.1-py3-none-any.whl", hash = "sha256:a769305cd3994bb5fa2764aa4073452dc105a4ec39068ffe6efd3c20c60acc77", size = 9350, upload-time = "2024-11-04T18:28:02.859Z" }, +] + +[[package]] +name = "requests" +version = "2.33.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "certifi" }, + { name = "charset-normalizer" }, + { name = "idna" }, + { name = "urllib3" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/34/64/8860370b167a9721e8956ae116825caff829224fbca0ca6e7bf8ddef8430/requests-2.33.0.tar.gz", hash = "sha256:c7ebc5e8b0f21837386ad0e1c8fe8b829fa5f544d8df3b2253bff14ef29d7652", size = 134232, upload-time = "2026-03-25T15:10:41.586Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/56/5d/c814546c2333ceea4ba42262d8c4d55763003e767fa169adc693bd524478/requests-2.33.0-py3-none-any.whl", hash = "sha256:3324635456fa185245e24865e810cecec7b4caf933d7eb133dcde67d48cee69b", size = 65017, upload-time = "2026-03-25T15:10:40.382Z" }, +] + +[package.optional-dependencies] +socks = [ + { name = "pysocks" }, +] + +[[package]] +name = "rich" +version = "14.3.3" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "markdown-it-py" }, + { name = "pygments" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/b3/c6/f3b320c27991c46f43ee9d856302c70dc2d0fb2dba4842ff739d5f46b393/rich-14.3.3.tar.gz", hash = "sha256:b8daa0b9e4eef54dd8cf7c86c03713f53241884e814f4e2f5fb342fe520f639b", size = 230582, upload-time = "2026-02-19T17:23:12.474Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/14/25/b208c5683343959b670dc001595f2f3737e051da617f66c31f7c4fa93abc/rich-14.3.3-py3-none-any.whl", hash = "sha256:793431c1f8619afa7d3b52b2cdec859562b950ea0d4b6b505397612db8d5362d", size = 310458, upload-time = "2026-02-19T17:23:13.732Z" }, +] + +[[package]] +name = "ruff" +version = "0.15.6" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/51/df/f8629c19c5318601d3121e230f74cbee7a3732339c52b21daa2b82ef9c7d/ruff-0.15.6.tar.gz", hash = "sha256:8394c7bb153a4e3811a4ecdacd4a8e6a4fa8097028119160dffecdcdf9b56ae4", size = 4597916, upload-time = "2026-03-12T23:05:47.51Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/9e/2f/4e03a7e5ce99b517e98d3b4951f411de2b0fa8348d39cf446671adcce9a2/ruff-0.15.6-py3-none-linux_armv6l.whl", hash = "sha256:7c98c3b16407b2cf3d0f2b80c80187384bc92c6774d85fefa913ecd941256fff", size = 10508953, upload-time = "2026-03-12T23:05:17.246Z" }, + { url = "https://files.pythonhosted.org/packages/70/60/55bcdc3e9f80bcf39edf0cd272da6fa511a3d94d5a0dd9e0adf76ceebdb4/ruff-0.15.6-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:ee7dcfaad8b282a284df4aa6ddc2741b3f4a18b0555d626805555a820ea181c3", size = 10942257, upload-time = "2026-03-12T23:05:23.076Z" }, + { url = "https://files.pythonhosted.org/packages/e7/f9/005c29bd1726c0f492bfa215e95154cf480574140cb5f867c797c18c790b/ruff-0.15.6-py3-none-macosx_11_0_arm64.whl", hash = "sha256:3bd9967851a25f038fc8b9ae88a7fbd1b609f30349231dffaa37b6804923c4bb", size = 10322683, upload-time = "2026-03-12T23:05:33.738Z" }, + { url = "https://files.pythonhosted.org/packages/5f/74/2f861f5fd7cbb2146bddb5501450300ce41562da36d21868c69b7a828169/ruff-0.15.6-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:13f4594b04e42cd24a41da653886b04d2ff87adbf57497ed4f728b0e8a4866f8", size = 10660986, upload-time = "2026-03-12T23:05:53.245Z" }, + { url = "https://files.pythonhosted.org/packages/c1/a1/309f2364a424eccb763cdafc49df843c282609f47fe53aa83f38272389e0/ruff-0.15.6-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:e2ed8aea2f3fe57886d3f00ea5b8aae5bf68d5e195f487f037a955ff9fbaac9e", size = 10332177, upload-time = "2026-03-12T23:05:56.145Z" }, + { url = "https://files.pythonhosted.org/packages/30/41/7ebf1d32658b4bab20f8ac80972fb19cd4e2c6b78552be263a680edc55ac/ruff-0.15.6-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:70789d3e7830b848b548aae96766431c0dc01a6c78c13381f423bf7076c66d15", size = 11170783, upload-time = "2026-03-12T23:06:01.742Z" }, + { url = "https://files.pythonhosted.org/packages/76/be/6d488f6adca047df82cd62c304638bcb00821c36bd4881cfca221561fdfc/ruff-0.15.6-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:542aaf1de3154cea088ced5a819ce872611256ffe2498e750bbae5247a8114e9", size = 12044201, upload-time = "2026-03-12T23:05:28.697Z" }, + { url = "https://files.pythonhosted.org/packages/71/68/e6f125df4af7e6d0b498f8d373274794bc5156b324e8ab4bf5c1b4fc0ec7/ruff-0.15.6-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:1c22e6f02c16cfac3888aa636e9eba857254d15bbacc9906c9689fdecb1953ab", size = 11421561, upload-time = "2026-03-12T23:05:31.236Z" }, + { url = "https://files.pythonhosted.org/packages/f1/9f/f85ef5fd01a52e0b472b26dc1b4bd228b8f6f0435975442ffa4741278703/ruff-0.15.6-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:98893c4c0aadc8e448cfa315bd0cc343a5323d740fe5f28ef8a3f9e21b381f7e", size = 11310928, upload-time = "2026-03-12T23:05:45.288Z" }, + { url = "https://files.pythonhosted.org/packages/8c/26/b75f8c421f5654304b89471ed384ae8c7f42b4dff58fa6ce1626d7f2b59a/ruff-0.15.6-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:70d263770d234912374493e8cc1e7385c5d49376e41dfa51c5c3453169dc581c", size = 11235186, upload-time = "2026-03-12T23:05:50.677Z" }, + { url = "https://files.pythonhosted.org/packages/fc/d4/d5a6d065962ff7a68a86c9b4f5500f7d101a0792078de636526c0edd40da/ruff-0.15.6-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:55a1ad63c5a6e54b1f21b7514dfadc0c7fb40093fa22e95143cf3f64ebdcd512", size = 10635231, upload-time = "2026-03-12T23:05:37.044Z" }, + { url = "https://files.pythonhosted.org/packages/d6/56/7c3acf3d50910375349016cf33de24be021532042afbed87942858992491/ruff-0.15.6-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:8dc473ba093c5ec238bb1e7429ee676dca24643c471e11fbaa8a857925b061c0", size = 10340357, upload-time = "2026-03-12T23:06:04.748Z" }, + { url = "https://files.pythonhosted.org/packages/06/54/6faa39e9c1033ff6a3b6e76b5df536931cd30caf64988e112bbf91ef5ce5/ruff-0.15.6-py3-none-musllinux_1_2_i686.whl", hash = "sha256:85b042377c2a5561131767974617006f99f7e13c63c111b998f29fc1e58a4cfb", size = 10860583, upload-time = "2026-03-12T23:05:58.978Z" }, + { url = "https://files.pythonhosted.org/packages/cb/1e/509a201b843b4dfb0b32acdedf68d951d3377988cae43949ba4c4133a96a/ruff-0.15.6-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:cef49e30bc5a86a6a92098a7fbf6e467a234d90b63305d6f3ec01225a9d092e0", size = 11410976, upload-time = "2026-03-12T23:05:39.955Z" }, + { url = "https://files.pythonhosted.org/packages/6c/25/3fc9114abf979a41673ce877c08016f8e660ad6cf508c3957f537d2e9fa9/ruff-0.15.6-py3-none-win32.whl", hash = "sha256:bbf67d39832404812a2d23020dda68fee7f18ce15654e96fb1d3ad21a5fe436c", size = 10616872, upload-time = "2026-03-12T23:05:42.451Z" }, + { url = "https://files.pythonhosted.org/packages/89/7a/09ece68445ceac348df06e08bf75db72d0e8427765b96c9c0ffabc1be1d9/ruff-0.15.6-py3-none-win_amd64.whl", hash = "sha256:aee25bc84c2f1007ecb5037dff75cef00414fdf17c23f07dc13e577883dca406", size = 11787271, upload-time = "2026-03-12T23:05:20.168Z" }, + { url = "https://files.pythonhosted.org/packages/7f/d0/578c47dd68152ddddddf31cd7fc67dc30b7cdf639a86275fda821b0d9d98/ruff-0.15.6-py3-none-win_arm64.whl", hash = "sha256:c34de3dd0b0ba203be50ae70f5910b17188556630e2178fd7d79fc030eb0d837", size = 11060497, upload-time = "2026-03-12T23:05:25.968Z" }, +] + +[[package]] +name = "six" +version = "1.17.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/94/e7/b2c673351809dca68a0e064b6af791aa332cf192da575fd474ed7d6f16a2/six-1.17.0.tar.gz", hash = "sha256:ff70335d468e7eb6ec65b95b99d3a2836546063f63acc5171de367e834932a81", size = 34031, upload-time = "2024-12-04T17:35:28.174Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/b7/ce/149a00dd41f10bc29e5921b496af8b574d8413afcd5e30dfa0ed46c2cc5e/six-1.17.0-py2.py3-none-any.whl", hash = "sha256:4721f391ed90541fddacab5acf947aa0d3dc7d27b2e1e8eda2be8970586c3274", size = 11050, upload-time = "2024-12-04T17:35:26.475Z" }, +] + +[[package]] +name = "soupsieve" +version = "2.8.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/7b/ae/2d9c981590ed9999a0d91755b47fc74f74de286b0f5cee14c9269041e6c4/soupsieve-2.8.3.tar.gz", hash = "sha256:3267f1eeea4251fb42728b6dfb746edc9acaffc4a45b27e19450b676586e8349", size = 118627, upload-time = "2026-01-20T04:27:02.457Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/46/2c/1462b1d0a634697ae9e55b3cecdcb64788e8b7d63f54d923fcd0bb140aed/soupsieve-2.8.3-py3-none-any.whl", hash = "sha256:ed64f2ba4eebeab06cc4962affce381647455978ffc1e36bb79a545b91f45a95", size = 37016, upload-time = "2026-01-20T04:27:01.012Z" }, +] + +[[package]] +name = "types-beautifulsoup4" +version = "4.12.0.20250516" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "types-html5lib" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/6d/d1/32b410f6d65eda94d3dfb0b3d0ca151f12cb1dc4cef731dcf7cbfd8716ff/types_beautifulsoup4-4.12.0.20250516.tar.gz", hash = "sha256:aa19dd73b33b70d6296adf92da8ab8a0c945c507e6fb7d5db553415cc77b417e", size = 16628, upload-time = "2025-05-16T03:09:09.93Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/7c/79/d84de200a80085b32f12c5820d4fd0addcbe7ba6dce8c1c9d8605e833c8e/types_beautifulsoup4-4.12.0.20250516-py3-none-any.whl", hash = "sha256:5923399d4a1ba9cc8f0096fe334cc732e130269541d66261bb42ab039c0376ee", size = 16879, upload-time = "2025-05-16T03:09:09.051Z" }, +] + +[[package]] +name = "types-html5lib" +version = "1.1.11.20251117" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "types-webencodings" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/c8/f3/d9a1bbba7b42b5558a3f9fe017d967f5338cf8108d35991d9b15fdea3e0d/types_html5lib-1.1.11.20251117.tar.gz", hash = "sha256:1a6a3ac5394aa12bf547fae5d5eff91dceec46b6d07c4367d9b39a37f42f201a", size = 18100, upload-time = "2025-11-17T03:08:00.78Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f0/ab/f5606db367c1f57f7400d3cb3bead6665ee2509621439af1b29c35ef6f9e/types_html5lib-1.1.11.20251117-py3-none-any.whl", hash = "sha256:2a3fc935de788a4d2659f4535002a421e05bea5e172b649d33232e99d4272d08", size = 24302, upload-time = "2025-11-17T03:07:59.996Z" }, +] + +[[package]] +name = "types-markdown" +version = "3.10.2.20260211" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/6d/2e/35b30a09f6ee8a69142408d3ceb248c4454aa638c0a414d8704a3ef79563/types_markdown-3.10.2.20260211.tar.gz", hash = "sha256:66164310f88c11a58c6c706094c6f8c537c418e3525d33b76276a5fbd66b01ce", size = 19768, upload-time = "2026-02-11T04:19:29.497Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/54/c9/659fa2df04b232b0bfcd05d2418e683080e91ec68f636f3c0a5a267350e7/types_markdown-3.10.2.20260211-py3-none-any.whl", hash = "sha256:2d94d08587e3738203b3c4479c449845112b171abe8b5cadc9b0c12fcf3e99da", size = 25854, upload-time = "2026-02-11T04:19:28.647Z" }, +] + +[[package]] +name = "types-webencodings" +version = "0.5.0.20251108" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/66/d6/75e381959a2706644f02f7527d264de3216cf6ed333f98eff95954d78e07/types_webencodings-0.5.0.20251108.tar.gz", hash = "sha256:2378e2ceccced3d41bb5e21387586e7b5305e11519fc6b0659c629f23b2e5de4", size = 7470, upload-time = "2025-11-08T02:56:00.132Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/45/4e/8fcf33e193ce4af03c19d0e08483cf5f0838e883f800909c6bc61cb361be/types_webencodings-0.5.0.20251108-py3-none-any.whl", hash = "sha256:e21f81ff750795faffddaffd70a3d8bfff77d006f22c27e393eb7812586249d8", size = 8715, upload-time = "2025-11-08T02:55:59.456Z" }, +] + +[[package]] +name = "typing-extensions" +version = "4.15.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/72/94/1a15dd82efb362ac84269196e94cf00f187f7ed21c242792a923cdb1c61f/typing_extensions-4.15.0.tar.gz", hash = "sha256:0cea48d173cc12fa28ecabc3b837ea3cf6f38c6d1136f85cbaaf598984861466", size = 109391, upload-time = "2025-08-25T13:49:26.313Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/18/67/36e9267722cc04a6b9f15c7f3441c2363321a3ea07da7ae0c0707beb2a9c/typing_extensions-4.15.0-py3-none-any.whl", hash = "sha256:f0fa19c6845758ab08074a0cfa8b7aecb71c999ca73d62883bc25cc018c4e548", size = 44614, upload-time = "2025-08-25T13:49:24.86Z" }, +] + +[[package]] +name = "urllib3" +version = "2.6.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/c7/24/5f1b3bdffd70275f6661c76461e25f024d5a38a46f04aaca912426a2b1d3/urllib3-2.6.3.tar.gz", hash = "sha256:1b62b6884944a57dbe321509ab94fd4d3b307075e0c2eae991ac71ee15ad38ed", size = 435556, upload-time = "2026-01-07T16:24:43.925Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/39/08/aaaad47bc4e9dc8c725e68f9d04865dbcb2052843ff09c97b08904852d84/urllib3-2.6.3-py3-none-any.whl", hash = "sha256:bf272323e553dfb2e87d9bfd225ca7b0f467b919d7bbd355436d3fd37cb0acd4", size = 131584, upload-time = "2026-01-07T16:24:42.685Z" }, +] + +[[package]] +name = "webencodings" +version = "0.5.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/0b/02/ae6ceac1baeda530866a85075641cec12989bd8d31af6d5ab4a3e8c92f47/webencodings-0.5.1.tar.gz", hash = "sha256:b36a1c245f2d304965eb4e0a82848379241dc04b865afcc4aab16748587e1923", size = 9721, upload-time = "2017-04-05T20:21:34.189Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f4/24/2a3e3df732393fed8b3ebf2ec078f05546de641fe1b667ee316ec1dcf3b7/webencodings-0.5.1-py2.py3-none-any.whl", hash = "sha256:a0af1213f3c2226497a97e2b3aa01a7e4bee4f403f95be16fc9acd2947514a78", size = 11774, upload-time = "2017-04-05T20:21:32.581Z" }, +]