diff --git a/.github/dependabot.yml b/.github/dependabot.yml new file mode 100644 index 0000000..58a9120 --- /dev/null +++ b/.github/dependabot.yml @@ -0,0 +1,20 @@ +version: 2 + +updates: + - package-ecosystem: "uv" + directory: "/" + schedule: + interval: "weekly" + open-pull-requests-limit: 2 + commit-message: + prefix: "build" + include: "scope" + rebase-strategy: "auto" + - package-ecosystem: github-actions + directory: / + commit-message: + prefix: "build" + include: "scope" + rebase-strategy: "auto" + schedule: + interval: "weekly" diff --git a/.github/labeler.yaml b/.github/labeler.yaml new file mode 100644 index 0000000..a049466 --- /dev/null +++ b/.github/labeler.yaml @@ -0,0 +1,19 @@ +dependencies: + - changed-files: + - any-glob-to-any-file: "uv.lock" + +github_actions: + - changed-files: + - any-glob-to-any-file: ".github/**" + +docs: + - changed-files: + - any-glob-to-any-file: "**/*.md" + +release: + - changed-files: + - any-glob-to-any-file: "CHANGELOG.md" + +tests: + - changed-files: + - any-glob-to-any-file: "tests/**" diff --git a/.github/workflows/CI.yaml b/.github/workflows/CI.yaml index ec6f276..cb9d502 100644 --- a/.github/workflows/CI.yaml +++ b/.github/workflows/CI.yaml @@ -2,100 +2,104 @@ name: CI on: push: - branches: [main] pull_request: - branches: - - main +concurrency: + group: ${{ github.workflow }}-${{ github.ref_name }} + cancel-in-progress: true env: - DEFAULT_PYTHON: "3.12" - DEFAULT_OS: ubuntu-latest + UV_VERSION: "0.9.4" jobs: + pre-commit: + runs-on: ubuntu-latest + name: Pre-commit hooks (lint/format/spell/type, all files) + steps: + - uses: actions/checkout@v6 + with: + fetch-depth: 0 + + - name: Set up Python + uses: actions/setup-python@v6 + with: + python-version-file: "pyproject.toml" + + - name: Install uv + uses: astral-sh/setup-uv@v7 + with: + enable-cache: true + version: ${{ env.UV_VERSION }} + + - name: Install dependencies + run: uv sync --all-groups + + - name: Run pre-commit + run: uv run pre-commit run --show-diff-on-failure --color=always --all-files --hook-stage push + + pytest: + name: Tests ${{ matrix.os }} / py${{ matrix.python }} + needs: pre-commit + runs-on: ${{ matrix.os }} strategy: + fail-fast: false matrix: - python-version: ["3.11", "3.12", "3.13"] os: [ubuntu-latest, windows-latest] - plexos-version: ["v9.2R6", "v10.0R2", "v11.0R4"] - permissions: - pull-requests: write - contents: write - needs: [pre-commit, mypy, ruff] - - runs-on: ${{ matrix.os }} + python: ["3.11", "3.12", "3.13", "3.14"] + defaults: + run: + shell: bash steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v6 + - name: Install uv - uses: astral-sh/setup-uv@v3 + uses: astral-sh/setup-uv@v7 + with: + enable-cache: true + version: ${{ env.UV_VERSION }} - - name: Set up Python ${{ matrix.python-version }} - run: uv python install ${{ matrix.python-version }} + - name: Set up Python ${{ matrix.python }} + run: uv python install ${{ matrix.python }} - - name: Installing dependencies - run: uv sync --dev + - name: Install dependencies + run: uv sync --all-groups - name: Running package tests run: | - uv run pytest -vvl --cov --cov-report=xml -k ${{ matrix.plexos-version }} + uv run pytest --cov --cov-report=xml - - name: codecov - uses: codecov/codecov-action@v4.2.0 - if: ${{ matrix.os == env.DEFAULT_OS && matrix.python-version == env.DEFAULT_PYTHON && matrix.plexos-version == 'v11.0R4' }} + - name: Upload coverage reports to Codecov + uses: codecov/codecov-action@v5 with: token: ${{ secrets.CODECOV_TOKEN }} - name: r2x-test - fail_ci_if_error: false - verbose: true - pre-commit: + package: + name: Package smoke test + needs: pytest runs-on: ubuntu-latest steps: - - uses: actions/checkout@v4 - - - name: Install uv - uses: astral-sh/setup-uv@v3 - - - name: Set up pinned Python version - uses: actions/setup-python@v5 - with: - python-version-file: "pyproject.toml" + - name: Checkout + uses: actions/checkout@v6 - - name: Installing dependencies - run: uv sync --dev - - name: Run pre-commit - run: | - uv run pre-commit run --all-files - - mypy: - runs-on: ubuntu-latest - name: "mypy" - steps: - - uses: actions/checkout@v4 - - - name: Install uv - uses: astral-sh/setup-uv@v3 - name: Set up Python - uses: actions/setup-python@v5 + uses: actions/setup-python@v6 with: python-version-file: "pyproject.toml" - - name: Installing dependencies - run: uv sync --dev - - name: Run mypy - run: | - uv run mypy --config-file=pyproject.toml src/ - ruff: - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v4 - name: Install uv - uses: astral-sh/setup-uv@v3 - - name: Set up Python - uses: actions/setup-python@v5 + uses: astral-sh/setup-uv@v7 with: - python-version-file: "pyproject.toml" - - name: Installing dependencies - run: uv sync --dev - - name: Run Ruff - run: uv run ruff check --output-format=github src/ + version: ${{ env.UV_VERSION }} + enable-cache: true + + - name: Install dependencies + run: uv sync --all-groups + + - name: Build and install wheel + run: | + uv build + python -m venv pkgtest + source pkgtest/bin/activate + python -m pip install --upgrade pip + python -m pip install dist/*.whl + python -c "import plexosdb as m; print(getattr(m, '__version__', 'OK'))" diff --git a/.github/workflows/commit.yaml b/.github/workflows/commit.yaml new file mode 100644 index 0000000..38b2d16 --- /dev/null +++ b/.github/workflows/commit.yaml @@ -0,0 +1,46 @@ +name: on-commit + +on: + pull_request: + types: [opened, reopened, synchronize] + +jobs: + labeler: + name: apply labels + permissions: + contents: read + pull-requests: write + issues: write + runs-on: [ubuntu-latest] + steps: + - uses: actions/checkout@v6 + - uses: actions/labeler@v6.0.1 + with: + repo-token: ${{ secrets.GITHUB_TOKEN }} + configuration-path: .github/labeler.yaml + sync-labels: true + + lint-commit-messages: + name: lint commit message + runs-on: [ubuntu-latest] + steps: + - name: Checkout + uses: actions/checkout@v6 + with: + ref: ${{ github.event.pull_request.head.sha }} + fetch-depth: 0 + - name: Install uv + uses: astral-sh/setup-uv@v7 + - name: Commitizen check + run: | + uvx --from commitizen cz check --rev-range HEAD^! + lint-pr-title: + # default: lints titles using https://github.com/commitizen/conventional-commit-types + name: lint pr title + runs-on: [ubuntu-latest] + permissions: + pull-requests: read + steps: + - uses: amannn/action-semantic-pull-request@v6.1.1 + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} diff --git a/.github/workflows/build-docs.yaml b/.github/workflows/docs.yaml similarity index 62% rename from .github/workflows/build-docs.yaml rename to .github/workflows/docs.yaml index e5bd707..601da5a 100644 --- a/.github/workflows/build-docs.yaml +++ b/.github/workflows/docs.yaml @@ -14,26 +14,26 @@ jobs: build: runs-on: ubuntu-latest steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v6 - name: Install uv - uses: astral-sh/setup-uv@v3 - - - name: "Set up Python" - uses: actions/setup-python@v5 + uses: astral-sh/setup-uv@v7 with: - python-version-file: "pyproject.toml" + version: "latest" - name: Install the project run: uv sync --group docs + - name: Generate docstring coverage badge + run: uv run docstr-coverage src --badge docs/source/_static + - name: Build Sphinx documentation - run: uv run sphinx-build docs/source/ docs/build/ + run: uv run sphinx-build docs/source/ docs/_build/ - name: Deploy to GitHub Pages - uses: peaceiris/actions-gh-pages@v3 + uses: peaceiris/actions-gh-pages@v4 if: ${{ github.event_name == 'push' && github.ref == 'refs/heads/main' }} with: publish_branch: gh-pages github_token: ${{ secrets.GITHUB_TOKEN }} - publish_dir: docs/build/ + publish_dir: docs/_build/ force_orphan: true diff --git a/.github/workflows/release.yaml b/.github/workflows/release.yaml index 486530b..a873fec 100644 --- a/.github/workflows/release.yaml +++ b/.github/workflows/release.yaml @@ -1,189 +1,81 @@ -name: Release -permissions: - contents: write - id-token: write - +name: release-please on: - workflow_dispatch: - inputs: - base_bump: - description: "Select base version bump (major, minor, patch, stable, or empty to bump prerelease only)" - required: false - type: choice - default: "" - options: - - "" - - patch - - minor - - major - - stable + push: + branches: + - main - prerelease: - description: "Optional: Add a pre-release tag (dev, alpha, beta, rc)" - required: false - type: choice - default: "" - options: - - "" - - dev - - alpha - - beta - - rc +concurrency: + group: release-please + cancel-in-progress: true - release_name: - description: "Optional: Release Name" - required: false - type: string +permissions: + contents: write + pull-requests: write + id-token: write - release_body: - description: "Optional: Release Body" - required: false - type: string +env: + UV_VERSION: "0.9.4" jobs: - determine-version: - runs-on: ubuntu-latest + release-please: outputs: - NEW_VERSION: ${{ steps.calculate_version.outputs.NEW_VERSION }} + release_created: ${{ steps.release.outputs.release_created }} + release_tag: ${{ steps.release.outputs.tag_name }} + runs-on: ubuntu-latest steps: - - uses: actions/checkout@v4 - - - name: Install uv - uses: astral-sh/setup-uv@v6 + - name: Run release-please + id: release + uses: googleapis/release-please-action@v4 with: - version: "latest" - - - name: Calculate new version - if: github.event_name == 'workflow_dispatch' - id: calculate_version - run: | - BASE_BUMP="${{ github.event.inputs.base_bump }}" - PRERELEASE="${{ github.event.inputs.prerelease }}" - CURRENT_VERSION=$(uv version --short) - - echo "Current version: $CURRENT_VERSION" - - # Build the uv version command based on inputs - if [[ -n "$BASE_BUMP" && -n "$PRERELEASE" ]]; then - # Both specified: base bump + prerelease - NEW_VERSION=$(uv version --bump "$BASE_BUMP" --bump "$PRERELEASE" --dry-run | awk '{print $NF}') - elif [[ -n "$BASE_BUMP" ]]; then - # Only base bump (includes 'stable') - NEW_VERSION=$(uv version --bump "$BASE_BUMP" --dry-run | awk '{print $NF}') - elif [[ -n "$PRERELEASE" ]]; then - # Only prerelease: bump the prerelease counter - NEW_VERSION=$(uv version --bump "$PRERELEASE" --dry-run | awk '{print $NF}') - else - echo "Error: Must specify either base_bump or prerelease" - exit 1 - fi - - echo "New version: $NEW_VERSION" - echo "NEW_VERSION=$NEW_VERSION" >> $GITHUB_OUTPUT - - # Add to workflow summary - echo "## ๐ŸŽ‰ Version Bump Summary" >> $GITHUB_STEP_SUMMARY - echo "" >> $GITHUB_STEP_SUMMARY - echo "**Current Version:** \`$CURRENT_VERSION\`" >> $GITHUB_STEP_SUMMARY - echo "**New Version:** \`$NEW_VERSION\`" >> $GITHUB_STEP_SUMMARY - echo "" >> $GITHUB_STEP_SUMMARY - if [[ -n "$BASE_BUMP" && -n "$PRERELEASE" ]]; then - echo "๐Ÿ“ฆ Creating first **$PRERELEASE** prerelease for new **$BASE_BUMP** version" >> $GITHUB_STEP_SUMMARY - elif [[ "$BASE_BUMP" == "stable" ]]; then - echo "โœ… Promoting to **stable** release" >> $GITHUB_STEP_SUMMARY - elif [[ -n "$PRERELEASE" ]]; then - echo "๐Ÿ”„ Bumping **$PRERELEASE** counter" >> $GITHUB_STEP_SUMMARY - elif [[ -n "$BASE_BUMP" ]]; then - echo "๐Ÿ“ˆ Standard **$BASE_BUMP** version bump" >> $GITHUB_STEP_SUMMARY - fi - - bumpversion: - needs: determine-version - runs-on: "ubuntu-latest" - outputs: - version: ${{ steps.version.outputs.version }} + token: ${{ secrets.GITHUB_TOKEN }} + config-file: .release-please-config.json + manifest-file: .release-please-manifest.json + target-branch: main + + build: + name: Build + needs: release-please + if: needs.release-please.outputs.release_created + runs-on: ubuntu-latest steps: - - uses: actions/checkout@v4 + - name: Checkout release commit + uses: actions/checkout@v6 with: - token: ${{ secrets.BOT_TOKEN }} + ref: ${{ needs.release-please.outputs.release_tag }} + fetch-depth: 0 - - name: Install uv - uses: astral-sh/setup-uv@v6 - with: - version: "latest" - - - name: "Set up Python" - uses: actions/setup-python@v5 + - name: Set up Python + uses: actions/setup-python@v6 with: python-version-file: "pyproject.toml" - - name: Install the project - run: uv sync --all-extras --dev - - - name: Bump version using UV - id: version - run: | - NEW_VERSION="${{ needs.determine-version.outputs.NEW_VERSION }}" - - echo "Setting version to: $NEW_VERSION" - uv version "$NEW_VERSION" - - echo "NEW_VERSION=$NEW_VERSION" >> $GITHUB_ENV - echo "version=$NEW_VERSION" >> $GITHUB_OUTPUT - - - name: Generate a changelog - uses: orhun/git-cliff-action@v4 - id: git-cliff - with: - config: cliff.toml - args: --verbose - env: - OUTPUT: docs/source/CHANGELOG.md - - - name: Import GPG key - uses: crazy-max/ghaction-import-gpg@v6 - id: import-gpg + - name: Install uv + uses: astral-sh/setup-uv@v7 with: - gpg_private_key: ${{ secrets.GPG_PRIVATE_KEY }} - passphrase: ${{ secrets.GPG_PASS }} - git_user_signingkey: true - git_commit_gpgsign: true - git_tag_gpgsign: true - git_config_global: true + version: ${{ env.UV_VERSION }} + enable-cache: true - - name: Adding changes - run: git add pyproject.toml docs/source/CHANGELOG.md - - - name: Commit new version - shell: bash - run: | - git config commit.gpgsign true - git config --global user.email "${{ steps.import-gpg.outputs.email }}" - git config --global user.name "${{ steps.import-gpg.outputs.name }}" - git commit -S -m "Bump version to v${{ env.NEW_VERSION}}" - git tag -s v${{ env.NEW_VERSION }} -m "Release version ${{ env.NEW_VERSION }}" - git push origin ${{ github.ref_name }} --tags + - name: Install dependencies + run: uv sync --all-groups - name: Build package - run: | - uv build + run: uv build - name: Store the distribution packages - uses: actions/upload-artifact@v4 + uses: actions/upload-artifact@v5 with: name: python-package-distributions path: dist/ publish-testpypi: - needs: - - bumpversion runs-on: ubuntu-latest + needs: build environment: name: testpypi url: https://test.pypi.org/p/plexosdb steps: - name: Download all the dists - uses: actions/download-artifact@v4 + uses: actions/download-artifact@v6 with: name: python-package-distributions path: dist/ @@ -195,7 +87,7 @@ jobs: publish-pypi: needs: - - bumpversion + - build - publish-testpypi runs-on: ubuntu-latest environment: @@ -203,46 +95,9 @@ jobs: url: https://pypi.org/p/plexosdb steps: - name: Download all the dists - uses: actions/download-artifact@v4 + uses: actions/download-artifact@v6 with: name: python-package-distributions path: dist/ - name: Publish distribution ๐Ÿ“ฆ to PyPI uses: pypa/gh-action-pypi-publish@release/v1 - - github-release: - name: >- - Sign the Python ๐Ÿ distribution ๐Ÿ“ฆ with Sigstore - and upload them to GitHub Release - needs: - - bumpversion - - publish-pypi - runs-on: ubuntu-latest - - steps: - - name: Download all the dists - uses: actions/download-artifact@v4 - with: - name: python-package-distributions - path: dist/ - - name: Sign the dists with Sigstore - uses: sigstore/gh-action-sigstore-python@v3.0.0 - with: - inputs: >- - ./dist/*.tar.gz - ./dist/*.whl - - name: Create GitHub Release - env: - GITHUB_TOKEN: ${{ github.token }} - run: >- - gh release create - "v${{ needs.bumpversion.outputs.version }}" - --repo "$GITHUB_REPOSITORY" - --generate-notes - - name: Upload artifact signatures to GitHub Release - env: - GITHUB_TOKEN: ${{ github.token }} - run: >- - gh release upload - "v${{ needs.bumpversion.outputs.version }}" dist/** - --repo "$GITHUB_REPOSITORY" diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 85a44fb..a42f6f2 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,23 +1,12 @@ +default_stages: + - pre-commit + +default_install_hook_types: + - pre-commit + - commit-msg + - pre-push + repos: - - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v4.6.0 # Use the ref you want to point at - hooks: - - id: trailing-whitespace - - id: check-added-large-files - args: ["--maxkb=5000"] - - id: check-json - - id: pretty-format-json - args: ["--autofix"] - - id: check-yaml - - id: check-case-conflict - - id: check-xml - - id: end-of-file-fixer - - id: debug-statements - - repo: https://github.com/pre-commit/mirrors-prettier - rev: "v4.0.0-alpha.8" - hooks: - - id: prettier - types_or: [yaml] - repo: local hooks: - id: ruff-format @@ -32,6 +21,42 @@ repos: language: system types_or: [python, pyi] + - repo: https://github.com/pre-commit/pre-commit-hooks + rev: v6.0.0 + hooks: + - id: end-of-file-fixer + - id: trailing-whitespace + - id: check-added-large-files + - id: check-merge-conflict + - id: check-yaml + - id: check-toml + - id: check-json + - id: check-case-conflict + + - repo: https://github.com/commitizen-tools/commitizen + rev: v4.10.0 + hooks: + - id: commitizen + stages: + - commit-msg + + - repo: local + hooks: + - id: pytest + name: pytest (quick) + entry: uv run pytest -q -m "not slow" --maxfail=1 --disable-pytest-warnings + language: system + types: [python] + pass_filenames: false + stages: [pre-push] + + + - repo: https://github.com/astral-sh/uv-pre-commit + # uv version. + rev: 0.9.4 + hooks: + - id: uv-lock + - repo: local hooks: - id: mypy diff --git a/.release-please-config.json b/.release-please-config.json new file mode 100644 index 0000000..bd76d60 --- /dev/null +++ b/.release-please-config.json @@ -0,0 +1,68 @@ +{ + "$schema": "https://raw.githubusercontent.com/googleapis/release-please/main/schemas/config.json", + "packages": { + ".": { + "changelog-path": "CHANGELOG.md", + "changelog-sections": [ + { + "hidden": false, + "section": "๐Ÿš€ Features", + "type": "feat" + }, + { + "hidden": false, + "section": "๐Ÿ› Bug Fixes", + "type": "fix" + }, + { + "hidden": false, + "section": "โšก Performance", + "type": "perf" + }, + { + "hidden": false, + "section": "๐Ÿงน Refactoring", + "type": "refactor" + }, + { + "hidden": false, + "section": "๐Ÿ“š Documentation", + "type": "docs" + }, + { + "hidden": false, + "section": "๐Ÿงฉ CI", + "type": "ci" + }, + { + "hidden": false, + "section": "๐Ÿ“ฆ Build", + "type": "build" + }, + { + "hidden": true, + "section": "๐Ÿงด Chores", + "type": "chore" + }, + { + "hidden": false, + "section": "๐Ÿงช Tests", + "type": "test" + } + ], + "bump-minor-pre-major": true, + "draft": false, + "extra-files": [ + { + "jsonpath": "$.package[?(@.name.value=='plexosdb')].version", + "path": "uv.lock", + "type": "toml" + } + ], + "include-component-in-tag": false, + "package-name": "plexosdb", + "prerelease": false, + "release-type": "python" + } + } +} diff --git a/.release-please-manifest.json b/.release-please-manifest.json new file mode 100644 index 0000000..d719a72 --- /dev/null +++ b/.release-please-manifest.json @@ -0,0 +1,3 @@ +{ + ".": "1.1.3" +} diff --git a/pyproject.toml b/pyproject.toml index 3d3dcd0..5da8763 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -3,7 +3,7 @@ name = "plexosdb" version = "1.1.3" readme = "README.md" license = {file = "LICENSE.txt"} -keywords = [] +keywords = ["PLEXOS", "Database", "SQLite"] authors = [ { name = "Pedro Andres Sanchez Perez", email = "psanchez@nrel.gov" }, { name = "Kodi Obika", email = "kodi.obika@nrel.gov" }, @@ -15,12 +15,9 @@ maintainers = [ { name = "mcllerena", email = "mcllerena@users.noreply.github.com"}, ] description = "SQLite API for plexos XMLs" -dependencies = [ - "loguru", -] -requires-python = ">= 3.11" +requires-python = ">= 3.11,<3.15" classifiers = [ - "Development Status :: 4 - Beta", + "Development Status :: 5 - Production/Stable", "Intended Audience :: Developers", "License :: OSI Approved :: BSD License", "Topic :: Software Development :: Build Tools", @@ -28,6 +25,16 @@ classifiers = [ "Programming Language :: Python :: 3.12", "Programming Language :: Python :: 3.13", "Programming Language :: Python :: 3.14", + "Topic :: Scientific/Engineering", + "Topic :: Scientific/Engineering :: Information Analysis", + "Topic :: Scientific/Engineering :: Interface Engine/Protocol Translator", + "Topic :: Software Development :: Build Tools", + "Topic :: Database", + + "Typing :: Typed", +] +dependencies = [ + "loguru", ] [project.urls] @@ -52,17 +59,15 @@ docs = [ "sphinx-autobuild>=2024.10.3", "sphinx-multiversion>=0.2.4", "sphinxcontrib-versioning>=2.2.1", + "docstr-coverage>=2.3.2", ] dev = [ - "pytest-benchmark>=5.1.0", - "bump2version", - "ipython", - "pudb", - "mypy~=1.11.0", - "pre-commit", - "pytest", - "pytest-cov", - "ruff~=0.5.2", + "ipython>=9.2.0", + "mypy>=1.15.0", + "pre-commit>=4.2.0", + "pytest>=8.3.5", + "pytest-coverage>=0.0", + "ruff>=0.11.5", ] [tool.setuptools] @@ -74,9 +79,9 @@ include-package-data = true [tool.setuptools.packages.find] where = ["src"] -# Setuptools configuration -[tool.setuptools.dynamic] -readme = { file = ["README.md"], content-type = "text/markdown" } +[tool.mypy] +strict = true +exclude = ["tests/"] [tool.ruff] line-length = 110 @@ -121,7 +126,7 @@ select = [ ] # Allow unused variables when underscore-prefixed. dummy-variable-rgx = "^(_+|(_+[a-zA-Z0-9_]*[a-zA-Z0-9]+?))$" -extend-ignore = ['D105', 'D107', 'D205', 'D415'] +extend-ignore = ['D105', 'D107', 'D205', 'D415', 'D418'] pydocstyle.convention = "numpy" [tool.ruff.format] @@ -138,8 +143,15 @@ docstring-code-line-length = "dynamic" "**/{tests,docs,tools}/*" = ["D100", "D103", "E402"] [tool.pytest.ini_options] -pythonpath = [ - "src" +pythonpath = ["src"] +testpaths = ["tests"] +addopts = [ + "--cov=plexosdb", + "--cov-report=term-missing:skip-covered", + "--cov-report=html", + "--cov-report=json", + "--strict-markers", + "-v", ] markers = [ "checks: Functions that check existence of database entities", @@ -153,6 +165,26 @@ markers = [ ] [tool.coverage.run] +source = ["src/plexosdb"] omit = [ - "tests/*", - ] + "*/tests/*", + "*/__pycache__/*", + "*/site-packages/*", +] + +[tool.coverage.report] +exclude_lines = [ + "pragma: no cover", + "def __repr__", + "raise AssertionError", + "raise NotImplementedError", + "if __name__ == .__main__.:", + "if TYPE_CHECKING:", + "@abstractmethod", + "@abstract", +] +precision = 2 +fail_under = 95 + +[tool.coverage.html] +directory = "htmlcov" diff --git a/src/plexosdb/__init__.py b/src/plexosdb/__init__.py index b4579cd..e9a8577 100644 --- a/src/plexosdb/__init__.py +++ b/src/plexosdb/__init__.py @@ -1,3 +1,5 @@ +"""Entrypoint for the PlexosDB client providing its exports.""" + from importlib.metadata import version from loguru import logger diff --git a/src/plexosdb/db.py b/src/plexosdb/db.py index c8f3652..0db38be 100644 --- a/src/plexosdb/db.py +++ b/src/plexosdb/db.py @@ -93,7 +93,7 @@ def __init__( self, fpath_or_conn: Path | str | sqlite3.Connection | None = None, new_db: bool = False, - **kwargs, + **kwargs: Any, ) -> None: """Initialize the API using an XML file or other data sources. @@ -130,7 +130,12 @@ def _get_plexos_version(self) -> tuple[int, ...] | None: return tuple(map(int, result[0].split("."))) @classmethod - def from_xml(cls, xml_path: str | Path, schema: str | None = None, **kwargs) -> "PlexosDB": + def from_xml( + cls, + xml_path: str | Path, + schema: str | None = None, + **kwargs: Any, + ) -> "PlexosDB": """Construct a PlexosDB instance from an XML file. This factory method creates a new PlexosDB instance and populates it with data @@ -679,7 +684,12 @@ def add_object( _ = self.add_membership(ClassEnum.System, class_enum, "System", name, collection_enum) return object_id - def add_objects(self, class_enum: ClassEnum, *object_names, category: str | None = None) -> None: + def add_objects( + self, + class_enum: ClassEnum, + *object_names: str, + category: str | None = None, + ) -> None: """Add multiple objects of the same class to the database in bulk. This method efficiently adds multiple objects to the database in a single operation, @@ -749,7 +759,7 @@ def add_objects(self, class_enum: ClassEnum, *object_names, category: str | None def add_properties_from_records( self, - records: list[dict], + records: list[dict[str, Any]], /, *, object_class: ClassEnum, @@ -858,18 +868,45 @@ def add_properties_from_records( logger.debug(f"Successfully processed {len(records)} property and text records in batches") return - def _handle_dates(self, data_id, date_from, date_to): + def _handle_dates( + self, + data_id: int, + date_from: datetime | None, + date_to: datetime | None, + ) -> None: + """Persist optional date boundaries for a property data entry. + + Parameters + ---------- + data_id : int + Identifier of the property data row that owns the dates. + date_from : datetime or None + Optional start date to attach to the row. + date_to : datetime or None + Optional end date to attach to the row. + + Returns + ------- + None + + Raises + ------ + TypeError + If either `date_from` or `date_to` is provided but is not a datetime. + """ if date_from is not None: if not isinstance(date_from, datetime): raise TypeError("date_from must be a datetime object") self._db.execute( - "INSERT INTO t_date_from(data_id, date) VALUES (?,?)", (data_id, date_from.isoformat()) + "INSERT INTO t_date_from(data_id, date) VALUES (?,?)", + (data_id, date_from.isoformat()), ) if date_to is not None: if not isinstance(date_to, datetime): raise TypeError("date_to must be a datetime object") self._db.execute( - "INSERT INTO t_date_to(data_id, date) VALUES (?,?)", (data_id, date_to.isoformat()) + "INSERT INTO t_date_to(data_id, date) VALUES (?,?)", + (data_id, date_to.isoformat()), ) def add_property( @@ -1408,7 +1445,7 @@ def check_collection_exists( query = f"SELECT 1 FROM {Schema.Collection.name} WHERE {where_clause}" return bool(self._db.query(query, tuple(params))) - def check_data_id_exist(self, data_id: int): + def check_data_id_exist(self, data_id: int) -> bool: """Check that a data id is present on t_data table.""" query = "SELECT 1 FROM t_data where data_id = ?" return bool(self.query(query, (data_id,))) @@ -1816,7 +1853,7 @@ def copy_object_memberships( logger.warning(msg, object_class, original_name) return membership_mapping - def _copy_object_properties(self, membership_mapping: dict[int, int]): + def _copy_object_properties(self, membership_mapping: dict[int, int]) -> bool: """Copy all property data from original object to new object efficiently. Parameters @@ -2150,7 +2187,7 @@ def get_attribute( *, object_name: str, attribute_name: str, - ) -> dict: + ) -> Any: """Get attribute details for a specific object.""" query = """ SELECT @@ -2166,7 +2203,7 @@ def get_attribute( result = self._db.fetchone(query, (attribute_id, object_id)) assert result - return result + return cast(Any, result) def get_attribute_id(self, class_enum: ClassEnum, /, name: str) -> int: """Return the ID for a given attribute. @@ -2203,7 +2240,7 @@ def get_attribute_id(self, class_enum: ClassEnum, /, name: str) -> int: """ result = self._db.fetchone(query, (name, class_enum)) assert result - return result[0] + return cast(int, result[0]) def get_attributes( self, @@ -2212,7 +2249,7 @@ def get_attributes( *, object_class: ClassEnum, attribute_names: list[str] | None = None, - ) -> list[dict]: + ) -> list[dict[str, Any]]: """Get all attributes for a specific object.""" raise NotImplementedError # pragma: no cover @@ -2266,7 +2303,7 @@ def get_category_id(self, class_enum: ClassEnum, /, name: str) -> int: if not result: msg = f"Category = `{name}` not found on the database." raise NotFoundError(msg) - return result[0] + return cast(int, result[0]) def get_category_max_id(self, class_enum: ClassEnum) -> int: """Return the current maximum rank for a given category class. @@ -2313,7 +2350,7 @@ def get_category_max_id(self, class_enum: ClassEnum) -> int: """ result = self._db.fetchone(query, (class_enum,)) assert result - return result[0] + return cast(int, result[0]) def get_class_id(self, class_enum: ClassEnum) -> int: """Return the ID for a given class. @@ -2349,7 +2386,7 @@ def get_class_id(self, class_enum: ClassEnum) -> int: query = f"SELECT class_id FROM {Schema.Class.name} WHERE name = ?" result = self._db.fetchone(query, (class_enum,)) assert result - return result[0] + return cast(int, result[0]) def get_collection_id( self, @@ -2407,13 +2444,13 @@ def get_collection_id( """ result = self._db.fetchone(query, (collection, parent_class_enum, child_class_enum)) assert result - return result[0] + return cast(int, result[0]) - def get_config(self, element: str | None = None) -> dict | list[dict]: + def get_config(self, element: str | None = None) -> dict[str, Any] | list[dict[str, Any]]: """Get configuration values from the database.""" raise NotImplementedError # pragma: no cover - def get_custom_columns(self, class_enum: ClassEnum | None = None) -> list[dict]: + def get_custom_columns(self, class_enum: ClassEnum | None = None) -> list[dict[str, Any]]: """Get custom columns, optionally filtered by class.""" raise NotImplementedError # pragma: no cover @@ -2478,7 +2515,7 @@ def get_membership_id( """ result = self._db.fetchone(query, (parent_name, child_name, collection)) assert result - return result[0] + return cast(int, result[0]) def list_object_memberships( self, @@ -2639,7 +2676,7 @@ def get_metadata( *, class_name: str | None = None, property_name: str | None = None, - ) -> list[dict]: + ) -> list[dict[str, Any]]: """Retrieve metadata for an entity.""" raise NotImplementedError # pragma: no cover @@ -2653,7 +2690,7 @@ def get_object_data_ids( parent_class_enum: ClassEnum = ClassEnum.System, collection_enum: CollectionEnum | None = None, category: str | None = None, - ): + ) -> list[int]: """Get all the data_id values for a given object in the database. Retrieves all data IDs that match the specified criteria for an object, @@ -2897,7 +2934,7 @@ def get_object_id(self, class_enum: ClassEnum, /, name: str, *, category: str | msg = f"Object = {name} not found on the database." raise NotFoundError(msg) assert result - return result[0] + return cast(int, result[0]) def get_objects_id( self, @@ -2985,7 +3022,7 @@ def get_property_id( (property_name, collection_id), ) assert result - return result[0] + return cast(int, result[0]) def get_property_unit( self, @@ -3036,11 +3073,11 @@ def get_text( /, *, class_id: int | None = None, - ) -> list[dict]: + ) -> list[dict[str, Any]]: """Retrieve text data associated with a property data record.""" raise NotImplementedError # pragma: no cover - def get_unit(self, unit_id: int) -> dict: + def get_unit(self, unit_id: int) -> dict[str, Any]: """Get details for a specific unit.""" raise NotImplementedError # pragma: no cover @@ -3309,7 +3346,7 @@ def list_child_objects( parent_class: ClassEnum, child_class: ClassEnum | None = None, collection: CollectionEnum | None = None, - ) -> list[dict]: + ) -> list[dict[str, Any]]: """List all child objects for a given parent object. Retrieves all child objects that have a membership relationship with the specified @@ -3453,7 +3490,7 @@ def list_collections( *, parent_class: ClassEnum | None = None, child_class: ClassEnum | None = None, - ) -> list[dict]: + ) -> list[dict[str, Any]]: """List all available collections in the database. Parameters @@ -3558,7 +3595,7 @@ def list_parent_objects( child_class: ClassEnum, parent_class: ClassEnum | None = None, collection: CollectionEnum | None = None, - ) -> list[dict]: + ) -> list[dict[str, Any]]: """List all parent objects for a given child object. Retrieves all parent objects that have a membership relationship with the specified @@ -3664,7 +3701,7 @@ def list_parent_objects( result = self._db.fetchall_dict(query, tuple(params)) return result - def list_reports(self) -> list[dict]: + def list_reports(self) -> list[dict[str, Any]]: """List all defined reports in the database.""" raise NotImplementedError # pragma: no cover @@ -4126,7 +4163,7 @@ def update_object( assert result return True - def update_properties(self, updates: list[dict]) -> None: + def update_properties(self, updates: list[dict[str, Any]]) -> None: """Update multiple properties in a single transaction.""" raise NotImplementedError # pragma: no cover diff --git a/src/plexosdb/db_manager.py b/src/plexosdb/db_manager.py index b5e9534..7dbca7e 100644 --- a/src/plexosdb/db_manager.py +++ b/src/plexosdb/db_manager.py @@ -1,16 +1,14 @@ """SQLite database manager.""" import sqlite3 -from collections.abc import Callable, Iterator +from collections.abc import Callable, Generator, Iterator from contextlib import contextmanager from dataclasses import dataclass from pathlib import Path -from typing import Any, Generic, TypeVar, overload +from typing import Any, cast, overload from loguru import logger -T = TypeVar("T") - @dataclass(slots=True) class SQLiteConfig: @@ -48,7 +46,7 @@ def for_file_database(cls) -> "SQLiteConfig": ) -class SQLiteManager(Generic[T]): +class SQLiteManager: """SQLite database manager with optimized transaction support.""" _con: sqlite3.Connection | None = None @@ -60,6 +58,21 @@ def __init__( config: SQLiteConfig | None = None, initialize: bool = True, ) -> None: + """Create an SQLite manager wrapping a connection or file path. + + Parameters + ---------- + fpath_or_conn : str, Path, sqlite3.Connection, optional + Path to an SQLite file, an existing connection, or None to use in-memory. + config : SQLiteConfig, optional + Configuration overrides for the connection. + initialize : bool, optional + Whether to immediately apply the requested SQLite pragmas. + + Returns + ------- + None + """ match fpath_or_conn: case None: logger.info("Creating in-memory database.") @@ -92,9 +105,9 @@ def config(self) -> SQLiteConfig: return self._config @property - def sqlite_version(self) -> int: + def sqlite_version(self) -> str: """SQLite version.""" - return self.query("select sqlite_version()")[0][0] + return cast(str, self.query("select sqlite_version()")[0][0]) @property def tables(self) -> list[str]: @@ -125,7 +138,7 @@ def _set_sqlite_configuration(self, config: SQLiteConfig) -> None: def _is_in_memory(self) -> bool: """Check if the connection is to an in-memory database.""" result = self.query("PRAGMA database_list") - return result[0][2] == "" # Empty file path indicates in-memory + return cast(str, result[0][2]) == "" # Empty file path indicates in-memory def add_collation(self, name: str, callable_func: Callable[[str, str], int]) -> bool: """Register a collation function. @@ -393,7 +406,7 @@ def last_insert_rowid(self) -> int: If a database error occurs """ try: - return self.query("SELECT last_insert_rowid()")[0][0] + return cast(int, self.query("SELECT last_insert_rowid()")[0][0]) except IndexError: # This shouldn't happen with last_insert_rowid() but handle it anyway return 0 @@ -443,10 +456,14 @@ def _validate_query_type(self, query: str) -> None: # Add generic type support for query results @overload - def query(self, query: str, params: None = None) -> list[tuple[Any, ...]]: ... + def query(self, query: str, params: None = None) -> list[tuple[Any, ...]]: + """Read-only queries without parameters use the default binding.""" + ... @overload - def query(self, query: str, params: tuple[Any, ...] | dict[str, Any]) -> list[tuple[Any, ...]]: ... + def query(self, query: str, params: tuple[Any, ...] | dict[str, Any]) -> list[tuple[Any, ...]]: + """Read-only queries that bind positional or named parameters.""" + ... def query( self, query: str, params: tuple[Any, ...] | dict[str, Any] | None = None @@ -782,7 +799,7 @@ def iter_dicts( cursor.close() @contextmanager - def transaction(self): + def transaction(self) -> Generator["SQLiteManager", None, None]: """Begin a transaction that can span multiple operations. This provides explicit transaction control for grouping multiple operations @@ -869,7 +886,7 @@ def insert_records( return self.executemany(query, values_list) - def __enter__(self): + def __enter__(self) -> "SQLiteManager": """Support using SQLiteManager as a context manager.""" return self diff --git a/src/plexosdb/enums.py b/src/plexosdb/enums.py index 85ae851..308d878 100644 --- a/src/plexosdb/enums.py +++ b/src/plexosdb/enums.py @@ -1,6 +1,7 @@ """Plexos model enums that define the data schema.""" from enum import Enum, StrEnum +from typing import cast class Schema(Enum): @@ -33,12 +34,26 @@ class Schema(Enum): Units = ("t_unit", "unit_id") @property - def name(self): # noqa: D102 - return self.value[0] + def name(self) -> str: + """Table name associated with this schema element. + + Returns + ------- + str + The underlying table name parsed from the enum value. + """ + return cast(str, self.value[0]) @property - def label(self): # noqa: D102 - return self.value[1] + def label(self) -> str | None: + """Primary label column name for the schema element, if any. + + Returns + ------- + str | None + The label column used as a default identifier when available. + """ + return cast(str | None, self.value[1]) class ClassEnum(StrEnum): @@ -120,11 +135,11 @@ class CollectionEnum(StrEnum): Variables = "Variables" -def str2enum(string, schema_enum=Schema) -> Schema | None: +def str2enum(string: str, schema_enum: type[Enum] = Schema) -> Schema | None: """Convert string to enum.""" for e in schema_enum: if e.name == string: - return e + return cast(Schema, e) return None diff --git a/src/plexosdb/exceptions.py b/src/plexosdb/exceptions.py index ad7698c..058f192 100644 --- a/src/plexosdb/exceptions.py +++ b/src/plexosdb/exceptions.py @@ -1,23 +1,25 @@ -# ruff: noqa: D100, D101 +"""Custom PlexosDB exceptions that highlight domain-specific failures.""" + + class NotFoundError(Exception): - pass + """Raised when the database cannot locate a requested entry.""" class MultlipleElementsError(Exception): - pass + """Raised when a query unexpectedly returns multiple elements.""" class ModelError(Exception): - pass + """Raised for generic errors related to model relationships.""" class MultipleFilesError(Exception): - pass + """Raised when multiple files are provided but only one is expected.""" class NameError(ValueError): - pass + """Raised when an object name is invalid or missing in context.""" class NoPropertiesError(Exception): - pass + """Raised when a lookup finds no properties for a given object.""" diff --git a/src/plexosdb/utils.py b/src/plexosdb/utils.py index e522ec7..ef0e40a 100644 --- a/src/plexosdb/utils.py +++ b/src/plexosdb/utils.py @@ -3,7 +3,7 @@ from __future__ import annotations import ast -from collections.abc import Iterable +from collections.abc import Iterable, Iterator from importlib.resources import files from itertools import islice from typing import TYPE_CHECKING, Any @@ -15,7 +15,7 @@ from plexosdb.db_manager import SQLiteManager -def batched(iterable, n): +def batched(iterable: Iterable[Any], n: int) -> Iterator[tuple[Any, ...]]: """Implement batched iterator. https://docs.python.org/3/library/itertools.html#itertools.batched @@ -70,7 +70,7 @@ def no_space(a: str, b: str) -> int: return 1 -def normalize_names(*args) -> list[str]: +def normalize_names(*args: str | Iterable[str]) -> list[str]: """Normalize a name or list of names into a unique list of strings. Parameters @@ -88,6 +88,7 @@ def normalize_names(*args) -> list[str]: ValueError If the input is neither a string nor an iterable of strings """ + names: Iterable[Any] if len(args) == 1 and hasattr(args[0], "__iter__") and not isinstance(args[0], str): names = args[0] else: @@ -95,7 +96,7 @@ def normalize_names(*args) -> list[str]: return list(set(str(name) for name in names if name is not None)) -def get_sql_query(query_name: str): +def get_sql_query(query_name: str) -> str: """Load SQL query from package. Parameters @@ -188,11 +189,11 @@ def create_membership_record( def prepare_properties_params( db: PlexosDB, - records: list[dict], + records: list[dict[str, Any]], object_class: ClassEnum, collection: CollectionEnum, - parent_class, -) -> tuple[list[tuple], list]: + parent_class: ClassEnum, +) -> tuple[list[tuple[int, int, Any]], list[tuple[str, int]]]: """Prepare SQL parameters for property insertion. Parameters @@ -232,7 +233,9 @@ def prepare_properties_params( return params, collection_properties -def insert_property_data(db, params: list[tuple]) -> dict: +def insert_property_data( + db: PlexosDB, params: list[tuple[int, int, Any]] +) -> dict[tuple[int, int, Any], tuple[int, str]]: """Insert property data and return mapping of data IDs to object names. Parameters @@ -269,7 +272,9 @@ def insert_property_data(db, params: list[tuple]) -> dict: return data_id_map -def insert_scenario_tags(db: PlexosDB, scenario: str, params: list[tuple], chunksize: int) -> None: +def insert_scenario_tags( + db: PlexosDB, scenario: str, params: list[tuple[int, int, Any]], chunksize: int +) -> None: """Insert scenario tags for property data. Parameters @@ -306,7 +311,12 @@ def insert_scenario_tags(db: PlexosDB, scenario: str, params: list[tuple], chunk def add_texts_for_properties( - db: PlexosDB, params: list[tuple], data_id_map: dict, records: list[dict], field_name: str, text_class + db: PlexosDB, + params: list[tuple[int, int, Any]], + data_id_map: dict[tuple[int, int, Any], tuple[int, str]], + records: list[dict[str, Any]], + field_name: str, + text_class: ClassEnum, ) -> None: """Add text data for properties from specified field. @@ -332,7 +342,9 @@ def add_texts_for_properties( db.add_text(text_class, text_map[obj_name], data_id) -def build_data_id_map(db: SQLiteManager, params: list[tuple]) -> dict: +def build_data_id_map( + db: SQLiteManager, params: list[tuple[int, int, Any]] +) -> dict[tuple[int, int, Any], tuple[int, str]]: """Build mapping of (membership_id, property_id, value) to (data_id, obj_name). Parameters diff --git a/src/plexosdb/xml_handler.py b/src/plexosdb/xml_handler.py index 0cbea84..19655cc 100644 --- a/src/plexosdb/xml_handler.py +++ b/src/plexosdb/xml_handler.py @@ -1,10 +1,10 @@ """Plexos Input XML API.""" -import xml.etree.ElementTree as ET # noqa: N817 +import xml.etree.ElementTree as ET from collections import defaultdict from collections.abc import Iterable, Iterator from os import PathLike -from typing import TYPE_CHECKING +from typing import TYPE_CHECKING, Any from loguru import logger @@ -19,21 +19,38 @@ class XMLHandler: # Tell mypy that these attributes are never None after initialization if TYPE_CHECKING: - root: ET.Element - tree: ET.ElementTree + root: ET.Element[str] + tree: ET.ElementTree[ET.Element[str]] def __init__( self, - fpath: str | PathLike | None = None, + fpath: str | PathLike[str] | None = None, namespace: str = PLEXOS_NAMESPACE, in_memory: bool = True, initialize: bool = False, ) -> None: + """Initialize XML handler for PLEXOS datasets. + + Parameters + ---------- + fpath : str or PathLike, optional + Path to an existing XML file to load. + namespace : str, optional + XML namespace to use when serializing. + in_memory : bool, optional + Store element caches in memory for faster lookups. + initialize : bool, optional + Build an empty XML tree instead of parsing a file. + + Returns + ------- + None + """ self.fpath = fpath self.namespace = namespace self.in_memory = in_memory - self._cache: dict = {} - self._counts: dict = {} + self._cache: dict[str, list[ET.Element]] = {} + self._counts: dict[str, int] = {} if initialize: self.root = ET.Element("MasterDataSet") @@ -42,7 +59,9 @@ def __init__( # If we are parsing an XML file if fpath and not initialize: self.tree = ET.parse(fpath) - self.root = self.tree.getroot() + root = self.tree.getroot() + if root is not None: + self.root = root self._remove_namespace(namespace) # At this point both should be either define from a file or from initialize. @@ -58,11 +77,21 @@ def __init__( self._counts = {key: len(_cache[key]) for key in _cache} @classmethod - def parse(cls, fpath: str | PathLike, namespace: str = PLEXOS_NAMESPACE, **kwargs) -> "XMLHandler": + def parse( + cls, + fpath: str | PathLike[str], + namespace: str = PLEXOS_NAMESPACE, + **kwargs: Any, + ) -> "XMLHandler": """Return XML instance from file requested.""" return XMLHandler(fpath=fpath, namespace=namespace, **kwargs) - def create_table_element(self, rows: list[tuple], column_types: dict[str, str], table_name: str) -> bool: + def create_table_element( + self, + rows: list[tuple[Any, ...]], + column_types: dict[str, str], + table_name: str, + ) -> bool: """Create XML elements for a given table.""" for row in rows: table_element = ET.SubElement(self.root, table_name) @@ -85,9 +114,9 @@ def get_records( self, element_enum: Schema, *elements: Iterable[str | int], - rename_dict: dict | None = None, - **tag_elements, - ) -> list[dict]: + rename_dict: dict[str, str] | None = None, + **tag_elements: Any, + ) -> list[dict[str, Any]]: """Return a given element(s) as list of dictionaries.""" if rename_dict is None: rename_dict = {} @@ -106,7 +135,11 @@ def get_records( ) def iter( - self, element_type: Schema, *elements: Iterable[str | int], label: str | None = None, **tags + self, + element_type: Schema, + *elements: Iterable[str | int], + label: str | None = None, + **tags: Any, ) -> Iterable[ET.Element]: """Return elements from the XML based on the type. @@ -141,7 +174,7 @@ def iter( for element in elements: yield from self._cache_iter(element_type, **{f"{label}": element}) - def to_xml(self, fpath: str | PathLike) -> bool: + def to_xml(self, fpath: str | PathLike[str]) -> bool: """Save memory xml to file.""" assert self.root is not None assert self.tree is not None @@ -167,13 +200,41 @@ def to_xml(self, fpath: str | PathLike) -> bool: return True - def _cache_iter(self, element_type: Schema, **tag_elements) -> Iterator | list: + def _cache_iter(self, element_type: Schema, **tag_elements: Any) -> Iterator[ET.Element]: + """Return iterator over cached XML elements matching filters. + + Parameters + ---------- + element_type : Schema + Schema enum describing the cached element type. + **tag_elements : Any + Optional tag filters (usually by label) to narrow the results. + + Returns + ------- + Iterator[ET.Element] + Cached elements that match the provided filters. + + Raises + ------ + ValueError + If no label exists but filters are supplied. + """ if not tag_elements: return iter(self._cache[element_type.name]) - index = int(tag_elements[element_type.label]) - 1 + label = element_type.label + if label is None: + msg = f"Element type {element_type.name} has no label" + raise ValueError(msg) + index = int(tag_elements[label]) - 1 return iter([self._cache[element_type.name][index]]) - def _iter_elements(self, element_type: str, *elements, **tag_elements) -> Iterator: + def _iter_elements( + self, + element_type: str, + *elements: Any, + **tag_elements: Any, + ) -> Iterator[ET.Element]: """Iterate over the xml file. This method also includes a simple cache mechanism to re-use results. @@ -208,7 +269,7 @@ def _remove_namespace(self, namespace: str) -> None: elem.tag = elem.tag[nsl:] -def xml_query(element_name: str, *tags, **tag_elements) -> str: +def xml_query(element_name: str, *tags: Any, **tag_elements: Any) -> str: """Construct XPath query for extracting data from a XML with no namespace. Parameters diff --git a/tests/conftest.py b/tests/conftest.py index 4adc4dd..28dffc7 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -1,7 +1,6 @@ import shutil import uuid from collections.abc import Generator -from typing import Any import pytest from _pytest.logging import LogCaptureFixture @@ -156,7 +155,7 @@ def db_instance_with_schema() -> PlexosDB: @pytest.fixture(scope="function") -def db_manager_instance_empty_with_schema() -> Generator[SQLiteManager[Any], None, None]: +def db_manager_instance_empty_with_schema() -> Generator[SQLiteManager, None, None]: db: PlexosDB = PlexosDB() db.create_schema() yield db._db diff --git a/tests/test_db_manager.py b/tests/test_db_manager.py index f0be86b..a8f9d9f 100644 --- a/tests/test_db_manager.py +++ b/tests/test_db_manager.py @@ -3,6 +3,7 @@ from pathlib import Path from unittest.mock import MagicMock +import pytest from plexosdb.db_manager import SQLiteManager @@ -113,6 +114,9 @@ def test_failed_collation_creation(monkeypatch): """Test handling of failed collation creation.""" db = SQLiteManager() + # Store the original connection to close it later (prevents resource leak) + original_conn = db._con + mock_conn = MagicMock() mock_conn.create_collation.side_effect = sqlite3.Error("Failed to create collation") @@ -121,6 +125,8 @@ def test_failed_collation_creation(monkeypatch): result = db.add_collation("TEST_COLLATION", lambda x, y: 0) assert result is False, "Should return False when collation creation fails" + # Restore and close the original connection to avoid ResourceWarning + db._con = original_conn db.close() @@ -181,3 +187,254 @@ def test_backup_path_handling(db_base, tmp_path): assert success is False finally: restricted_dir.chmod(0o755) + + +def test_config_property_returns_config(): + """Test that config property returns SQLiteConfig instance.""" + from plexosdb.db_manager import SQLiteConfig + + db = SQLiteManager() + + config = db.config + + assert config is not None + assert isinstance(config, SQLiteConfig) + db.close() + + +def test_config_property_in_memory(): + """Test config property for in-memory database.""" + db = SQLiteManager() # In-memory by default + + config = db.config + + assert config.cache_size_mb == 50 + assert config.mmap_size_gb == 0 + assert config.synchronous == "OFF" + assert config.journal_mode == "MEMORY" + db.close() + + +def test_config_property_file_database(tmp_path): + """Test config property for file-based database.""" + db_path = tmp_path / "test.db" + db = SQLiteManager(fpath_or_conn=db_path) + + config = db.config + + assert config.cache_size_mb == 20 + assert config.mmap_size_gb == 2 + assert config.synchronous == "NORMAL" + assert config.journal_mode == "WAL" + db.close() + + +def test_sqlite_version_property(): + """Test sqlite_version property returns version string.""" + db = SQLiteManager() + + version = db.sqlite_version + + assert isinstance(version, str) + # Version should be in format like "3.43.0" + parts = version.split(".") + assert len(parts) >= 2 + assert all(part.isdigit() for part in parts[:2]) + db.close() + + +def test_init_with_invalid_type_raises_type_error(): + """Test that __init__ raises TypeError for invalid input type.""" + with pytest.raises(TypeError): + SQLiteManager(fpath_or_conn=12345) # Invalid type: int + + +def test_init_with_list_raises_type_error(): + """Test that __init__ raises TypeError for list input.""" + with pytest.raises(TypeError): + SQLiteManager(fpath_or_conn=[]) # Invalid type: list + + +def test_init_with_dict_raises_type_error(): + """Test that __init__ raises TypeError for dict input.""" + with pytest.raises(TypeError): + SQLiteManager(fpath_or_conn={}) # Invalid type: dict + + +def test_transaction_context_manager_success(): + """Test transaction context manager executes successfully.""" + db = SQLiteManager() + db.execute("CREATE TABLE test (id INTEGER PRIMARY KEY, value TEXT)") + + with db.transaction(): + db.execute("INSERT INTO test (value) VALUES (?)", ("test1",)) + db.execute("INSERT INTO test (value) VALUES (?)", ("test2",)) + + # Verify data was committed + rows = db.query("SELECT * FROM test") + assert len(rows) == 2 + + db.close() + + +def test_transaction_context_manager_rollback_on_error(): + """Test transaction rolls back when sqlite3.Error occurs.""" + db = SQLiteManager() + db.execute("CREATE TABLE unique_test (id INTEGER PRIMARY KEY UNIQUE, value TEXT)") + db.execute("INSERT INTO unique_test VALUES (1, 'initial')") + + try: + with db.transaction(): + # This will fail because of duplicate primary key (database error) + db.execute("INSERT INTO unique_test VALUES (1, 'duplicate')") + except sqlite3.Error: + pass + + # Verify only initial row exists (rollback occurred) + rows = db.query("SELECT * FROM unique_test") + assert len(rows) == 1 + assert rows[0][1] == "initial" + + db.close() + + +def test_transaction_reraises_sqlite_error(): + """Test transaction re-raises sqlite3.Error.""" + from unittest.mock import MagicMock + + db = SQLiteManager() + mock_conn = MagicMock() + mock_conn.execute.side_effect = sqlite3.Error("Transaction failed") + + db._con = mock_conn + + with pytest.raises(sqlite3.Error): + with db.transaction(): + pass + + +def test_transaction_rollback_called_on_error(): + """Test that rollback is called when sqlite3.Error occurs in transaction.""" + from unittest.mock import MagicMock + + db = SQLiteManager() + mock_conn = MagicMock() + mock_conn.execute.side_effect = [None, sqlite3.Error("Error in transaction")] + + db._con = mock_conn + + try: + with db.transaction(): + # This would trigger the second execute call which raises the error + db.connection.execute("SELECT 1") + except sqlite3.Error: + pass + + mock_conn.rollback.assert_called_once() + + +def test_close_handles_rollback_error(): + """Test close() handles rollback errors gracefully.""" + from unittest.mock import MagicMock + + db = SQLiteManager() + mock_conn = MagicMock() + mock_conn.in_transaction = True + mock_conn.rollback.side_effect = sqlite3.Error("Rollback failed") + mock_conn.close.return_value = None + + db._con = mock_conn + + db.close() + + assert db._con is None + + +def test_close_handles_commit_error(): + """Test close() handles commit errors gracefully.""" + from unittest.mock import MagicMock + + db = SQLiteManager() + mock_conn = MagicMock() + mock_conn.in_transaction = False + mock_conn.commit.side_effect = sqlite3.Error("Commit failed") + mock_conn.close.return_value = None + + db._con = mock_conn + + db._is_in_memory = MagicMock(return_value=False) + + db.close() + + assert db._con is None + + +def test_close_handles_close_error(): + """Test close() handles connection.close() errors gracefully.""" + from unittest.mock import MagicMock + + db = SQLiteManager() + mock_conn = MagicMock() + mock_conn.in_transaction = False + mock_conn.close.side_effect = sqlite3.Error("Close failed") + + db._con = mock_conn + + # Mock _is_in_memory to return True (in-memory database) + db._is_in_memory = MagicMock(return_value=True) + + # Should not raise, should log warning and continue + db.close() + + # Connection should be nulled + assert db._con is None + + +def test_optimize_with_active_transaction(): + """Test optimize() commits transaction before VACUUM.""" + db = SQLiteManager() + db.execute("CREATE TABLE test (id INTEGER PRIMARY KEY)") + db.execute("INSERT INTO test (id) VALUES (1)") + + db.connection.execute("BEGIN") + + # Optimize should commit the transaction before VACUUM + result = db.optimize() + + assert result is True + + +def test_optimize_error_handling(): + """Test optimize() handles errors gracefully.""" + from unittest.mock import MagicMock + + db = SQLiteManager() + mock_conn = MagicMock() + mock_conn.in_transaction = False + mock_conn.execute.side_effect = sqlite3.Error("Optimize failed") + + db._con = mock_conn + + result = db.optimize() + + assert result is False + + +def test_last_insert_rowid_returns_zero_on_empty(): + """Test last_insert_rowid returns 0 when query returns empty.""" + from unittest.mock import MagicMock + + db = SQLiteManager() + mock_conn = MagicMock() + mock_cursor = MagicMock() + mock_cursor.fetchall.return_value = [] + mock_conn.cursor.return_value = mock_cursor + + db._con = mock_conn + + # Mock query to return empty result + db.query = MagicMock(return_value=[]) + + result = db.last_insert_rowid() + + assert result == 0 diff --git a/tests/test_db_manager_context_manager.py b/tests/test_db_manager_context_manager.py new file mode 100644 index 0000000..aaacf80 --- /dev/null +++ b/tests/test_db_manager_context_manager.py @@ -0,0 +1,118 @@ +"""Tests for SQLiteManager context manager protocol. + +Tests for __enter__ and __exit__ methods that implement the context manager +protocol, allowing SQLiteManager to be used with the 'with' statement. +""" + +from __future__ import annotations + + +def test_context_manager_enter_returns_self() -> None: + """Test that __enter__ returns the SQLiteManager instance.""" + from plexosdb.db_manager import SQLiteManager + + db = SQLiteManager() + + result = db.__enter__() + + assert result is db + db.__exit__(None, None, None) + + +def test_context_manager_exit_closes_connection() -> None: + """Test that __exit__ closes the database connection.""" + from plexosdb.db_manager import SQLiteManager + + db = SQLiteManager() + + assert db._con is not None + + db.__exit__(None, None, None) + + assert db._con is None + + +def test_context_manager_with_statement() -> None: + """Test using SQLiteManager as context manager with 'with' statement.""" + from plexosdb.db_manager import SQLiteManager + + with SQLiteManager() as db: + # Inside the context, connection should be available + assert db._con is not None + + # Should be able to use database operations + db.execute("CREATE TABLE test (id INTEGER PRIMARY KEY)") + db.execute("INSERT INTO test (id) VALUES (1)") + result = db.query("SELECT * FROM test") + assert len(result) == 1 + + # After exiting context, connection should be closed + assert db._con is None + + +def test_context_manager_with_statement_operations() -> None: + """Test various database operations within context manager.""" + from plexosdb.db_manager import SQLiteManager + + with SQLiteManager() as db: + # Create table + db.execute("CREATE TABLE items (id INTEGER PRIMARY KEY, name TEXT)") + + # Insert multiple rows + db.executemany( + "INSERT INTO items (name) VALUES (?)", + [("Item 1",), ("Item 2",), ("Item 3",)], + ) + + # Query and verify + rows = db.query("SELECT * FROM items") + assert len(rows) == 3 + + # Use iterator + count = 0 + for row in db.iter_query("SELECT * FROM items"): + count += 1 + assert count == 3 + + +def test_context_manager_exception_still_closes() -> None: + """Test that connection is closed even when exception occurs in context.""" + from plexosdb.db_manager import SQLiteManager + + try: + with SQLiteManager() as db: + assert db._con is not None + raise ValueError("Test exception") + except ValueError: + pass + + # After exception, connection should still be closed + assert db._con is None + + +def test_context_manager_sqlite_error_in_context() -> None: + """Test that connection is closed even when sqlite3 error occurs.""" + from plexosdb.db_manager import SQLiteManager + + try: + with SQLiteManager() as db: + assert db._con is not None + + # Cause a database error + db.execute("INVALID SQL SYNTAX !!!!") + except Exception: + pass + + # Connection should still be closed + assert db._con is None + + +def test_close_with_none_connection() -> None: + """Test close() handles None connection gracefully.""" + from plexosdb.db_manager import SQLiteManager + + db = SQLiteManager() + db.close() + # Second close should not raise error + db.close() + assert db._con is None diff --git a/tests/test_db_manager_error_handling.py b/tests/test_db_manager_error_handling.py index ec983ea..4c85fce 100644 --- a/tests/test_db_manager_error_handling.py +++ b/tests/test_db_manager_error_handling.py @@ -198,3 +198,189 @@ def test_executescript_successful_with_commit(): result = db.executescript("SELECT 1; SELECT 2;") assert result is True + + +def test_query_reraises_sqlite_error(): + """Test query() re-raises sqlite3.Error when cursor.execute fails.""" + from plexosdb.db_manager import SQLiteManager + + db = SQLiteManager() + mock_conn = MagicMock() + mock_cursor = MagicMock() + mock_cursor.execute.side_effect = sqlite3.Error("Query failed") + mock_conn.cursor.return_value = mock_cursor + + db._con = mock_conn + + with pytest.raises(sqlite3.Error): + db.query("SELECT * FROM nonexistent") + + +def test_query_cursor_cleanup_on_error(): + """Test query() properly cleans up cursor on error.""" + from plexosdb.db_manager import SQLiteManager + + db = SQLiteManager() + mock_conn = MagicMock() + mock_cursor = MagicMock() + mock_cursor.execute.side_effect = sqlite3.Error("Query failed") + mock_conn.cursor.return_value = mock_cursor + + db._con = mock_conn + + with pytest.raises(sqlite3.Error): + db.query("SELECT * FROM test") + + mock_cursor.close.assert_called_once() + + +def test_fetchall_dict_reraises_sqlite_error(): + """Test fetchall_dict() re-raises sqlite3.Error.""" + from plexosdb.db_manager import SQLiteManager + + db = SQLiteManager() + mock_conn = MagicMock() + mock_cursor = MagicMock() + mock_cursor.execute.side_effect = sqlite3.Error("Query failed") + mock_conn.cursor.return_value = mock_cursor + + db._con = mock_conn + + with pytest.raises(sqlite3.Error): + db.fetchall_dict("SELECT * FROM test") + + +def test_fetchall_dict_cursor_cleanup_on_error(): + """Test fetchall_dict() cleans up cursor on error.""" + from plexosdb.db_manager import SQLiteManager + + db = SQLiteManager() + mock_conn = MagicMock() + mock_cursor = MagicMock() + mock_cursor.execute.side_effect = sqlite3.Error("Query failed") + mock_conn.cursor.return_value = mock_cursor + + db._con = mock_conn + + with pytest.raises(sqlite3.Error): + db.fetchall_dict("SELECT * FROM test") + + mock_cursor.close.assert_called_once() + + +def test_fetchone_reraises_sqlite_error(): + """Test fetchone() re-raises sqlite3.Error.""" + from plexosdb.db_manager import SQLiteManager + + db = SQLiteManager() + mock_conn = MagicMock() + mock_cursor = MagicMock() + mock_cursor.execute.side_effect = sqlite3.Error("Query failed") + mock_conn.cursor.return_value = mock_cursor + + db._con = mock_conn + + with pytest.raises(sqlite3.Error): + db.fetchone("SELECT * FROM test") + + +def test_fetchone_cursor_cleanup_on_error(): + """Test fetchone() cleans up cursor on error.""" + from plexosdb.db_manager import SQLiteManager + + db = SQLiteManager() + mock_conn = MagicMock() + mock_cursor = MagicMock() + mock_cursor.execute.side_effect = sqlite3.Error("Query failed") + mock_conn.cursor.return_value = mock_cursor + + db._con = mock_conn + + with pytest.raises(sqlite3.Error): + db.fetchone("SELECT * FROM test") + + mock_cursor.close.assert_called_once() + + +def test_fetchone_dict_reraises_sqlite_error(): + """Test fetchone_dict() re-raises sqlite3.Error.""" + from plexosdb.db_manager import SQLiteManager + + db = SQLiteManager() + mock_conn = MagicMock() + mock_cursor = MagicMock() + mock_cursor.execute.side_effect = sqlite3.Error("Query failed") + mock_conn.cursor.return_value = mock_cursor + + db._con = mock_conn + + with pytest.raises(sqlite3.Error): + db.fetchone_dict("SELECT * FROM test") + + +def test_fetchone_dict_cursor_cleanup_on_error(): + """Test fetchone_dict() cleans up cursor on error.""" + from plexosdb.db_manager import SQLiteManager + + db = SQLiteManager() + mock_conn = MagicMock() + mock_cursor = MagicMock() + mock_cursor.execute.side_effect = sqlite3.Error("Query failed") + mock_conn.cursor.return_value = mock_cursor + + db._con = mock_conn + + with pytest.raises(sqlite3.Error): + db.fetchone_dict("SELECT * FROM test") + + mock_cursor.close.assert_called_once() + + +def test_validate_query_type_raises_on_insert(): + """Test _validate_query_type raises ValueError for INSERT.""" + from plexosdb.db_manager import SQLiteManager + + db = SQLiteManager() + + with pytest.raises(ValueError, match=r"Use execute.*INSERT"): + db.query("INSERT INTO test VALUES (1)") + + +def test_validate_query_type_raises_on_update(): + """Test _validate_query_type raises ValueError for UPDATE.""" + from plexosdb.db_manager import SQLiteManager + + db = SQLiteManager() + + with pytest.raises(ValueError, match=r"Use execute.*UPDATE"): + db.query("UPDATE test SET col = 1") + + +def test_validate_query_type_raises_on_delete(): + """Test _validate_query_type raises ValueError for DELETE.""" + from plexosdb.db_manager import SQLiteManager + + db = SQLiteManager() + + with pytest.raises(ValueError, match=r"Use execute.*DELETE"): + db.query("DELETE FROM test") + + +def test_validate_query_type_raises_on_create(): + """Test _validate_query_type raises ValueError for CREATE.""" + from plexosdb.db_manager import SQLiteManager + + db = SQLiteManager() + + with pytest.raises(ValueError, match=r"Use execute.*CREATE"): + db.query("CREATE TABLE test (id INT)") + + +def test_validate_query_type_raises_on_alter(): + """Test _validate_query_type raises ValueError for ALTER.""" + from plexosdb.db_manager import SQLiteManager + + db = SQLiteManager() + + with pytest.raises(ValueError, match=r"Use execute.*ALTER"): + db.query("ALTER TABLE test ADD COLUMN col TEXT") diff --git a/tests/test_db_manager_iterators.py b/tests/test_db_manager_iterators.py new file mode 100644 index 0000000..45f410c --- /dev/null +++ b/tests/test_db_manager_iterators.py @@ -0,0 +1,347 @@ +"""Tests for SQLiteManager iterator methods. + +Tests for iter_query() and iter_dicts() methods which provide memory-efficient +iteration over large result sets. +""" + +from __future__ import annotations + +import sqlite3 +from collections.abc import Generator +from typing import TYPE_CHECKING +from unittest.mock import MagicMock + +import pytest + +if TYPE_CHECKING: + from plexosdb.db_manager import SQLiteManager + + +@pytest.fixture +def db_with_large_dataset() -> Generator[SQLiteManager, None, None]: + """Create a database with multiple rows for iteration testing.""" + from plexosdb.db_manager import SQLiteManager + + db = SQLiteManager() + db.executescript( + """ + CREATE TABLE items ( + id INTEGER PRIMARY KEY, + name TEXT NOT NULL, + value REAL, + category TEXT + ); + """ + ) + + # Insert test data + for i in range(1, 51): + db.execute( + "INSERT INTO items (name, value, category) VALUES (?, ?, ?)", + (f"Item {i}", i * 1.5, f"Category {(i % 5) + 1}"), + ) + + yield db + db.close() + + +def test_iter_query_yields_all_rows(db_with_large_dataset: SQLiteManager) -> None: + """Test that iter_query yields all rows from result set.""" + rows = list(db_with_large_dataset.iter_query("SELECT * FROM items")) + + assert len(rows) == 50 + assert rows[0][0] == 1 # First item's id + assert rows[-1][0] == 50 # Last item's id + + +def test_iter_query_returns_tuples(db_with_large_dataset: SQLiteManager) -> None: + """Test that iter_query returns tuples for each row.""" + for row in db_with_large_dataset.iter_query("SELECT * FROM items LIMIT 1"): + assert isinstance(row, tuple) + assert len(row) == 4 # id, name, value, category + + +def test_iter_query_with_tuple_params(db_with_large_dataset: SQLiteManager) -> None: + """Test iter_query with tuple parameters.""" + rows = list(db_with_large_dataset.iter_query("SELECT * FROM items WHERE id > ? AND id <= ?", (10, 15))) + + assert len(rows) == 5 + assert rows[0][0] == 11 + assert rows[-1][0] == 15 + + +def test_iter_query_with_dict_params(db_with_large_dataset: SQLiteManager) -> None: + """Test iter_query with dictionary parameters.""" + rows = list( + db_with_large_dataset.iter_query("SELECT * FROM items WHERE id = :target_id", {"target_id": 25}) + ) + + assert len(rows) == 1 + assert rows[0][0] == 25 + + +def test_iter_query_custom_batch_size_small( + db_with_large_dataset: SQLiteManager, +) -> None: + """Test iter_query with small batch size.""" + rows = list(db_with_large_dataset.iter_query("SELECT * FROM items", batch_size=5)) + + assert len(rows) == 50 + + +def test_iter_query_custom_batch_size_large( + db_with_large_dataset: SQLiteManager, +) -> None: + """Test iter_query with large batch size exceeding result set.""" + rows = list(db_with_large_dataset.iter_query("SELECT * FROM items", batch_size=1000)) + + assert len(rows) == 50 + + +def test_iter_query_empty_result_set(db_with_large_dataset: SQLiteManager) -> None: + """Test iter_query with query that returns no rows.""" + rows = list(db_with_large_dataset.iter_query("SELECT * FROM items WHERE id > 1000")) + + assert len(rows) == 0 + + +def test_iter_query_single_row(db_with_large_dataset: SQLiteManager) -> None: + """Test iter_query with query that returns single row.""" + rows = list(db_with_large_dataset.iter_query("SELECT * FROM items WHERE id = 1")) + + assert len(rows) == 1 + assert rows[0][1] == "Item 1" + + +def test_iter_query_memory_efficiency() -> None: + """Test that iter_query processes rows in batches (doesn't load all at once).""" + from plexosdb.db_manager import SQLiteManager + + # Create a large dataset + db = SQLiteManager() + db.executescript( + """ + CREATE TABLE large_table ( + id INTEGER PRIMARY KEY, + data TEXT + ); + """ + ) + + # Insert 1000 rows + for i in range(1, 1001): + db.execute("INSERT INTO large_table (data) VALUES (?)", (f"Data {i}",)) + + # Iterate and verify we can process without loading all into memory + row_count = 0 + for row in db.iter_query("SELECT * FROM large_table", batch_size=100): + row_count += 1 + assert len(row) == 2 + + assert row_count == 1000 + db.close() + + +def test_iter_query_with_where_clause(db_with_large_dataset: SQLiteManager) -> None: + """Test iter_query with WHERE clause filtering.""" + rows = list(db_with_large_dataset.iter_query("SELECT * FROM items WHERE category = ?", ("Category 1",))) + + # Verify filtering works + assert all(row[3] == "Category 1" for row in rows) + assert len(rows) == 10 # 50 items / 5 categories = 10 per category + + +def test_iter_query_with_order_by(db_with_large_dataset: SQLiteManager) -> None: + """Test iter_query with ORDER BY clause.""" + rows = list(db_with_large_dataset.iter_query("SELECT * FROM items ORDER BY id DESC")) + + # Verify ordering (descending) + assert rows[0][0] == 50 + assert rows[-1][0] == 1 + + +def test_iter_query_cursor_cleanup_on_success( + db_with_large_dataset: SQLiteManager, +) -> None: + """Test that cursor is properly cleaned up after successful iteration.""" + # First iteration + list(db_with_large_dataset.iter_query("SELECT * FROM items LIMIT 5")) + + # Second iteration should work fine (cursor was cleaned up) + rows = list(db_with_large_dataset.iter_query("SELECT * FROM items LIMIT 3")) + assert len(rows) == 3 + + +def test_iter_query_reraises_sqlite_error() -> None: + """Test that iter_query re-raises sqlite3.Error.""" + from plexosdb.db_manager import SQLiteManager + + db = SQLiteManager() + mock_conn = MagicMock() + mock_cursor = MagicMock() + mock_cursor.execute.side_effect = sqlite3.Error("Query failed") + mock_conn.cursor.return_value = mock_cursor + + db._con = mock_conn + + with pytest.raises(sqlite3.Error): + list(db.iter_query("SELECT * FROM nonexistent")) + + +def test_iter_query_cursor_cleanup_on_error() -> None: + """Test that cursor is closed even when error occurs during iteration.""" + from plexosdb.db_manager import SQLiteManager + + db = SQLiteManager() + mock_conn = MagicMock() + mock_cursor = MagicMock() + mock_cursor.execute.side_effect = sqlite3.Error("Query failed") + mock_conn.cursor.return_value = mock_cursor + + db._con = mock_conn + + with pytest.raises(sqlite3.Error): + list(db.iter_query("SELECT * FROM test")) + + mock_cursor.close.assert_called_once() + + +# ============================================================================ +# iter_dicts() Tests +# ============================================================================ + + +def test_iter_dicts_yields_dictionaries(db_with_large_dataset: SQLiteManager) -> None: + """Test that iter_dicts yields dictionaries with column names as keys.""" + dicts = list(db_with_large_dataset.iter_dicts("SELECT * FROM items LIMIT 5")) + + assert len(dicts) == 5 + for row_dict in dicts: + assert isinstance(row_dict, dict) + assert "id" in row_dict + assert "name" in row_dict + assert "value" in row_dict + assert "category" in row_dict + + +def test_iter_dicts_all_rows(db_with_large_dataset: SQLiteManager) -> None: + """Test that iter_dicts yields all rows from result set.""" + dicts = list(db_with_large_dataset.iter_dicts("SELECT * FROM items")) + + assert len(dicts) == 50 + assert dicts[0]["id"] == 1 + assert dicts[-1]["id"] == 50 + + +def test_iter_dicts_with_tuple_params(db_with_large_dataset: SQLiteManager) -> None: + """Test iter_dicts with tuple parameters.""" + dicts = list(db_with_large_dataset.iter_dicts("SELECT * FROM items WHERE id > ? AND id <= ?", (5, 10))) + + assert len(dicts) == 5 + assert all(5 < d["id"] <= 10 for d in dicts) + + +def test_iter_dicts_with_dict_params(db_with_large_dataset: SQLiteManager) -> None: + """Test iter_dicts with dictionary parameters.""" + dicts = list(db_with_large_dataset.iter_dicts("SELECT * FROM items WHERE id = :item_id", {"item_id": 15})) + + assert len(dicts) == 1 + assert dicts[0]["id"] == 15 + + +def test_iter_dicts_custom_batch_size(db_with_large_dataset: SQLiteManager) -> None: + """Test iter_dicts with custom batch size.""" + dicts = list(db_with_large_dataset.iter_dicts("SELECT * FROM items", batch_size=10)) + + assert len(dicts) == 50 + + +def test_iter_dicts_empty_result_set(db_with_large_dataset: SQLiteManager) -> None: + """Test iter_dicts with query that returns no rows.""" + dicts = list(db_with_large_dataset.iter_dicts("SELECT * FROM items WHERE id > 1000")) + + assert len(dicts) == 0 + + +def test_iter_dicts_column_name_mapping(db_with_large_dataset: SQLiteManager) -> None: + """Test that iter_dicts correctly maps column names from cursor.description.""" + dicts = list(db_with_large_dataset.iter_dicts("SELECT id, name FROM items LIMIT 1")) + + assert len(dicts) == 1 + row_dict = dicts[0] + assert "id" in row_dict + assert "name" in row_dict + assert "value" not in row_dict + assert "category" not in row_dict + + +def test_iter_dicts_with_null_values(db_with_large_dataset: SQLiteManager) -> None: + """Test that iter_dicts correctly handles NULL values.""" + # Update a row to have NULL value + db_with_large_dataset.execute("UPDATE items SET value = NULL WHERE id = 10") + + dicts = list(db_with_large_dataset.iter_dicts("SELECT * FROM items WHERE id = 10")) + + assert len(dicts) == 1 + assert dicts[0]["value"] is None + + +def test_iter_dicts_cursor_cleanup_on_success( + db_with_large_dataset: SQLiteManager, +) -> None: + """Test that cursor is properly cleaned up after successful iteration.""" + # First iteration + list(db_with_large_dataset.iter_dicts("SELECT * FROM items LIMIT 5")) + + # Second iteration should work fine (cursor was cleaned up) + dicts = list(db_with_large_dataset.iter_dicts("SELECT * FROM items LIMIT 3")) + assert len(dicts) == 3 + + +def test_iter_dicts_reraises_sqlite_error() -> None: + """Test that iter_dicts re-raises sqlite3.Error.""" + from plexosdb.db_manager import SQLiteManager + + db = SQLiteManager() + mock_conn = MagicMock() + mock_cursor = MagicMock() + mock_cursor.execute.side_effect = sqlite3.Error("Query failed") + mock_conn.cursor.return_value = mock_cursor + + db._con = mock_conn + + with pytest.raises(sqlite3.Error): + list(db.iter_dicts("SELECT * FROM nonexistent")) + + +def test_iter_dicts_cursor_cleanup_on_error() -> None: + """Test that cursor is closed even when error occurs during iteration.""" + from plexosdb.db_manager import SQLiteManager + + db = SQLiteManager() + mock_conn = MagicMock() + mock_cursor = MagicMock() + mock_cursor.execute.side_effect = sqlite3.Error("Query failed") + mock_conn.cursor.return_value = mock_cursor + + db._con = mock_conn + + with pytest.raises(sqlite3.Error): + list(db.iter_dicts("SELECT * FROM test")) + + mock_cursor.close.assert_called_once() + + +def test_fetchmany_happy_path(db_with_large_dataset: SQLiteManager) -> None: + """Test fetchmany() successful execution returns correct number of rows.""" + result = db_with_large_dataset.fetchmany("SELECT * FROM items", size=2) + assert len(result) == 2 + assert result[0][1] == "Item 1" + assert result[1][1] == "Item 2" + + +def test_fetchmany_default_size(db_with_large_dataset: SQLiteManager) -> None: + """Test fetchmany() with default size parameter.""" + result = db_with_large_dataset.fetchmany("SELECT * FROM items") + assert len(result) > 0 + assert isinstance(result, list) diff --git a/tests/test_plexosdb.py b/tests/test_plexosdb.py index 3620018..0b18c5f 100644 --- a/tests/test_plexosdb.py +++ b/tests/test_plexosdb.py @@ -109,3 +109,15 @@ def test_xml_round_trip(db_base, tmp_path): def test_xml_not_exist(): with pytest.raises(FileNotFoundError): _ = PlexosDB.from_xml("not/existing/path") + + +def test_plexosdb_version_property_refresh(db_with_topology): + """Test PlexosDB.version property with cache refresh.""" + # First access should fetch version + version1 = db_with_topology.version + + # Second access should use cached value + version2 = db_with_topology.version + + # Should return same value + assert version1 == version2 diff --git a/tests/test_plexosdb_check_property_exists.py b/tests/test_plexosdb_check_property_exists.py index e845223..82b6d4e 100644 --- a/tests/test_plexosdb_check_property_exists.py +++ b/tests/test_plexosdb_check_property_exists.py @@ -38,7 +38,7 @@ def test_check_property_exists_invalid_collection_raises_error( from plexosdb import ClassEnum from plexosdb.exceptions import NotFoundError - with pytest.raises(NotFoundError, match="Collection.*does not exist"): + with pytest.raises(NotFoundError, match=r"Collection.*does not exist"): db_with_topology.check_property_exists( "InvalidCollection", ClassEnum.Generator, @@ -52,7 +52,7 @@ def test_check_property_exists_invalid_child_class_raises_error( from plexosdb import CollectionEnum from plexosdb.exceptions import NotFoundError - with pytest.raises(NotFoundError, match="Child class.*does not exist"): + with pytest.raises(NotFoundError, match=r"Child class.*does not exist"): db_with_topology.check_property_exists( CollectionEnum.Generators, "InvalidClass", diff --git a/tests/test_plexosdb_xml.py b/tests/test_plexosdb_xml.py index cf82e25..d1494ed 100644 --- a/tests/test_plexosdb_xml.py +++ b/tests/test_plexosdb_xml.py @@ -1,5 +1,5 @@ import os -import xml.etree.ElementTree as ET # noqa: N817 +import xml.etree.ElementTree as ET from pathlib import Path import pytest @@ -84,3 +84,26 @@ def test_save_xml(tmp_path): ) def test_xml_query(element_name, tags, tag_elements, expected_query): assert xml_query(element_name, *tags, **tag_elements) == expected_query + + +def test_iter_with_elements(xml_handler): + """Test XMLHandler.iter() with specific elements filters.""" + from plexosdb.enums import Schema + + # Get elements with specific IDs to exercise the filter path + elements = list(xml_handler.iter(Schema.Objects, 1)) + assert elements is not None + assert isinstance(elements, list) + + +def test_cache_iter_with_label_filter(xml_handler): + """Test XMLHandler._cache_iter() with label-based filtering.""" + from plexosdb.enums import Schema + + # This exercises the code path where label validation occurs + element_type = Schema.Objects + + if element_type.label: + # Create tag_elements dict with the label + elements = list(xml_handler._cache_iter(element_type, **{element_type.label: 1})) + assert isinstance(elements, list) diff --git a/tests/test_utils_properties.py b/tests/test_utils_properties.py index e64c212..d245d76 100644 --- a/tests/test_utils_properties.py +++ b/tests/test_utils_properties.py @@ -4,6 +4,8 @@ from typing import TYPE_CHECKING +import pytest + if TYPE_CHECKING: from plexosdb import PlexosDB @@ -115,3 +117,480 @@ def test_add_texts_for_properties_with_datafile_text(db_with_topology: PlexosDB) text_records = db_with_topology.query("SELECT COUNT(*) as count FROM t_text") # query returns tuples, so access by index assert text_records[0][0] > 0 + + +def test_prepare_properties_params_raises_error_when_no_memberships(db_with_topology: PlexosDB) -> None: + """Test that prepare_properties_params raises error when objects don't exist.""" + from plexosdb import ClassEnum, CollectionEnum + from plexosdb.exceptions import NotFoundError + from plexosdb.utils import prepare_properties_params + + # Don't add objects to database - they should not exist + records = [ + {"name": "nonexistent-gen-01", "Max Capacity": 100.0}, + {"name": "nonexistent-gen-02", "Max Capacity": 150.0}, + ] + + # Raises NotFoundError when objects don't exist in database + with pytest.raises(NotFoundError): + prepare_properties_params( + db_with_topology, + records, + ClassEnum.Generator, + CollectionEnum.Generators, + ClassEnum.System, + ) + + +def test_prepare_properties_params_empty_collection_properties(db_with_topology: PlexosDB) -> None: + """Test prepare_properties_params with valid objects but no properties in collection.""" + from plexosdb import ClassEnum, CollectionEnum + from plexosdb.utils import prepare_properties_params + + db_with_topology.add_object(ClassEnum.Generator, "gen-01") + + # Records with properties that don't exist in the collection + records = [ + {"name": "gen-01", "NonexistentProperty": 100.0}, + ] + + params, collection_properties = prepare_properties_params( + db_with_topology, + records, + ClassEnum.Generator, + CollectionEnum.Generators, + ClassEnum.System, + ) + + # Should return empty params since property doesn't exist in collection + assert params == [] + assert collection_properties is not None + + +def test_prepare_properties_params_multiple_records_single_valid(db_with_topology: PlexosDB) -> None: + """Test prepare_properties_params with multiple records but only some have valid properties.""" + from plexosdb import ClassEnum, CollectionEnum + from plexosdb.utils import prepare_properties_params + + db_with_topology.add_object(ClassEnum.Generator, "gen-01") + db_with_topology.add_object(ClassEnum.Generator, "gen-02") + + records = [ + {"name": "gen-01", "Max Capacity": 100.0}, + {"name": "gen-02", "NonexistentProperty": 150.0}, + ] + + params, _ = prepare_properties_params( + db_with_topology, + records, + ClassEnum.Generator, + CollectionEnum.Generators, + ClassEnum.System, + ) + + # Should only have params for gen-01 with Max Capacity + assert len(params) >= 1 + assert all(param[2] is not None for param in params) + + +def test_prepare_properties_params_return_structure(db_with_topology: PlexosDB) -> None: + """Test that prepare_properties_params returns correct tuple structure.""" + from plexosdb import ClassEnum, CollectionEnum + from plexosdb.utils import prepare_properties_params + + db_with_topology.add_object(ClassEnum.Generator, "gen-01") + + records = [{"name": "gen-01", "Max Capacity": 100.0}] + + result = prepare_properties_params( + db_with_topology, + records, + ClassEnum.Generator, + CollectionEnum.Generators, + ClassEnum.System, + ) + + # Check that result is a tuple with 2 elements + assert isinstance(result, tuple) + assert len(result) == 2 + + params, collection_properties = result + + # Check params structure + assert isinstance(params, list) + if len(params) > 0: + assert isinstance(params[0], tuple) + assert len(params[0]) == 3 # (membership_id, property_id, value) + + # Check collection_properties structure + assert isinstance(collection_properties, list) + + +def test_insert_property_data_updates_multiple_properties(db_with_topology: PlexosDB) -> None: + """Test that insert_property_data marks multiple properties as dynamic and enabled.""" + from plexosdb import ClassEnum, CollectionEnum + from plexosdb.utils import insert_property_data, prepare_properties_params + + db_with_topology.add_object(ClassEnum.Generator, "gen-01") + db_with_topology.add_object(ClassEnum.Generator, "gen-02") + + records = [ + {"name": "gen-01", "Max Capacity": 100.0, "Heat Rate": 10.5}, + {"name": "gen-02", "Max Capacity": 150.0, "Heat Rate": 9.8}, + ] + + params, _ = prepare_properties_params( + db_with_topology, + records, + ClassEnum.Generator, + CollectionEnum.Generators, + ClassEnum.System, + ) + + with db_with_topology._db.transaction(): + insert_property_data(db_with_topology, params) + + # Verify properties are marked as dynamic and enabled + properties = db_with_topology.query( + "SELECT property_id, is_dynamic, is_enabled FROM t_property WHERE is_dynamic=1 AND is_enabled=1" + ) + assert len(properties) >= 2 + + +def test_insert_property_data_inserts_data_correctly(db_with_topology: PlexosDB) -> None: + """Test that insert_property_data inserts data rows into t_data table.""" + from plexosdb import ClassEnum, CollectionEnum + from plexosdb.utils import insert_property_data, prepare_properties_params + + db_with_topology.add_object(ClassEnum.Generator, "gen-01") + + records = [{"name": "gen-01", "Max Capacity": 100.0}] + + params, _ = prepare_properties_params( + db_with_topology, + records, + ClassEnum.Generator, + CollectionEnum.Generators, + ClassEnum.System, + ) + + with db_with_topology._db.transaction(): + insert_property_data(db_with_topology, params) + + # Verify data was inserted + data_count = db_with_topology.query("SELECT COUNT(*) FROM t_data") + assert data_count[0][0] > 0 + + +def test_insert_property_data_builds_data_id_map(db_with_topology: PlexosDB) -> None: + """Test that insert_property_data builds correct data_id_map.""" + from plexosdb import ClassEnum, CollectionEnum + from plexosdb.utils import insert_property_data, prepare_properties_params + + db_with_topology.add_object(ClassEnum.Generator, "gen-01") + + records = [{"name": "gen-01", "Max Capacity": 100.0}] + + params, _ = prepare_properties_params( + db_with_topology, + records, + ClassEnum.Generator, + CollectionEnum.Generators, + ClassEnum.System, + ) + + with db_with_topology._db.transaction(): + data_id_map = insert_property_data(db_with_topology, params) + + # Verify data_id_map structure + assert isinstance(data_id_map, dict) + for key, value in data_id_map.items(): + assert len(key) == 3 # (membership_id, property_id, value) + assert len(value) == 2 # (data_id, obj_name) + + +def test_insert_property_data_handles_null_values(db_with_topology: PlexosDB) -> None: + """Test that insert_property_data handles None/NULL values correctly.""" + from plexosdb import ClassEnum, CollectionEnum + from plexosdb.utils import insert_property_data, prepare_properties_params + + db_with_topology.add_object(ClassEnum.Generator, "gen-01") + + records = [{"name": "gen-01", "Max Capacity": None}] + + params, _ = prepare_properties_params( + db_with_topology, + records, + ClassEnum.Generator, + CollectionEnum.Generators, + ClassEnum.System, + ) + + with db_with_topology._db.transaction(): + data_id_map = insert_property_data(db_with_topology, params) + + # Should handle NULL values without error + assert isinstance(data_id_map, dict) + + +def test_insert_property_data_empty_params_returns_empty_map(db_with_topology: PlexosDB) -> None: + """Test that insert_property_data returns empty map when params is empty.""" + from plexosdb.utils import insert_property_data + + with db_with_topology._db.transaction(): + data_id_map = insert_property_data(db_with_topology, []) + + assert data_id_map == {} + + +def test_insert_scenario_tags_early_return_when_scenario_none(db_with_topology: PlexosDB) -> None: + """Test that insert_scenario_tags early returns when scenario is None.""" + from plexosdb.utils import insert_scenario_tags + + # Should not raise error and should return early when scenario is None + # Using type: ignore because we're testing the None case + insert_scenario_tags(db_with_topology, None, [], chunksize=1000) # type: ignore[arg-type] + + +def test_insert_scenario_tags_creates_new_scenario(db_with_topology: PlexosDB) -> None: + """Test that insert_scenario_tags creates scenario if it doesn't exist.""" + from plexosdb import ClassEnum, CollectionEnum + from plexosdb.utils import insert_property_data, insert_scenario_tags, prepare_properties_params + + db_with_topology.add_object(ClassEnum.Generator, "gen-01") + + records = [{"name": "gen-01", "Max Capacity": 100.0}] + + params, _ = prepare_properties_params( + db_with_topology, + records, + ClassEnum.Generator, + CollectionEnum.Generators, + ClassEnum.System, + ) + + with db_with_topology._db.transaction(): + insert_property_data(db_with_topology, params) + insert_scenario_tags(db_with_topology, "NewScenario", params, chunksize=1000) + + # Verify scenario was created + scenario_exists = db_with_topology.check_scenario_exists("NewScenario") + assert scenario_exists is True + + +def test_insert_scenario_tags_uses_existing_scenario(db_with_topology: PlexosDB) -> None: + """Test that insert_scenario_tags uses existing scenario instead of creating new one.""" + from plexosdb import ClassEnum, CollectionEnum + from plexosdb.utils import insert_property_data, insert_scenario_tags, prepare_properties_params + + # Pre-create scenario + scenario_id_before = db_with_topology.add_scenario("ExistingScenario") + + db_with_topology.add_object(ClassEnum.Generator, "gen-01") + + records = [{"name": "gen-01", "Max Capacity": 100.0}] + + params, _ = prepare_properties_params( + db_with_topology, + records, + ClassEnum.Generator, + CollectionEnum.Generators, + ClassEnum.System, + ) + + with db_with_topology._db.transaction(): + insert_property_data(db_with_topology, params) + insert_scenario_tags(db_with_topology, "ExistingScenario", params, chunksize=1000) + + # Verify scenario still exists and wasn't duplicated + scenario_id_after = db_with_topology.get_scenario_id("ExistingScenario") + assert scenario_id_after == scenario_id_before + + +def test_insert_scenario_tags_batching_single_batch(db_with_topology: PlexosDB) -> None: + """Test insert_scenario_tags with params less than chunksize.""" + from plexosdb import ClassEnum, CollectionEnum + from plexosdb.utils import insert_property_data, insert_scenario_tags, prepare_properties_params + + db_with_topology.add_object(ClassEnum.Generator, "gen-01") + + records = [{"name": "gen-01", "Max Capacity": 100.0}] + + params, _ = prepare_properties_params( + db_with_topology, + records, + ClassEnum.Generator, + CollectionEnum.Generators, + ClassEnum.System, + ) + + with db_with_topology._db.transaction(): + insert_property_data(db_with_topology, params) + # Large chunksize - should process all in one batch + insert_scenario_tags(db_with_topology, "BatchTest", params, chunksize=1000) + + # Verify tags were inserted + tag_count = db_with_topology.query("SELECT COUNT(*) FROM t_tag") + assert tag_count[0][0] > 0 + + +def test_insert_scenario_tags_batching_multiple_batches(db_with_topology: PlexosDB) -> None: + """Test insert_scenario_tags with params split into multiple batches.""" + from plexosdb import ClassEnum, CollectionEnum + from plexosdb.utils import insert_property_data, insert_scenario_tags, prepare_properties_params + + for i in range(3): + db_with_topology.add_object(ClassEnum.Generator, f"gen-{i:02d}") + + records = [ + {"name": f"gen-{i:02d}", "Max Capacity": 100.0 + i * 10.0, "Heat Rate": 10.0 + i} for i in range(3) + ] + + params, _ = prepare_properties_params( + db_with_topology, + records, + ClassEnum.Generator, + CollectionEnum.Generators, + ClassEnum.System, + ) + + with db_with_topology._db.transaction(): + insert_property_data(db_with_topology, params) + # Small chunksize to force multiple batches + insert_scenario_tags(db_with_topology, "BatchTest", params, chunksize=2) + + # Verify tags were inserted + tag_count = db_with_topology.query("SELECT COUNT(*) FROM t_tag") + assert tag_count[0][0] > 0 + + +def test_insert_scenario_tags_empty_params(db_with_topology: PlexosDB) -> None: + """Test insert_scenario_tags with empty params list.""" + from plexosdb.utils import insert_scenario_tags + + with db_with_topology._db.transaction(): + # Should not raise error with empty params + insert_scenario_tags(db_with_topology, "EmptyTest", [], chunksize=1000) + + # Verify scenario was still created + scenario_exists = db_with_topology.check_scenario_exists("EmptyTest") + assert scenario_exists is True + + +def test_add_texts_for_properties_skips_records_without_field(db_with_topology: PlexosDB) -> None: + """Test that add_texts_for_properties skips records without specified field.""" + from plexosdb import ClassEnum, CollectionEnum + from plexosdb.utils import add_texts_for_properties, insert_property_data, prepare_properties_params + + db_with_topology.add_object(ClassEnum.Generator, "gen-01") + + records = [ + {"name": "gen-01", "Max Capacity": 100.0}, + # No datafile_text field in second record + ] + + params, _ = prepare_properties_params( + db_with_topology, + records, + ClassEnum.Generator, + CollectionEnum.Generators, + ClassEnum.System, + ) + + with db_with_topology._db.transaction(): + data_id_map = insert_property_data(db_with_topology, params) + + # Call with field that doesn't exist in all records + add_texts_for_properties( + db_with_topology, params, data_id_map, records, "datafile_text", ClassEnum.DataFile + ) + + # Should not raise error + + +def test_add_texts_for_properties_handles_data_id_none(db_with_topology: PlexosDB) -> None: + """Test that add_texts_for_properties handles missing data_id in map.""" + from plexosdb import ClassEnum, CollectionEnum + from plexosdb.utils import add_texts_for_properties, insert_property_data, prepare_properties_params + + db_with_topology.add_object(ClassEnum.Generator, "gen-01") + + records = [{"name": "gen-01", "Max Capacity": 100.0, "datafile_text": "/path/to/file.csv"}] + + params, _ = prepare_properties_params( + db_with_topology, + records, + ClassEnum.Generator, + CollectionEnum.Generators, + ClassEnum.System, + ) + + with db_with_topology._db.transaction(): + insert_property_data(db_with_topology, params) + + # Create empty map - simulating missing data_ids + empty_data_id_map: dict[tuple[int, int, int], tuple[int, str]] = {} + + # Should not raise error when data_id is missing + add_texts_for_properties( + db_with_topology, params, empty_data_id_map, records, "datafile_text", ClassEnum.DataFile + ) + + +def test_add_texts_for_properties_multiple_texts(db_with_topology: PlexosDB) -> None: + """Test that add_texts_for_properties handles multiple text records.""" + from plexosdb import ClassEnum, CollectionEnum + from plexosdb.utils import add_texts_for_properties, insert_property_data, prepare_properties_params + + db_with_topology.add_object(ClassEnum.Generator, "gen-01") + db_with_topology.add_object(ClassEnum.Generator, "gen-02") + + records = [ + {"name": "gen-01", "Max Capacity": 100.0, "datafile_text": "/path/file1.csv"}, + {"name": "gen-02", "Max Capacity": 150.0, "datafile_text": "/path/file2.csv"}, + ] + + params, _ = prepare_properties_params( + db_with_topology, + records, + ClassEnum.Generator, + CollectionEnum.Generators, + ClassEnum.System, + ) + + with db_with_topology._db.transaction(): + data_id_map = insert_property_data(db_with_topology, params) + + add_texts_for_properties( + db_with_topology, params, data_id_map, records, "datafile_text", ClassEnum.DataFile + ) + + text_count = db_with_topology.query("SELECT COUNT(*) FROM t_text") + assert text_count[0][0] >= 2 + + +def test_add_texts_for_properties_empty_params(db_with_topology: PlexosDB) -> None: + """Test add_texts_for_properties with empty inputs.""" + from plexosdb import ClassEnum + from plexosdb.utils import add_texts_for_properties + + add_texts_for_properties(db_with_topology, [], {}, [], "datafile_text", ClassEnum.DataFile) + + +def test_prepare_properties_params_raises_on_no_memberships(db_with_topology: PlexosDB) -> None: + """Test prepare_properties_params raises NotFoundError when no memberships exist.""" + from plexosdb import ClassEnum, CollectionEnum + from plexosdb.exceptions import NotFoundError + from plexosdb.utils import prepare_properties_params + + # Try to prepare params for object that doesn't exist + records = [{"name": "NonExistentObject", "property": 100}] + + with pytest.raises(NotFoundError, match="Object = NonExistentObject not found"): + prepare_properties_params( + db_with_topology, + records, + ClassEnum.Generator, + CollectionEnum.Generators, + ClassEnum.System, + ) diff --git a/tests/test_utils_scenario.py b/tests/test_utils_scenario.py new file mode 100644 index 0000000..22e862b --- /dev/null +++ b/tests/test_utils_scenario.py @@ -0,0 +1,57 @@ +"""Tests for scenario utility functions.""" + +from __future__ import annotations + +from typing import TYPE_CHECKING + +if TYPE_CHECKING: + from plexosdb import PlexosDB + + +def test_get_scenario_id_creates_new_scenario(db_with_topology: PlexosDB) -> None: + """Test that get_scenario_id creates scenario if it doesn't exist.""" + from plexosdb.utils import get_scenario_id + + scenario_id = get_scenario_id(db_with_topology, "NewScenario") + + assert isinstance(scenario_id, int) + assert scenario_id > 0 + + scenario_exists = db_with_topology.check_scenario_exists("NewScenario") + assert scenario_exists is True + + +def test_get_scenario_id_returns_existing_scenario(db_with_topology: PlexosDB) -> None: + """Test that get_scenario_id returns existing scenario without creating new one.""" + from plexosdb.utils import get_scenario_id + + initial_id = db_with_topology.add_scenario("ExistingScenario") + + returned_id = get_scenario_id(db_with_topology, "ExistingScenario") + + assert returned_id == initial_id + + +def test_get_scenario_id_return_type(db_with_topology: PlexosDB) -> None: + """Test that get_scenario_id returns integer type.""" + from plexosdb.utils import get_scenario_id + + scenario_id = get_scenario_id(db_with_topology, "TestScenario") + + assert isinstance(scenario_id, int) + + +def test_get_scenario_id_different_names(db_with_topology: PlexosDB) -> None: + """Test that get_scenario_id handles various scenario names.""" + from plexosdb.utils import get_scenario_id + + names = ["Scenario-1", "Test_Scenario_2", "scenario with spaces", "S3"] + + ids = [get_scenario_id(db_with_topology, name) for name in names] + + assert all(isinstance(id_, int) and id_ > 0 for id_ in ids) + + assert len(set(ids)) == len(ids) + + for name in names: + assert db_with_topology.check_scenario_exists(name) is True diff --git a/uv.lock b/uv.lock index 7559cca..b4aadd4 100644 --- a/uv.lock +++ b/uv.lock @@ -1,6 +1,6 @@ version = 1 revision = 3 -requires-python = ">=3.11" +requires-python = ">=3.11, <3.15" [[package]] name = "accessible-pygments" @@ -68,15 +68,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/50/cd/30110dc0ffcf3b131156077b90e9f60ed75711223f306da4db08eff8403b/beautifulsoup4-4.13.4-py3-none-any.whl", hash = "sha256:9bbbb14bfde9d79f38b8cd5f8c7c85f4b8f2523190ebed90e950a8dea4cb1c4b", size = 187285, upload-time = "2025-04-15T17:05:12.221Z" }, ] -[[package]] -name = "bump2version" -version = "1.0.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/29/2a/688aca6eeebfe8941235be53f4da780c6edee05dbbea5d7abaa3aab6fad2/bump2version-1.0.1.tar.gz", hash = "sha256:762cb2bfad61f4ec8e2bdf452c7c267416f8c70dd9ecb1653fd0bbb01fa936e6", size = 36236, upload-time = "2020-10-07T18:38:40.119Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/1d/e3/fa60c47d7c344533142eb3af0b73234ef8ea3fb2da742ab976b947e717df/bump2version-1.0.1-py2.py3-none-any.whl", hash = "sha256:37f927ea17cde7ae2d7baf832f8e80ce3777624554a653006c9144f8017fe410", size = 22030, upload-time = "2020-10-07T18:38:38.148Z" }, -] - [[package]] name = "certifi" version = "2025.7.9" @@ -250,6 +241,20 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/91/a1/cf2472db20f7ce4a6be1253a81cfdf85ad9c7885ffbed7047fb72c24cf87/distlib-0.3.9-py2.py3-none-any.whl", hash = "sha256:47f8c22fd27c27e25a65601af709b38e4f0a45ea4fc2e710f65755fa8caaaf87", size = 468973, upload-time = "2024-10-09T18:35:44.272Z" }, ] +[[package]] +name = "docstr-coverage" +version = "2.3.2" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "click" }, + { name = "pyyaml" }, + { name = "tqdm" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/75/86/d3f02e5baf426eac0f039849272204649137449f050d3fe0eb104c6f399c/docstr-coverage-2.3.2.tar.gz", hash = "sha256:e99a28c502ed21ae8a310cb9e14e8de2d7cff44d365b46fa6dca6de05bf156a0", size = 26750, upload-time = "2024-05-07T16:54:33.514Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/db/f7/7923d915a80aa2e04939260f7a000e1a353b25c7ffcf7771cd777559c27a/docstr_coverage-2.3.2-py3-none-any.whl", hash = "sha256:37a885d6560ad87e289b23bf0e54527885bacbf6b17cf55278d514dd0fef8ff5", size = 25816, upload-time = "2024-05-07T16:54:31.636Z" }, +] + [[package]] name = "docutils" version = "0.21.2" @@ -407,6 +412,69 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/62/a1/3d680cbfd5f4b8f15abc1d571870c5fc3e594bb582bc3b64ea099db13e56/jinja2-3.1.6-py3-none-any.whl", hash = "sha256:85ece4451f492d0c13c5dd7c13a64681a86afae63a5f347908daf103ce6d2f67", size = 134899, upload-time = "2025-03-05T20:05:00.369Z" }, ] +[[package]] +name = "librt" +version = "0.6.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/37/c3/cdff3c10e2e608490dc0a310ccf11ba777b3943ad4fcead2a2ade98c21e1/librt-0.6.3.tar.gz", hash = "sha256:c724a884e642aa2bbad52bb0203ea40406ad742368a5f90da1b220e970384aae", size = 54209, upload-time = "2025-11-29T14:01:56.058Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/8b/80/bc60fd16fe24910bf5974fb914778a2e8540cef55385ab2cb04a0dfe42c4/librt-0.6.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:61348cc488b18d1b1ff9f3e5fcd5ac43ed22d3e13e862489d2267c2337285c08", size = 27285, upload-time = "2025-11-29T14:00:46.626Z" }, + { url = "https://files.pythonhosted.org/packages/88/3c/26335536ed9ba097c79cffcee148393592e55758fe76d99015af3e47a6d0/librt-0.6.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:64645b757d617ad5f98c08e07620bc488d4bced9ced91c6279cec418f16056fa", size = 27629, upload-time = "2025-11-29T14:00:47.863Z" }, + { url = "https://files.pythonhosted.org/packages/af/fd/2dcedeacfedee5d2eda23e7a49c1c12ce6221b5d58a13555f053203faafc/librt-0.6.3-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:26b8026393920320bb9a811b691d73c5981385d537ffc5b6e22e53f7b65d4122", size = 82039, upload-time = "2025-11-29T14:00:49.131Z" }, + { url = "https://files.pythonhosted.org/packages/48/ff/6aa11914b83b0dc2d489f7636942a8e3322650d0dba840db9a1b455f3caa/librt-0.6.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d998b432ed9ffccc49b820e913c8f327a82026349e9c34fa3690116f6b70770f", size = 86560, upload-time = "2025-11-29T14:00:50.403Z" }, + { url = "https://files.pythonhosted.org/packages/76/a1/d25af61958c2c7eb978164aeba0350719f615179ba3f428b682b9a5fdace/librt-0.6.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e18875e17ef69ba7dfa9623f2f95f3eda6f70b536079ee6d5763ecdfe6cc9040", size = 86494, upload-time = "2025-11-29T14:00:51.383Z" }, + { url = "https://files.pythonhosted.org/packages/7d/4b/40e75d3b258c801908e64b39788f9491635f9554f8717430a491385bd6f2/librt-0.6.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:a218f85081fc3f70cddaed694323a1ad7db5ca028c379c214e3a7c11c0850523", size = 88914, upload-time = "2025-11-29T14:00:52.688Z" }, + { url = "https://files.pythonhosted.org/packages/97/6d/0070c81aba8a169224301c75fb5fb6c3c25ca67e6ced086584fc130d5a67/librt-0.6.3-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:1ef42ff4edd369e84433ce9b188a64df0837f4f69e3d34d3b34d4955c599d03f", size = 86944, upload-time = "2025-11-29T14:00:53.768Z" }, + { url = "https://files.pythonhosted.org/packages/a6/94/809f38887941b7726692e0b5a083dbdc87dbb8cf893e3b286550c5f0b129/librt-0.6.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:0e0f2b79993fec23a685b3e8107ba5f8675eeae286675a216da0b09574fa1e47", size = 89852, upload-time = "2025-11-29T14:00:54.71Z" }, + { url = "https://files.pythonhosted.org/packages/58/a3/b0e5b1cda675b91f1111d8ba941da455d8bfaa22f4d2d8963ba96ccb5b12/librt-0.6.3-cp311-cp311-win32.whl", hash = "sha256:fd98cacf4e0fabcd4005c452cb8a31750258a85cab9a59fb3559e8078da408d7", size = 19948, upload-time = "2025-11-29T14:00:55.989Z" }, + { url = "https://files.pythonhosted.org/packages/cc/73/70011c2b37e3be3ece3affd3abc8ebe5cda482b03fd6b3397906321a901e/librt-0.6.3-cp311-cp311-win_amd64.whl", hash = "sha256:e17b5b42c8045867ca9d1f54af00cc2275198d38de18545edaa7833d7e9e4ac8", size = 21406, upload-time = "2025-11-29T14:00:56.874Z" }, + { url = "https://files.pythonhosted.org/packages/91/ee/119aa759290af6ca0729edf513ca390c1afbeae60f3ecae9b9d56f25a8a9/librt-0.6.3-cp311-cp311-win_arm64.whl", hash = "sha256:87597e3d57ec0120a3e1d857a708f80c02c42ea6b00227c728efbc860f067c45", size = 20875, upload-time = "2025-11-29T14:00:57.752Z" }, + { url = "https://files.pythonhosted.org/packages/b4/2c/b59249c566f98fe90e178baf59e83f628d6c38fb8bc78319301fccda0b5e/librt-0.6.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:74418f718083009108dc9a42c21bf2e4802d49638a1249e13677585fcc9ca176", size = 27841, upload-time = "2025-11-29T14:00:58.925Z" }, + { url = "https://files.pythonhosted.org/packages/40/e8/9db01cafcd1a2872b76114c858f81cc29ce7ad606bc102020d6dabf470fb/librt-0.6.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:514f3f363d1ebc423357d36222c37e5c8e6674b6eae8d7195ac9a64903722057", size = 27844, upload-time = "2025-11-29T14:01:00.2Z" }, + { url = "https://files.pythonhosted.org/packages/59/4d/da449d3a7d83cc853af539dee42adc37b755d7eea4ad3880bacfd84b651d/librt-0.6.3-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:cf1115207a5049d1f4b7b4b72de0e52f228d6c696803d94843907111cbf80610", size = 84091, upload-time = "2025-11-29T14:01:01.118Z" }, + { url = "https://files.pythonhosted.org/packages/ea/6c/f90306906fb6cc6eaf4725870f0347115de05431e1f96d35114392d31fda/librt-0.6.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ad8ba80cdcea04bea7b78fcd4925bfbf408961e9d8397d2ee5d3ec121e20c08c", size = 88239, upload-time = "2025-11-29T14:01:02.11Z" }, + { url = "https://files.pythonhosted.org/packages/e7/ae/473ce7b423cfac2cb503851a89d9d2195bf615f534d5912bf86feeebbee7/librt-0.6.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:4018904c83eab49c814e2494b4e22501a93cdb6c9f9425533fe693c3117126f9", size = 88815, upload-time = "2025-11-29T14:01:03.114Z" }, + { url = "https://files.pythonhosted.org/packages/c4/6d/934df738c87fb9617cabefe4891eece585a06abe6def25b4bca3b174429d/librt-0.6.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8983c5c06ac9c990eac5eb97a9f03fe41dc7e9d7993df74d9e8682a1056f596c", size = 90598, upload-time = "2025-11-29T14:01:04.071Z" }, + { url = "https://files.pythonhosted.org/packages/72/89/eeaa124f5e0f431c2b39119550378ae817a4b1a3c93fd7122f0639336fff/librt-0.6.3-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:d7769c579663a6f8dbf34878969ac71befa42067ce6bf78e6370bf0d1194997c", size = 88603, upload-time = "2025-11-29T14:01:05.02Z" }, + { url = "https://files.pythonhosted.org/packages/4d/ed/c60b3c1cfc27d709bc0288af428ce58543fcb5053cf3eadbc773c24257f5/librt-0.6.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:d3c9a07eafdc70556f8c220da4a538e715668c0c63cabcc436a026e4e89950bf", size = 92112, upload-time = "2025-11-29T14:01:06.304Z" }, + { url = "https://files.pythonhosted.org/packages/c1/ab/f56169be5f716ef4ab0277be70bcb1874b4effc262e655d85b505af4884d/librt-0.6.3-cp312-cp312-win32.whl", hash = "sha256:38320386a48a15033da295df276aea93a92dfa94a862e06893f75ea1d8bbe89d", size = 20127, upload-time = "2025-11-29T14:01:07.283Z" }, + { url = "https://files.pythonhosted.org/packages/ff/8d/222750ce82bf95125529eaab585ac7e2829df252f3cfc05d68792fb1dd2c/librt-0.6.3-cp312-cp312-win_amd64.whl", hash = "sha256:c0ecf4786ad0404b072196b5df774b1bb23c8aacdcacb6c10b4128bc7b00bd01", size = 21545, upload-time = "2025-11-29T14:01:08.184Z" }, + { url = "https://files.pythonhosted.org/packages/72/c9/f731ddcfb72f446a92a8674c6b8e1e2242773cce43a04f41549bd8b958ff/librt-0.6.3-cp312-cp312-win_arm64.whl", hash = "sha256:9f2a6623057989ebc469cd9cc8fe436c40117a0147627568d03f84aef7854c55", size = 20946, upload-time = "2025-11-29T14:01:09.384Z" }, + { url = "https://files.pythonhosted.org/packages/dd/aa/3055dd440f8b8b3b7e8624539a0749dd8e1913e978993bcca9ce7e306231/librt-0.6.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:9e716f9012148a81f02f46a04fc4c663420c6fbfeacfac0b5e128cf43b4413d3", size = 27874, upload-time = "2025-11-29T14:01:10.615Z" }, + { url = "https://files.pythonhosted.org/packages/ef/93/226d7dd455eaa4c26712b5ccb2dfcca12831baa7f898c8ffd3a831e29fda/librt-0.6.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:669ff2495728009a96339c5ad2612569c6d8be4474e68f3f3ac85d7c3261f5f5", size = 27852, upload-time = "2025-11-29T14:01:11.535Z" }, + { url = "https://files.pythonhosted.org/packages/4e/8b/db9d51191aef4e4cc06285250affe0bb0ad8b2ed815f7ca77951655e6f02/librt-0.6.3-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:349b6873ebccfc24c9efd244e49da9f8a5c10f60f07575e248921aae2123fc42", size = 84264, upload-time = "2025-11-29T14:01:12.461Z" }, + { url = "https://files.pythonhosted.org/packages/8d/53/297c96bda3b5a73bdaf748f1e3ae757edd29a0a41a956b9c10379f193417/librt-0.6.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0c74c26736008481c9f6d0adf1aedb5a52aff7361fea98276d1f965c0256ee70", size = 88432, upload-time = "2025-11-29T14:01:13.405Z" }, + { url = "https://files.pythonhosted.org/packages/54/3a/c005516071123278e340f22de72fa53d51e259d49215295c212da16c4dc2/librt-0.6.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:408a36ddc75e91918cb15b03460bdc8a015885025d67e68c6f78f08c3a88f522", size = 89014, upload-time = "2025-11-29T14:01:14.373Z" }, + { url = "https://files.pythonhosted.org/packages/8e/9b/ea715f818d926d17b94c80a12d81a79e95c44f52848e61e8ca1ff29bb9a9/librt-0.6.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:e61ab234624c9ffca0248a707feffe6fac2343758a36725d8eb8a6efef0f8c30", size = 90807, upload-time = "2025-11-29T14:01:15.377Z" }, + { url = "https://files.pythonhosted.org/packages/f0/fc/4e2e4c87e002fa60917a8e474fd13c4bac9a759df82be3778573bb1ab954/librt-0.6.3-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:324462fe7e3896d592b967196512491ec60ca6e49c446fe59f40743d08c97917", size = 88890, upload-time = "2025-11-29T14:01:16.633Z" }, + { url = "https://files.pythonhosted.org/packages/70/7f/c7428734fbdfd4db3d5b9237fc3a857880b2ace66492836f6529fef25d92/librt-0.6.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:36b2ec8c15030002c7f688b4863e7be42820d7c62d9c6eece3db54a2400f0530", size = 92300, upload-time = "2025-11-29T14:01:17.658Z" }, + { url = "https://files.pythonhosted.org/packages/f9/0c/738c4824fdfe74dc0f95d5e90ef9e759d4ecf7fd5ba964d54a7703322251/librt-0.6.3-cp313-cp313-win32.whl", hash = "sha256:25b1b60cb059471c0c0c803e07d0dfdc79e41a0a122f288b819219ed162672a3", size = 20159, upload-time = "2025-11-29T14:01:18.61Z" }, + { url = "https://files.pythonhosted.org/packages/f2/95/93d0e61bc617306ecf4c54636b5cbde4947d872563565c4abdd9d07a39d3/librt-0.6.3-cp313-cp313-win_amd64.whl", hash = "sha256:10a95ad074e2a98c9e4abc7f5b7d40e5ecbfa84c04c6ab8a70fabf59bd429b88", size = 21484, upload-time = "2025-11-29T14:01:19.506Z" }, + { url = "https://files.pythonhosted.org/packages/10/23/abd7ace79ab54d1dbee265f13529266f686a7ce2d21ab59a992f989009b6/librt-0.6.3-cp313-cp313-win_arm64.whl", hash = "sha256:17000df14f552e86877d67e4ab7966912224efc9368e998c96a6974a8d609bf9", size = 20935, upload-time = "2025-11-29T14:01:20.415Z" }, + { url = "https://files.pythonhosted.org/packages/83/14/c06cb31152182798ed98be73f54932ab984894f5a8fccf9b73130897a938/librt-0.6.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:8e695f25d1a425ad7a272902af8ab8c8d66c1998b177e4b5f5e7b4e215d0c88a", size = 27566, upload-time = "2025-11-29T14:01:21.609Z" }, + { url = "https://files.pythonhosted.org/packages/0c/b1/ce83ca7b057b06150519152f53a0b302d7c33c8692ce2f01f669b5a819d9/librt-0.6.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:3e84a4121a7ae360ca4da436548a9c1ca8ca134a5ced76c893cc5944426164bd", size = 27753, upload-time = "2025-11-29T14:01:22.558Z" }, + { url = "https://files.pythonhosted.org/packages/3b/ec/739a885ef0a2839b6c25f1b01c99149d2cb6a34e933ffc8c051fcd22012e/librt-0.6.3-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:05f385a414de3f950886ea0aad8f109650d4b712cf9cc14cc17f5f62a9ab240b", size = 83178, upload-time = "2025-11-29T14:01:23.555Z" }, + { url = "https://files.pythonhosted.org/packages/db/bd/dc18bb1489d48c0911b9f4d72eae2d304ea264e215ba80f1e6ba4a9fc41d/librt-0.6.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:36a8e337461150b05ca2c7bdedb9e591dfc262c5230422cea398e89d0c746cdc", size = 87266, upload-time = "2025-11-29T14:01:24.532Z" }, + { url = "https://files.pythonhosted.org/packages/94/f3/d0c5431b39eef15e48088b2d739ad84b17c2f1a22c0345c6d4c4a42b135e/librt-0.6.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:dcbe48f6a03979384f27086484dc2a14959be1613cb173458bd58f714f2c48f3", size = 87623, upload-time = "2025-11-29T14:01:25.798Z" }, + { url = "https://files.pythonhosted.org/packages/3b/15/9a52e90834e4bd6ee16cdbaf551cb32227cbaad27398391a189c489318bc/librt-0.6.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:4bca9e4c260233fba37b15c4ec2f78aa99c1a79fbf902d19dd4a763c5c3fb751", size = 89436, upload-time = "2025-11-29T14:01:26.769Z" }, + { url = "https://files.pythonhosted.org/packages/c3/8a/a7e78e46e8486e023c50f21758930ef4793999115229afd65de69e94c9cc/librt-0.6.3-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:760c25ed6ac968e24803eb5f7deb17ce026902d39865e83036bacbf5cf242aa8", size = 87540, upload-time = "2025-11-29T14:01:27.756Z" }, + { url = "https://files.pythonhosted.org/packages/49/01/93799044a1cccac31f1074b07c583e181829d240539657e7f305ae63ae2a/librt-0.6.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:4aa4a93a353ccff20df6e34fa855ae8fd788832c88f40a9070e3ddd3356a9f0e", size = 90597, upload-time = "2025-11-29T14:01:29.35Z" }, + { url = "https://files.pythonhosted.org/packages/a7/29/00c7f58b8f8eb1bad6529ffb6c9cdcc0890a27dac59ecda04f817ead5277/librt-0.6.3-cp314-cp314-win32.whl", hash = "sha256:cb92741c2b4ea63c09609b064b26f7f5d9032b61ae222558c55832ec3ad0bcaf", size = 18955, upload-time = "2025-11-29T14:01:30.325Z" }, + { url = "https://files.pythonhosted.org/packages/d7/13/2739e6e197a9f751375a37908a6a5b0bff637b81338497a1bcb5817394da/librt-0.6.3-cp314-cp314-win_amd64.whl", hash = "sha256:fdcd095b1b812d756fa5452aca93b962cf620694c0cadb192cec2bb77dcca9a2", size = 20263, upload-time = "2025-11-29T14:01:31.287Z" }, + { url = "https://files.pythonhosted.org/packages/e1/73/393868fc2158705ea003114a24e73bb10b03bda31e9ad7b5c5ec6575338b/librt-0.6.3-cp314-cp314-win_arm64.whl", hash = "sha256:822ca79e28720a76a935c228d37da6579edef048a17cd98d406a2484d10eda78", size = 19575, upload-time = "2025-11-29T14:01:32.229Z" }, + { url = "https://files.pythonhosted.org/packages/48/6d/3c8ff3dec21bf804a205286dd63fd28dcdbe00b8dd7eb7ccf2e21a40a0b0/librt-0.6.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:078cd77064d1640cb7b0650871a772956066174d92c8aeda188a489b58495179", size = 28732, upload-time = "2025-11-29T14:01:33.165Z" }, + { url = "https://files.pythonhosted.org/packages/f4/90/e214b8b4aa34ed3d3f1040719c06c4d22472c40c5ef81a922d5af7876eb4/librt-0.6.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:5cc22f7f5c0cc50ed69f4b15b9c51d602aabc4500b433aaa2ddd29e578f452f7", size = 29065, upload-time = "2025-11-29T14:01:34.088Z" }, + { url = "https://files.pythonhosted.org/packages/ab/90/ef61ed51f0a7770cc703422d907a757bbd8811ce820c333d3db2fd13542a/librt-0.6.3-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:14b345eb7afb61b9fdcdfda6738946bd11b8e0f6be258666b0646af3b9bb5916", size = 93703, upload-time = "2025-11-29T14:01:35.057Z" }, + { url = "https://files.pythonhosted.org/packages/a8/ae/c30bb119c35962cbe9a908a71da99c168056fc3f6e9bbcbc157d0b724d89/librt-0.6.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6d46aa46aa29b067f0b8b84f448fd9719aaf5f4c621cc279164d76a9dc9ab3e8", size = 98890, upload-time = "2025-11-29T14:01:36.031Z" }, + { url = "https://files.pythonhosted.org/packages/d1/96/47a4a78d252d36f072b79d592df10600d379a895c3880c8cbd2ac699f0ad/librt-0.6.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1b51ba7d9d5d9001494769eca8c0988adce25d0a970c3ba3f2eb9df9d08036fc", size = 98255, upload-time = "2025-11-29T14:01:37.058Z" }, + { url = "https://files.pythonhosted.org/packages/e5/28/779b5cc3cd9987683884eb5f5672e3251676bebaaae6b7da1cf366eb1da1/librt-0.6.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:ced0925a18fddcff289ef54386b2fc230c5af3c83b11558571124bfc485b8c07", size = 100769, upload-time = "2025-11-29T14:01:38.413Z" }, + { url = "https://files.pythonhosted.org/packages/28/d7/771755e57c375cb9d25a4e106f570607fd856e2cb91b02418db1db954796/librt-0.6.3-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:6bac97e51f66da2ca012adddbe9fd656b17f7368d439de30898f24b39512f40f", size = 98580, upload-time = "2025-11-29T14:01:39.459Z" }, + { url = "https://files.pythonhosted.org/packages/d0/ec/8b157eb8fbc066339a2f34b0aceb2028097d0ed6150a52e23284a311eafe/librt-0.6.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:b2922a0e8fa97395553c304edc3bd36168d8eeec26b92478e292e5d4445c1ef0", size = 101706, upload-time = "2025-11-29T14:01:40.474Z" }, + { url = "https://files.pythonhosted.org/packages/82/a8/4aaead9a06c795a318282aebf7d3e3e578fa889ff396e1b640c3be4c7806/librt-0.6.3-cp314-cp314t-win32.whl", hash = "sha256:f33462b19503ba68d80dac8a1354402675849259fb3ebf53b67de86421735a3a", size = 19465, upload-time = "2025-11-29T14:01:41.77Z" }, + { url = "https://files.pythonhosted.org/packages/3a/61/b7e6a02746c1731670c19ba07d86da90b1ae45d29e405c0b5615abf97cde/librt-0.6.3-cp314-cp314t-win_amd64.whl", hash = "sha256:04f8ce401d4f6380cfc42af0f4e67342bf34c820dae01343f58f472dbac75dcf", size = 21042, upload-time = "2025-11-29T14:01:42.865Z" }, + { url = "https://files.pythonhosted.org/packages/0e/3d/72cc9ec90bb80b5b1a65f0bb74a0f540195837baaf3b98c7fa4a7aa9718e/librt-0.6.3-cp314-cp314t-win_arm64.whl", hash = "sha256:afb39550205cc5e5c935762c6bf6a2bb34f7d21a68eadb25e2db7bf3593fecc0", size = 20246, upload-time = "2025-11-29T14:01:44.13Z" }, +] + [[package]] name = "loguru" version = "0.7.3" @@ -515,25 +583,41 @@ wheels = [ [[package]] name = "mypy" -version = "1.11.2" +version = "1.19.0" source = { registry = "https://pypi.org/simple" } dependencies = [ + { name = "librt" }, { name = "mypy-extensions" }, + { name = "pathspec" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/5c/86/5d7cbc4974fd564550b80fbb8103c05501ea11aa7835edf3351d90095896/mypy-1.11.2.tar.gz", hash = "sha256:7f9993ad3e0ffdc95c2a14b66dee63729f021968bff8ad911867579c65d13a79", size = 3078806, upload-time = "2024-08-24T22:50:11.357Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/e2/aa/cc56fb53ebe14c64f1fe91d32d838d6f4db948b9494e200d2f61b820b85d/mypy-1.11.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:75746e06d5fa1e91bfd5432448d00d34593b52e7e91a187d981d08d1f33d4385", size = 10859630, upload-time = "2024-08-24T22:49:51.895Z" }, - { url = "https://files.pythonhosted.org/packages/04/c8/b19a760fab491c22c51975cf74e3d253b8c8ce2be7afaa2490fbf95a8c59/mypy-1.11.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a976775ab2256aadc6add633d44f100a2517d2388906ec4f13231fafbb0eccca", size = 10037973, upload-time = "2024-08-24T22:49:21.428Z" }, - { url = "https://files.pythonhosted.org/packages/88/57/7e7e39f2619c8f74a22efb9a4c4eff32b09d3798335625a124436d121d89/mypy-1.11.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:cd953f221ac1379050a8a646585a29574488974f79d8082cedef62744f0a0104", size = 12416659, upload-time = "2024-08-24T22:49:35.02Z" }, - { url = "https://files.pythonhosted.org/packages/fc/a6/37f7544666b63a27e46c48f49caeee388bf3ce95f9c570eb5cfba5234405/mypy-1.11.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:57555a7715c0a34421013144a33d280e73c08df70f3a18a552938587ce9274f4", size = 12897010, upload-time = "2024-08-24T22:49:29.725Z" }, - { url = "https://files.pythonhosted.org/packages/84/8b/459a513badc4d34acb31c736a0101c22d2bd0697b969796ad93294165cfb/mypy-1.11.2-cp311-cp311-win_amd64.whl", hash = "sha256:36383a4fcbad95f2657642a07ba22ff797de26277158f1cc7bd234821468b1b6", size = 9562873, upload-time = "2024-08-24T22:49:40.448Z" }, - { url = "https://files.pythonhosted.org/packages/35/3a/ed7b12ecc3f6db2f664ccf85cb2e004d3e90bec928e9d7be6aa2f16b7cdf/mypy-1.11.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:e8960dbbbf36906c5c0b7f4fbf2f0c7ffb20f4898e6a879fcf56a41a08b0d318", size = 10990335, upload-time = "2024-08-24T22:49:54.245Z" }, - { url = "https://files.pythonhosted.org/packages/04/e4/1a9051e2ef10296d206519f1df13d2cc896aea39e8683302f89bf5792a59/mypy-1.11.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:06d26c277962f3fb50e13044674aa10553981ae514288cb7d0a738f495550b36", size = 10007119, upload-time = "2024-08-24T22:49:03.451Z" }, - { url = "https://files.pythonhosted.org/packages/f3/3c/350a9da895f8a7e87ade0028b962be0252d152e0c2fbaafa6f0658b4d0d4/mypy-1.11.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6e7184632d89d677973a14d00ae4d03214c8bc301ceefcdaf5c474866814c987", size = 12506856, upload-time = "2024-08-24T22:50:08.804Z" }, - { url = "https://files.pythonhosted.org/packages/b6/49/ee5adf6a49ff13f4202d949544d3d08abb0ea1f3e7f2a6d5b4c10ba0360a/mypy-1.11.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:3a66169b92452f72117e2da3a576087025449018afc2d8e9bfe5ffab865709ca", size = 12952066, upload-time = "2024-08-24T22:50:03.89Z" }, - { url = "https://files.pythonhosted.org/packages/27/c0/b19d709a42b24004d720db37446a42abadf844d5c46a2c442e2a074d70d9/mypy-1.11.2-cp312-cp312-win_amd64.whl", hash = "sha256:969ea3ef09617aff826885a22ece0ddef69d95852cdad2f60c8bb06bf1f71f70", size = 9664000, upload-time = "2024-08-24T22:49:59.703Z" }, - { url = "https://files.pythonhosted.org/packages/42/3a/bdf730640ac523229dd6578e8a581795720a9321399de494374afc437ec5/mypy-1.11.2-py3-none-any.whl", hash = "sha256:b499bc07dbdcd3de92b0a8b29fdf592c111276f6a12fe29c30f6c417dd546d12", size = 2619625, upload-time = "2024-08-24T22:50:01.842Z" }, +sdist = { url = "https://files.pythonhosted.org/packages/f9/b5/b58cdc25fadd424552804bf410855d52324183112aa004f0732c5f6324cf/mypy-1.19.0.tar.gz", hash = "sha256:f6b874ca77f733222641e5c46e4711648c4037ea13646fd0cdc814c2eaec2528", size = 3579025, upload-time = "2025-11-28T15:49:01.26Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0f/d2/010fb171ae5ac4a01cc34fbacd7544531e5ace95c35ca166dd8fd1b901d0/mypy-1.19.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:a31e4c28e8ddb042c84c5e977e28a21195d086aaffaf08b016b78e19c9ef8106", size = 13010563, upload-time = "2025-11-28T15:48:23.975Z" }, + { url = "https://files.pythonhosted.org/packages/41/6b/63f095c9f1ce584fdeb595d663d49e0980c735a1d2004720ccec252c5d47/mypy-1.19.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:34ec1ac66d31644f194b7c163d7f8b8434f1b49719d403a5d26c87fff7e913f7", size = 12077037, upload-time = "2025-11-28T15:47:51.582Z" }, + { url = "https://files.pythonhosted.org/packages/d7/83/6cb93d289038d809023ec20eb0b48bbb1d80af40511fa077da78af6ff7c7/mypy-1.19.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:cb64b0ba5980466a0f3f9990d1c582bcab8db12e29815ecb57f1408d99b4bff7", size = 12680255, upload-time = "2025-11-28T15:46:57.628Z" }, + { url = "https://files.pythonhosted.org/packages/99/db/d217815705987d2cbace2edd9100926196d6f85bcb9b5af05058d6e3c8ad/mypy-1.19.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:120cffe120cca5c23c03c77f84abc0c14c5d2e03736f6c312480020082f1994b", size = 13421472, upload-time = "2025-11-28T15:47:59.655Z" }, + { url = "https://files.pythonhosted.org/packages/4e/51/d2beaca7c497944b07594f3f8aad8d2f0e8fc53677059848ae5d6f4d193e/mypy-1.19.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:7a500ab5c444268a70565e374fc803972bfd1f09545b13418a5174e29883dab7", size = 13651823, upload-time = "2025-11-28T15:45:29.318Z" }, + { url = "https://files.pythonhosted.org/packages/aa/d1/7883dcf7644db3b69490f37b51029e0870aac4a7ad34d09ceae709a3df44/mypy-1.19.0-cp311-cp311-win_amd64.whl", hash = "sha256:c14a98bc63fd867530e8ec82f217dae29d0550c86e70debc9667fff1ec83284e", size = 10049077, upload-time = "2025-11-28T15:45:39.818Z" }, + { url = "https://files.pythonhosted.org/packages/11/7e/1afa8fb188b876abeaa14460dc4983f909aaacaa4bf5718c00b2c7e0b3d5/mypy-1.19.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:0fb3115cb8fa7c5f887c8a8d81ccdcb94cff334684980d847e5a62e926910e1d", size = 13207728, upload-time = "2025-11-28T15:46:26.463Z" }, + { url = "https://files.pythonhosted.org/packages/b2/13/f103d04962bcbefb1644f5ccb235998b32c337d6c13145ea390b9da47f3e/mypy-1.19.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:f3e19e3b897562276bb331074d64c076dbdd3e79213f36eed4e592272dabd760", size = 12202945, upload-time = "2025-11-28T15:48:49.143Z" }, + { url = "https://files.pythonhosted.org/packages/e4/93/a86a5608f74a22284a8ccea8592f6e270b61f95b8588951110ad797c2ddd/mypy-1.19.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b9d491295825182fba01b6ffe2c6fe4e5a49dbf4e2bb4d1217b6ced3b4797bc6", size = 12718673, upload-time = "2025-11-28T15:47:37.193Z" }, + { url = "https://files.pythonhosted.org/packages/3d/58/cf08fff9ced0423b858f2a7495001fda28dc058136818ee9dffc31534ea9/mypy-1.19.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6016c52ab209919b46169651b362068f632efcd5eb8ef9d1735f6f86da7853b2", size = 13608336, upload-time = "2025-11-28T15:48:32.625Z" }, + { url = "https://files.pythonhosted.org/packages/64/ed/9c509105c5a6d4b73bb08733102a3ea62c25bc02c51bca85e3134bf912d3/mypy-1.19.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:f188dcf16483b3e59f9278c4ed939ec0254aa8a60e8fc100648d9ab5ee95a431", size = 13833174, upload-time = "2025-11-28T15:45:48.091Z" }, + { url = "https://files.pythonhosted.org/packages/cd/71/01939b66e35c6f8cb3e6fdf0b657f0fd24de2f8ba5e523625c8e72328208/mypy-1.19.0-cp312-cp312-win_amd64.whl", hash = "sha256:0e3c3d1e1d62e678c339e7ade72746a9e0325de42cd2cccc51616c7b2ed1a018", size = 10112208, upload-time = "2025-11-28T15:46:41.702Z" }, + { url = "https://files.pythonhosted.org/packages/cb/0d/a1357e6bb49e37ce26fcf7e3cc55679ce9f4ebee0cd8b6ee3a0e301a9210/mypy-1.19.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:7686ed65dbabd24d20066f3115018d2dce030d8fa9db01aa9f0a59b6813e9f9e", size = 13191993, upload-time = "2025-11-28T15:47:22.336Z" }, + { url = "https://files.pythonhosted.org/packages/5d/75/8e5d492a879ec4490e6ba664b5154e48c46c85b5ac9785792a5ec6a4d58f/mypy-1.19.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:fd4a985b2e32f23bead72e2fb4bbe5d6aceee176be471243bd831d5b2644672d", size = 12174411, upload-time = "2025-11-28T15:44:55.492Z" }, + { url = "https://files.pythonhosted.org/packages/71/31/ad5dcee9bfe226e8eaba777e9d9d251c292650130f0450a280aec3485370/mypy-1.19.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:fc51a5b864f73a3a182584b1ac75c404396a17eced54341629d8bdcb644a5bba", size = 12727751, upload-time = "2025-11-28T15:44:14.169Z" }, + { url = "https://files.pythonhosted.org/packages/77/06/b6b8994ce07405f6039701f4b66e9d23f499d0b41c6dd46ec28f96d57ec3/mypy-1.19.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:37af5166f9475872034b56c5efdcf65ee25394e9e1d172907b84577120714364", size = 13593323, upload-time = "2025-11-28T15:46:34.699Z" }, + { url = "https://files.pythonhosted.org/packages/68/b1/126e274484cccdf099a8e328d4fda1c7bdb98a5e888fa6010b00e1bbf330/mypy-1.19.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:510c014b722308c9bd377993bcbf9a07d7e0692e5fa8fc70e639c1eb19fc6bee", size = 13818032, upload-time = "2025-11-28T15:46:18.286Z" }, + { url = "https://files.pythonhosted.org/packages/f8/56/53a8f70f562dfc466c766469133a8a4909f6c0012d83993143f2a9d48d2d/mypy-1.19.0-cp313-cp313-win_amd64.whl", hash = "sha256:cabbee74f29aa9cd3b444ec2f1e4fa5a9d0d746ce7567a6a609e224429781f53", size = 10120644, upload-time = "2025-11-28T15:47:43.99Z" }, + { url = "https://files.pythonhosted.org/packages/b0/f4/7751f32f56916f7f8c229fe902cbdba3e4dd3f3ea9e8b872be97e7fc546d/mypy-1.19.0-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:f2e36bed3c6d9b5f35d28b63ca4b727cb0228e480826ffc8953d1892ddc8999d", size = 13185236, upload-time = "2025-11-28T15:45:20.696Z" }, + { url = "https://files.pythonhosted.org/packages/35/31/871a9531f09e78e8d145032355890384f8a5b38c95a2c7732d226b93242e/mypy-1.19.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:a18d8abdda14035c5718acb748faec09571432811af129bf0d9e7b2d6699bf18", size = 12213902, upload-time = "2025-11-28T15:46:10.117Z" }, + { url = "https://files.pythonhosted.org/packages/58/b8/af221910dd40eeefa2077a59107e611550167b9994693fc5926a0b0f87c0/mypy-1.19.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f75e60aca3723a23511948539b0d7ed514dda194bc3755eae0bfc7a6b4887aa7", size = 12738600, upload-time = "2025-11-28T15:44:22.521Z" }, + { url = "https://files.pythonhosted.org/packages/11/9f/c39e89a3e319c1d9c734dedec1183b2cc3aefbab066ec611619002abb932/mypy-1.19.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8f44f2ae3c58421ee05fe609160343c25f70e3967f6e32792b5a78006a9d850f", size = 13592639, upload-time = "2025-11-28T15:48:08.55Z" }, + { url = "https://files.pythonhosted.org/packages/97/6d/ffaf5f01f5e284d9033de1267e6c1b8f3783f2cf784465378a86122e884b/mypy-1.19.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:63ea6a00e4bd6822adbfc75b02ab3653a17c02c4347f5bb0cf1d5b9df3a05835", size = 13799132, upload-time = "2025-11-28T15:47:06.032Z" }, + { url = "https://files.pythonhosted.org/packages/fe/b0/c33921e73aaa0106224e5a34822411bea38046188eb781637f5a5b07e269/mypy-1.19.0-cp314-cp314-win_amd64.whl", hash = "sha256:3ad925b14a0bb99821ff6f734553294aa6a3440a8cb082fe1f5b84dfb662afb1", size = 10269832, upload-time = "2025-11-28T15:47:29.392Z" }, + { url = "https://files.pythonhosted.org/packages/09/0e/fe228ed5aeab470c6f4eb82481837fadb642a5aa95cc8215fd2214822c10/mypy-1.19.0-py3-none-any.whl", hash = "sha256:0c01c99d626380752e527d5ce8e69ffbba2046eb8a060db0329690849cf9b6f9", size = 2469714, upload-time = "2025-11-28T15:45:33.22Z" }, ] [[package]] @@ -589,6 +673,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/c6/ac/dac4a63f978e4dcb3c6d3a78c4d8e0192a113d288502a1216950c41b1027/parso-0.8.4-py2.py3-none-any.whl", hash = "sha256:a418670a20291dacd2dddc80c377c5c3791378ee1e8d12bffc35420643d43f18", size = 103650, upload-time = "2024-04-05T09:43:53.299Z" }, ] +[[package]] +name = "pathspec" +version = "0.12.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/ca/bc/f35b8446f4531a7cb215605d100cd88b7ac6f44ab3fc94870c120ab3adbf/pathspec-0.12.1.tar.gz", hash = "sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712", size = 51043, upload-time = "2023-12-10T22:30:45Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/cc/20/ff623b09d963f88bfde16306a54e12ee5ea43e9b597108672ff3a408aad6/pathspec-0.12.1-py3-none-any.whl", hash = "sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08", size = 31191, upload-time = "2023-12-10T22:30:43.14Z" }, +] + [[package]] name = "pexpect" version = "4.9.0" @@ -620,17 +713,15 @@ dependencies = [ [package.dev-dependencies] dev = [ - { name = "bump2version" }, { name = "ipython" }, { name = "mypy" }, { name = "pre-commit" }, - { name = "pudb" }, { name = "pytest" }, - { name = "pytest-benchmark" }, - { name = "pytest-cov" }, + { name = "pytest-coverage" }, { name = "ruff" }, ] docs = [ + { name = "docstr-coverage" }, { name = "furo" }, { name = "ghp-import" }, { name = "myst-parser" }, @@ -648,17 +739,15 @@ requires-dist = [{ name = "loguru" }] [package.metadata.requires-dev] dev = [ - { name = "bump2version" }, - { name = "ipython" }, - { name = "mypy", specifier = "~=1.11.0" }, - { name = "pre-commit" }, - { name = "pudb" }, - { name = "pytest" }, - { name = "pytest-benchmark", specifier = ">=5.1.0" }, - { name = "pytest-cov" }, - { name = "ruff", specifier = "~=0.5.2" }, + { name = "ipython", specifier = ">=9.2.0" }, + { name = "mypy", specifier = ">=1.15.0" }, + { name = "pre-commit", specifier = ">=4.2.0" }, + { name = "pytest", specifier = ">=8.3.5" }, + { name = "pytest-coverage", specifier = ">=0.0" }, + { name = "ruff", specifier = ">=0.11.5" }, ] docs = [ + { name = "docstr-coverage", specifier = ">=2.3.2" }, { name = "furo" }, { name = "ghp-import" }, { name = "myst-parser" }, @@ -717,22 +806,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/22/a6/858897256d0deac81a172289110f31629fc4cee19b6f01283303e18c8db3/ptyprocess-0.7.0-py2.py3-none-any.whl", hash = "sha256:4b41f3967fce3af57cc7e94b888626c18bf37a083e3651ca8feeb66d492fef35", size = 13993, upload-time = "2020-12-28T15:15:28.35Z" }, ] -[[package]] -name = "pudb" -version = "2025.1" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "jedi" }, - { name = "packaging" }, - { name = "pygments" }, - { name = "urwid" }, - { name = "urwid-readline" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/93/40/8d17b16a1c2a36d8cc1befb5eda13310ad20fcb347e58bef5e104696e14a/pudb-2025.1.tar.gz", hash = "sha256:a528b29c69ce8b182a337872c5f046071f6d68d3415c6d7bf53bd27c264f58d0", size = 220623, upload-time = "2025-05-06T20:43:18.306Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/b1/01/069766294390d3e10c77dfb553171466d67ffb51bf72a437650c0a5db86a/pudb-2025.1-py3-none-any.whl", hash = "sha256:f642d42e6054c992b43c463742650aa879fe290d7d7ffdeb21f7d00dc4587a21", size = 89208, upload-time = "2025-05-06T20:43:17.101Z" }, -] - [[package]] name = "pure-eval" version = "0.2.3" @@ -742,15 +815,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/8e/37/efad0257dc6e593a18957422533ff0f87ede7c9c6ea010a2177d738fb82f/pure_eval-0.2.3-py3-none-any.whl", hash = "sha256:1db8e35b67b3d218d818ae653e27f06c3aa420901fa7b081ca98cbedc874e0d0", size = 11842, upload-time = "2024-07-21T12:58:20.04Z" }, ] -[[package]] -name = "py-cpuinfo" -version = "9.0.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/37/a8/d832f7293ebb21690860d2e01d8115e5ff6f2ae8bbdc953f0eb0fa4bd2c7/py-cpuinfo-9.0.0.tar.gz", hash = "sha256:3cdbbf3fac90dc6f118bfd64384f309edeadd902d7c8fb17f02ffa1fc3f49690", size = 104716, upload-time = "2022-10-25T20:38:06.303Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/e0/a9/023730ba63db1e494a271cb018dcd361bd2c917ba7004c3e49d5daf795a2/py_cpuinfo-9.0.0-py3-none-any.whl", hash = "sha256:859625bc251f64e21f077d099d4162689c762b5d6a4c3c97553d56241c9674d5", size = 22335, upload-time = "2022-10-25T20:38:27.636Z" }, -] - [[package]] name = "pydata-sphinx-theme" version = "0.15.4" @@ -796,30 +860,41 @@ wheels = [ ] [[package]] -name = "pytest-benchmark" -version = "5.1.0" +name = "pytest-cov" +version = "6.2.1" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "py-cpuinfo" }, + { name = "coverage", extra = ["toml"] }, + { name = "pluggy" }, { name = "pytest" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/39/d0/a8bd08d641b393db3be3819b03e2d9bb8760ca8479080a26a5f6e540e99c/pytest-benchmark-5.1.0.tar.gz", hash = "sha256:9ea661cdc292e8231f7cd4c10b0319e56a2118e2c09d9f50e1b3d150d2aca105", size = 337810, upload-time = "2024-10-30T11:51:48.521Z" } +sdist = { url = "https://files.pythonhosted.org/packages/18/99/668cade231f434aaa59bbfbf49469068d2ddd945000621d3d165d2e7dd7b/pytest_cov-6.2.1.tar.gz", hash = "sha256:25cc6cc0a5358204b8108ecedc51a9b57b34cc6b8c967cc2c01a4e00d8a67da2", size = 69432, upload-time = "2025-06-12T10:47:47.684Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/9e/d6/b41653199ea09d5969d4e385df9bbfd9a100f28ca7e824ce7c0a016e3053/pytest_benchmark-5.1.0-py3-none-any.whl", hash = "sha256:922de2dfa3033c227c96da942d1878191afa135a29485fb942e85dff1c592c89", size = 44259, upload-time = "2024-10-30T11:51:45.94Z" }, + { url = "https://files.pythonhosted.org/packages/bc/16/4ea354101abb1287856baa4af2732be351c7bee728065aed451b678153fd/pytest_cov-6.2.1-py3-none-any.whl", hash = "sha256:f5bc4c23f42f1cdd23c70b1dab1bbaef4fc505ba950d53e0081d0730dd7e86d5", size = 24644, upload-time = "2025-06-12T10:47:45.932Z" }, ] [[package]] -name = "pytest-cov" -version = "6.2.1" +name = "pytest-cover" +version = "3.0.0" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "coverage", extra = ["toml"] }, - { name = "pluggy" }, - { name = "pytest" }, + { name = "pytest-cov" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/18/99/668cade231f434aaa59bbfbf49469068d2ddd945000621d3d165d2e7dd7b/pytest_cov-6.2.1.tar.gz", hash = "sha256:25cc6cc0a5358204b8108ecedc51a9b57b34cc6b8c967cc2c01a4e00d8a67da2", size = 69432, upload-time = "2025-06-12T10:47:47.684Z" } +sdist = { url = "https://files.pythonhosted.org/packages/30/27/20964101a7cdb260f8d6c4e854659026968321d10c90552b1fe7f6c5f913/pytest-cover-3.0.0.tar.gz", hash = "sha256:5bdb6c1cc3dd75583bb7bc2c57f5e1034a1bfcb79d27c71aceb0b16af981dbf4", size = 3211, upload-time = "2015-08-01T19:20:22.562Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/bc/16/4ea354101abb1287856baa4af2732be351c7bee728065aed451b678153fd/pytest_cov-6.2.1-py3-none-any.whl", hash = "sha256:f5bc4c23f42f1cdd23c70b1dab1bbaef4fc505ba950d53e0081d0730dd7e86d5", size = 24644, upload-time = "2025-06-12T10:47:45.932Z" }, + { url = "https://files.pythonhosted.org/packages/71/9b/7b4700c462628e169bd859c6368d596a6aedc87936bde733bead9f875fce/pytest_cover-3.0.0-py2.py3-none-any.whl", hash = "sha256:578249955eb3b5f3991209df6e532bb770b647743b7392d3d97698dc02f39ebb", size = 3769, upload-time = "2015-08-01T19:20:18.534Z" }, +] + +[[package]] +name = "pytest-coverage" +version = "0.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "pytest-cover" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/01/81/1d954849aed17b254d1c397eb4447a05eedce612a56b627c071df2ce00c1/pytest-coverage-0.0.tar.gz", hash = "sha256:db6af2cbd7e458c7c9fd2b4207cee75258243c8a81cad31a7ee8cfad5be93c05", size = 873, upload-time = "2015-06-17T21:50:38.956Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5b/4b/d95b052f87db89a2383233c0754c45f6d3b427b7a4bcb771ac9316a6fae1/pytest_coverage-0.0-py2.py3-none-any.whl", hash = "sha256:dedd084c5e74d8e669355325916dc011539b190355021b037242514dee546368", size = 2013, upload-time = "2015-06-17T22:08:36.771Z" }, ] [[package]] @@ -895,27 +970,28 @@ wheels = [ [[package]] name = "ruff" -version = "0.5.7" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/bf/2b/69e5e412f9d390adbdbcbf4f64d6914fa61b44b08839a6584655014fc524/ruff-0.5.7.tar.gz", hash = "sha256:8dfc0a458797f5d9fb622dd0efc52d796f23f0a1493a9527f4e49a550ae9a7e5", size = 2449817, upload-time = "2024-08-08T15:43:07.467Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/6b/eb/06e06aaf96af30a68e83b357b037008c54a2ddcbad4f989535007c700394/ruff-0.5.7-py3-none-linux_armv6l.whl", hash = "sha256:548992d342fc404ee2e15a242cdbea4f8e39a52f2e7752d0e4cbe88d2d2f416a", size = 9570571, upload-time = "2024-08-08T15:41:56.537Z" }, - { url = "https://files.pythonhosted.org/packages/a4/10/1be32aeaab8728f78f673e7a47dd813222364479b2d6573dbcf0085e83ea/ruff-0.5.7-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:00cc8872331055ee017c4f1071a8a31ca0809ccc0657da1d154a1d2abac5c0be", size = 8685138, upload-time = "2024-08-08T15:42:02.833Z" }, - { url = "https://files.pythonhosted.org/packages/3d/1d/c218ce83beb4394ba04d05e9aa2ae6ce9fba8405688fe878b0fdb40ce855/ruff-0.5.7-py3-none-macosx_11_0_arm64.whl", hash = "sha256:eaf3d86a1fdac1aec8a3417a63587d93f906c678bb9ed0b796da7b59c1114a1e", size = 8266785, upload-time = "2024-08-08T15:42:08.321Z" }, - { url = "https://files.pythonhosted.org/packages/26/79/7f49509bd844476235b40425756def366b227a9714191c91f02fb2178635/ruff-0.5.7-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a01c34400097b06cf8a6e61b35d6d456d5bd1ae6961542de18ec81eaf33b4cb8", size = 9983964, upload-time = "2024-08-08T15:42:12.419Z" }, - { url = "https://files.pythonhosted.org/packages/bf/b1/939836b70bf9fcd5e5cd3ea67fdb8abb9eac7631351d32f26544034a35e4/ruff-0.5.7-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:fcc8054f1a717e2213500edaddcf1dbb0abad40d98e1bd9d0ad364f75c763eea", size = 9359490, upload-time = "2024-08-08T15:42:16.713Z" }, - { url = "https://files.pythonhosted.org/packages/32/7d/b3db19207de105daad0c8b704b2c6f2a011f9c07017bd58d8d6e7b8eba19/ruff-0.5.7-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:7f70284e73f36558ef51602254451e50dd6cc479f8b6f8413a95fcb5db4a55fc", size = 10170833, upload-time = "2024-08-08T15:42:20.54Z" }, - { url = "https://files.pythonhosted.org/packages/a2/45/eae9da55f3357a1ac04220230b8b07800bf516e6dd7e1ad20a2ff3b03b1b/ruff-0.5.7-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:a78ad870ae3c460394fc95437d43deb5c04b5c29297815a2a1de028903f19692", size = 10896360, upload-time = "2024-08-08T15:42:25.2Z" }, - { url = "https://files.pythonhosted.org/packages/99/67/4388b36d145675f4c51ebec561fcd4298a0e2550c81e629116f83ce45a39/ruff-0.5.7-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9ccd078c66a8e419475174bfe60a69adb36ce04f8d4e91b006f1329d5cd44bcf", size = 10477094, upload-time = "2024-08-08T15:42:29.553Z" }, - { url = "https://files.pythonhosted.org/packages/e1/9c/f5e6ed1751dc187a4ecf19a4970dd30a521c0ee66b7941c16e292a4043fb/ruff-0.5.7-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7e31c9bad4ebf8fdb77b59cae75814440731060a09a0e0077d559a556453acbb", size = 11480896, upload-time = "2024-08-08T15:42:33.772Z" }, - { url = "https://files.pythonhosted.org/packages/c8/3b/2b683be597bbd02046678fc3fc1c199c641512b20212073b58f173822bb3/ruff-0.5.7-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8d796327eed8e168164346b769dd9a27a70e0298d667b4ecee6877ce8095ec8e", size = 10179702, upload-time = "2024-08-08T15:42:38.038Z" }, - { url = "https://files.pythonhosted.org/packages/f1/38/c2d94054dc4b3d1ea4c2ba3439b2a7095f08d1c8184bc41e6abe2a688be7/ruff-0.5.7-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:4a09ea2c3f7778cc635e7f6edf57d566a8ee8f485f3c4454db7771efb692c499", size = 9982855, upload-time = "2024-08-08T15:42:42.031Z" }, - { url = "https://files.pythonhosted.org/packages/7d/e7/1433db2da505ffa8912dcf5b28a8743012ee780cbc20ad0bf114787385d9/ruff-0.5.7-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:a36d8dcf55b3a3bc353270d544fb170d75d2dff41eba5df57b4e0b67a95bb64e", size = 9433156, upload-time = "2024-08-08T15:42:45.339Z" }, - { url = "https://files.pythonhosted.org/packages/e0/36/4fa43250e67741edeea3d366f59a1dc993d4d89ad493a36cbaa9889895f2/ruff-0.5.7-py3-none-musllinux_1_2_i686.whl", hash = "sha256:9369c218f789eefbd1b8d82a8cf25017b523ac47d96b2f531eba73770971c9e5", size = 9782971, upload-time = "2024-08-08T15:42:49.354Z" }, - { url = "https://files.pythonhosted.org/packages/80/0e/8c276103d518e5cf9202f70630aaa494abf6fc71c04d87c08b6d3cd07a4b/ruff-0.5.7-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:b88ca3db7eb377eb24fb7c82840546fb7acef75af4a74bd36e9ceb37a890257e", size = 10247775, upload-time = "2024-08-08T15:42:53.294Z" }, - { url = "https://files.pythonhosted.org/packages/cb/b9/673096d61276f39291b729dddde23c831a5833d98048349835782688a0ec/ruff-0.5.7-py3-none-win32.whl", hash = "sha256:33d61fc0e902198a3e55719f4be6b375b28f860b09c281e4bdbf783c0566576a", size = 7841772, upload-time = "2024-08-08T15:42:57.488Z" }, - { url = "https://files.pythonhosted.org/packages/67/1c/4520c98bfc06b9c73cd1457686d4d3935d40046b1ddea08403e5a6deff51/ruff-0.5.7-py3-none-win_amd64.whl", hash = "sha256:083bbcbe6fadb93cd86709037acc510f86eed5a314203079df174c40bbbca6b3", size = 8699779, upload-time = "2024-08-08T15:43:00.429Z" }, - { url = "https://files.pythonhosted.org/packages/38/23/b3763a237d2523d40a31fe2d1a301191fe392dd48d3014977d079cf8c0bd/ruff-0.5.7-py3-none-win_arm64.whl", hash = "sha256:2dca26154ff9571995107221d0aeaad0e75a77b5a682d6236cf89a58c70b76f4", size = 8091891, upload-time = "2024-08-08T15:43:04.162Z" }, +version = "0.14.7" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/b7/5b/dd7406afa6c95e3d8fa9d652b6d6dd17dd4a6bf63cb477014e8ccd3dcd46/ruff-0.14.7.tar.gz", hash = "sha256:3417deb75d23bd14a722b57b0a1435561db65f0ad97435b4cf9f85ffcef34ae5", size = 5727324, upload-time = "2025-11-28T20:55:10.525Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/8c/b1/7ea5647aaf90106f6d102230e5df874613da43d1089864da1553b899ba5e/ruff-0.14.7-py3-none-linux_armv6l.whl", hash = "sha256:b9d5cb5a176c7236892ad7224bc1e63902e4842c460a0b5210701b13e3de4fca", size = 13414475, upload-time = "2025-11-28T20:54:54.569Z" }, + { url = "https://files.pythonhosted.org/packages/af/19/fddb4cd532299db9cdaf0efdc20f5c573ce9952a11cb532d3b859d6d9871/ruff-0.14.7-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:3f64fe375aefaf36ca7d7250292141e39b4cea8250427482ae779a2aa5d90015", size = 13634613, upload-time = "2025-11-28T20:55:17.54Z" }, + { url = "https://files.pythonhosted.org/packages/40/2b/469a66e821d4f3de0440676ed3e04b8e2a1dc7575cf6fa3ba6d55e3c8557/ruff-0.14.7-py3-none-macosx_11_0_arm64.whl", hash = "sha256:93e83bd3a9e1a3bda64cb771c0d47cda0e0d148165013ae2d3554d718632d554", size = 12765458, upload-time = "2025-11-28T20:55:26.128Z" }, + { url = "https://files.pythonhosted.org/packages/f1/05/0b001f734fe550bcfde4ce845948ac620ff908ab7241a39a1b39bb3c5f49/ruff-0.14.7-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3838948e3facc59a6070795de2ae16e5786861850f78d5914a03f12659e88f94", size = 13236412, upload-time = "2025-11-28T20:55:28.602Z" }, + { url = "https://files.pythonhosted.org/packages/11/36/8ed15d243f011b4e5da75cd56d6131c6766f55334d14ba31cce5461f28aa/ruff-0.14.7-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:24c8487194d38b6d71cd0fd17a5b6715cda29f59baca1defe1e3a03240f851d1", size = 13182949, upload-time = "2025-11-28T20:55:33.265Z" }, + { url = "https://files.pythonhosted.org/packages/3b/cf/fcb0b5a195455729834f2a6eadfe2e4519d8ca08c74f6d2b564a4f18f553/ruff-0.14.7-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:79c73db6833f058a4be8ffe4a0913b6d4ad41f6324745179bd2aa09275b01d0b", size = 13816470, upload-time = "2025-11-28T20:55:08.203Z" }, + { url = "https://files.pythonhosted.org/packages/7f/5d/34a4748577ff7a5ed2f2471456740f02e86d1568a18c9faccfc73bd9ca3f/ruff-0.14.7-py3-none-manylinux_2_17_ppc64.manylinux2014_ppc64.whl", hash = "sha256:12eb7014fccff10fc62d15c79d8a6be4d0c2d60fe3f8e4d169a0d2def75f5dad", size = 15289621, upload-time = "2025-11-28T20:55:30.837Z" }, + { url = "https://files.pythonhosted.org/packages/53/53/0a9385f047a858ba133d96f3f8e3c9c66a31cc7c4b445368ef88ebeac209/ruff-0.14.7-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:6c623bbdc902de7ff715a93fa3bb377a4e42dd696937bf95669118773dbf0c50", size = 14975817, upload-time = "2025-11-28T20:55:24.107Z" }, + { url = "https://files.pythonhosted.org/packages/a8/d7/2f1c32af54c3b46e7fadbf8006d8b9bcfbea535c316b0bd8813d6fb25e5d/ruff-0.14.7-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:f53accc02ed2d200fa621593cdb3c1ae06aa9b2c3cae70bc96f72f0000ae97a9", size = 14284549, upload-time = "2025-11-28T20:55:06.08Z" }, + { url = "https://files.pythonhosted.org/packages/92/05/434ddd86becd64629c25fb6b4ce7637dd52a45cc4a4415a3008fe61c27b9/ruff-0.14.7-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:281f0e61a23fcdcffca210591f0f53aafaa15f9025b5b3f9706879aaa8683bc4", size = 14071389, upload-time = "2025-11-28T20:55:35.617Z" }, + { url = "https://files.pythonhosted.org/packages/ff/50/fdf89d4d80f7f9d4f420d26089a79b3bb1538fe44586b148451bc2ba8d9c/ruff-0.14.7-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:dbbaa5e14148965b91cb090236931182ee522a5fac9bc5575bafc5c07b9f9682", size = 14202679, upload-time = "2025-11-28T20:55:01.472Z" }, + { url = "https://files.pythonhosted.org/packages/77/54/87b34988984555425ce967f08a36df0ebd339bb5d9d0e92a47e41151eafc/ruff-0.14.7-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:1464b6e54880c0fe2f2d6eaefb6db15373331414eddf89d6b903767ae2458143", size = 13147677, upload-time = "2025-11-28T20:55:19.933Z" }, + { url = "https://files.pythonhosted.org/packages/67/29/f55e4d44edfe053918a16a3299e758e1c18eef216b7a7092550d7a9ec51c/ruff-0.14.7-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:f217ed871e4621ea6128460df57b19ce0580606c23aeab50f5de425d05226784", size = 13151392, upload-time = "2025-11-28T20:55:21.967Z" }, + { url = "https://files.pythonhosted.org/packages/36/69/47aae6dbd4f1d9b4f7085f4d9dcc84e04561ee7ad067bf52e0f9b02e3209/ruff-0.14.7-py3-none-musllinux_1_2_i686.whl", hash = "sha256:6be02e849440ed3602d2eb478ff7ff07d53e3758f7948a2a598829660988619e", size = 13412230, upload-time = "2025-11-28T20:55:12.749Z" }, + { url = "https://files.pythonhosted.org/packages/b7/4b/6e96cb6ba297f2ba502a231cd732ed7c3de98b1a896671b932a5eefa3804/ruff-0.14.7-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:19a0f116ee5e2b468dfe80c41c84e2bbd6b74f7b719bee86c2ecde0a34563bcc", size = 14195397, upload-time = "2025-11-28T20:54:56.896Z" }, + { url = "https://files.pythonhosted.org/packages/69/82/251d5f1aa4dcad30aed491b4657cecd9fb4274214da6960ffec144c260f7/ruff-0.14.7-py3-none-win32.whl", hash = "sha256:e33052c9199b347c8937937163b9b149ef6ab2e4bb37b042e593da2e6f6cccfa", size = 13126751, upload-time = "2025-11-28T20:55:03.47Z" }, + { url = "https://files.pythonhosted.org/packages/a8/b5/d0b7d145963136b564806f6584647af45ab98946660d399ec4da79cae036/ruff-0.14.7-py3-none-win_amd64.whl", hash = "sha256:e17a20ad0d3fad47a326d773a042b924d3ac31c6ca6deb6c72e9e6b5f661a7c6", size = 14531726, upload-time = "2025-11-28T20:54:59.121Z" }, + { url = "https://files.pythonhosted.org/packages/1d/d2/1637f4360ada6a368d3265bf39f2cf737a0aaab15ab520fc005903e883f8/ruff-0.14.7-py3-none-win_arm64.whl", hash = "sha256:be4d653d3bea1b19742fcc6502354e32f65cd61ff2fbdb365803ef2c2aec6228", size = 13609215, upload-time = "2025-11-28T20:55:15.375Z" }, ] [[package]] @@ -1207,6 +1283,18 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/6e/c2/61d3e0f47e2b74ef40a68b9e6ad5984f6241a942f7cd3bbfbdbd03861ea9/tomli-2.2.1-py3-none-any.whl", hash = "sha256:cb55c73c5f4408779d0cf3eef9f762b9c9f147a77de7b258bef0a5628adc85cc", size = 14257, upload-time = "2024-11-27T22:38:35.385Z" }, ] +[[package]] +name = "tqdm" +version = "4.67.1" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "colorama", marker = "sys_platform == 'win32'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/a8/4b/29b4ef32e036bb34e4ab51796dd745cdba7ed47ad142a9f4a1eb8e0c744d/tqdm-4.67.1.tar.gz", hash = "sha256:f8aef9c52c08c13a65f30ea34f4e5aac3fd1a34959879d7e59e63027286627f2", size = 169737, upload-time = "2024-11-24T20:12:22.481Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/d0/30/dc54f88dd4a2b5dc8a0279bdd7270e735851848b762aeb1c1184ed1f6b14/tqdm-4.67.1-py3-none-any.whl", hash = "sha256:26445eca388f82e72884e0d580d5464cd801a3ea01e63e5601bdff9ba6a48de2", size = 78540, upload-time = "2024-11-24T20:12:19.698Z" }, +] + [[package]] name = "traitlets" version = "5.14.3" @@ -1234,27 +1322,6 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/a7/c2/fe1e52489ae3122415c51f387e221dd0773709bad6c6cdaa599e8a2c5185/urllib3-2.5.0-py3-none-any.whl", hash = "sha256:e6b01673c0fa6a13e374b50871808eb3bf7046c4b125b216f6bf1cc604cff0dc", size = 129795, upload-time = "2025-06-18T14:07:40.39Z" }, ] -[[package]] -name = "urwid" -version = "3.0.2" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "wcwidth" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/46/2d/71550379ed6b34968e14f73b0cf8574dee160acb6b820a066ab238ef2d4f/urwid-3.0.2.tar.gz", hash = "sha256:e7cb70ba1e7ff45779a5a57e43c57581ee7de6ceefb56c432491a4a6ce81eb78", size = 855353, upload-time = "2025-05-07T10:48:51.381Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/b5/ee/2956918f14fd6e4310f7200108b53917f4da713d74c7ccd0d91a2e3a4f18/urwid-3.0.2-py3-none-any.whl", hash = "sha256:94ec1448d0178c881c01845c2b478cdc89f7b71bb65349466dbc99da1965eaac", size = 295994, upload-time = "2025-05-07T10:48:49.173Z" }, -] - -[[package]] -name = "urwid-readline" -version = "0.15.1" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "urwid" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/ad/70/be318554495555eba7d8ff6e489f6f74ddb225b24086ba4af62a82e723fd/urwid_readline-0.15.1.tar.gz", hash = "sha256:9301444b86d58f7d26388506b704f142cefd193888488b4070d3a0fdfcfc0f84", size = 9007, upload-time = "2024-09-22T17:51:55.144Z" } - [[package]] name = "uvicorn" version = "0.35.0"