diff --git a/.git-blame-ignore-revs b/.git-blame-ignore-revs index 5646ef96c..57d444f85 100644 --- a/.git-blame-ignore-revs +++ b/.git-blame-ignore-revs @@ -1,4 +1,4 @@ -# Template taken from https://github.com/v8/v8/blob/master/.git-blame-ignore-revs. +# Template taken from https://github.com/v8/v8/blob/main/.git-blame-ignore-revs. # # This file contains a list of git hashes of revisions to be ignored by git blame. These # revisions are considered "unimportant" in that they are unlikely to be what you are diff --git a/.github/actions/spelling/allow.txt b/.github/actions/spelling/allow.txt index 8d0b13c8c..b3b2d56e8 100644 --- a/.github/actions/spelling/allow.txt +++ b/.github/actions/spelling/allow.txt @@ -1,3 +1,6 @@ +a2a +A2A +A2AFastAPI AAgent ACard AClient @@ -5,11 +8,14 @@ ACMRTUXB aconnect adk AError +AException AFast agentic AGrpc aio aiomysql +AIP +alg amannn aproject ARequest @@ -21,19 +27,27 @@ AStarlette AUser autouse backticks +base64url +buf +bufbuild cla cls coc codegen coro +culsans datamodel +datapart deepwiki drivername DSNs dunders +ES256 euo EUR +evt excinfo +FastAPI fernet fetchrow fetchval @@ -42,13 +56,23 @@ genai getkwargs gle GVsb +hazmat +HS256 +HS384 ietf +importlib initdb inmemory INR isready +itk +ITK +jcs +jit jku +JOSE JPY +JSONRPC JSONRPCt jwk jwks @@ -61,36 +85,62 @@ lifecycles linting Llm lstrips +middleware mikeas mockurl mysqladmin notif +npx oauthoidc oidc +Oneof +OpenAPI +openapiv +openapiv2 opensource otherurl +pb2 +poolclass postgres POSTGRES postgresql proot +proto +protobuf +Protobuf protoc +pydantic pyi pypistats +pyproto pyupgrade pyversions redef respx resub +rmi +RS256 RUF +SECP256R1 SLF socio sse +starlette +Starlette sut SUT +swagger tagwords taskupdate testuuid Tful tiangolo +TResponse +typ typeerror vulnz +Podman +podman +UIDs +subuids +subgids diff --git a/.github/actions/spelling/excludes.txt b/.github/actions/spelling/excludes.txt index f54f084c8..6189bc705 100644 --- a/.github/actions/spelling/excludes.txt +++ b/.github/actions/spelling/excludes.txt @@ -10,6 +10,7 @@ (?:^|/)pyproject.toml (?:^|/)requirements(?:-dev|-doc|-test|)\.txt$ (?:^|/)vendor/ +(?:^|/)buf.gen.yaml /CODEOWNERS$ \.a$ \.ai$ @@ -87,5 +88,9 @@ ^\.github/workflows/ CHANGELOG.md ^src/a2a/grpc/ +^src/a2a/types/ +^src/a2a/compat/v0_3/a2a_v0_3* ^tests/ .pre-commit-config.yaml +(?:^|/)a2a\.json$ +release-please-config.json diff --git a/.github/actions/spelling/expect.txt b/.github/actions/spelling/expect.txt deleted file mode 100644 index abf7a6f71..000000000 --- a/.github/actions/spelling/expect.txt +++ /dev/null @@ -1 +0,0 @@ -datapart diff --git a/.github/workflows/itk.yaml b/.github/workflows/itk.yaml new file mode 100644 index 000000000..ab272d0e3 --- /dev/null +++ b/.github/workflows/itk.yaml @@ -0,0 +1,31 @@ +name: ITK + +on: + push: + branches: [main, 1.0-dev] + pull_request: + paths: + - 'src/**' + - 'itk/**' + - 'pyproject.toml' + +permissions: + contents: read + +jobs: + itk: + name: ITK + runs-on: ubuntu-latest + + steps: + - name: Checkout code + uses: actions/checkout@v6 + + - name: Install uv + uses: astral-sh/setup-uv@v7 + + - name: Run ITK Tests + run: bash run_itk.sh + working-directory: itk + env: + A2A_SAMPLES_REVISION: itk-v.016-alpha diff --git a/.github/workflows/linter.yaml b/.github/workflows/linter.yaml index 99e8548d7..4263abb3c 100644 --- a/.github/workflows/linter.yaml +++ b/.github/workflows/linter.yaml @@ -2,7 +2,7 @@ name: Lint Code Base on: pull_request: - branches: [main] + branches: [main, 1.0-dev] permissions: contents: read jobs: @@ -43,9 +43,7 @@ jobs: - name: Run Pyright (Pylance equivalent) id: pyright continue-on-error: true - uses: jakebailey/pyright-action@8ec14b5cfe41f26e5f41686a31eb6012758217ef # v3 - with: - pylance-version: latest-release + run: uv run pyright src - name: Run JSCPD for copy-paste detection id: jscpd diff --git a/.github/workflows/minimal-install.yml b/.github/workflows/minimal-install.yml new file mode 100644 index 000000000..7e0f143c6 --- /dev/null +++ b/.github/workflows/minimal-install.yml @@ -0,0 +1,41 @@ +--- +name: Minimal Install Smoke Test +on: + push: + branches: [main, 1.0-dev] + pull_request: +permissions: + contents: read + +jobs: + minimal-install: + name: Verify base-only install + runs-on: ubuntu-latest + if: github.repository == 'a2aproject/a2a-python' + strategy: + matrix: + python-version: ['3.10', '3.11', '3.12', '3.13', '3.14'] + steps: + - name: Checkout code + uses: actions/checkout@v6 + + - name: Install uv + uses: astral-sh/setup-uv@v7 + with: + python-version: ${{ matrix.python-version }} + + - name: Build package + run: uv build --wheel + + - name: Install with base dependencies only + run: | + uv venv .venv-minimal + # Install only the built wheel -- no extras, no dev deps. + # This simulates what an end-user gets with `pip install a2a-sdk`. + VIRTUAL_ENV=.venv-minimal uv pip install dist/*.whl + + - name: List installed packages + run: VIRTUAL_ENV=.venv-minimal uv pip list + + - name: Run import smoke test + run: .venv-minimal/bin/python scripts/test_minimal_install.py diff --git a/.github/workflows/release-please.yml b/.github/workflows/release-please.yml index 1668691e8..98ac3bf2d 100644 --- a/.github/workflows/release-please.yml +++ b/.github/workflows/release-please.yml @@ -2,6 +2,7 @@ on: push: branches: - main + - 1.0-dev permissions: contents: write @@ -16,4 +17,6 @@ jobs: - uses: googleapis/release-please-action@16a9c90856f42705d54a6fda1823352bdc62cf38 # v4 with: token: ${{ secrets.A2A_BOT_PAT }} - release-type: python + target-branch: ${{ github.ref_name }} + config-file: release-please-config.json + manifest-file: .release-please-manifest.json diff --git a/.github/workflows/unit-tests.yml b/.github/workflows/unit-tests.yml index cb6f82414..adabe0676 100644 --- a/.github/workflows/unit-tests.yml +++ b/.github/workflows/unit-tests.yml @@ -2,7 +2,7 @@ name: Run Unit Tests on: push: - branches: [main] + branches: [main, 1.0-dev] pull_request: permissions: contents: read @@ -38,7 +38,7 @@ jobs: strategy: matrix: - python-version: ['3.10', '3.13'] + python-version: ['3.10', '3.11', '3.12', '3.13', '3.14'] steps: - name: Checkout code uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6 @@ -54,45 +54,47 @@ jobs: - name: Add uv to PATH run: | echo "$HOME/.cargo/bin" >> $GITHUB_PATH - - name: Install dependencies - run: uv sync --locked - # Coverage comparison for PRs (only on Python 3.13 to avoid duplicate work) + + # Coverage comparison for PRs (only on Python 3.14 to avoid duplicate work) - name: Checkout Base Branch - if: github.event_name == 'pull_request' && matrix.python-version == '3.13' + if: github.event_name == 'pull_request' && matrix.python-version == '3.14' uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6 with: ref: ${{ github.event.pull_request.base.ref || 'main' }} clean: true + + - name: Install dependencies + run: uv sync --locked - name: Run coverage (Base) - if: github.event_name == 'pull_request' && matrix.python-version == '3.13' + if: github.event_name == 'pull_request' && matrix.python-version == '3.14' run: | uv run pytest --cov=a2a --cov-report=json --cov-report=html:coverage mv coverage.json /tmp/coverage-base.json - name: Checkout PR Branch (Restore) - if: github.event_name == 'pull_request' && matrix.python-version == '3.13' + if: github.event_name == 'pull_request' && matrix.python-version == '3.14' uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6 with: clean: true - name: Run coverage (PR) - if: github.event_name == 'pull_request' && matrix.python-version == '3.13' + if: github.event_name == 'pull_request' && matrix.python-version == '3.14' run: | uv run pytest --cov=a2a --cov-report=json --cov-report=html:coverage --cov-report=term --cov-fail-under=88 mv coverage.json coverage-pr.json cp /tmp/coverage-base.json coverage-base.json - name: Save Metadata - if: github.event_name == 'pull_request' && matrix.python-version == '3.13' + if: github.event_name == 'pull_request' && matrix.python-version == '3.14' run: | echo ${{ github.event.number }} > ./PR_NUMBER echo ${{ github.event.pull_request.base.ref || 'main' }} > ./BASE_BRANCH - name: Upload Coverage Artifacts uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7 - if: github.event_name == 'pull_request' && matrix.python-version == '3.13' + if: github.event_name == 'pull_request' && matrix.python-version == '3.14' with: name: coverage-data path: | @@ -105,12 +107,12 @@ jobs: # Run standard tests (for matrix items that didn't run coverage PR) - name: Run tests (Standard) - if: matrix.python-version != '3.13' || github.event_name != 'pull_request' + if: matrix.python-version != '3.14' || github.event_name != 'pull_request' run: uv run pytest --cov=a2a --cov-report term --cov-fail-under=88 - name: Upload Artifact (base) uses: actions/upload-artifact@bbbca2ddaa5d8feaa63e36b76fdaad77386f024f # v7 - if: github.event_name != 'pull_request' && matrix.python-version == '3.13' + if: github.event_name != 'pull_request' && matrix.python-version == '3.14' with: name: coverage-report path: coverage diff --git a/.github/workflows/update-a2a-types.yml b/.github/workflows/update-a2a-types.yml deleted file mode 100644 index cb1ece199..000000000 --- a/.github/workflows/update-a2a-types.yml +++ /dev/null @@ -1,62 +0,0 @@ ---- -name: Update A2A Schema from Specification -on: -# TODO (https://github.com/a2aproject/a2a-python/issues/559): bring back once types are migrated, currently it generates many broken PRs -# repository_dispatch: -# types: [a2a_json_update] - workflow_dispatch: -jobs: - generate_and_pr: - runs-on: ubuntu-latest - permissions: - contents: write - pull-requests: write - steps: - - name: Checkout code - uses: actions/checkout@de0fac2e4500dabe0009e67214ff5f5447ce83dd # v6 - - name: Set up Python - uses: actions/setup-python@a309ff8b426b58ec0e2a45f0f869d46889d02405 # v6 - with: - python-version: '3.10' - - name: Install uv - uses: astral-sh/setup-uv@37802adc94f370d6bfd71619e3f0bf239e1f3b78 # v7 - - name: Configure uv shell - run: echo "$HOME/.cargo/bin" >> $GITHUB_PATH - - name: Install dependencies (datamodel-code-generator) - run: uv sync --locked - - name: Define output file variable - id: vars - run: | - GENERATED_FILE="./src/a2a/types.py" - echo "GENERATED_FILE=$GENERATED_FILE" >> "$GITHUB_OUTPUT" - - name: Generate types from schema - run: | - chmod +x scripts/generate_types.sh - ./scripts/generate_types.sh "${{ steps.vars.outputs.GENERATED_FILE }}" - - name: Install Buf - uses: bufbuild/buf-setup-action@a47c93e0b1648d5651a065437926377d060baa99 # v1.50.0 - - name: Run buf generate - run: | - set -euo pipefail # Exit immediately if a command exits with a non-zero status - echo "Running buf generate..." - buf generate - uv run scripts/grpc_gen_post_processor.py - echo "Buf generate finished." - - name: Create Pull Request with Updates - uses: peter-evans/create-pull-request@c0f553fe549906ede9cf27b5156039d195d2ece0 # v8 - with: - token: ${{ secrets.A2A_BOT_PAT }} - committer: a2a-bot - author: a2a-bot - commit-message: '${{ github.event.client_payload.message }}' - title: '${{ github.event.client_payload.message }}' - body: | - Commit: https://github.com/a2aproject/A2A/commit/${{ github.event.client_payload.sha }} - branch: auto-update-a2a-types-${{ github.event.client_payload.sha }} - base: main - labels: | - automated - dependencies - add-paths: |- - ${{ steps.vars.outputs.GENERATED_FILE }} - src/a2a/grpc/ diff --git a/.gitignore b/.gitignore index 91cbb9938..14bccd39b 100644 --- a/.gitignore +++ b/.gitignore @@ -10,3 +10,12 @@ test_venv/ coverage.xml .nox spec.json +docker-compose.yaml +.geminiignore +docs/ai/ai_learnings.md + +# ITK Integration Test Artifacts +itk/a2a-samples/ +itk/pyproto/ +itk/instruction.proto +itk/logs/ diff --git a/.jscpd.json b/.jscpd.json index 5a6fcad71..ed59a6491 100644 --- a/.jscpd.json +++ b/.jscpd.json @@ -1,5 +1,13 @@ { - "ignore": ["**/.github/**", "**/.git/**", "**/tests/**", "**/src/a2a/grpc/**", "**/.nox/**", "**/.venv/**"], + "ignore": [ + "**/.github/**", + "**/.git/**", + "**/tests/**", + "**/src/a2a/grpc/**", + "**/src/a2a/compat/**", + "**/.nox/**", + "**/.venv/**" + ], "threshold": 3, "reporters": ["html", "markdown"] } diff --git a/.release-please-manifest.json b/.release-please-manifest.json new file mode 100644 index 000000000..160cadc01 --- /dev/null +++ b/.release-please-manifest.json @@ -0,0 +1 @@ +{".":"1.0.0-alpha.3"} diff --git a/CHANGELOG.md b/CHANGELOG.md index 01e3469b8..3e3b43a3a 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,167 @@ # Changelog +## [1.0.0-alpha.3](https://github.com/a2aproject/a2a-python/compare/v1.0.0-alpha.2...v1.0.0-alpha.3) (2026-04-17) + + +### Bug Fixes + +* update `with_a2a_extensions` to append instead of overwriting ([#985](https://github.com/a2aproject/a2a-python/issues/985)) ([e1d0e7a](https://github.com/a2aproject/a2a-python/commit/e1d0e7a72e2b9633be0b76c952f6c2e6fe11e3e5)) + +## [1.0.0-alpha.2](https://github.com/a2aproject/a2a-python/compare/v1.0.0-alpha.1...v1.0.0-alpha.2) (2026-04-17) + + +### ⚠ BREAKING CHANGES + +* clean helpers and utils folders structure ([#983](https://github.com/a2aproject/a2a-python/issues/983)) +* Raise errors on invalid AgentExecutor behavior. ([#979](https://github.com/a2aproject/a2a-python/issues/979)) +* extract developer helpers in helpers folder ([#978](https://github.com/a2aproject/a2a-python/issues/978)) + +### Features + +* Raise errors on invalid AgentExecutor behavior. ([#979](https://github.com/a2aproject/a2a-python/issues/979)) ([f4a0bcd](https://github.com/a2aproject/a2a-python/commit/f4a0bcdf68107c95e6c0a5e6696e4a7d6e01a03f)) +* **utils:** add `display_agent_card()` utility for human-readable AgentCard inspection ([#972](https://github.com/a2aproject/a2a-python/issues/972)) ([3468180](https://github.com/a2aproject/a2a-python/commit/3468180ac7396d453d99ce3e74cdd7f5a0afb5ab)) + + +### Bug Fixes + +* Don't generate empty metadata change events in VertexTaskStore ([#974](https://github.com/a2aproject/a2a-python/issues/974)) ([b58b03e](https://github.com/a2aproject/a2a-python/commit/b58b03ef58bd806db3accbe6dca8fc444a43bc18)), closes [#802](https://github.com/a2aproject/a2a-python/issues/802) +* **extensions:** support both header names and remove "activation" concept ([#984](https://github.com/a2aproject/a2a-python/issues/984)) ([b8df210](https://github.com/a2aproject/a2a-python/commit/b8df210b00d0f249ca68f0d814191c4205e18b35)) + + +### Documentation + +* AgentExecutor interface documentation ([#976](https://github.com/a2aproject/a2a-python/issues/976)) ([d667e4f](https://github.com/a2aproject/a2a-python/commit/d667e4fa55e99225eb3c02e009b426a3bc2d449d)) +* move `ai_learnings.md` to local-only and update `GEMINI.md` ([#982](https://github.com/a2aproject/a2a-python/issues/982)) ([f6610fa](https://github.com/a2aproject/a2a-python/commit/f6610fa35e1f5fbc3e7e6cd9e29a5177a538eb4e)) + + +### Code Refactoring + +* clean helpers and utils folders structure ([#983](https://github.com/a2aproject/a2a-python/issues/983)) ([c87e87c](https://github.com/a2aproject/a2a-python/commit/c87e87c76c004c73c9d6b9bd8cacfd4e590598e6)) +* extract developer helpers in helpers folder ([#978](https://github.com/a2aproject/a2a-python/issues/978)) ([5f3ea29](https://github.com/a2aproject/a2a-python/commit/5f3ea292389cf72a25a7cf2792caceb4af45f6da)) + +## [1.0.0-alpha.1](https://github.com/a2aproject/a2a-python/compare/v1.0.0-alpha.0...v1.0.0-alpha.1) (2026-04-10) + + +### ⚠ BREAKING CHANGES + +* **client:** make ClientConfig.push_notification_config singular ([#955](https://github.com/a2aproject/a2a-python/issues/955)) +* **client:** reorganize ClientFactory API ([#947](https://github.com/a2aproject/a2a-python/issues/947)) +* **server:** add build_user function to DefaultContextBuilder to allow A2A user creation customization ([#925](https://github.com/a2aproject/a2a-python/issues/925)) +* **client:** remove `ClientTaskManager` and `Consumers` from client ([#916](https://github.com/a2aproject/a2a-python/issues/916)) +* **server:** migrate from Application wrappers to Starlette route-based endpoints for rest ([#892](https://github.com/a2aproject/a2a-python/issues/892)) +* **server:** migrate from Application wrappers to Starlette route-based endpoints for jsonrpc ([#873](https://github.com/a2aproject/a2a-python/issues/873)) + +### Features + +* A2A Version Header validation on server side. ([#865](https://github.com/a2aproject/a2a-python/issues/865)) ([b261ceb](https://github.com/a2aproject/a2a-python/commit/b261ceb98bf46cc1e479fcdace52fef8371c8e58)) +* Add GetExtendedAgentCard Support to RequestHandlers ([#919](https://github.com/a2aproject/a2a-python/issues/919)) ([2159140](https://github.com/a2aproject/a2a-python/commit/2159140b1c24fe556a41accf97a6af7f54ec6701)) +* Add support for more Task Message and Artifact fields in the Vertex Task Store ([#908](https://github.com/a2aproject/a2a-python/issues/908)) ([5e0dcd7](https://github.com/a2aproject/a2a-python/commit/5e0dcd798fcba16a8092b0b4c2d3d8026ca287de)) +* Add support for more Task Message and Artifact fields in the Vertex Task Store ([#936](https://github.com/a2aproject/a2a-python/issues/936)) ([605fa49](https://github.com/a2aproject/a2a-python/commit/605fa4913ad23539a51a3ee1f5b9ca07f24e1d2d)) +* Create EventQueue interface and make tap() async. ([#914](https://github.com/a2aproject/a2a-python/issues/914)) ([9ccf99c](https://github.com/a2aproject/a2a-python/commit/9ccf99c63d4e556eadea064de6afa0b4fc4e19d6)), closes [#869](https://github.com/a2aproject/a2a-python/issues/869) +* EventQueue - unify implementation between python versions ([#877](https://github.com/a2aproject/a2a-python/issues/877)) ([7437b88](https://github.com/a2aproject/a2a-python/commit/7437b88328fc71ed07e8e50f22a2eb0df4bf4201)), closes [#869](https://github.com/a2aproject/a2a-python/issues/869) +* EventQueue is now a simple interface with single enqueue_event method. ([#944](https://github.com/a2aproject/a2a-python/issues/944)) ([f0e1d74](https://github.com/a2aproject/a2a-python/commit/f0e1d74802e78a4e9f4c22cbc85db104137e0cd2)) +* Implementation of DefaultRequestHandlerV2 ([#933](https://github.com/a2aproject/a2a-python/issues/933)) ([462eb3c](https://github.com/a2aproject/a2a-python/commit/462eb3cb7b6070c258f5672aa3b0aa59e913037c)), closes [#869](https://github.com/a2aproject/a2a-python/issues/869) +* InMemoryTaskStore creates a copy of Task by default to make it consistent with database task stores ([#887](https://github.com/a2aproject/a2a-python/issues/887)) ([8c65e84](https://github.com/a2aproject/a2a-python/commit/8c65e84fb844251ce1d8f04d26dbf465a89b9a29)), closes [#869](https://github.com/a2aproject/a2a-python/issues/869) +* merge metadata of new and old artifact when append=True ([#945](https://github.com/a2aproject/a2a-python/issues/945)) ([cc094aa](https://github.com/a2aproject/a2a-python/commit/cc094aa51caba8107b63982e9b79256f7c2d331a)) +* **server:** add async context manager support to EventQueue ([#743](https://github.com/a2aproject/a2a-python/issues/743)) ([f68b22f](https://github.com/a2aproject/a2a-python/commit/f68b22f0323ed4ff9267fabcf09c9d873baecc39)) +* **server:** validate presence according to `google.api.field_behavior` annotations ([#870](https://github.com/a2aproject/a2a-python/issues/870)) ([4586c3e](https://github.com/a2aproject/a2a-python/commit/4586c3ec0b507d64caa3ced72d68a34ec5b37a11)) +* Simplify ActiveTask.subscribe() ([#958](https://github.com/a2aproject/a2a-python/issues/958)) ([62e5e59](https://github.com/a2aproject/a2a-python/commit/62e5e59a30b11b9b493f7bf969aa13173ce51b9c)) +* Support AgentExectuor enqueue of a Task object. ([#960](https://github.com/a2aproject/a2a-python/issues/960)) ([12ce017](https://github.com/a2aproject/a2a-python/commit/12ce0179056db9d9ba2abdd559cb5a4bb5a20ddf)) +* Support Message-only simplified execution without creating Task ([#956](https://github.com/a2aproject/a2a-python/issues/956)) ([354fdfb](https://github.com/a2aproject/a2a-python/commit/354fdfb68dd0c7894daaac885a06dfed0ab839c8)) +* Unhandled exception in AgentExecutor marks task as failed ([#943](https://github.com/a2aproject/a2a-python/issues/943)) ([4fc6b54](https://github.com/a2aproject/a2a-python/commit/4fc6b54fd26cc83d810d81f923579a1cd4853b39)) + + +### Bug Fixes + +* Add `packaging` to base dependencies ([#897](https://github.com/a2aproject/a2a-python/issues/897)) ([7a9aec7](https://github.com/a2aproject/a2a-python/commit/7a9aec7779448faa85a828d1076bcc47cda7bdbb)) +* **client:** do not mutate SendMessageRequest in BaseClient.send_message ([#949](https://github.com/a2aproject/a2a-python/issues/949)) ([94537c3](https://github.com/a2aproject/a2a-python/commit/94537c382be4160332279a44d83254feeb0b8037)) +* fix `athrow()` RuntimeError on streaming responses ([#912](https://github.com/a2aproject/a2a-python/issues/912)) ([ca7edc3](https://github.com/a2aproject/a2a-python/commit/ca7edc3b670538ce0f051c49f2224173f186d3f4)) +* fix docstrings related to `CallContextBuilder` args in constructors and make ServerCallContext mandatory in `compat` folder ([#907](https://github.com/a2aproject/a2a-python/issues/907)) ([9cade9b](https://github.com/a2aproject/a2a-python/commit/9cade9bdadfb94f2f857ec2dc302a2c402e7f0ea)) +* fix error handling for gRPC and SSE streaming ([#879](https://github.com/a2aproject/a2a-python/issues/879)) ([2b323d0](https://github.com/a2aproject/a2a-python/commit/2b323d0b191279fb5f091199aa30865299d5fcf2)) +* fix JSONRPC error handling ([#957](https://github.com/a2aproject/a2a-python/issues/957)) ([6c807d5](https://github.com/a2aproject/a2a-python/commit/6c807d51c49ac294a6e3cbec34be101d4f91870d)) +* fix REST error handling ([#893](https://github.com/a2aproject/a2a-python/issues/893)) ([405be3f](https://github.com/a2aproject/a2a-python/commit/405be3fa3ef8c60f730452b956879beeaecc5957)) +* handle SSE errors occurred after stream started ([#894](https://github.com/a2aproject/a2a-python/issues/894)) ([3a68d8f](https://github.com/a2aproject/a2a-python/commit/3a68d8f916d96ae135748ee2b9b907f8dace4fa7)) +* remove the use of deprecated types from VertexTaskStore ([#889](https://github.com/a2aproject/a2a-python/issues/889)) ([6d49122](https://github.com/a2aproject/a2a-python/commit/6d49122238a5e7d497c5d002792732446071dcb2)) +* Remove unconditional SQLAlchemy dependency from SDK core ([#898](https://github.com/a2aproject/a2a-python/issues/898)) ([ab762f0](https://github.com/a2aproject/a2a-python/commit/ab762f0448911a9ac05b6e3fec0104615e0ec557)), closes [#883](https://github.com/a2aproject/a2a-python/issues/883) +* remove unused import and request for FastAPI in pyproject ([#934](https://github.com/a2aproject/a2a-python/issues/934)) ([fe5de77](https://github.com/a2aproject/a2a-python/commit/fe5de77a1d457958fe14fec61b0d8aa41c5ec300)) +* replace stale entry in a2a.types.__all__ with actual import name ([#902](https://github.com/a2aproject/a2a-python/issues/902)) ([05cd5e9](https://github.com/a2aproject/a2a-python/commit/05cd5e9b73b55d2863c58c13be0c7dd21d8124bb)) +* wrong method name for ExtendedAgentCard endpoint in JsonRpc compat version ([#931](https://github.com/a2aproject/a2a-python/issues/931)) ([5d22186](https://github.com/a2aproject/a2a-python/commit/5d22186b8ee0f64b744512cdbe7ab6176fa97c60)) + + +### Documentation + +* add Database Migration Documentation ([#864](https://github.com/a2aproject/a2a-python/issues/864)) ([fd12dff](https://github.com/a2aproject/a2a-python/commit/fd12dffa3a7aa93816c762a155ed9b505086b924)) + + +### Miscellaneous Chores + +* release 1.0.0-alpha.1 ([a61f6d4](https://github.com/a2aproject/a2a-python/commit/a61f6d4e2e7ce1616a35c3a2ede64a4c9067048a)) + + +### Code Refactoring + +* **client:** make ClientConfig.push_notification_config singular ([#955](https://github.com/a2aproject/a2a-python/issues/955)) ([be4c5ff](https://github.com/a2aproject/a2a-python/commit/be4c5ff17a2f58e20d5d333a5e8e7bfcaa58c6c0)) +* **client:** remove `ClientTaskManager` and `Consumers` from client ([#916](https://github.com/a2aproject/a2a-python/issues/916)) ([97058bb](https://github.com/a2aproject/a2a-python/commit/97058bb444ea663d77c3b62abcf2fd0c30a1a526)), closes [#734](https://github.com/a2aproject/a2a-python/issues/734) +* **client:** reorganize ClientFactory API ([#947](https://github.com/a2aproject/a2a-python/issues/947)) ([01b3b2c](https://github.com/a2aproject/a2a-python/commit/01b3b2c0e196b0aab4f1f0dc22a95c09c7ee914d)) +* **server:** add build_user function to DefaultContextBuilder to allow A2A user creation customization ([#925](https://github.com/a2aproject/a2a-python/issues/925)) ([2648c5e](https://github.com/a2aproject/a2a-python/commit/2648c5e50281ceb9795b10a726bd23670b363ae1)) +* **server:** migrate from Application wrappers to Starlette route-based endpoints for jsonrpc ([#873](https://github.com/a2aproject/a2a-python/issues/873)) ([734d062](https://github.com/a2aproject/a2a-python/commit/734d0621dc6170d10d0cdf9c074e5ae28531fc71)) +* **server:** migrate from Application wrappers to Starlette route-based endpoints for rest ([#892](https://github.com/a2aproject/a2a-python/issues/892)) ([4be2064](https://github.com/a2aproject/a2a-python/commit/4be2064b5d511e0b4617507ed0c376662688ebeb)) + +## 1.0.0-alpha.0 (2026-03-17) + + +### ⚠ BREAKING CHANGES + +* **spec**: upgrade SDK to A2A 1.0 spec and use proto-based types ([#572](https://github.com/a2aproject/a2a-python/issues/572), [#665](https://github.com/a2aproject/a2a-python/issues/665), [#804](https://github.com/a2aproject/a2a-python/issues/804), [#765](https://github.com/a2aproject/a2a-python/issues/765)) +* **client:** introduce ServiceParameters for extensions and include it in ClientCallContext ([#784](https://github.com/a2aproject/a2a-python/issues/784)) +* **client:** rename "callback" -> "push_notification_config" ([#749](https://github.com/a2aproject/a2a-python/issues/749)) +* **client:** transport agnostic interceptors ([#796](https://github.com/a2aproject/a2a-python/issues/796)) ([a910cbc](https://github.com/a2aproject/a2a-python/commit/a910cbcd48f6017c19bb4c87be3c62b7d7e9810d)) +* add `protocol_version` column to Task and PushNotificationConfig models and create a migration ([#789](https://github.com/a2aproject/a2a-python/issues/789)) ([2e2d431](https://github.com/a2aproject/a2a-python/commit/2e2d43190930612495720c372dd2d9921c0311f9)) +* **server:** implement `Resource Scoping` for tasks and push notifications ([#709](https://github.com/a2aproject/a2a-python/issues/709)) ([f0d4669](https://github.com/a2aproject/a2a-python/commit/f0d4669224841657341e7f773b427e2128ab0ed8)) + +### Features + +* add GetExtendedAgentCardRequest as input parameter to GetExtendedAgentCard method ([#767](https://github.com/a2aproject/a2a-python/issues/767)) ([13a092f](https://github.com/a2aproject/a2a-python/commit/13a092f5a5d7b2b2654c69a99dc09ed9d928ffe5)) +* add validation for the JSON-RPC version ([#808](https://github.com/a2aproject/a2a-python/issues/808)) ([6eb7e41](https://github.com/a2aproject/a2a-python/commit/6eb7e4155517be8ff0766c0a929fd7d7b4a52db5)) +* **client:** expose close() and async context manager support on abstract Client ([#719](https://github.com/a2aproject/a2a-python/issues/719)) ([e25ba7b](https://github.com/a2aproject/a2a-python/commit/e25ba7be57fe28ab101a9726972f7c8620468a52)) +* **compat:** AgentCard backward compatibility helpers and tests ([#760](https://github.com/a2aproject/a2a-python/issues/760)) ([81f3494](https://github.com/a2aproject/a2a-python/commit/81f349482fc748c93b073a9f2af715e7333b0dfb)) +* **compat:** GRPC client compatible with 0.3 server ([#779](https://github.com/a2aproject/a2a-python/issues/779)) ([0ebca93](https://github.com/a2aproject/a2a-python/commit/0ebca93670703490df1e536d57b4cd83595d0e51)) +* **compat:** GRPC server compatible with 0.3 client ([#772](https://github.com/a2aproject/a2a-python/issues/772)) ([80d827a](https://github.com/a2aproject/a2a-python/commit/80d827ae4ebb6515bf8dcb10e50ba27be8b6b41b)) +* **compat:** legacy v0.3 protocol models, conversion logic and utilities ([#754](https://github.com/a2aproject/a2a-python/issues/754)) ([26835ad](https://github.com/a2aproject/a2a-python/commit/26835ad3f6d256ff6b84858d690204da66854eb9)) +* **compat:** REST and JSONRPC clients compatible with 0.3 servers ([#798](https://github.com/a2aproject/a2a-python/issues/798)) ([08794f7](https://github.com/a2aproject/a2a-python/commit/08794f7bd05c223f8621d4b6924fc9a80d898a39)) +* **compat:** REST and JSONRPC servers compatible with 0.3 clients ([#795](https://github.com/a2aproject/a2a-python/issues/795)) ([9856054](https://github.com/a2aproject/a2a-python/commit/9856054f8398162b01e38b65b2e090adb95f1e8b)) +* **compat:** set a2a-version header to 1.0.0 ([#764](https://github.com/a2aproject/a2a-python/issues/764)) ([4cb68aa](https://github.com/a2aproject/a2a-python/commit/4cb68aa26a80a1121055d11f067824610a035ee6)) +* **compat:** unify v0.3 REST url prefix and expand cross-version tests ([#820](https://github.com/a2aproject/a2a-python/issues/820)) ([0925f0a](https://github.com/a2aproject/a2a-python/commit/0925f0aa27800df57ca766a1f7b0a36071e3752c)) +* database forward compatibility: make `owner` field optional ([#812](https://github.com/a2aproject/a2a-python/issues/812)) ([cc29d1f](https://github.com/a2aproject/a2a-python/commit/cc29d1f2fb1dbaeae80a08b783e3ba05bc4a757e)) +* handle tenant in Client ([#758](https://github.com/a2aproject/a2a-python/issues/758)) ([5b354e4](https://github.com/a2aproject/a2a-python/commit/5b354e403a717c3c6bf47a291bef028c8c6a9d94)) +* implement missing push notifications related methods ([#711](https://github.com/a2aproject/a2a-python/issues/711)) ([041f0f5](https://github.com/a2aproject/a2a-python/commit/041f0f53bcf5fc2e74545d653bfeeba8d2d85c79)) +* implement rich gRPC error details per A2A v1.0 spec ([#790](https://github.com/a2aproject/a2a-python/issues/790)) ([245eca3](https://github.com/a2aproject/a2a-python/commit/245eca30b70ccd1809031325dc9b86f23a9bac2a)) +* **rest:** add tenant support to rest ([#773](https://github.com/a2aproject/a2a-python/issues/773)) ([4771b5a](https://github.com/a2aproject/a2a-python/commit/4771b5aa1dbae51fdb5f7ff4324136d4db31e76f)) +* send task as a first subscribe event ([#716](https://github.com/a2aproject/a2a-python/issues/716)) ([e71ac62](https://github.com/a2aproject/a2a-python/commit/e71ac6266f506ec843d00409d606acb22fec5f78)) +* **server, grpc:** Implement tenant context propagation for gRPC requests. ([#781](https://github.com/a2aproject/a2a-python/issues/781)) ([164f919](https://github.com/a2aproject/a2a-python/commit/164f9197f101e3db5c487c4dede45b8729475a8c)) +* **server, json-rpc:** Implement tenant context propagation for JSON-RPC requests. ([#778](https://github.com/a2aproject/a2a-python/issues/778)) ([72a330d](https://github.com/a2aproject/a2a-python/commit/72a330d2c073ece51e093542c41ec171c667f312)) +* **server:** add v0.3 legacy compatibility for database models ([#783](https://github.com/a2aproject/a2a-python/issues/783)) ([08c491e](https://github.com/a2aproject/a2a-python/commit/08c491eb6c732f7a872e562cd0fbde01df791cca)) +* **spec:** add `tasks/list` method with filtering and pagination to the specification ([#511](https://github.com/a2aproject/a2a-python/issues/511)) ([d5818e5](https://github.com/a2aproject/a2a-python/commit/d5818e5233d9f0feeab3161cc3b1be3ae236d887)) +* use StreamResponse as push notifications payload ([#724](https://github.com/a2aproject/a2a-python/issues/724)) ([a149a09](https://github.com/a2aproject/a2a-python/commit/a149a0923c14480888c48156710413967dfebc36)) +* **rest:** update REST error handling to use `google.rpc.Status` ([#838](https://github.com/a2aproject/a2a-python/issues/838)) ([ea7d3ad](https://github.com/a2aproject/a2a-python/commit/ea7d3add16e137ea6c71272d845bdc9bfb5853c8)) + + +### Bug Fixes + +* add history length and page size validations ([#726](https://github.com/a2aproject/a2a-python/issues/726)) ([e67934b](https://github.com/a2aproject/a2a-python/commit/e67934b06442569a993455753ee4a360ac89b69f)) +* allign error codes with the latest spec ([#826](https://github.com/a2aproject/a2a-python/issues/826)) ([709b1ff](https://github.com/a2aproject/a2a-python/commit/709b1ff57b7604889da0c532a6b33954ee65491b)) +* **client:** align send_message signature with BaseClient ([#740](https://github.com/a2aproject/a2a-python/issues/740)) ([57cb529](https://github.com/a2aproject/a2a-python/commit/57cb52939ef9779eebd993a078cfffb854663e3e)) +* get_agent_card trailing slash when agent_card_path="" ([#799](https://github.com/a2aproject/a2a-python/issues/799)) ([#800](https://github.com/a2aproject/a2a-python/issues/800)) ([a55c97e](https://github.com/a2aproject/a2a-python/commit/a55c97e4d2031d74b57835710e07344484fb9fb6)) +* handle parsing error in REST ([#806](https://github.com/a2aproject/a2a-python/issues/806)) ([bbd09f2](https://github.com/a2aproject/a2a-python/commit/bbd09f232f556c527096eea5629688e29abb3f2f)) +* Improve error handling for Timeout exceptions on REST and JSON-RPC clients ([#690](https://github.com/a2aproject/a2a-python/issues/690)) ([2acd838](https://github.com/a2aproject/a2a-python/commit/2acd838796d44ab9bfe6ba8c8b4ea0c2571a59dc)) +* Improve streaming errors handling ([#576](https://github.com/a2aproject/a2a-python/issues/576)) ([7ea7475](https://github.com/a2aproject/a2a-python/commit/7ea7475091df2ee40d3035ef1bc34ee2f86524ee)) +* properly handle unset and zero history length ([#717](https://github.com/a2aproject/a2a-python/issues/717)) ([72a1007](https://github.com/a2aproject/a2a-python/commit/72a100797e513730dbeb80477c943b36cf79c957)) +* return entire history when history_length=0 ([#537](https://github.com/a2aproject/a2a-python/issues/537)) ([acdc0de](https://github.com/a2aproject/a2a-python/commit/acdc0de4fa03d34a6b287ab252ff51b19c3016b5)) +* return mandatory fields from list_tasks ([#710](https://github.com/a2aproject/a2a-python/issues/710)) ([6132053](https://github.com/a2aproject/a2a-python/commit/6132053976c4e8b2ce7cad9b87072fa8fb5a2cf0)) +* taskslist error on invalid page token and response serialization ([#814](https://github.com/a2aproject/a2a-python/issues/814)) ([a102d31](https://github.com/a2aproject/a2a-python/commit/a102d31abe8d72d18ec706f083855b7aad8bbbd4)) +* use correct REST path for Get Extended Agent Card operation ([#769](https://github.com/a2aproject/a2a-python/issues/769)) ([ced3f99](https://github.com/a2aproject/a2a-python/commit/ced3f998a9d0b97495ebded705422459aa8d7398)) +* Use POST method for REST endpoint /tasks/{id}:subscribe ([#843](https://github.com/a2aproject/a2a-python/issues/843)) ([a0827d0](https://github.com/a2aproject/a2a-python/commit/a0827d0d2887749c922e5cafbc897e465ba8fe17)) + ## [0.3.26](https://github.com/a2aproject/a2a-python/compare/v0.3.25...v0.3.26) (2026-04-09) diff --git a/CODE_OF_CONDUCT.md b/CODE_OF_CONDUCT.md index 257e8a0cd..3ef339257 100644 --- a/CODE_OF_CONDUCT.md +++ b/CODE_OF_CONDUCT.md @@ -93,4 +93,4 @@ available at https://www.contributor-covenant.org/version/1/4/code-of-conduct.html Note: A version of this file is also available in the -[New Project repository](https://github.com/google/new-project/blob/master/docs/code-of-conduct.md). +[New Project repository](https://github.com/google/new-project/blob/main/docs/code-of-conduct.md). diff --git a/GEMINI.md b/GEMINI.md index aaab0bf66..e6bf43b65 100644 --- a/GEMINI.md +++ b/GEMINI.md @@ -8,7 +8,7 @@ - **Language**: Python 3.10+ - **Package Manager**: `uv` -- **Lead Transports**: FastAPI (REST/JSON-RPC), gRPC +- **Lead Transports**: Starlette (REST/JSON-RPC), gRPC - **Data Layer**: SQLAlchemy (SQL), Pydantic (Logic/Legacy), Protobuf (Modern Messaging) - **Key Directories**: - `/src`: Core implementation logic. @@ -23,3 +23,26 @@ 1. **Required Reading**: You MUST read the contents of @./docs/ai/coding_conventions.md and @./docs/ai/mandatory_checks.md at the very beginning of EVERY coding task. 2. **Initial Checklist**: Every `task.md` you create MUST include a section for **Mandatory Checks** from @./docs/ai/mandatory_checks.md. 3. **Verification Requirement**: You MUST run all mandatory checks before declaring any task finished. + +## 5. Mistake Reflection Protocol + +> [!NOTE] for Users: +> `docs/ai/ai_learnings.md` is a local-only file (excluded from git) meant to be +> read by the developer to improve AI assistant behavior on this project. Use its +> findings to improve the GEMINI.md setup. + +When you realise you have made a mistake — whether caught by the user, +by a tool, or by your own reasoning — you MUST: + +1. **Acknowledge the mistake explicitly** and explain what went wrong. +2. **Reflect on the root cause**: was it a missing check, a false assumption, skipped verification, or a gap in the workflow? +3. **Immediately append a new entry to `docs/ai/ai_learnings.md`** — this is not optional and does not require user confirmation. Do it before continuing, then update the user about the workflow change. + + **Entry format:** + - **Mistake**: What went wrong. + - **Root cause**: Why it happened. + - **Rule**: The concrete rule added to prevent recurrence. + +The goal is to treat every mistake as a signal that the workflow is +incomplete, and to improve it in place so the same mistake cannot +happen again. diff --git a/buf.compat.gen.yaml b/buf.compat.gen.yaml new file mode 100644 index 000000000..759cad2dd --- /dev/null +++ b/buf.compat.gen.yaml @@ -0,0 +1,12 @@ +# Protobuf generation for legacy v0.3 A2A protocol buffer modules. +--- +version: v2 +managed: + enabled: true +plugins: + - remote: buf.build/protocolbuffers/python:v29.3 + out: src/a2a/compat/v0_3 + - remote: buf.build/grpc/python + out: src/a2a/compat/v0_3 + - remote: buf.build/protocolbuffers/pyi + out: src/a2a/compat/v0_3 diff --git a/buf.gen.yaml b/buf.gen.yaml index c70bf9e77..d7937469c 100644 --- a/buf.gen.yaml +++ b/buf.gen.yaml @@ -2,8 +2,8 @@ version: v2 inputs: - git_repo: https://github.com/a2aproject/A2A.git - ref: main - subdir: specification/grpc + ref: v1.0.0 + subdir: specification managed: enabled: true # Python Generation @@ -21,11 +21,11 @@ plugins: # Generate python protobuf related code # Generates *_pb2.py files, one for each .proto - remote: buf.build/protocolbuffers/python:v29.3 - out: src/a2a/grpc + out: src/a2a/types # Generate python service code. # Generates *_pb2_grpc.py - remote: buf.build/grpc/python - out: src/a2a/grpc + out: src/a2a/types # Generates *_pb2.pyi files. - remote: buf.build/protocolbuffers/pyi - out: src/a2a/grpc + out: src/a2a/types diff --git a/docs/migrations/v1_0/database/README.md b/docs/migrations/v1_0/database/README.md new file mode 100644 index 000000000..6cde621d3 --- /dev/null +++ b/docs/migrations/v1_0/database/README.md @@ -0,0 +1,22 @@ +# Database Migration Guide: v0.3 to v1.0 + +The A2A SDK v1.0 introduces significant updates to the database persistence layer, including a new schema for tracking task ownership and protocol versions. This guide provides the necessary steps to migrate your database from v0.3 to the v1.0 persistence model without data loss. + +--- + +## ⚡ Choose Your Migration Strategy + +Depending on your application's availability requirements, choose one of the following paths: + +| Strategy | Downtime | Complexity | Best For | +| :--- | :--- | :--- | :--- | +| **[Simple Migration](simple_migration.md)** | Short (Restart) | Low | Single-instance apps, non-critical services. | +| **[Zero Downtime Migration](zero_downtime.md)** | None | Medium | Multi-instance, high-availability production environments. | + +--- + +## 🏗️ Technical Overview + +The v1.0 database migration involves: +1. **Schema Updates**: Adding the `protocol_version`, `owner`, and `last_updated` columns to the `tasks` table, and the `protocol_version` and `owner` columns to the `push_notification_configs` table. +2. **Storage Model**: Transitioning from Pydantic-based JSON to Protobuf-based JSON serialization for better interoperability and performance. diff --git a/docs/migrations/v1_0/database/simple_migration.md b/docs/migrations/v1_0/database/simple_migration.md new file mode 100644 index 000000000..82561f398 --- /dev/null +++ b/docs/migrations/v1_0/database/simple_migration.md @@ -0,0 +1,80 @@ +# Simple Migration: v0.3 to v1.0 + +This guide is for users who can afford a short period of downtime during the migration from A2A protocol v0.3 to v1.0. This is the recommended path for single-instance applications or non-critical services. + +--- + +> [!WARNING] +> **Safety First:** +> Before proceeding, ensure you have a backup of your database. + +--- + +## 🛠 Prerequisites + +### Install Migration Tools +The migration CLI is not included in the base package. Install the `db-cli` extra: + +```bash +uv add "a2a-sdk[db-cli]" +# OR +pip install "a2a-sdk[db-cli]" +``` + +--- + +## 🚀 Migration Steps + +### Step 1: Apply Schema Updates + +Run the `a2a-db` migration tool to update your tables. This adds new columns (`owner`, `protocol_version`, `last_updated`) while leaving existing v0.3 data intact. + +```bash +# Run migration against your target database +uv run a2a-db --database-url "your-database-url" +``` + +> [!NOTE] +> +>For more details on the CLI migration tool, including flags, see the [A2A SDK Database Migrations README](../../../../src/a2a/migrations/README.md). + +> [!NOTE] +> +> The v1.0 database stores are designed to be backward compatible by default. After this step, your new v1.0 code will be able to read existing v0.3 entries from the database using a built-in legacy parser. + +### Step 2: Verify the Migration + +Confirm the schema is at the correct version: + +```bash +uv run a2a-db current +``` +The output should show the latest revision ID (e.g., `38ce57e08137`). + +### Step 3: Update Your Application Code + +Upgrade your application to use the v1.0 SDK. + +--- + +## ↩️ Rollback Strategy + +If your application fails to start or encounters errors after the migration: + +1. **Revert Application Code**: Revert your application code to use the v0.3 SDK. + + > [!NOTE] + > Older SDKs are compatible with the new schema (as new columns are nullable). If something breaks, rolling back the application code is usually sufficient. + +2. **Revert Schema (Fallback)**: If you encounter database issues, use the `downgrade` command to step back to the v0.3 structure. + ```bash + uv run a2a-db downgrade -1 + ``` +3. **Restart**: Resume operations using the v0.3 SDK. + + +--- + +## 🧩 Resources +- **[Zero Downtime Migration](zero_downtime.md)**: If you cannot stop your application. +- **[a2a-db CLI](../../../../src/a2a/migrations/README.md)**: The primary tool for executing schema migrations. diff --git a/docs/migrations/v1_0/database/zero_downtime.md b/docs/migrations/v1_0/database/zero_downtime.md new file mode 100644 index 000000000..026ec88c1 --- /dev/null +++ b/docs/migrations/v1_0/database/zero_downtime.md @@ -0,0 +1,132 @@ +# Zero Downtime Migration: v0.3 to v1.0 + +This guide outlines the strategy for migrating your Agent application from A2A protocol v0.3 to v1.0 without service interruption, even when running multiple distributed instances sharing a single database. + +--- + +> [!WARNING] +> **Safety First:** +> Before proceeding, ensure you have a backup of your database. + +--- + +## 🛠 Prerequisites + +### Install Migration Tools +The migration CLI is not included in the base package. Install the `db-cli` extra: + +```bash +uv add "a2a-sdk[db-cli]" +# OR +pip install "a2a-sdk[db-cli]" +``` + +--- + +## 🏗️ The 3-Step Strategy + +Zero-downtime migration requires an "Expand, Migrate, Contract" pattern. It means we first expand the schema, then migrate the code to coexist with the old format, and finally transition fully to the new v1.0 standards. + +### Step 1: Apply Schema Updates + +Run the `a2a-db` migration tool to update your tables. This adds new columns (`owner`, `protocol_version`, `last_updated`) while leaving existing v0.3 data intact. + +```bash +# Run migration against your target database +uv run a2a-db --database-url "your-database-url" +``` + +> [!NOTE] +> +>For more details on the CLI migration tool, including flags, see the [A2A SDK Database Migrations README](../../../../src/a2a/migrations/README.md). + +> [!NOTE] +> All new columns are nullable. Your existing v0.3 code will continue to work normally after this step is completed. +> +> The v1.0 database stores are designed to be backward compatible by default. After this step, your new v1.0 code will be able to read existing v0.3 entries from the database using a built-in legacy parser. + +#### ✅ How to Verify +Confirm the schema is at the correct version: + +```bash +uv run a2a-db current +``` +The output should show the latest revision ID (e.g., `38ce57e08137`). + +### Step 2: Rolling Deployment in Compatibility Mode + +In this step, you deploy the v1.0 SDK code but configure it to **write** data in the legacy v0.3 format. This ensures that any v0.3 instances still running in your cluster can read data produced by the new v1.0 instances. + +#### Update Initialization Code +Enable the v0.3 conversion utilities in your application entry point (e.g., `main.py`). + +```python +from a2a.server.tasks import DatabaseTaskStore, DatabasePushNotificationConfigStore +from a2a.compat.v0_3.model_conversions import ( + core_to_compat_task_model, + core_to_compat_push_notification_config_model, +) + +# Initialize stores with compatibility conversion +# The '... # other' represents your existing configuration (engine, table_name, etc.) +task_store = DatabaseTaskStore( + ... # other arguments + core_to_model_conversion=core_to_compat_task_model +) + +config_store = DatabasePushNotificationConfigStore( + ... # other arguments + core_to_model_conversion=core_to_compat_push_notification_config_model +) +``` + +#### Perform a Rolling Restart +Deploy the new code by restarting your instances one by one. + +#### ✅ How to Verify +Verify that v1.0 instances are successfully writing to the database. In the `tasks` and `push_notification_configs` tables, new rows created during this phase should have `protocol_version` set to `0.3`. + +### Step 3: Transition to v1.0 Mode + +Once **100%** of your application instances are running v1.0 code (with compatibility mode enabled), you can switch to the v1.0 write format. + +> [!CAUTION] +> **CRITICAL PRE-REQUISITE**: Do NOT start Step 3 until you have confirmed that no v0.3 instances remain. Old v0.3 code cannot parse the new v1.0 native database entries. + +#### Disable Compatibility Logic +Remove the `core_to_model_conversion` arguments from your Store constructors. + +```python +# Revert to native v1.0 write behavior +task_store = DatabaseTaskStore(engine=engine, ...) +config_store = DatabasePushNotificationConfigStore(engine=engine, ...) +``` + +#### Perform a Final Rolling Restart + +Restart your instances again. + +#### ✅ How to Verify +Inspect the `tasks` and `push_notification_configs` tables. New entries should now show `protocol_version` as `1.0`. + +--- + +## 🛠️ Why it Works + +The A2A `DatabaseStore` classes follow a version-aware read/write pattern: + +1. **Write Logic**: If `core_to_model_conversion` is provided, it is used. Otherwise, it defaults to the v1.0 Protobuf JSON format. +2. **Read Logic**: The store automatically inspects the `protocol_version` column for every row. + * If `NULL` or `0.3`, it uses the internal **v0.3 legacy parser**. + * If `1.0`, it uses the modern **Protobuf parser**. + +This allows v1.0 instances to read *all* existing data regardless of when it was written. + +--- + +## 🧩 Resources +- **[a2a-db CLI](../../../../src/a2a/migrations/README.md)**: The primary tool for executing schema migrations. +- **[Compatibility Conversions](../../../../src/a2a/compat/v0_3/model_conversions.py)**: Source for model conversion functions `core_to_compat_task_model` and `core_to_compat_push_notification_config_model` used in Step 2. +- **[Task Store Implementation](../../../../src/a2a/server/tasks/database_task_store.py)**: The `DatabaseTaskStore` which handles the version-aware read/write logic. +- **[Push Notification Store Implementation](../../../../src/a2a/server/tasks/database_push_notification_config_store.py)**: The `DatabasePushNotificationConfigStore` which handles the version-aware read/write logic. + diff --git a/itk/README.md b/itk/README.md new file mode 100644 index 000000000..9a82d0469 --- /dev/null +++ b/itk/README.md @@ -0,0 +1,74 @@ +# Running ITK Tests Locally + +This directory contains scripts to run Integration Test Kit (ITK) tests locally using Podman. + +## Prerequisites + +### 1. Install Podman + +Run the following commands to install Podman and its components: + +```bash +sudo apt update && sudo apt install -y podman podman-docker podman-compose +``` + +### 2. Configure SubUIDs/SubGIDs + +For rootless Podman to function correctly, you need to ensure subuids and subgids are configured for your user. + +If they are not already configured, you can add them using (replace `$USER` with your username if needed): + +```bash +sudo usermod --add-subuids 100000-165535 --add-subgids 100000-165535 $USER +``` + +After adding subuids or if you encounter permission issues, run: + +```bash +podman system migrate +``` + +## Running Tests + +### 1. Set Environment Variable + +You must set the `A2A_SAMPLES_REVISION` environment variable to specify which revision of the `a2a-samples` repository to use for testing. This can be a branch name, tag, or commit hash. + +Example: +``` +export A2A_SAMPLES_REVISION=itk-v.015-alpha +``` + +### 2. Execute Tests + +Run the test script from this directory: + +```bash +./run_itk.sh +``` + +The script will: +- Clone `a2a-samples` (if not already present). +- Checkout the specified revision. +- Build the ITK service Docker image. +- Run the tests and output results. + +## Debugging + +To enable debug logging and persist logs for inspection: + +1. Set the `ITK_LOG_LEVEL` environment variable to `DEBUG`: + + ```bash + export ITK_LOG_LEVEL=DEBUG + ``` +2. Run the test script: + ```bash + ./run_itk.sh + ``` + +When run in `DEBUG` mode: +- The `logs/` directory will be created in this directory (if it doesn't exist). +- The `logs/` directory will be mounted to the container. +- The test execution will produce detailed logs in `logs/` (e.g., `agent_current.log`). +- The `logs/` directory will **not** be removed during cleanup. diff --git a/src/a2a/grpc/__init__.py b/itk/__init__.py similarity index 100% rename from src/a2a/grpc/__init__.py rename to itk/__init__.py diff --git a/itk/main.py b/itk/main.py new file mode 100644 index 000000000..6792c540a --- /dev/null +++ b/itk/main.py @@ -0,0 +1,388 @@ +import argparse # noqa: I001 +import asyncio +import base64 +import logging +import os +import uuid + +import grpc +import httpx +import uvicorn + +from fastapi import FastAPI + +from pyproto import instruction_pb2 + +from a2a.client import ClientConfig, create_client +from a2a.compat.v0_3 import a2a_v0_3_pb2_grpc +from a2a.compat.v0_3.grpc_handler import CompatGrpcHandler +from a2a.server.agent_execution import AgentExecutor, RequestContext +from a2a.server.routes import create_agent_card_routes, create_jsonrpc_routes +from a2a.server.routes.rest_routes import create_rest_routes +from a2a.server.events import EventQueue +from a2a.server.events.in_memory_queue_manager import InMemoryQueueManager +from a2a.server.request_handlers import DefaultRequestHandler, GrpcHandler +from a2a.server.tasks import TaskUpdater +from a2a.server.tasks.inmemory_task_store import InMemoryTaskStore +from a2a.types import a2a_pb2_grpc +from a2a.types.a2a_pb2 import ( + AgentCapabilities, + AgentCard, + AgentInterface, + Message, + Part, + SendMessageRequest, + Task, + TaskState, + TaskStatus, +) +from a2a.utils import TransportProtocol + + +log_level = os.environ.get('ITK_LOG_LEVEL', 'INFO').upper() +logging.basicConfig(level=log_level) +logger = logging.getLogger(__name__) + + +def extract_instruction( + message: Message | None, +) -> instruction_pb2.Instruction | None: + """Extracts an Instruction proto from an A2A Message.""" + if not message or not message.parts: + return None + + for part in message.parts: + # 1. Handle binary protobuf part (media_type or filename) + if ( + part.media_type == 'application/x-protobuf' + or part.filename == 'instruction.bin' + ): + try: + inst = instruction_pb2.Instruction() + if part.raw: + inst.ParseFromString(part.raw) + elif part.text: + # Some clients might send it as base64 in text part + raw = base64.b64decode(part.text) + inst.ParseFromString(raw) + except Exception: + logger.debug( + 'Failed to parse instruction from binary part', + exc_info=True, + ) + continue + else: + return inst + + # 2. Handle base64 encoded instruction in any text part + if part.text: + try: + raw = base64.b64decode(part.text) + inst = instruction_pb2.Instruction() + inst.ParseFromString(raw) + except Exception: + logger.debug( + 'Failed to parse instruction from text part', exc_info=True + ) + continue + else: + return inst + return None + + +def wrap_instruction_to_request(inst: instruction_pb2.Instruction) -> Message: + """Wraps an Instruction proto into an A2A Message.""" + inst_bytes = inst.SerializeToString() + return Message( + role='ROLE_USER', + message_id=str(uuid.uuid4()), + parts=[ + Part( + raw=inst_bytes, + media_type='application/x-protobuf', + filename='instruction.bin', + ) + ], + ) + + +async def handle_call_agent(call: instruction_pb2.CallAgent) -> list[str]: + """Handles the CallAgent instruction by invoking another agent.""" + logger.info('Calling agent %s via %s', call.agent_card_uri, call.transport) + + # Mapping transport string to TransportProtocol enum + transport_map = { + 'JSONRPC': TransportProtocol.JSONRPC, + 'HTTP+JSON': TransportProtocol.HTTP_JSON, + 'HTTP_JSON': TransportProtocol.HTTP_JSON, + 'REST': TransportProtocol.HTTP_JSON, + 'GRPC': TransportProtocol.GRPC, + } + + selected_transport = transport_map.get(call.transport.upper()) + if selected_transport is None: + raise ValueError(f'Unsupported transport: {call.transport}') + + config = ClientConfig() + config.httpx_client = httpx.AsyncClient(timeout=30.0) + config.grpc_channel_factory = grpc.aio.insecure_channel + config.supported_protocol_bindings = [selected_transport] + config.streaming = call.streaming or ( + selected_transport == TransportProtocol.GRPC + ) + + try: + client = await create_client(call.agent_card_uri, client_config=config) + + # Wrap nested instruction + async with client: + nested_msg = wrap_instruction_to_request(call.instruction) + request = SendMessageRequest(message=nested_msg) + + results: list[str] = [] + async for event in client.send_message(request): + # Event is StreamResponse + logger.info('Event: %s', event) + stream_resp = event + + message = None + if stream_resp.HasField('message'): + message = stream_resp.message + elif stream_resp.HasField( + 'task' + ) and stream_resp.task.status.HasField('message'): + message = stream_resp.task.status.message + elif stream_resp.HasField( + 'status_update' + ) and stream_resp.status_update.status.HasField('message'): + message = stream_resp.status_update.status.message + + if message: + results.extend( + part.text for part in message.parts if part.text + ) + + except Exception as e: + logger.exception('Failed to call outbound agent') + raise RuntimeError( + f'Outbound call to {call.agent_card_uri} failed: {e!s}' + ) from e + else: + return results + + +async def handle_instruction(inst: instruction_pb2.Instruction) -> list[str]: + """Recursively handles instructions.""" + if inst.HasField('call_agent'): + return await handle_call_agent(inst.call_agent) + if inst.HasField('return_response'): + return [inst.return_response.response] + if inst.HasField('steps'): + all_results = [] + for step in inst.steps.instructions: + results = await handle_instruction(step) + all_results.extend(results) + return all_results + raise ValueError('Unknown instruction type') + + +class V10AgentExecutor(AgentExecutor): + """Executor for ITK v10 agent tasks.""" + + async def execute( + self, context: RequestContext, event_queue: EventQueue + ) -> None: + """Executes a task instruction.""" + logger.info('Executing task %s', context.task_id) + task_updater = TaskUpdater( + event_queue, + context.task_id, + context.context_id, + ) + + # Explicitly create the task by sending it to the queue + task = Task( + id=context.task_id, + context_id=context.context_id, + status=TaskStatus(state=TaskState.TASK_STATE_SUBMITTED), + history=[context.message] if context.message else [], + ) + async with task_updater._lock: # noqa: SLF001 + await event_queue.enqueue_event(task) + + await task_updater.update_status(TaskState.TASK_STATE_WORKING) + + instruction = extract_instruction(context.message) + if not instruction: + error_msg = 'No valid instruction found in request' + logger.error(error_msg) + await task_updater.update_status( + TaskState.TASK_STATE_FAILED, + message=task_updater.new_agent_message([Part(text=error_msg)]), + ) + return + + try: + logger.info('Instruction: %s', instruction) + results = await handle_instruction(instruction) + response_text = '\n'.join(results) + logger.info('Response: %s', response_text) + await task_updater.update_status( + TaskState.TASK_STATE_COMPLETED, + message=task_updater.new_agent_message( + [Part(text=response_text)] + ), + ) + logger.info('Task %s completed', context.task_id) + except Exception as e: + logger.exception('Error during instruction handling') + await task_updater.update_status( + TaskState.TASK_STATE_FAILED, + message=task_updater.new_agent_message([Part(text=str(e))]), + ) + + async def cancel( + self, context: RequestContext, event_queue: EventQueue + ) -> None: + """Cancels a task.""" + logger.info('Cancel requested for task %s', context.task_id) + task_updater = TaskUpdater( + event_queue, + context.task_id, + context.context_id, + ) + await task_updater.update_status(TaskState.TASK_STATE_CANCELED) + + +async def main_async(http_port: int, grpc_port: int) -> None: + """Starts the Agent with HTTP and gRPC interfaces.""" + interfaces = [ + AgentInterface( + protocol_binding=TransportProtocol.GRPC, + url=f'127.0.0.1:{grpc_port}', + protocol_version='1.0', + ), + AgentInterface( + protocol_binding=TransportProtocol.GRPC, + url=f'127.0.0.1:{grpc_port}', + protocol_version='0.3', + ), + ] + + interfaces.append( + AgentInterface( + protocol_binding=TransportProtocol.JSONRPC, + url=f'http://127.0.0.1:{http_port}/jsonrpc/', + protocol_version='1.0', + ) + ) + interfaces.append( + AgentInterface( + protocol_binding=TransportProtocol.JSONRPC, + url=f'http://127.0.0.1:{http_port}/jsonrpc/', + protocol_version='0.3', + ) + ) + interfaces.append( + AgentInterface( + protocol_binding=TransportProtocol.HTTP_JSON, + url=f'http://127.0.0.1:{http_port}/rest/', + protocol_version='1.0', + ) + ) + interfaces.append( + AgentInterface( + protocol_binding=TransportProtocol.HTTP_JSON, + url=f'http://127.0.0.1:{http_port}/rest/', + protocol_version='0.3', + ) + ) + + agent_card = AgentCard( + name='ITK v10 Agent', + description='Python agent using SDK 1.0.', + version='1.0.0', + capabilities=AgentCapabilities( + streaming=True, + push_notifications=True, + extended_agent_card=True, + ), + default_input_modes=['text/plain'], + default_output_modes=['text/plain'], + supported_interfaces=interfaces, + ) + + task_store = InMemoryTaskStore() + handler = DefaultRequestHandler( + agent_executor=V10AgentExecutor(), + task_store=task_store, + agent_card=agent_card, + queue_manager=InMemoryQueueManager(), + ) + + handler_extended = DefaultRequestHandler( + agent_executor=V10AgentExecutor(), + task_store=task_store, + agent_card=agent_card, + queue_manager=InMemoryQueueManager(), + extended_agent_card=agent_card, + ) + + app = FastAPI() + + agent_card_routes = create_agent_card_routes( + agent_card=agent_card, card_url='/.well-known/agent-card.json' + ) + jsonrpc_routes = create_jsonrpc_routes( + request_handler=handler_extended, + rpc_url='/', + enable_v0_3_compat=True, + ) + app.mount( + '/jsonrpc', + FastAPI(routes=jsonrpc_routes + agent_card_routes), + ) + + rest_routes = create_rest_routes( + request_handler=handler, + enable_v0_3_compat=True, + ) + app.mount('/rest', FastAPI(routes=rest_routes + agent_card_routes)) + + server = grpc.aio.server() + + compat_servicer = CompatGrpcHandler(handler) + a2a_v0_3_pb2_grpc.add_A2AServiceServicer_to_server(compat_servicer, server) + servicer = GrpcHandler(handler) + a2a_pb2_grpc.add_A2AServiceServicer_to_server(servicer, server) + + server.add_insecure_port(f'127.0.0.1:{grpc_port}') + await server.start() + + logger.info( + 'Starting ITK v10 Agent on HTTP port %s and gRPC port %s', + http_port, + grpc_port, + ) + + uvicorn_log_level = os.environ.get('ITK_LOG_LEVEL', 'INFO').lower() + config = uvicorn.Config( + app, host='127.0.0.1', port=http_port, log_level=uvicorn_log_level + ) + uvicorn_server = uvicorn.Server(config) + + await uvicorn_server.serve() + + +def main() -> None: + """Main entry point for the agent.""" + parser = argparse.ArgumentParser() + parser.add_argument('--httpPort', type=int, default=10102) + parser.add_argument('--grpcPort', type=int, default=11002) + args = parser.parse_args() + + asyncio.run(main_async(args.httpPort, args.grpcPort)) + + +if __name__ == '__main__': + main() diff --git a/itk/pyproject.toml b/itk/pyproject.toml new file mode 100644 index 000000000..e2c141a0e --- /dev/null +++ b/itk/pyproject.toml @@ -0,0 +1,21 @@ +[project] +name = "itk-python-v10-agent" +version = "0.1.0" +description = "ITK Python v1.0 Agent" +dependencies = [ + "a2a-sdk[sqlite,grpc,http-server]", + "fastapi", + "uvicorn", + "grpcio", + "grpcio-tools", + "protobuf", + "sse-starlette", + "httpx-sse", + "packaging", +] + +[tool.uv] +package = false + +[tool.uv.sources] +a2a-sdk = { path = ".." } diff --git a/itk/run_itk.sh b/itk/run_itk.sh new file mode 100755 index 000000000..2d9371c14 --- /dev/null +++ b/itk/run_itk.sh @@ -0,0 +1,182 @@ +#!/bin/bash +set -ex + +# Set default log level +export ITK_LOG_LEVEL="${ITK_LOG_LEVEL:-INFO}" + +# Initialize default exit code +RESULT=1 + +# Cleanup function to be called on exit +cleanup() { + set +x + echo "Cleaning up artifacts..." + docker stop itk-service > /dev/null 2>&1 || true + docker rm itk-service > /dev/null 2>&1 || true + docker rmi itk_service > /dev/null 2>&1 || true + rm -rf a2a-samples > /dev/null 2>&1 || true + rm -rf pyproto > /dev/null 2>&1 || true + rm -f instruction.proto > /dev/null 2>&1 || true + echo "Done. Final exit code: $RESULT" +} + +# Register cleanup function to run on script exit +trap cleanup EXIT + +# 1. Pull a2a-samples and checkout revision +: "${A2A_SAMPLES_REVISION:?A2A_SAMPLES_REVISION environment variable must be set}" + +if [ ! -d "a2a-samples" ]; then + git clone https://github.com/a2aproject/a2a-samples.git a2a-samples +fi +cd a2a-samples +git fetch origin +git checkout "$A2A_SAMPLES_REVISION" + +# Only pull if it's a branch (not a detached HEAD) +if git symbolic-ref -q HEAD > /dev/null; then + git pull origin "$A2A_SAMPLES_REVISION" +fi +cd .. + +# 2. Copy instruction.proto from a2a-samples +cp a2a-samples/itk/protos/instruction.proto ./instruction.proto + +# 3. Build pyproto library +mkdir -p pyproto +touch pyproto/__init__.py +uv run --with grpcio-tools python -m grpc_tools.protoc \ + -I. \ + --python_out=pyproto \ + --grpc_python_out=pyproto \ + instruction.proto + +# Fix imports in generated file +sed -i 's/^import instruction_pb2 as instruction__pb2/from . import instruction_pb2 as instruction__pb2/' pyproto/instruction_pb2_grpc.py + +# 4. Build jit itk_service docker image from root of a2a-samples/itk +# We run docker build from the itk directory inside a2a-samples +docker build -t itk_service a2a-samples/itk + +# 5. Start docker service +# Mounting a2a-python as repo and itk as current agent +A2A_PYTHON_ROOT=$(cd .. && pwd) +ITK_DIR=$(pwd) + +# Stop existing container if any +docker rm -f itk-service || true + +# Create logs directory if debug +if [ "${ITK_LOG_LEVEL^^}" = "DEBUG" ]; then + mkdir -p "$ITK_DIR/logs" +fi + +DOCKER_MOUNT_LOGS="" +if [ "${ITK_LOG_LEVEL^^}" = "DEBUG" ]; then + DOCKER_MOUNT_LOGS="-v $ITK_DIR/logs:/app/logs" +fi + +docker run -d --name itk-service \ + -v "$A2A_PYTHON_ROOT:/app/agents/repo" \ + -v "$ITK_DIR:/app/agents/repo/itk" \ + $DOCKER_MOUNT_LOGS \ + -e ITK_LOG_LEVEL="$ITK_LOG_LEVEL" \ + -p 8000:8000 \ + itk_service + +# 5.1. Fix dubious ownership for git (needed for uv-dynamic-versioning) +docker exec -u root itk-service git config --system --add safe.directory /app/agents/repo +docker exec -u root itk-service git config --system --add safe.directory /app/agents/repo/itk +docker exec -u root itk-service git config --system core.multiPackIndex false + +# 6. Verify service is up and send post request +MAX_RETRIES=30 +echo "Waiting for ITK service to start on 127.0.0.1:8000..." +set +e +for i in $(seq 1 $MAX_RETRIES); do + if curl -s http://127.0.0.1:8000/ > /dev/null; then + echo "Service is up!" + break + fi + echo "Still waiting... ($i/$MAX_RETRIES)" + sleep 2 +done + +# If we reached the end of the loop without success +if ! curl -s http://127.0.0.1:8000/ > /dev/null; then + echo "Error: ITK service failed to start on port 8000" + docker logs itk-service + exit 1 +fi + +echo "ITK Service is up! Sending compatibility test request..." +RESPONSE=$(curl -s -X POST http://127.0.0.1:8000/run \ + -H "Content-Type: application/json" \ + -d '{ + "tests": [ + { + "name": "Star Topology (Full) - JSONRPC & GRPC", + "sdks": ["current", "python_v10", "python_v03", "go_v10", "go_v03"], + "traversal": "euler", + "edges": ["0->1", "0->2", "0->3", "0->4", "1->0", "2->0", "3->0", "4->0"], + "protocols": ["jsonrpc", "grpc"] + }, + { + "name": "Star Topology (No Go v03) - HTTP_JSON", + "sdks": ["current", "python_v10", "python_v03", "go_v10"], + "traversal": "euler", + "edges": ["0->1", "0->2", "0->3", "1->0", "2->0", "3->0"], + "protocols": ["http_json"] + }, + { + "name": "Star Topology (Full) - JSONRPC & GRPC (Streaming)", + "sdks": ["current", "python_v10", "python_v03", "go_v10", "go_v03"], + "traversal": "euler", + "edges": ["0->1", "0->2", "0->3", "0->4", "1->0", "2->0", "3->0", "4->0"], + "protocols": ["jsonrpc", "grpc"], + "streaming": true + }, + { + "name": "Star Topology (No Go v03) - HTTP_JSON (Streaming)", + "sdks": ["current", "python_v10", "python_v03", "go_v10"], + "traversal": "euler", + "edges": ["0->1", "0->2", "0->3", "1->0", "2->0", "3->0"], + "protocols": ["http_json"], + "streaming": true + } + ] + }') + +echo "--------------------------------------------------------" +echo "ITK TEST RESULTS:" +echo "--------------------------------------------------------" +echo "$RESPONSE" | python3 -c " +import sys, json +try: + data = json.load(sys.stdin) + all_passed = data.get('all_passed', False) + results = data.get('results', {}) + for test, passed in results.items(): + status = 'PASSED' if passed else 'FAILED' + print(f'{test}: {status}') + print('--------------------------------------------------------') + print(f'OVERALL STATUS: {\"PASSED\" if all_passed else \"FAILED\"}') + if not all_passed: + sys.exit(1) +except Exception as e: + print(f'Error parsing results: {e}') + print(f'Raw response: {data if \"data\" in locals() else \"no data\"}') + sys.exit(1) +" +RESULT=$? +set -e + +if [ $RESULT -ne 0 ]; then + echo "Tests failed. Container logs:" + docker logs itk-service +fi +echo "--------------------------------------------------------" + +# Final exit result will be captured by trap cleanup +exit $RESULT + diff --git a/pyproject.toml b/pyproject.toml index c71a970d2..724749865 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -13,6 +13,10 @@ dependencies = [ "pydantic>=2.11.3", "protobuf>=5.29.5", "google-api-core>=1.26.0", + "json-rpc>=1.15.0", + "googleapis-common-protos>=1.70.0", + "culsans>=0.11.0 ; python_full_version < '3.13'", + "packaging>=24.0", ] classifiers = [ @@ -23,20 +27,22 @@ classifiers = [ "Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.12", "Programming Language :: Python :: 3.13", + "Programming Language :: Python :: 3.14", "Operating System :: OS Independent", "Topic :: Software Development :: Libraries :: Python Modules", "License :: OSI Approved :: Apache Software License", ] [project.optional-dependencies] -http-server = ["fastapi>=0.115.2", "sse-starlette", "starlette"] +http-server = ["sse-starlette", "starlette"] encryption = ["cryptography>=43.0.0"] -grpc = ["grpcio>=1.60", "grpcio-tools>=1.60", "grpcio_reflection>=1.7.0"] +grpc = ["grpcio>=1.60", "grpcio-tools>=1.60", "grpcio-status>=1.60", "grpcio_reflection>=1.7.0"] telemetry = ["opentelemetry-api>=1.33.0", "opentelemetry-sdk>=1.33.0"] postgresql = ["sqlalchemy[asyncio,postgresql-asyncpg]>=2.0.0"] mysql = ["sqlalchemy[asyncio,aiomysql]>=2.0.0"] signing = ["PyJWT>=2.0.0"] sqlite = ["sqlalchemy[asyncio,aiosqlite]>=2.0.0"] +db-cli = ["alembic>=1.14.0"] vertex = ["google-cloud-aiplatform>=1.140.0"] sql = ["a2a-sdk[postgresql,mysql,sqlite]"] @@ -48,6 +54,7 @@ all = [ "a2a-sdk[grpc]", "a2a-sdk[telemetry]", "a2a-sdk[signing]", + "a2a-sdk[db-cli]", "a2a-sdk[vertex]", ] @@ -61,6 +68,7 @@ documentation = "https://a2a-protocol.org/latest/sdk/python/" requires = ["hatchling", "uv-dynamic-versioning"] build-backend = "hatchling.build" + [tool.hatch.version] source = "uv-dynamic-versioning" @@ -79,6 +87,16 @@ markers = [ "asyncio: mark a test as a coroutine that should be run by pytest-asyncio", "xdist_group: mark a test to run in a specific sequential group for isolation", ] +filterwarnings = [ + # SQLAlchemy warning about duplicate class registration - this is a known limitation + # of the dynamic model creation pattern used in models.py for custom table names + "ignore:This declarative base already contains a class with the same class name:sqlalchemy.exc.SAWarning", + # ResourceWarnings from asyncio event loop/socket cleanup during garbage collection + # These appear intermittently between tests due to pytest-asyncio and sse-starlette timing + "ignore:unclosed event loop:ResourceWarning", + "ignore:unclosed transport:ResourceWarning", + "ignore:unclosed =0.30.0", + "fastapi>=0.115.2", "mypy>=1.15.0", + "PyJWT>=2.0.0", "pytest>=8.3.5", "pytest-asyncio>=0.26.0", "pytest-cov>=6.1.1", @@ -105,6 +124,7 @@ dev = [ "trio", "uvicorn>=0.35.0", "pytest-timeout>=2.4.0", + "pyright", "a2a-sdk[all]", ] @@ -119,7 +139,7 @@ a2a-sdk = { workspace = true } [tool.mypy] plugins = ["pydantic.mypy"] -exclude = ["src/a2a/grpc/"] +exclude = ["src/a2a/types/a2a_pb2\\.py", "src/a2a/types/a2a_pb2_grpc\\.py"] disable_error_code = [ "import-not-found", "annotation-unchecked", @@ -139,10 +159,12 @@ exclude = [ "**/node_modules", "**/venv", "**/.venv", - "src/a2a/grpc/", + "src/a2a/types", + "src/a2a/compat/v0_3/*_pb2*.py", + "src/a2a/compat/v0_3/proto_utils.py", ] -reportMissingImports = "none" -reportMissingModuleSource = "none" +venvPath = "." +venv = ".venv" [tool.coverage.run] branch = true @@ -150,7 +172,9 @@ omit = [ "*/tests/*", "*/site-packages/*", "*/__init__.py", - "src/a2a/grpc/*", + "src/a2a/types/a2a_pb2.py", + "src/a2a/types/a2a_pb2_grpc.py", + "src/a2a/compat/*/*_pb2*.py", ] [tool.coverage.report] @@ -176,6 +200,8 @@ indent-width = 4 # Google Style Guide §3.4: 4 spaces target-version = "py310" # Minimum Python version [tool.ruff.lint] +preview = true +explicit-preview-rules = true ignore = [ "COM812", # Trailing comma missing. "FBT001", # Boolean positional arg in function definition @@ -262,7 +288,12 @@ exclude = [ "node_modules", "venv", "*/migrations/*", - "src/a2a/grpc/**", + "src/a2a/types/a2a_pb2.py", + "src/a2a/types/a2a_pb2.pyi", + "src/a2a/types/a2a_pb2_grpc.py", + "src/a2a/compat/v0_3/*_pb2.py", + "src/a2a/compat/v0_3/*_pb2.pyi", + "src/a2a/compat/v0_3/*_pb2_grpc.py", "tests/**", ] @@ -316,9 +347,28 @@ inline-quotes = "single" [tool.ruff.format] exclude = [ - "src/a2a/grpc/**", + "src/a2a/types/a2a_pb2.py", + "src/a2a/types/a2a_pb2.pyi", + "src/a2a/types/a2a_pb2_grpc.py", + "src/a2a/compat/v0_3/*_pb2.py", + "src/a2a/compat/v0_3/*_pb2.pyi", + "src/a2a/compat/v0_3/*_pb2_grpc.py", ] docstring-code-format = true docstring-code-line-length = "dynamic" quote-style = "single" indent-style = "space" + + +[tool.alembic] + +# path to migration scripts. +script_location = "src/a2a/migrations" + +# additional paths to be prepended to sys.path. defaults to the current working directory. +prepend_sys_path = [ + "src" +] + +[project.scripts] +a2a-db = "a2a.a2a_db_cli:run_migrations" diff --git a/release-please-config.json b/release-please-config.json new file mode 100644 index 000000000..063b8435a --- /dev/null +++ b/release-please-config.json @@ -0,0 +1,9 @@ +{ + "release-type": "python", + "prerelease": true, + "prerelease-type": "alpha", + "versioning": "prerelease", + "packages": { + ".": {} + } +} diff --git a/samples/README.md b/samples/README.md new file mode 100644 index 000000000..e61264955 --- /dev/null +++ b/samples/README.md @@ -0,0 +1,58 @@ +# A2A Python SDK — Samples + +This directory contains runnable examples demonstrating how to build and interact with an A2A-compliant agent using the Python SDK. + +## Contents + +| File | Role | Description | +|---|---|---| +| `hello_world_agent.py` | **Server** | A2A agent server | +| `cli.py` | **Client** | Interactive terminal client | + +The samples are designed to work together out of the box: the agent listens on `http://127.0.0.1:41241`, which is the default URL used by the client. +--- + +## `hello_world_agent.py` — Agent Server + +Implements an A2A agent that responds to simple greeting messages (e.g., "hello", "how are you", "bye") with text replies, simulating a 1-second processing delay. + +Demonstrates: +- Subclassing `AgentExecutor` and implementing `execute()` / `cancel()` +- Publishing streaming status updates and artifacts via `TaskUpdater` +- Exposing all three transports in both protocol versions (v1.0 and v0.3 compat) simultaneously: + - **JSON-RPC** (v1.0 and v0.3) at `http://127.0.0.1:41241/a2a/jsonrpc` + - **HTTP+JSON (REST)** (v1.0 and v0.3) at `http://127.0.0.1:41241/a2a/rest` + - **gRPC v1.0** on port `50051` + - **gRPC v0.3 (compat)** on port `50052` +- Serving the agent card at `http://127.0.0.1:41241/.well-known/agent-card.json` + +**Run:** + +```bash +uv run python samples/hello_world_agent.py +``` + +--- + +## `cli.py` — Client + +An interactive terminal client with full visibility into the streaming event flow. Each `TaskStatusUpdate` and `TaskArtifactUpdate` event is printed as it arrives. + +Features: +- Transport selection via `--transport` flag (`JSONRPC`, `HTTP+JSON`, `GRPC`) +- Session management (`context_id` persisted across messages, `task_id` per task) +- Graceful error handling for HTTP and gRPC failures + +**Run:** + +```bash +# Connect to the local hello_world_agent (default): +uv run python samples/cli.py + +# Connect to a different URL, using gRPC: +uv run python samples/cli.py --url http://192.168.1.10:41241 --transport GRPC +``` + +Then type a message like `hello` and press Enter. + +Type `/quit` or `/exit` to stop, or press `Ctrl+C`. diff --git a/samples/__init__.py b/samples/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/samples/cli.py b/samples/cli.py new file mode 100644 index 000000000..beff26aa9 --- /dev/null +++ b/samples/cli.py @@ -0,0 +1,135 @@ +import argparse +import asyncio +import os +import signal +import uuid + +from typing import Any + +import grpc +import httpx + +from a2a.client import A2ACardResolver, ClientConfig, create_client +from a2a.helpers import get_artifact_text, get_message_text +from a2a.helpers.agent_card import display_agent_card +from a2a.types import Message, Part, Role, SendMessageRequest, TaskState + + +async def _handle_stream( + stream: Any, current_task_id: str | None +) -> str | None: + async for event in stream: + if event.HasField('message'): + print('Message:', get_message_text(event.message, delimiter=' ')) + return None + + if not current_task_id: + if event.HasField('task'): + current_task_id = event.task.id + print('--- Task Started ---') + print(f'Task [state={TaskState.Name(event.task.status.state)}]') + else: + raise ValueError(f'Unexpected first event: {event}') + + if event.HasField('status_update'): + state_name = TaskState.Name(event.status_update.status.state) + message_text = ( + ': ' + + get_message_text( + event.status_update.status.message, delimiter=' ' + ) + if event.status_update.status.HasField('message') + else '' + ) + print(f'TaskStatusUpdate [state={state_name}]{message_text}') + if state_name in ( + 'TASK_STATE_COMPLETED', + 'TASK_STATE_FAILED', + 'TASK_STATE_CANCELED', + 'TASK_STATE_REJECTED', + ): + current_task_id = None + print('--- Task Finished ---') + elif event.HasField('artifact_update'): + print( + f'TaskArtifactUpdate [name={event.artifact_update.artifact.name}]:', + get_artifact_text( + event.artifact_update.artifact, delimiter=' ' + ), + ) + return current_task_id + + +async def main() -> None: + """Run the A2A terminal client.""" + parser = argparse.ArgumentParser(description='A2A Terminal Client') + parser.add_argument( + '--url', default='http://127.0.0.1:41241', help='Agent base URL' + ) + parser.add_argument( + '--transport', + default=None, + help='Preferred transport (JSONRPC, HTTP+JSON, GRPC)', + ) + args = parser.parse_args() + + config = ClientConfig( + grpc_channel_factory=grpc.aio.insecure_channel, + ) + if args.transport: + config.supported_protocol_bindings = [args.transport] + + print( + f'Connecting to {args.url} (preferred transport: {args.transport or "Any"})' + ) + + async with httpx.AsyncClient() as httpx_client: + resolver = A2ACardResolver(httpx_client, args.url) + card = await resolver.get_agent_card() + print('\n✓ Agent Card Found:') + display_agent_card(card) + + client = await create_client(card, client_config=config) + + actual_transport = getattr(client, '_transport', client) + print(f' Picked Transport: {actual_transport.__class__.__name__}') + + print('\nConnected! Send a message or type /quit to exit.') + + current_task_id = None + current_context_id = str(uuid.uuid4()) + + while True: + try: + loop = asyncio.get_running_loop() + user_input = await loop.run_in_executor(None, input, 'You: ') + except KeyboardInterrupt: + break + + if user_input.lower() in ('/quit', '/exit'): + break + if not user_input.strip(): + continue + + message = Message( + role=Role.ROLE_USER, + message_id=str(uuid.uuid4()), + parts=[Part(text=user_input)], + task_id=current_task_id, + context_id=current_context_id, + ) + + request = SendMessageRequest(message=message) + + try: + stream = client.send_message(request) + current_task_id = await _handle_stream(stream, current_task_id) + except (httpx.RequestError, grpc.RpcError) as e: + print(f'Error communicating with agent: {e}') + + await client.close() + + +if __name__ == '__main__': + signal.signal(signal.SIGINT, lambda sig, frame: os._exit(0)) + asyncio.run(main()) diff --git a/samples/hello_world_agent.py b/samples/hello_world_agent.py new file mode 100644 index 000000000..a6e589ac0 --- /dev/null +++ b/samples/hello_world_agent.py @@ -0,0 +1,275 @@ +import argparse +import asyncio +import contextlib +import logging + +import grpc +import uvicorn + +from fastapi import FastAPI + +from a2a.compat.v0_3 import a2a_v0_3_pb2_grpc +from a2a.compat.v0_3.grpc_handler import CompatGrpcHandler +from a2a.server.agent_execution.agent_executor import AgentExecutor +from a2a.server.agent_execution.context import RequestContext +from a2a.server.events.event_queue import EventQueue +from a2a.server.request_handlers import DefaultRequestHandler, GrpcHandler +from a2a.server.routes import ( + create_agent_card_routes, + create_jsonrpc_routes, + create_rest_routes, +) +from a2a.server.tasks.inmemory_task_store import InMemoryTaskStore +from a2a.server.tasks.task_updater import TaskUpdater +from a2a.types import ( + AgentCapabilities, + AgentCard, + AgentInterface, + AgentProvider, + AgentSkill, + Part, + Task, + TaskState, + TaskStatus, + a2a_pb2_grpc, +) + + +logger = logging.getLogger(__name__) + + +class SampleAgentExecutor(AgentExecutor): + """Sample agent executor logic similar to the a2a-js sample.""" + + def __init__(self) -> None: + self.running_tasks: set[str] = set() + + async def cancel( + self, context: RequestContext, event_queue: EventQueue + ) -> None: + """Cancels a task.""" + task_id = context.task_id + if task_id in self.running_tasks: + self.running_tasks.remove(task_id) + + updater = TaskUpdater( + event_queue=event_queue, + task_id=task_id or '', + context_id=context.context_id or '', + ) + await updater.cancel() + + async def execute( + self, context: RequestContext, event_queue: EventQueue + ) -> None: + """Executes a task inline.""" + user_message = context.message + task_id = context.task_id + context_id = context.context_id + + if not user_message or not task_id or not context_id: + return + + self.running_tasks.add(task_id) + + logger.info( + '[SampleAgentExecutor] Processing message %s for task %s (context: %s)', + user_message.message_id, + task_id, + context_id, + ) + + await event_queue.enqueue_event( + Task( + id=task_id, + context_id=context_id, + status=TaskStatus(state=TaskState.TASK_STATE_SUBMITTED), + history=[user_message], + ) + ) + + updater = TaskUpdater( + event_queue=event_queue, + task_id=task_id, + context_id=context_id, + ) + + working_message = updater.new_agent_message( + parts=[Part(text='Processing your question...')] + ) + await updater.start_work(message=working_message) + + query = context.get_user_input() + + agent_reply_text = self._parse_input(query) + await asyncio.sleep(1) + + if task_id not in self.running_tasks: + return + + await updater.add_artifact( + parts=[Part(text=agent_reply_text)], + name='response', + last_chunk=True, + ) + await updater.complete() + + logger.info( + '[SampleAgentExecutor] Task %s finished with state: completed', + task_id, + ) + + def _parse_input(self, query: str) -> str: + if not query: + return 'Hello! Please provide a message for me to respond to.' + + ql = query.lower() + if 'hello' in ql or 'hi' in ql: + return 'Hello World! Nice to meet you!' + if 'how are you' in ql: + return ( + "I'm doing great! Thanks for asking. How can I help you today?" + ) + if 'goodbye' in ql or 'bye' in ql: + return 'Goodbye! Have a wonderful day!' + return f"Hello World! You said: '{query}'. Thanks for your message!" + + +async def serve( + host: str = '127.0.0.1', + port: int = 41241, + grpc_port: int = 50051, + compat_grpc_port: int = 50052, +) -> None: + """Run the Sample Agent server with mounted JSON-RPC, HTTP+JSON and gRPC transports.""" + agent_card = AgentCard( + name='Sample Agent', + description='A sample agent to test the stream functionality.', + provider=AgentProvider( + organization='A2A Samples', url='https://example.com' + ), + version='1.0.0', + capabilities=AgentCapabilities( + streaming=True, push_notifications=False + ), + default_input_modes=['text'], + default_output_modes=['text', 'task-status'], + skills=[ + AgentSkill( + id='sample_agent', + name='Sample Agent', + description='Say hi.', + tags=['sample'], + examples=['hi'], + input_modes=['text'], + output_modes=['text', 'task-status'], + ) + ], + supported_interfaces=[ + AgentInterface( + protocol_binding='GRPC', + protocol_version='1.0', + url=f'{host}:{grpc_port}', + ), + AgentInterface( + protocol_binding='GRPC', + protocol_version='0.3', + url=f'{host}:{compat_grpc_port}', + ), + AgentInterface( + protocol_binding='JSONRPC', + protocol_version='1.0', + url=f'http://{host}:{port}/a2a/jsonrpc', + ), + AgentInterface( + protocol_binding='JSONRPC', + protocol_version='0.3', + url=f'http://{host}:{port}/a2a/jsonrpc', + ), + AgentInterface( + protocol_binding='HTTP+JSON', + protocol_version='1.0', + url=f'http://{host}:{port}/a2a/rest', + ), + AgentInterface( + protocol_binding='HTTP+JSON', + protocol_version='0.3', + url=f'http://{host}:{port}/a2a/rest', + ), + ], + ) + + task_store = InMemoryTaskStore() + request_handler = DefaultRequestHandler( + agent_executor=SampleAgentExecutor(), + task_store=task_store, + agent_card=agent_card, + ) + + rest_routes = create_rest_routes( + request_handler=request_handler, + path_prefix='/a2a/rest', + enable_v0_3_compat=True, + ) + jsonrpc_routes = create_jsonrpc_routes( + request_handler=request_handler, + rpc_url='/a2a/jsonrpc', + enable_v0_3_compat=True, + ) + agent_card_routes = create_agent_card_routes( + agent_card=agent_card, + ) + app = FastAPI() + app.routes.extend(jsonrpc_routes) + app.routes.extend(agent_card_routes) + app.routes.extend(rest_routes) + + grpc_server = grpc.aio.server() + grpc_server.add_insecure_port(f'{host}:{grpc_port}') + servicer = GrpcHandler(request_handler) + a2a_pb2_grpc.add_A2AServiceServicer_to_server(servicer, grpc_server) + + compat_grpc_server = grpc.aio.server() + compat_grpc_server.add_insecure_port(f'{host}:{compat_grpc_port}') + compat_servicer = CompatGrpcHandler(request_handler) + a2a_v0_3_pb2_grpc.add_A2AServiceServicer_to_server( + compat_servicer, compat_grpc_server + ) + + config = uvicorn.Config(app, host=host, port=port) + uvicorn_server = uvicorn.Server(config) + + logger.info('Starting Sample Agent servers:') + logger.info(' - HTTP on http://%s:%s', host, port) + logger.info(' - gRPC on %s:%s', host, grpc_port) + logger.info(' - gRPC (v0.3 compat) on %s:%s', host, compat_grpc_port) + logger.info( + 'Agent Card available at http://%s:%s/.well-known/agent-card.json', + host, + port, + ) + + await asyncio.gather( + grpc_server.start(), + compat_grpc_server.start(), + uvicorn_server.serve(), + ) + + +if __name__ == '__main__': + logging.basicConfig(level=logging.INFO) + parser = argparse.ArgumentParser(description='Sample A2A agent server') + parser.add_argument('--host', default='127.0.0.1') + parser.add_argument('--port', type=int, default=41241) + parser.add_argument('--grpc-port', type=int, default=50051) + parser.add_argument('--compat-grpc-port', type=int, default=50052) + args = parser.parse_args() + with contextlib.suppress(KeyboardInterrupt): + asyncio.run( + serve( + host=args.host, + port=args.port, + grpc_port=args.grpc_port, + compat_grpc_port=args.compat_grpc_port, + ) + ) diff --git a/scripts/checkout_experimental_types.sh b/scripts/checkout_experimental_types.sh deleted file mode 100755 index a598afaff..000000000 --- a/scripts/checkout_experimental_types.sh +++ /dev/null @@ -1,98 +0,0 @@ -#!/bin/bash - -# Exit immediately if a command exits with a non-zero status. -# Treat unset variables as an error. -set -euo pipefail - -A2A_SPEC_REPO="https://github.com/a2aproject/A2A.git" # URL for the A2A spec repo. -A2A_SPEC_BRANCH="main" # Name of the branch with experimental changes. -FEATURE_BRANCH="experimental-types" # Name of the feature branch to create. -ROOT_DIR=$(git rev-parse --show-toplevel) - -usage() { - cat <&2 - usage - exit 1 - ;; - esac -done - - -TMP_WORK_DIR=$(mktemp -d) -echo "Created a temporary working directory: $TMP_WORK_DIR" -trap 'rm -rf -- "$TMP_WORK_DIR"' EXIT -cd $TMP_WORK_DIR - -echo "Cloning the \"$A2A_SPEC_REPO\" repository..." -git clone $A2A_SPEC_REPO spec_repo -cd spec_repo - -echo "Checking out the \"$A2A_SPEC_BRANCH\" branch..." -git checkout "$A2A_SPEC_BRANCH" - -echo "Invoking the generate_types.sh script..." -GENERATED_FILE="$ROOT_DIR/src/a2a/types.py" -$ROOT_DIR/scripts/generate_types.sh "$GENERATED_FILE" --input-file "$TMP_WORK_DIR/spec_repo/specification/json/a2a.json" - - -echo "Running buf generate..." -cd "$ROOT_DIR" -buf generate -uv run "$ROOT_DIR/scripts/grpc_gen_post_processor.py" - - -echo "Committing generated types file to the \"$FEATURE_BRANCH\" branch..." -git checkout -b "$FEATURE_BRANCH" -git add "$GENERATED_FILE" "$ROOT_DIR/src/a2a/grpc" -git commit -m "Experimental types" diff --git a/scripts/gen_proto.sh b/scripts/gen_proto.sh new file mode 100755 index 000000000..34ff96ae0 --- /dev/null +++ b/scripts/gen_proto.sh @@ -0,0 +1,28 @@ +#!/bin/bash +set -e + +# Run buf generate to regenerate protobuf code and OpenAPI spec +npx --yes @bufbuild/buf generate + +# The OpenAPI generator produces a file named like 'a2a.swagger.json' or similar. +# We need it to be 'a2a.json' for the A2A SDK. +# Find the generated json file in the output directory +generated_json=$(find src/a2a/types -name "*.swagger.json" -print -quit) + +if [ -n "$generated_json" ]; then + echo "Renaming $generated_json to src/a2a/types/a2a.json" + mv "$generated_json" src/a2a/types/a2a.json +else + echo "Warning: No Swagger JSON generated." +fi + +# Fix imports in generated grpc file +echo "Fixing imports in src/a2a/types/a2a_pb2_grpc.py" +sed 's/import a2a_pb2 as a2a__pb2/from . import a2a_pb2 as a2a__pb2/g' src/a2a/types/a2a_pb2_grpc.py > src/a2a/types/a2a_pb2_grpc.py.tmp && mv src/a2a/types/a2a_pb2_grpc.py.tmp src/a2a/types/a2a_pb2_grpc.py + +# Download legacy v0.3 compatibility protobuf code +echo "Downloading legacy v0.3 proto file..." +# Commit hash was selected as a2a.proto version from 0.3 branch with latests fixes. +curl -o src/a2a/compat/v0_3/a2a_v0_3.proto https://raw.githubusercontent.com/a2aproject/A2A/b3b266d127dde3d1000ec103b252d1de81289e83/specification/grpc/a2a.proto + + diff --git a/scripts/gen_proto_compat.sh b/scripts/gen_proto_compat.sh new file mode 100755 index 000000000..c85d2efe2 --- /dev/null +++ b/scripts/gen_proto_compat.sh @@ -0,0 +1,10 @@ +#!/bin/bash +set -e + +# Generate legacy v0.3 compatibility protobuf code +echo "Generating legacy v0.3 compatibility protobuf code" +npx --yes @bufbuild/buf generate src/a2a/compat/v0_3 --template buf.compat.gen.yaml + +# Fix imports in legacy generated grpc file +echo "Fixing imports in src/a2a/compat/v0_3/a2a_v0_3_pb2_grpc.py" +sed 's/import a2a_v0_3_pb2 as a2a__v0__3__pb2/from . import a2a_v0_3_pb2 as a2a__v0__3__pb2/g' src/a2a/compat/v0_3/a2a_v0_3_pb2_grpc.py > src/a2a/compat/v0_3/a2a_v0_3_pb2_grpc.py.tmp && mv src/a2a/compat/v0_3/a2a_v0_3_pb2_grpc.py.tmp src/a2a/compat/v0_3/a2a_v0_3_pb2_grpc.py diff --git a/scripts/generate_types.sh b/scripts/generate_types.sh deleted file mode 100755 index 6c01cff57..000000000 --- a/scripts/generate_types.sh +++ /dev/null @@ -1,134 +0,0 @@ -#!/bin/bash - -# Exit immediately if a command exits with a non-zero status. -# Treat unset variables as an error. -set -euo pipefail - -# A2A specification version to use -# Can be overridden via environment variable: A2A_SPEC_VERSION=v1.2.0 ./generate_types.sh -# Or via command-line flag: ./generate_types.sh --version v1.2.0 output.py -# Use a specific git tag, branch name, or commit SHA -# Examples: "v1.0.0", "v1.2.0", "main", "abc123def" -A2A_SPEC_VERSION="${A2A_SPEC_VERSION:-v0.3.0}" - -# Build URL based on version format -# Tags use /refs/tags/, branches use /refs/heads/, commits use direct ref -build_remote_url() { - local version="$1" - local base_url="https://raw.githubusercontent.com/a2aproject/A2A" - local spec_path="specification/json/a2a.json" - local url_part - - if [[ "$version" =~ ^v[0-9]+\.[0-9]+\.[0-9]+$ ]]; then - # Looks like a version tag (v1.0.0, v1.2.3) - url_part="refs/tags/${version}" - elif [[ "$version" =~ ^[0-9a-f]{7,40}$ ]]; then - # Looks like a commit SHA (7+ hex chars) - url_part="${version}" - else - # Assume it's a branch name (main, develop, etc.) - url_part="refs/heads/${version}" - fi - echo "${base_url}/${url_part}/${spec_path}" -} - -REMOTE_URL=$(build_remote_url "$A2A_SPEC_VERSION") - -GENERATED_FILE="" -INPUT_FILE="" - -# Parse command-line arguments -while [[ $# -gt 0 ]]; do - case "$1" in - --input-file) - INPUT_FILE="$2" - shift 2 - ;; - --version) - A2A_SPEC_VERSION="$2" - REMOTE_URL=$(build_remote_url "$A2A_SPEC_VERSION") - shift 2 - ;; - *) - GENERATED_FILE="$1" - shift 1 - ;; - esac -done - -if [ -z "$GENERATED_FILE" ]; then - cat >&2 <] [--version ] -Options: - --input-file Use a local JSON schema file instead of fetching from remote - --version Specify A2A spec version (default: v0.3.0) - Can be a git tag (v1.0.0), branch (main), or commit SHA -Environment variables: - A2A_SPEC_VERSION Override default spec version -Examples: - $0 src/a2a/types.py - $0 --version v1.2.0 src/a2a/types.py - $0 --input-file local/a2a.json src/a2a/types.py - A2A_SPEC_VERSION=main $0 src/a2a/types.py -EOF - exit 1 -fi - -echo "Running datamodel-codegen..." -declare -a source_args -if [ -n "$INPUT_FILE" ]; then - echo " - Source File: $INPUT_FILE" - if [ ! -f "$INPUT_FILE" ]; then - echo "Error: Input file does not exist: $INPUT_FILE" >&2 - exit 1 - fi - source_args=("--input" "$INPUT_FILE") -else - echo " - A2A Spec Version: $A2A_SPEC_VERSION" - echo " - Source URL: $REMOTE_URL" - - # Validate that the remote URL is accessible - echo " - Validating remote URL..." - if ! curl --fail --silent --head "$REMOTE_URL" >/dev/null 2>&1; then - cat >&2 < None: - """Post processor for the generated code.""" - dir_path = Path(src_folder) - print(dir_path) - if not dir_path.is_dir(): - print('Source folder not found') - sys.exit(1) - - grpc_pattern = '**/*_pb2_grpc.py' - files = dir_path.glob(grpc_pattern) - - for file in files: - print(f'Processing {file}') - try: - with file.open('r', encoding='utf-8') as f: - src_content = f.read() - - # Change import a2a_pb2 as a2a__pb2 - import_pattern = r'^import (\w+_pb2) as (\w+__pb2)$' - # to from . import a2a_pb2 as a2a__pb2 - replacement_pattern = r'from . import \1 as \2' - - fixed_src_content = re.sub( - import_pattern, - replacement_pattern, - src_content, - flags=re.MULTILINE, - ) - - if fixed_src_content != src_content: - with file.open('w', encoding='utf-8') as f: - f.write(fixed_src_content) - print('Imports fixed') - else: - print('No changes needed') - - except Exception as e: # noqa: BLE001 - print(f'Error processing file {file}: {e}') - sys.exit(1) - - -if __name__ == '__main__': - process_generated_code() diff --git a/scripts/lint.sh b/scripts/lint.sh new file mode 100755 index 000000000..5fd7c2177 --- /dev/null +++ b/scripts/lint.sh @@ -0,0 +1,60 @@ +#!/bin/bash +# Local replica of .github/workflows/linter.yaml (excluding jscpd copy-paste check) + +# ANSI color codes for premium output +RED='\033[0;31m' +GREEN='\033[0;32m' +YELLOW='\033[1;33m' +BLUE='\033[0;34m' +BOLD='\033[1m' +NC='\033[0m' # No Color + +FAILED=0 + +echo -e "${BLUE}${BOLD}=== A2A Python Fixed-and-Lint Suite ===${NC}" +echo -e "Fixing formatting and linting issues, then verifying types...\n" + +# 1. Ruff Linter (with fix) +echo -e "${YELLOW}${BOLD}--- [1/4] Running Ruff Linter (fix) ---${NC}" +if uv run ruff check --fix; then + echo -e "${GREEN}✓ Ruff Linter passed (and fixed what it could)${NC}" +else + echo -e "${RED}✗ Ruff Linter failed${NC}" + FAILED=1 +fi + +# 2. Ruff Formatter +echo -e "\n${YELLOW}${BOLD}--- [2/4] Running Ruff Formatter (apply) ---${NC}" +if uv run ruff format; then + echo -e "${GREEN}✓ Ruff Formatter applied${NC}" +else + echo -e "${RED}✗ Ruff Formatter failed${NC}" + FAILED=1 +fi + +# 3. MyPy Type Checker +echo -e "\n${YELLOW}${BOLD}--- [3/4] Running MyPy Type Checker ---${NC}" +if uv run mypy src; then + echo -e "${GREEN}✓ MyPy passed${NC}" +else + echo -e "${RED}✗ MyPy failed${NC}" + FAILED=1 +fi + +# 4. Pyright Type Checker +echo -e "\n${YELLOW}${BOLD}--- [4/4] Running Pyright ---${NC}" +if uv run pyright; then + echo -e "${GREEN}✓ Pyright passed${NC}" +else + echo -e "${RED}✗ Pyright failed${NC}" + FAILED=1 +fi + +echo -e "\n${BLUE}${BOLD}=========================================${NC}" +if [ $FAILED -eq 0 ]; then + echo -e "${GREEN}${BOLD}SUCCESS: All linting and formatting tasks complete!${NC}" + exit 0 +else + echo -e "${RED}${BOLD}FAILURE: One or more steps failed.${NC}" + exit 1 +fi diff --git a/scripts/test_minimal_install.py b/scripts/test_minimal_install.py new file mode 100755 index 000000000..84e3ee3fc --- /dev/null +++ b/scripts/test_minimal_install.py @@ -0,0 +1,88 @@ +#!/usr/bin/env python3 +"""Smoke test for minimal (base-only) installation of a2a-sdk. + +This script verifies that all core public API modules can be imported +when only the base dependencies are installed (no optional extras). + +It is designed to run WITHOUT pytest or any dev dependencies -- just +a clean venv with `pip install a2a-sdk`. + +Usage: + python scripts/test_minimal_install.py + +Exit codes: + 0 - All core imports succeeded + 1 - One or more core imports failed +""" + +from __future__ import annotations + +import importlib +import sys + + +# Core modules that MUST be importable with only base dependencies. +# These are the public API surface that every user gets with +# `pip install a2a-sdk` (no extras). +# +# Do NOT add modules here that require optional extras (grpc, +# http-server, sql, signing, telemetry, vertex, etc.). +# Those modules are expected to fail without their extras installed +# and should use try/except ImportError guards internally. +CORE_MODULES = [ + 'a2a', + 'a2a.client', + 'a2a.client.auth', + 'a2a.client.base_client', + 'a2a.client.card_resolver', + 'a2a.client.client', + 'a2a.client.client_factory', + 'a2a.client.errors', + 'a2a.client.interceptors', + 'a2a.client.optionals', + 'a2a.client.transports', + 'a2a.server', + 'a2a.server.agent_execution', + 'a2a.server.context', + 'a2a.server.events', + 'a2a.server.request_handlers', + 'a2a.server.tasks', + 'a2a.types', + 'a2a.utils', + 'a2a.utils.constants', + 'a2a.utils.error_handlers', + 'a2a.utils.version_validator', + 'a2a.utils.proto_utils', + 'a2a.utils.task', + 'a2a.helpers.agent_card', + 'a2a.helpers.proto_helpers', +] + + +def main() -> int: + failures: list[str] = [] + successes: list[str] = [] + + for module_name in CORE_MODULES: + try: + importlib.import_module(module_name) + successes.append(module_name) + except Exception as e: # noqa: BLE001, PERF203 + failures.append(f'{module_name}: {e}') + + print(f'Tested {len(CORE_MODULES)} core modules') + print(f' Passed: {len(successes)}') + print(f' Failed: {len(failures)}') + + if failures: + print('\nFAILED imports:') + for failure in failures: + print(f' - {failure}') + return 1 + + print('\nAll core modules imported successfully.') + return 0 + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/src/a2a/a2a_db_cli.py b/src/a2a/a2a_db_cli.py new file mode 100644 index 000000000..1da69a7be --- /dev/null +++ b/src/a2a/a2a_db_cli.py @@ -0,0 +1,164 @@ +import argparse +import logging +import os + +from importlib.resources import files + + +try: + from alembic import command + from alembic.config import Config + +except ImportError as e: + raise ImportError( + "CLI requires Alembic. Install with: 'pip install a2a-sdk[db-cli]'." + ) from e + + +def _add_shared_args( + parser: argparse.ArgumentParser, is_sub: bool = False +) -> None: + """Add common arguments to the given parser.""" + prefix = 'sub_' if is_sub else '' + parser.add_argument( + '--database-url', + dest=f'{prefix}database_url', + help='Database URL to use for the migrations. If not set, the DATABASE_URL environment variable will be used.', + ) + parser.add_argument( + '--tasks-table', + dest=f'{prefix}tasks_table', + help='Custom tasks table to update. If not set, the default is "tasks".', + ) + parser.add_argument( + '--push-notification-configs-table', + dest=f'{prefix}push_notification_configs_table', + help='Custom push notification configs table to update. If not set, the default is "push_notification_configs".', + ) + parser.add_argument( + '-v', + '--verbose', + dest=f'{prefix}verbose', + help='Enable verbose output (sets sqlalchemy.engine logging to INFO)', + action='store_true', + ) + parser.add_argument( + '--sql', + dest=f'{prefix}sql', + help='Run migrations in sql mode (generate SQL instead of executing)', + action='store_true', + ) + + +def create_parser() -> argparse.ArgumentParser: + """Create the argument parser for the migration tool.""" + parser = argparse.ArgumentParser(description='A2A Database Migration Tool') + + # Global options + parser.add_argument( + '--add_columns_owner_last_updated-default-owner', + dest='owner', + help="Value for the 'owner' column (used in specific migrations). If not set defaults to 'legacy_v03_no_user_info'", + ) + _add_shared_args(parser) + + subparsers = parser.add_subparsers(dest='cmd', help='Migration command') + + # Upgrade command + up_parser = subparsers.add_parser( + 'upgrade', help='Upgrade to a later version' + ) + up_parser.add_argument( + 'revision', + nargs='?', + default='head', + help='Revision target (default: head)', + ) + up_parser.add_argument( + '--add_columns_owner_last_updated-default-owner', + dest='sub_owner', + help="Value for the 'owner' column (used in specific migrations). If not set defaults to 'legacy_v03_no_user_info'", + ) + _add_shared_args(up_parser, is_sub=True) + + # Downgrade command + down_parser = subparsers.add_parser( + 'downgrade', help='Revert to a previous version' + ) + down_parser.add_argument( + 'revision', + nargs='?', + default='base', + help='Revision target (e.g., -1, base or a specific ID)', + ) + _add_shared_args(down_parser, is_sub=True) + + # Current command + current_parser = subparsers.add_parser( + 'current', help='Display the current revision for a database' + ) + _add_shared_args(current_parser, is_sub=True) + + return parser + + +def run_migrations() -> None: + """CLI tool to manage database migrations.""" + # Configure logging to show INFO messages + logging.basicConfig(level=logging.INFO, format='%(levelname)s %(message)s') + + parser = create_parser() + args = parser.parse_args() + + # Default to upgrade head if no command is provided + if not args.cmd: + args.cmd = 'upgrade' + args.revision = 'head' + + # Locate the bundled alembic.ini + ini_path = files('a2a').joinpath('alembic.ini') + cfg = Config(str(ini_path)) + + # Dynamically set the script location + migrations_path = files('a2a').joinpath('migrations') + cfg.set_main_option('script_location', str(migrations_path)) + + # Consolidate owner, db_url, tables, verbose and sql values + owner = args.owner or getattr(args, 'sub_owner', None) + db_url = args.database_url or getattr(args, 'sub_database_url', None) + task_table = args.tasks_table or getattr(args, 'sub_tasks_table', None) + push_notification_configs_table = ( + args.push_notification_configs_table + or getattr(args, 'sub_push_notification_configs_table', None) + ) + + verbose = args.verbose or getattr(args, 'sub_verbose', False) + sql = args.sql or getattr(args, 'sub_sql', False) + + # Pass custom arguments to the migration context + if owner: + cfg.set_main_option( + 'add_columns_owner_last_updated_default_owner', owner + ) + if db_url: + os.environ['DATABASE_URL'] = db_url + if task_table: + cfg.set_main_option('tasks_table', task_table) + if push_notification_configs_table: + cfg.set_main_option( + 'push_notification_configs_table', push_notification_configs_table + ) + if verbose: + cfg.set_main_option('verbose', 'true') + + # Execute the requested command + if args.cmd == 'upgrade': + logging.info('Upgrading database to %s', args.revision) + command.upgrade(cfg, args.revision, sql=sql) + elif args.cmd == 'downgrade': + logging.info('Downgrading database to %s', args.revision) + command.downgrade(cfg, args.revision, sql=sql) + elif args.cmd == 'current': + command.current(cfg, verbose=verbose) + + logging.info('Done.') diff --git a/src/a2a/alembic.ini b/src/a2a/alembic.ini new file mode 100644 index 000000000..f46511c00 --- /dev/null +++ b/src/a2a/alembic.ini @@ -0,0 +1,35 @@ +# A generic, single database configuration. + +[loggers] +keys = root,sqlalchemy,alembic + +[handlers] +keys = console + +[formatters] +keys = generic + +[logger_root] +level = INFO +handlers = console +qualname = + +[logger_sqlalchemy] +level = WARNING +handlers = +qualname = sqlalchemy.engine + +[logger_alembic] +level = WARNING +handlers = +qualname = alembic + +[handler_console] +class = StreamHandler +args = (sys.stderr,) +level = NOTSET +formatter = generic + +[formatter_generic] +format = %(levelname)-5.5s [%(name)s] %(message)s +datefmt = %H:%M:%S diff --git a/src/a2a/client/__init__.py b/src/a2a/client/__init__.py index 4fccd0810..d33c09481 100644 --- a/src/a2a/client/__init__.py +++ b/src/a2a/client/__init__.py @@ -1,7 +1,5 @@ """Client-side components for interacting with an A2A agent.""" -import logging - from a2a.client.auth import ( AuthInterceptor, CredentialService, @@ -9,59 +7,38 @@ ) from a2a.client.base_client import BaseClient from a2a.client.card_resolver import A2ACardResolver -from a2a.client.client import Client, ClientConfig, ClientEvent, Consumer -from a2a.client.client_factory import ClientFactory, minimal_agent_card +from a2a.client.client import ( + Client, + ClientCallContext, + ClientConfig, +) +from a2a.client.client_factory import ( + ClientFactory, + create_client, + minimal_agent_card, +) from a2a.client.errors import ( A2AClientError, - A2AClientHTTPError, - A2AClientJSONError, A2AClientTimeoutError, + AgentCardResolutionError, ) -from a2a.client.helpers import create_text_message_object -from a2a.client.legacy import A2AClient -from a2a.client.middleware import ClientCallContext, ClientCallInterceptor - - -logger = logging.getLogger(__name__) - -try: - from a2a.client.legacy_grpc import A2AGrpcClient # type: ignore -except ImportError as e: - _original_error = e - logger.debug( - 'A2AGrpcClient not loaded. This is expected if gRPC dependencies are not installed. Error: %s', - _original_error, - ) - - class A2AGrpcClient: # type: ignore - """Placeholder for A2AGrpcClient when dependencies are not installed.""" - - def __init__(self, *args, **kwargs): - raise ImportError( - 'To use A2AGrpcClient, its dependencies must be installed. ' - 'You can install them with \'pip install "a2a-sdk[grpc]"\'' - ) from _original_error +from a2a.client.interceptors import ClientCallInterceptor __all__ = [ 'A2ACardResolver', - 'A2AClient', 'A2AClientError', - 'A2AClientHTTPError', - 'A2AClientJSONError', 'A2AClientTimeoutError', - 'A2AGrpcClient', + 'AgentCardResolutionError', 'AuthInterceptor', 'BaseClient', 'Client', 'ClientCallContext', 'ClientCallInterceptor', 'ClientConfig', - 'ClientEvent', 'ClientFactory', - 'Consumer', 'CredentialService', 'InMemoryContextCredentialStore', - 'create_text_message_object', + 'create_client', 'minimal_agent_card', ] diff --git a/src/a2a/client/auth/credentials.py b/src/a2a/client/auth/credentials.py index 11f323709..e3d74e4af 100644 --- a/src/a2a/client/auth/credentials.py +++ b/src/a2a/client/auth/credentials.py @@ -1,6 +1,6 @@ from abc import ABC, abstractmethod -from a2a.client.middleware import ClientCallContext +from a2a.client.client import ClientCallContext class CredentialService(ABC): diff --git a/src/a2a/client/auth/interceptor.py b/src/a2a/client/auth/interceptor.py index 65c971921..973c91cd7 100644 --- a/src/a2a/client/auth/interceptor.py +++ b/src/a2a/client/auth/interceptor.py @@ -1,15 +1,11 @@ import logging # noqa: I001 -from typing import Any from a2a.client.auth.credentials import CredentialService -from a2a.client.middleware import ClientCallContext, ClientCallInterceptor -from a2a.types import ( - AgentCard, - APIKeySecurityScheme, - HTTPAuthSecurityScheme, - In, - OAuth2SecurityScheme, - OpenIdConnectSecurityScheme, +from a2a.client.client import ClientCallContext +from a2a.client.interceptors import ( + AfterArgs, + BeforeArgs, + ClientCallInterceptor, ) logger = logging.getLogger(__name__) @@ -24,75 +20,77 @@ class AuthInterceptor(ClientCallInterceptor): def __init__(self, credential_service: CredentialService): self._credential_service = credential_service - async def intercept( - self, - method_name: str, - request_payload: dict[str, Any], - http_kwargs: dict[str, Any], - agent_card: AgentCard | None, - context: ClientCallContext | None, - ) -> tuple[dict[str, Any], dict[str, Any]]: + async def before(self, args: BeforeArgs) -> None: """Applies authentication headers to the request if credentials are available.""" + agent_card = args.agent_card + + # Proto3 repeated fields (security) and maps (security_schemes) do not track presence. + # HasField() raises ValueError for them. + # We check for truthiness to see if they are non-empty. if ( - agent_card is None - or agent_card.security is None - or agent_card.security_schemes is None + not agent_card.security_requirements + or not agent_card.security_schemes ): - return request_payload, http_kwargs + return - for requirement in agent_card.security: - for scheme_name in requirement: + for requirement in agent_card.security_requirements: + for scheme_name in requirement.schemes: credential = await self._credential_service.get_credentials( - scheme_name, context + scheme_name, args.context ) if credential and scheme_name in agent_card.security_schemes: - scheme_def_union = agent_card.security_schemes.get( - scheme_name - ) - if not scheme_def_union: - continue - scheme_def = scheme_def_union.root - - headers = http_kwargs.get('headers', {}) - - match scheme_def: - # Case 1a: HTTP Bearer scheme with an if guard - case HTTPAuthSecurityScheme() if ( - scheme_def.scheme.lower() == 'bearer' - ): - headers['Authorization'] = f'Bearer {credential}' - logger.debug( - "Added Bearer token for scheme '%s' (type: %s).", - scheme_name, - scheme_def.type, - ) - http_kwargs['headers'] = headers - return request_payload, http_kwargs - - # Case 1b: OAuth2 and OIDC schemes, which are implicitly Bearer - case ( - OAuth2SecurityScheme() - | OpenIdConnectSecurityScheme() - ): - headers['Authorization'] = f'Bearer {credential}' - logger.debug( - "Added Bearer token for scheme '%s' (type: %s).", - scheme_name, - scheme_def.type, - ) - http_kwargs['headers'] = headers - return request_payload, http_kwargs - - # Case 2: API Key in Header - case APIKeySecurityScheme(in_=In.header): - headers[scheme_def.name] = credential - logger.debug( - "Added API Key Header for scheme '%s'.", - scheme_name, - ) - http_kwargs['headers'] = headers - return request_payload, http_kwargs + scheme = agent_card.security_schemes[scheme_name] + + if args.context is None: + args.context = ClientCallContext() + + if args.context.service_parameters is None: + args.context.service_parameters = {} + + # HTTP Bearer authentication + if ( + scheme.HasField('http_auth_security_scheme') + and scheme.http_auth_security_scheme.scheme.lower() + == 'bearer' + ): + args.context.service_parameters['Authorization'] = ( + f'Bearer {credential}' + ) + logger.debug( + "Added Bearer token for scheme '%s'.", + scheme_name, + ) + return + + # OAuth2 and OIDC schemes are implicitly Bearer + if scheme.HasField( + 'oauth2_security_scheme' + ) or scheme.HasField('open_id_connect_security_scheme'): + args.context.service_parameters['Authorization'] = ( + f'Bearer {credential}' + ) + logger.debug( + "Added Bearer token for scheme '%s'.", + scheme_name, + ) + return + + # API Key in Header + if ( + scheme.HasField('api_key_security_scheme') + and scheme.api_key_security_scheme.location.lower() + == 'header' + ): + args.context.service_parameters[ + scheme.api_key_security_scheme.name + ] = credential + logger.debug( + "Added API Key Header for scheme '%s'.", + scheme_name, + ) + return # Note: Other cases like API keys in query/cookie are not handled and will be skipped. - return request_payload, http_kwargs + async def after(self, args: AfterArgs) -> None: + """Invoked after the method is executed.""" diff --git a/src/a2a/client/base_client.py b/src/a2a/client/base_client.py index 09b2891d6..763f23fb5 100644 --- a/src/a2a/client/base_client.py +++ b/src/a2a/client/base_client.py @@ -1,32 +1,33 @@ -from collections.abc import AsyncIterator, Callable -from types import TracebackType +from collections.abc import AsyncGenerator, AsyncIterator, Awaitable, Callable from typing import Any -from typing_extensions import Self - from a2a.client.client import ( Client, ClientCallContext, ClientConfig, - ClientEvent, - Consumer, ) -from a2a.client.client_task_manager import ClientTaskManager -from a2a.client.errors import A2AClientInvalidStateError -from a2a.client.middleware import ClientCallInterceptor +from a2a.client.interceptors import ( + AfterArgs, + BeforeArgs, + ClientCallInterceptor, +) from a2a.client.transports.base import ClientTransport -from a2a.types import ( +from a2a.types.a2a_pb2 import ( AgentCard, - GetTaskPushNotificationConfigParams, - Message, - MessageSendConfiguration, - MessageSendParams, + CancelTaskRequest, + DeleteTaskPushNotificationConfigRequest, + GetExtendedAgentCardRequest, + GetTaskPushNotificationConfigRequest, + GetTaskRequest, + ListTaskPushNotificationConfigsRequest, + ListTaskPushNotificationConfigsResponse, + ListTasksRequest, + ListTasksResponse, + SendMessageRequest, + StreamResponse, + SubscribeToTaskRequest, Task, - TaskArtifactUpdateEvent, - TaskIdParams, TaskPushNotificationConfig, - TaskQueryParams, - TaskStatusUpdateEvent, ) @@ -38,36 +39,20 @@ def __init__( card: AgentCard, config: ClientConfig, transport: ClientTransport, - consumers: list[Consumer], - middleware: list[ClientCallInterceptor], + interceptors: list[ClientCallInterceptor], ): - super().__init__(consumers, middleware) + super().__init__(interceptors) self._card = card self._config = config self._transport = transport - - async def __aenter__(self) -> Self: - """Enters the async context manager, returning the client itself.""" - return self - - async def __aexit__( - self, - exc_type: type[BaseException] | None, - exc_val: BaseException | None, - exc_tb: TracebackType | None, - ) -> None: - """Exits the async context manager, ensuring close() is called.""" - await self.close() + self._interceptors = interceptors async def send_message( self, - request: Message, + request: SendMessageRequest, *, - configuration: MessageSendConfiguration | None = None, context: ClientCallContext | None = None, - request_metadata: dict[str, Any] | None = None, - extensions: list[str] | None = None, - ) -> AsyncIterator[ClientEvent | Message]: + ) -> AsyncIterator[StreamResponse]: """Sends a message to the agent. This method handles both streaming and non-streaming (polling) interactions @@ -76,184 +61,260 @@ async def send_message( Args: request: The message to send to the agent. - configuration: Optional per-call overrides for message sending behavior. - context: The client call context. - request_metadata: Extensions Metadata attached to the request. - extensions: List of extensions to be activated. + context: Optional client call context. Yields: - An async iterator of `ClientEvent` or a final `Message` response. + An async iterator of `StreamResponse` """ - base_config = MessageSendConfiguration( - accepted_output_modes=self._config.accepted_output_modes, - blocking=not self._config.polling, - push_notification_config=( - self._config.push_notification_configs[0] - if self._config.push_notification_configs - else None - ), - ) - if configuration is not None: - update_data = configuration.model_dump( - exclude_unset=True, - by_alias=False, - ) - config = base_config.model_copy(update=update_data) - else: - config = base_config - - params = MessageSendParams( - message=request, configuration=config, metadata=request_metadata - ) - + self._apply_client_config(request) if not self._config.streaming or not self._card.capabilities.streaming: - response = await self._transport.send_message( - params, context=context, extensions=extensions + response = await self._execute_with_interceptors( + input_data=request, + method='send_message', + context=context, + transport_call=lambda req, ctx: self._transport.send_message( + req, context=ctx + ), ) - result = ( - (response, None) if isinstance(response, Task) else response - ) - await self.consume(result, self._card) - yield result - return - - tracker = ClientTaskManager() - stream = self._transport.send_message_streaming( - params, context=context, extensions=extensions - ) - first_event = await anext(stream) - # The response from a server may be either exactly one Message or a - # series of Task updates. Separate out the first message for special - # case handling, which allows us to simplify further stream processing. - if isinstance(first_event, Message): - await self.consume(first_event, self._card) - yield first_event + # In non-streaming case we convert to a StreamResponse so that the + # client always sees the same iterator. + stream_response = StreamResponse() + if response.HasField('task'): + stream_response.task.CopyFrom(response.task) + elif response.HasField('message'): + stream_response.message.CopyFrom(response.message) + else: + raise ValueError('Response has neither task nor message') + + yield stream_response return - yield await self._process_response(tracker, first_event) + async for event in self._execute_stream_with_interceptors( + input_data=request, + method='send_message_streaming', + context=context, + transport_call=lambda req, ctx: ( + self._transport.send_message_streaming(req, context=ctx) + ), + ): + yield event - async for event in stream: - yield await self._process_response(tracker, event) + def _apply_client_config(self, request: SendMessageRequest) -> None: + request.configuration.return_immediately |= self._config.polling + if ( + not request.configuration.HasField('task_push_notification_config') + and self._config.push_notification_config + ): + request.configuration.task_push_notification_config.CopyFrom( + self._config.push_notification_config + ) + if ( + not request.configuration.accepted_output_modes + and self._config.accepted_output_modes + ): + request.configuration.accepted_output_modes.extend( + self._config.accepted_output_modes + ) - async def _process_response( + async def _process_stream( self, - tracker: ClientTaskManager, - event: Task | Message | TaskStatusUpdateEvent | TaskArtifactUpdateEvent, - ) -> ClientEvent: - if isinstance(event, Message): - raise A2AClientInvalidStateError( - 'received a streamed Message from server after first response; this is not supported' + stream: AsyncIterator[StreamResponse], + before_args: BeforeArgs, + ) -> AsyncGenerator[StreamResponse, None]: + async for stream_response in stream: + after_args = AfterArgs( + result=stream_response, + method=before_args.method, + agent_card=self._card, + context=before_args.context, ) - await tracker.process(event) - task = tracker.get_task_or_raise() - update = None if isinstance(event, Task) else event - client_event = (task, update) - await self.consume(client_event, self._card) - return client_event + await self._intercept_after(after_args) + yield after_args.result + if after_args.result.HasField('message'): + return async def get_task( self, - request: TaskQueryParams, + request: GetTaskRequest, *, context: ClientCallContext | None = None, - extensions: list[str] | None = None, ) -> Task: """Retrieves the current state and history of a specific task. Args: - request: The `TaskQueryParams` object specifying the task ID. - context: The client call context. - extensions: List of extensions to be activated. + request: The `GetTaskRequest` object specifying the task ID. + context: Optional client call context. Returns: A `Task` object representing the current state of the task. """ - return await self._transport.get_task( - request, context=context, extensions=extensions + return await self._execute_with_interceptors( + input_data=request, + method='get_task', + context=context, + transport_call=lambda req, ctx: self._transport.get_task( + req, context=ctx + ), + ) + + async def list_tasks( + self, + request: ListTasksRequest, + *, + context: ClientCallContext | None = None, + ) -> ListTasksResponse: + """Retrieves tasks for an agent.""" + return await self._execute_with_interceptors( + input_data=request, + method='list_tasks', + context=context, + transport_call=lambda req, ctx: self._transport.list_tasks( + req, context=ctx + ), ) async def cancel_task( self, - request: TaskIdParams, + request: CancelTaskRequest, *, context: ClientCallContext | None = None, - extensions: list[str] | None = None, ) -> Task: """Requests the agent to cancel a specific task. Args: - request: The `TaskIdParams` object specifying the task ID. - context: The client call context. - extensions: List of extensions to be activated. + request: The `CancelTaskRequest` object specifying the task ID. + context: Optional client call context. Returns: A `Task` object containing the updated task status. """ - return await self._transport.cancel_task( - request, context=context, extensions=extensions + return await self._execute_with_interceptors( + input_data=request, + method='cancel_task', + context=context, + transport_call=lambda req, ctx: self._transport.cancel_task( + req, context=ctx + ), ) - async def set_task_callback( + async def create_task_push_notification_config( self, request: TaskPushNotificationConfig, *, context: ClientCallContext | None = None, - extensions: list[str] | None = None, ) -> TaskPushNotificationConfig: """Sets or updates the push notification configuration for a specific task. Args: request: The `TaskPushNotificationConfig` object with the new configuration. - context: The client call context. - extensions: List of extensions to be activated. + context: Optional client call context. Returns: The created or updated `TaskPushNotificationConfig` object. """ - return await self._transport.set_task_callback( - request, context=context, extensions=extensions + return await self._execute_with_interceptors( + input_data=request, + method='create_task_push_notification_config', + context=context, + transport_call=lambda req, ctx: ( + self._transport.create_task_push_notification_config( + req, context=ctx + ) + ), ) - async def get_task_callback( + async def get_task_push_notification_config( self, - request: GetTaskPushNotificationConfigParams, + request: GetTaskPushNotificationConfigRequest, *, context: ClientCallContext | None = None, - extensions: list[str] | None = None, ) -> TaskPushNotificationConfig: """Retrieves the push notification configuration for a specific task. Args: request: The `GetTaskPushNotificationConfigParams` object specifying the task. - context: The client call context. - extensions: List of extensions to be activated. + context: Optional client call context. Returns: A `TaskPushNotificationConfig` object containing the configuration. """ - return await self._transport.get_task_callback( - request, context=context, extensions=extensions + return await self._execute_with_interceptors( + input_data=request, + method='get_task_push_notification_config', + context=context, + transport_call=lambda req, ctx: ( + self._transport.get_task_push_notification_config( + req, context=ctx + ) + ), + ) + + async def list_task_push_notification_configs( + self, + request: ListTaskPushNotificationConfigsRequest, + *, + context: ClientCallContext | None = None, + ) -> ListTaskPushNotificationConfigsResponse: + """Lists push notification configurations for a specific task. + + Args: + request: The `ListTaskPushNotificationConfigsRequest` object specifying the request. + context: Optional client call context. + + Returns: + A `ListTaskPushNotificationConfigsResponse` object. + """ + return await self._execute_with_interceptors( + input_data=request, + method='list_task_push_notification_configs', + context=context, + transport_call=lambda req, ctx: ( + self._transport.list_task_push_notification_configs( + req, context=ctx + ) + ), ) - async def resubscribe( + async def delete_task_push_notification_config( self, - request: TaskIdParams, + request: DeleteTaskPushNotificationConfigRequest, *, context: ClientCallContext | None = None, - extensions: list[str] | None = None, - ) -> AsyncIterator[ClientEvent]: + ) -> None: + """Deletes the push notification configuration for a specific task. + + Args: + request: The `DeleteTaskPushNotificationConfigRequest` object specifying the request. + context: Optional client call context. + """ + return await self._execute_with_interceptors( + input_data=request, + method='delete_task_push_notification_config', + context=context, + transport_call=lambda req, ctx: ( + self._transport.delete_task_push_notification_config( + req, context=ctx + ) + ), + ) + + async def subscribe( + self, + request: SubscribeToTaskRequest, + *, + context: ClientCallContext | None = None, + ) -> AsyncIterator[StreamResponse]: """Resubscribes to a task's event stream. This is only available if both the client and server support streaming. Args: request: Parameters to identify the task to resubscribe to. - context: The client call context. - extensions: List of extensions to be activated. + context: Optional client call context. Yields: - An async iterator of `ClientEvent` objects. + An async iterator of `StreamResponse` objects. Raises: NotImplementedError: If streaming is not supported by the client or server. @@ -263,20 +324,21 @@ async def resubscribe( 'client and/or server do not support resubscription.' ) - tracker = ClientTaskManager() - # Note: resubscribe can only be called on an existing task. As such, - # we should never see Message updates, despite the typing of the service - # definition indicating it may be possible. - async for event in self._transport.resubscribe( - request, context=context, extensions=extensions + async for event in self._execute_stream_with_interceptors( + input_data=request, + method='subscribe', + context=context, + transport_call=lambda req, ctx: self._transport.subscribe( + req, context=ctx + ), ): - yield await self._process_response(tracker, event) + yield event - async def get_card( + async def get_extended_agent_card( self, + request: GetExtendedAgentCardRequest, *, context: ClientCallContext | None = None, - extensions: list[str] | None = None, signature_verifier: Callable[[AgentCard], None] | None = None, ) -> AgentCard: """Retrieves the agent's card. @@ -285,21 +347,136 @@ async def get_card( client's internal state with the new card. Args: - context: The client call context. - extensions: List of extensions to be activated. + request: The `GetExtendedAgentCardRequest` object specifying the request. + context: Optional client call context. signature_verifier: A callable used to verify the agent card's signatures. Returns: The `AgentCard` for the agent. """ - card = await self._transport.get_card( + card = await self._execute_with_interceptors( + input_data=request, + method='get_extended_agent_card', context=context, - extensions=extensions, - signature_verifier=signature_verifier, + transport_call=lambda req, ctx: ( + self._transport.get_extended_agent_card(req, context=ctx) + ), ) + if signature_verifier: + signature_verifier(card) + self._card = card return card async def close(self) -> None: """Closes the underlying transport.""" await self._transport.close() + + async def _execute_with_interceptors( + self, + input_data: Any, + method: str, + context: ClientCallContext | None, + transport_call: Callable[ + [Any, ClientCallContext | None], Awaitable[Any] + ], + ) -> Any: + before_args = BeforeArgs( + input=input_data, + method=method, + agent_card=self._card, + context=context, + ) + before_result = await self._intercept_before(before_args) + + if before_result is not None: + early_after_args = AfterArgs( + result=before_result['early_return'], + method=method, + agent_card=self._card, + context=before_args.context, + ) + await self._intercept_after( + early_after_args, + before_result['executed'], + ) + return early_after_args.result + + result = await transport_call(before_args.input, before_args.context) + + after_args = AfterArgs( + result=result, + method=method, + agent_card=self._card, + context=before_args.context, + ) + await self._intercept_after(after_args) + + return after_args.result + + async def _execute_stream_with_interceptors( + self, + input_data: Any, + method: str, + context: ClientCallContext | None, + transport_call: Callable[ + [Any, ClientCallContext | None], AsyncIterator[StreamResponse] + ], + ) -> AsyncIterator[StreamResponse]: + + before_args = BeforeArgs( + input=input_data, + method=method, + agent_card=self._card, + context=context, + ) + before_result = await self._intercept_before(before_args) + + if before_result is not None: + after_args = AfterArgs( + result=before_result['early_return'], + method=method, + agent_card=self._card, + context=before_args.context, + ) + await self._intercept_after(after_args, before_result['executed']) + + yield after_args.result + return + + stream = transport_call(before_args.input, before_args.context) + + async for client_event in self._process_stream(stream, before_args): + yield client_event + + async def _intercept_before( + self, + args: BeforeArgs, + ) -> dict[str, Any] | None: + if not self._interceptors: + return None + executed: list[ClientCallInterceptor] = [] + for interceptor in self._interceptors: + await interceptor.before(args) + executed.append(interceptor) + if args.early_return: + return { + 'early_return': args.early_return, + 'executed': executed, + } + return None + + async def _intercept_after( + self, + args: AfterArgs, + interceptors: list[ClientCallInterceptor] | None = None, + ) -> None: + interceptors_to_use = ( + interceptors if interceptors is not None else self._interceptors + ) + + reversed_interceptors = list(reversed(interceptors_to_use)) + for interceptor in reversed_interceptors: + await interceptor.after(args) + if args.early_return: + return diff --git a/src/a2a/client/card_resolver.py b/src/a2a/client/card_resolver.py index adb3c5aee..815916014 100644 --- a/src/a2a/client/card_resolver.py +++ b/src/a2a/client/card_resolver.py @@ -6,13 +6,10 @@ import httpx -from pydantic import ValidationError +from google.protobuf.json_format import ParseDict, ParseError -from a2a.client.errors import ( - A2AClientHTTPError, - A2AClientJSONError, -) -from a2a.types import ( +from a2a.client.errors import AgentCardResolutionError +from a2a.types.a2a_pb2 import ( AgentCard, ) from a2a.utils.constants import AGENT_CARD_WELL_KNOWN_PATH @@ -21,6 +18,111 @@ logger = logging.getLogger(__name__) +def parse_agent_card(agent_card_data: dict[str, Any]) -> AgentCard: + """Parse AgentCard JSON dictionary and handle backward compatibility.""" + _handle_extended_card_compatibility(agent_card_data) + _handle_connection_fields_compatibility(agent_card_data) + _handle_security_compatibility(agent_card_data) + + return ParseDict(agent_card_data, AgentCard(), ignore_unknown_fields=True) + + +def _handle_extended_card_compatibility( + agent_card_data: dict[str, Any], +) -> None: + """Map legacy supportsAuthenticatedExtendedCard to capabilities.""" + if agent_card_data.pop('supportsAuthenticatedExtendedCard', None): + capabilities = agent_card_data.setdefault('capabilities', {}) + if 'extendedAgentCard' not in capabilities: + capabilities['extendedAgentCard'] = True + + +def _handle_connection_fields_compatibility( + agent_card_data: dict[str, Any], +) -> None: + """Map legacy connection and transport fields to supportedInterfaces.""" + main_url = agent_card_data.pop('url', None) + main_transport = agent_card_data.pop('preferredTransport', 'JSONRPC') + version = agent_card_data.pop('protocolVersion', '0.3.0') + additional_interfaces = ( + agent_card_data.pop('additionalInterfaces', None) or [] + ) + + if 'supportedInterfaces' not in agent_card_data and main_url: + supported_interfaces = [] + supported_interfaces.append( + { + 'url': main_url, + 'protocolBinding': main_transport, + 'protocolVersion': version, + } + ) + supported_interfaces.extend( + { + 'url': iface.get('url'), + 'protocolBinding': iface.get('transport'), + 'protocolVersion': version, + } + for iface in additional_interfaces + ) + agent_card_data['supportedInterfaces'] = supported_interfaces + + +def _map_legacy_security( + sec_list: list[dict[str, list[str]]], +) -> list[dict[str, Any]]: + """Convert a legacy security requirement list into the 1.0.0 Protobuf format.""" + return [ + { + 'schemes': { + scheme_name: {'list': scopes} + for scheme_name, scopes in sec_dict.items() + } + } + for sec_dict in sec_list + ] + + +def _handle_security_compatibility(agent_card_data: dict[str, Any]) -> None: + """Map legacy security requirements and schemas to their 1.0.0 Protobuf equivalents.""" + legacy_security = agent_card_data.pop('security', None) + if ( + 'securityRequirements' not in agent_card_data + and legacy_security is not None + ): + agent_card_data['securityRequirements'] = _map_legacy_security( + legacy_security + ) + + for skill in agent_card_data.get('skills', []): + legacy_skill_sec = skill.pop('security', None) + if 'securityRequirements' not in skill and legacy_skill_sec is not None: + skill['securityRequirements'] = _map_legacy_security( + legacy_skill_sec + ) + + security_schemes = agent_card_data.get('securitySchemes', {}) + if security_schemes: + type_mapping = { + 'apiKey': 'apiKeySecurityScheme', + 'http': 'httpAuthSecurityScheme', + 'oauth2': 'oauth2SecurityScheme', + 'openIdConnect': 'openIdConnectSecurityScheme', + 'mutualTLS': 'mtlsSecurityScheme', + } + for scheme in security_schemes.values(): + scheme_type = scheme.pop('type', None) + if scheme_type in type_mapping: + # Map legacy 'in' to modern 'location' + if scheme_type == 'apiKey' and 'in' in scheme: + scheme['location'] = scheme.pop('in') + + mapped_name = type_mapping[scheme_type] + new_scheme_wrapper = {mapped_name: scheme.copy()} + scheme.clear() + scheme.update(new_scheme_wrapper) + + class A2ACardResolver: """Agent Card resolver.""" @@ -64,9 +166,9 @@ async def get_agent_card( An `AgentCard` object representing the agent's capabilities. Raises: - A2AClientHTTPError: If an HTTP error occurs during the request. - A2AClientJSONError: If the response body cannot be decoded as JSON - or validated against the AgentCard schema. + AgentCardResolutionError: If an HTTP error occurs during the request, if the + response body cannot be decoded as JSON, or if it cannot be + validated against the AgentCard schema. """ if not relative_card_path: # Use the default public agent card path configured during initialization @@ -74,7 +176,9 @@ async def get_agent_card( else: path_segment = relative_card_path.lstrip('/') - target_url = f'{self.base_url}/{path_segment}' + target_url = ( + f'{self.base_url}/{path_segment}' if path_segment else self.base_url + ) try: response = await self.httpx_client.get( @@ -88,26 +192,25 @@ async def get_agent_card( target_url, agent_card_data, ) - agent_card = AgentCard.model_validate(agent_card_data) + agent_card = parse_agent_card(agent_card_data) if signature_verifier: signature_verifier(agent_card) except httpx.HTTPStatusError as e: - raise A2AClientHTTPError( - e.response.status_code, - f'Failed to fetch agent card from {target_url}: {e}', + raise AgentCardResolutionError( + f'Failed to fetch agent card from {target_url} (HTTP {e.response.status_code}): {e}', + status_code=e.response.status_code, ) from e except json.JSONDecodeError as e: - raise A2AClientJSONError( + raise AgentCardResolutionError( f'Failed to parse JSON for agent card from {target_url}: {e}' ) from e except httpx.RequestError as e: - raise A2AClientHTTPError( - 503, + raise AgentCardResolutionError( f'Network communication error fetching agent card from {target_url}: {e}', ) from e - except ValidationError as e: # Pydantic validation error - raise A2AClientJSONError( - f'Failed to validate agent card structure from {target_url}: {e.json()}' + except ParseError as e: + raise AgentCardResolutionError( + f'Failed to validate agent card structure from {target_url}: {e}' ) from e return agent_card diff --git a/src/a2a/client/client.py b/src/a2a/client/client.py index 286641a79..3fbf4f287 100644 --- a/src/a2a/client/client.py +++ b/src/a2a/client/client.py @@ -2,25 +2,34 @@ import logging from abc import ABC, abstractmethod -from collections.abc import AsyncIterator, Callable, Coroutine +from collections.abc import AsyncIterator, Callable, MutableMapping +from types import TracebackType from typing import Any import httpx -from a2a.client.middleware import ClientCallContext, ClientCallInterceptor +from pydantic import BaseModel, Field +from typing_extensions import Self + +from a2a.client.interceptors import ClientCallInterceptor from a2a.client.optionals import Channel -from a2a.types import ( +from a2a.client.service_parameters import ServiceParameters +from a2a.types.a2a_pb2 import ( AgentCard, - GetTaskPushNotificationConfigParams, - Message, - PushNotificationConfig, + CancelTaskRequest, + DeleteTaskPushNotificationConfigRequest, + GetExtendedAgentCardRequest, + GetTaskPushNotificationConfigRequest, + GetTaskRequest, + ListTaskPushNotificationConfigsRequest, + ListTaskPushNotificationConfigsResponse, + ListTasksRequest, + ListTasksResponse, + SendMessageRequest, + StreamResponse, + SubscribeToTaskRequest, Task, - TaskArtifactUpdateEvent, - TaskIdParams, TaskPushNotificationConfig, - TaskQueryParams, - TaskStatusUpdateEvent, - TransportProtocol, ) @@ -45,7 +54,7 @@ class ClientConfig: grpc_channel_factory: Callable[[str], Channel] | None = None """Generates a grpc connection channel for a given url.""" - supported_transports: list[TransportProtocol | str] = dataclasses.field( + supported_protocol_bindings: list[str] = dataclasses.field( default_factory=list ) """Ordered list of transports for connecting to agent @@ -62,23 +71,20 @@ class ClientConfig: accepted_output_modes: list[str] = dataclasses.field(default_factory=list) """The set of accepted output modes for the client.""" - push_notification_configs: list[PushNotificationConfig] = dataclasses.field( - default_factory=list - ) - """Push notification callbacks to use for every request.""" + push_notification_config: TaskPushNotificationConfig | None = None + """Push notification configuration to use for every request.""" + - extensions: list[str] = dataclasses.field(default_factory=list) - """A list of extension URIs the client supports.""" +class ClientCallContext(BaseModel): + """A context passed with each client call, allowing for call-specific. + configuration and data passing. Such as authentication details or + request deadlines. + """ -UpdateEvent = TaskStatusUpdateEvent | TaskArtifactUpdateEvent | None -# Alias for emitted events from client -ClientEvent = tuple[Task, UpdateEvent] -# Alias for an event consuming callback. It takes either a (task, update) pair -# or a message as well as the agent card for the agent this came from. -Consumer = Callable[ - [ClientEvent | Message, AgentCard], Coroutine[None, Any, Any] -] + state: MutableMapping[str, Any] = Field(default_factory=dict) + timeout: float | None = None + service_parameters: ServiceParameters | None = None class Client(ABC): @@ -91,38 +97,40 @@ class Client(ABC): def __init__( self, - consumers: list[Consumer] | None = None, - middleware: list[ClientCallInterceptor] | None = None, + interceptors: list[ClientCallInterceptor] | None = None, ): - """Initializes the client with consumers and middleware. + """Initializes the client with interceptors. Args: - consumers: A list of callables to process events from the agent. - middleware: A list of interceptors to process requests and responses. + interceptors: A list of interceptors to process requests and responses. """ - if middleware is None: - middleware = [] - if consumers is None: - consumers = [] - self._consumers = consumers - self._middleware = middleware + self._interceptors = interceptors or [] + + async def __aenter__(self) -> Self: + """Enters the async context manager.""" + return self + + async def __aexit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> None: + """Exits the async context manager and closes the client.""" + await self.close() @abstractmethod async def send_message( self, - request: Message, + request: SendMessageRequest, *, context: ClientCallContext | None = None, - request_metadata: dict[str, Any] | None = None, - extensions: list[str] | None = None, - ) -> AsyncIterator[ClientEvent | Message]: + ) -> AsyncIterator[StreamResponse]: """Sends a message to the server. This will automatically use the streaming or non-streaming approach as supported by the server and the client config. Client will - aggregate update events and return an iterator of (`Task`,`Update`) - pairs, or a `Message`. Client will also send these values to any - configured `Consumer`s in the client. + aggregate update events and return an iterator of `StreamResponse`. """ return yield @@ -130,82 +138,91 @@ async def send_message( @abstractmethod async def get_task( self, - request: TaskQueryParams, + request: GetTaskRequest, *, context: ClientCallContext | None = None, - extensions: list[str] | None = None, ) -> Task: """Retrieves the current state and history of a specific task.""" + @abstractmethod + async def list_tasks( + self, + request: ListTasksRequest, + *, + context: ClientCallContext | None = None, + ) -> ListTasksResponse: + """Retrieves tasks for an agent.""" + @abstractmethod async def cancel_task( self, - request: TaskIdParams, + request: CancelTaskRequest, *, context: ClientCallContext | None = None, - extensions: list[str] | None = None, ) -> Task: """Requests the agent to cancel a specific task.""" @abstractmethod - async def set_task_callback( + async def create_task_push_notification_config( self, request: TaskPushNotificationConfig, *, context: ClientCallContext | None = None, - extensions: list[str] | None = None, ) -> TaskPushNotificationConfig: """Sets or updates the push notification configuration for a specific task.""" @abstractmethod - async def get_task_callback( + async def get_task_push_notification_config( self, - request: GetTaskPushNotificationConfigParams, + request: GetTaskPushNotificationConfigRequest, *, context: ClientCallContext | None = None, - extensions: list[str] | None = None, ) -> TaskPushNotificationConfig: """Retrieves the push notification configuration for a specific task.""" @abstractmethod - async def resubscribe( + async def list_task_push_notification_configs( + self, + request: ListTaskPushNotificationConfigsRequest, + *, + context: ClientCallContext | None = None, + ) -> ListTaskPushNotificationConfigsResponse: + """Lists push notification configurations for a specific task.""" + + @abstractmethod + async def delete_task_push_notification_config( self, - request: TaskIdParams, + request: DeleteTaskPushNotificationConfigRequest, *, context: ClientCallContext | None = None, - extensions: list[str] | None = None, - ) -> AsyncIterator[ClientEvent]: + ) -> None: + """Deletes the push notification configuration for a specific task.""" + + @abstractmethod + async def subscribe( + self, + request: SubscribeToTaskRequest, + *, + context: ClientCallContext | None = None, + ) -> AsyncIterator[StreamResponse]: """Resubscribes to a task's event stream.""" return yield @abstractmethod - async def get_card( + async def get_extended_agent_card( self, + request: GetExtendedAgentCardRequest, *, context: ClientCallContext | None = None, - extensions: list[str] | None = None, signature_verifier: Callable[[AgentCard], None] | None = None, ) -> AgentCard: """Retrieves the agent's card.""" - async def add_event_consumer(self, consumer: Consumer) -> None: - """Attaches additional consumers to the `Client`.""" - self._consumers.append(consumer) - - async def add_request_middleware( - self, middleware: ClientCallInterceptor - ) -> None: - """Attaches additional middleware to the `Client`.""" - self._middleware.append(middleware) + async def add_interceptor(self, interceptor: ClientCallInterceptor) -> None: + """Attaches additional interceptors to the `Client`.""" + self._interceptors.append(interceptor) - async def consume( - self, - event: tuple[Task, UpdateEvent] | Message | None, - card: AgentCard, - ) -> None: - """Processes the event via all the registered `Consumer`s.""" - if not event: - return - for c in self._consumers: - await c(event, card) + @abstractmethod + async def close(self) -> None: + """Closes the client and releases any underlying resources.""" diff --git a/src/a2a/client/client_factory.py b/src/a2a/client/client_factory.py index c3d5762eb..a59189ade 100644 --- a/src/a2a/client/client_factory.py +++ b/src/a2a/client/client_factory.py @@ -3,178 +3,297 @@ import logging from collections.abc import Callable -from typing import Any +from typing import TYPE_CHECKING, Any import httpx +from packaging.version import InvalidVersion, Version + from a2a.client.base_client import BaseClient from a2a.client.card_resolver import A2ACardResolver -from a2a.client.client import Client, ClientConfig, Consumer -from a2a.client.middleware import ClientCallInterceptor +from a2a.client.client import Client, ClientConfig from a2a.client.transports.base import ClientTransport from a2a.client.transports.jsonrpc import JsonRpcTransport from a2a.client.transports.rest import RestTransport -from a2a.types import ( +from a2a.client.transports.tenant_decorator import TenantTransportDecorator +from a2a.compat.v0_3.versions import is_legacy_version +from a2a.types.a2a_pb2 import ( AgentCapabilities, AgentCard, AgentInterface, +) +from a2a.utils.constants import ( + PROTOCOL_VERSION_0_3, + PROTOCOL_VERSION_1_0, + PROTOCOL_VERSION_CURRENT, + VERSION_HEADER, TransportProtocol, ) +if TYPE_CHECKING: + from a2a.client.interceptors import ClientCallInterceptor + + try: from a2a.client.transports.grpc import GrpcTransport except ImportError: GrpcTransport = None # type: ignore # pyright: ignore +try: + from a2a.compat.v0_3.grpc_transport import CompatGrpcTransport +except ImportError: + CompatGrpcTransport = None # type: ignore # pyright: ignore + logger = logging.getLogger(__name__) TransportProducer = Callable[ - [AgentCard, str, ClientConfig, list[ClientCallInterceptor]], + [AgentCard, str, ClientConfig], ClientTransport, ] class ClientFactory: - """ClientFactory is used to generate the appropriate client for the agent. + """Factory for creating clients that communicate with A2A agents. - The factory is configured with a `ClientConfig` and optionally a list of - `Consumer`s to use for all generated `Client`s. The expected use is: + The factory is configured with a `ClientConfig` and optionally custom + transport producers registered via `register`. Example usage: - .. code-block:: python + factory = ClientFactory(config) + # Optionally register custom transport implementations + factory.register('my_custom_transport', custom_transport_producer) + # Create a client from an AgentCard + client = factory.create(card, interceptors) + # Or resolve an AgentCard from a URL and create a client + client = await factory.create_from_url('https://example.com') - factory = ClientFactory(config, consumers) - # Optionally register custom client implementations - factory.register('my_customer_transport', NewCustomTransportClient) - # Then with an agent card make a client with additional consumers and - # interceptors - client = factory.create(card, additional_consumers, interceptors) - - Now the client can be used consistently regardless of the transport. This + The client can be used consistently regardless of the transport. This aligns the client configuration with the server's capabilities. """ def __init__( self, - config: ClientConfig, - consumers: list[Consumer] | None = None, + config: ClientConfig | None = None, ): - if consumers is None: - consumers = [] + config = config or ClientConfig() + httpx_client = config.httpx_client or httpx.AsyncClient() + httpx_client.headers.setdefault( + VERSION_HEADER, PROTOCOL_VERSION_CURRENT + ) + self._config = config - self._consumers = consumers + self._httpx_client = httpx_client self._registry: dict[str, TransportProducer] = {} - self._register_defaults(config.supported_transports) + self._register_defaults(config.supported_protocol_bindings) - def _register_defaults( - self, supported: list[str | TransportProtocol] - ) -> None: + def _register_defaults(self, supported: list[str]) -> None: # Empty support list implies JSON-RPC only. - if TransportProtocol.jsonrpc in supported or not supported: - self.register( - TransportProtocol.jsonrpc, - lambda card, url, config, interceptors: JsonRpcTransport( - config.httpx_client or httpx.AsyncClient(), + + if TransportProtocol.JSONRPC in supported or not supported: + + def jsonrpc_transport_producer( + card: AgentCard, + url: str, + config: ClientConfig, + ) -> ClientTransport: + interface = ClientFactory._find_best_interface( + list(card.supported_interfaces), + protocol_bindings=[TransportProtocol.JSONRPC], + url=url, + ) + version = ( + interface.protocol_version + if interface + else PROTOCOL_VERSION_CURRENT + ) + + if is_legacy_version(version): + from a2a.compat.v0_3.jsonrpc_transport import ( # noqa: PLC0415 + CompatJsonRpcTransport, + ) + + return CompatJsonRpcTransport( + self._httpx_client, + card, + url, + ) + + return JsonRpcTransport( + self._httpx_client, card, url, - interceptors, - config.extensions or None, - ), - ) - if TransportProtocol.http_json in supported: + ) + self.register( - TransportProtocol.http_json, - lambda card, url, config, interceptors: RestTransport( - config.httpx_client or httpx.AsyncClient(), + TransportProtocol.JSONRPC, + jsonrpc_transport_producer, + ) + if TransportProtocol.HTTP_JSON in supported: + + def rest_transport_producer( + card: AgentCard, + url: str, + config: ClientConfig, + ) -> ClientTransport: + interface = ClientFactory._find_best_interface( + list(card.supported_interfaces), + protocol_bindings=[TransportProtocol.HTTP_JSON], + url=url, + ) + version = ( + interface.protocol_version + if interface + else PROTOCOL_VERSION_CURRENT + ) + + if is_legacy_version(version): + from a2a.compat.v0_3.rest_transport import ( # noqa: PLC0415 + CompatRestTransport, + ) + + return CompatRestTransport( + self._httpx_client, + card, + url, + ) + + return RestTransport( + self._httpx_client, card, url, - interceptors, - config.extensions or None, - ), + ) + + self.register( + TransportProtocol.HTTP_JSON, + rest_transport_producer, ) - if TransportProtocol.grpc in supported: + if TransportProtocol.GRPC in supported: if GrpcTransport is None: raise ImportError( 'To use GrpcClient, its dependencies must be installed. ' 'You can install them with \'pip install "a2a-sdk[grpc]"\'' ) + + _grpc_transport = GrpcTransport + + def grpc_transport_producer( + card: AgentCard, + url: str, + config: ClientConfig, + ) -> ClientTransport: + # The interface has already been selected and passed as `url`. + # We determine its version to use the appropriate transport implementation. + interface = ClientFactory._find_best_interface( + list(card.supported_interfaces), + protocol_bindings=[TransportProtocol.GRPC], + url=url, + ) + version = ( + interface.protocol_version + if interface + else PROTOCOL_VERSION_CURRENT + ) + + if ( + is_legacy_version(version) + and CompatGrpcTransport is not None + ): + return CompatGrpcTransport.create(card, url, config) + + return _grpc_transport.create(card, url, config) + self.register( - TransportProtocol.grpc, - GrpcTransport.create, + TransportProtocol.GRPC, + grpc_transport_producer, ) - @classmethod - async def connect( # noqa: PLR0913 - cls, - agent: str | AgentCard, - client_config: ClientConfig | None = None, - consumers: list[Consumer] | None = None, + @staticmethod + def _find_best_interface( + interfaces: list[AgentInterface], + protocol_bindings: list[str] | None = None, + url: str | None = None, + ) -> AgentInterface | None: + """Finds the best interface based on protocol version priorities.""" + candidates = [ + i + for i in interfaces + if ( + protocol_bindings is None + or i.protocol_binding in protocol_bindings + ) + and (url is None or i.url == url) + ] + + if not candidates: + return None + + # Prefer interface with version 1.0 + for i in candidates: + if i.protocol_version == PROTOCOL_VERSION_1_0: + return i + + best_gt_1_0 = None + best_ge_0_3 = None + best_no_version = None + + for i in candidates: + if not i.protocol_version: + if best_no_version is None: + best_no_version = i + continue + + try: + v = Version(i.protocol_version) + if best_gt_1_0 is None and v > Version(PROTOCOL_VERSION_1_0): + best_gt_1_0 = i + if best_ge_0_3 is None and v >= Version(PROTOCOL_VERSION_0_3): + best_ge_0_3 = i + except InvalidVersion: + pass + + return best_gt_1_0 or best_ge_0_3 or best_no_version + + async def create_from_url( + self, + url: str, interceptors: list[ClientCallInterceptor] | None = None, relative_card_path: str | None = None, resolver_http_kwargs: dict[str, Any] | None = None, - extra_transports: dict[str, TransportProducer] | None = None, - extensions: list[str] | None = None, signature_verifier: Callable[[AgentCard], None] | None = None, ) -> Client: - """Convenience method for constructing a client. + """Create a `Client` by resolving an `AgentCard` from a URL. - Constructs a client that connects to the specified agent. Note that - creating multiple clients via this method is less efficient than - constructing an instance of ClientFactory and reusing that. - - .. code-block:: python - - # This will search for an AgentCard at /.well-known/agent-card.json - my_agent_url = 'https://travel.agents.example.com' - client = await ClientFactory.connect(my_agent_url) + Resolves the agent card from the given URL using the factory's + configured httpx client, then creates a client via `create`. + If the agent card is already available, use `create` directly + instead. Args: - agent: The base URL of the agent, or the AgentCard to connect to. - client_config: The ClientConfig to use when connecting to the agent. - consumers: A list of `Consumer` methods to pass responses to. - interceptors: A list of interceptors to use for each request. These - are used for things like attaching credentials or http headers - to all outbound requests. - relative_card_path: If the agent field is a URL, this value is used as - the relative path when resolving the agent card. See - A2AAgentCardResolver.get_agent_card for more details. - resolver_http_kwargs: Dictionary of arguments to provide to the httpx - client when resolving the agent card. This value is provided to - A2AAgentCardResolver.get_agent_card as the http_kwargs parameter. - extra_transports: Additional transport protocols to enable when - constructing the client. - extensions: List of extensions to be activated. - signature_verifier: A callable used to verify the agent card's signatures. + url: The base URL of the agent. The agent card will be fetched + from `/.well-known/agent-card.json` by default. + interceptors: A list of interceptors to use for each request. + These are used for things like attaching credentials or http + headers to all outbound requests. + relative_card_path: The relative path when resolving the agent + card. See `A2ACardResolver.get_agent_card` for details. + resolver_http_kwargs: Dictionary of arguments to provide to the + httpx client when resolving the agent card. + signature_verifier: A callable used to verify the agent card's + signatures. Returns: A `Client` object. """ - client_config = client_config or ClientConfig() - if isinstance(agent, str): - if not client_config.httpx_client: - async with httpx.AsyncClient() as client: - resolver = A2ACardResolver(client, agent) - card = await resolver.get_agent_card( - relative_card_path=relative_card_path, - http_kwargs=resolver_http_kwargs, - signature_verifier=signature_verifier, - ) - else: - resolver = A2ACardResolver(client_config.httpx_client, agent) - card = await resolver.get_agent_card( - relative_card_path=relative_card_path, - http_kwargs=resolver_http_kwargs, - signature_verifier=signature_verifier, - ) - else: - card = agent - factory = cls(client_config) - for label, generator in (extra_transports or {}).items(): - factory.register(label, generator) - return factory.create(card, consumers, interceptors, extensions) + resolver = A2ACardResolver(self._httpx_client, url) + card = await resolver.get_agent_card( + relative_card_path=relative_card_path, + http_kwargs=resolver_http_kwargs, + signature_verifier=signature_verifier, + ) + return self.create(card, interceptors) def register(self, label: str, generator: TransportProducer) -> None: """Register a new transport producer for a given transport label.""" @@ -183,19 +302,15 @@ def register(self, label: str, generator: TransportProducer) -> None: def create( self, card: AgentCard, - consumers: list[Consumer] | None = None, interceptors: list[ClientCallInterceptor] | None = None, - extensions: list[str] | None = None, ) -> Client: """Create a new `Client` for the provided `AgentCard`. Args: card: An `AgentCard` defining the characteristics of the agent. - consumers: A list of `Consumer` methods to pass responses to. interceptors: A list of interceptors to use for each request. These are used for things like attaching credentials or http headers to all outbound requests. - extensions: List of extensions to be activated. Returns: A `Client` object. @@ -204,56 +319,93 @@ def create( If there is no valid matching of the client configuration with the server configuration, a `ValueError` is raised. """ - server_preferred = card.preferred_transport or TransportProtocol.jsonrpc - server_set = {server_preferred: card.url} - if card.additional_interfaces: - server_set.update( - {x.transport: x.url for x in card.additional_interfaces} - ) - client_set = self._config.supported_transports or [ - TransportProtocol.jsonrpc + client_set = self._config.supported_protocol_bindings or [ + TransportProtocol.JSONRPC ] transport_protocol = None - transport_url = None + selected_interface = None if self._config.use_client_preference: - for x in client_set: - if x in server_set: - transport_protocol = x - transport_url = server_set[x] + for protocol_binding in client_set: + selected_interface = ClientFactory._find_best_interface( + list(card.supported_interfaces), + protocol_bindings=[protocol_binding], + ) + if selected_interface: + transport_protocol = protocol_binding break else: - for x, url in server_set.items(): - if x in client_set: - transport_protocol = x - transport_url = url + for supported_interface in card.supported_interfaces: + if supported_interface.protocol_binding in client_set: + transport_protocol = supported_interface.protocol_binding + selected_interface = ClientFactory._find_best_interface( + list(card.supported_interfaces), + protocol_bindings=[transport_protocol], + ) break - if not transport_protocol or not transport_url: + if not transport_protocol or not selected_interface: raise ValueError('no compatible transports found.') if transport_protocol not in self._registry: raise ValueError(f'no client available for {transport_protocol}') - all_consumers = self._consumers.copy() - if consumers: - all_consumers.extend(consumers) - - all_extensions = self._config.extensions.copy() - if extensions: - all_extensions.extend(extensions) - self._config.extensions = all_extensions - transport = self._registry[transport_protocol]( - card, transport_url, self._config, interceptors or [] + card, selected_interface.url, self._config ) + if selected_interface.tenant: + transport = TenantTransportDecorator( + transport, selected_interface.tenant + ) + return BaseClient( card, self._config, transport, - all_consumers, interceptors or [], ) +async def create_client( # noqa: PLR0913 + agent: str | AgentCard, + client_config: ClientConfig | None = None, + interceptors: list[ClientCallInterceptor] | None = None, + relative_card_path: str | None = None, + resolver_http_kwargs: dict[str, Any] | None = None, + signature_verifier: Callable[[AgentCard], None] | None = None, +) -> Client: + """Create a `Client` for an agent from a URL or `AgentCard`. + + Convenience function that constructs a `ClientFactory` internally. + For reusing a factory across multiple agents or registering custom + transports, use `ClientFactory` directly instead. + + Args: + agent: The base URL of the agent, or an `AgentCard` to use + directly. + client_config: Optional `ClientConfig`. A default config is + created if not provided. + interceptors: A list of interceptors to use for each request. + relative_card_path: The relative path when resolving the agent + card. Only used when `agent` is a URL. + resolver_http_kwargs: Dictionary of arguments to provide to the + httpx client when resolving the agent card. + signature_verifier: A callable used to verify the agent card's + signatures. + + Returns: + A `Client` object. + """ + factory = ClientFactory(client_config) + if isinstance(agent, str): + return await factory.create_from_url( + agent, + interceptors=interceptors, + relative_card_path=relative_card_path, + resolver_http_kwargs=resolver_http_kwargs, + signature_verifier=signature_verifier, + ) + return factory.create(agent, interceptors) + + def minimal_agent_card( url: str, transports: list[str] | None = None ) -> AgentCard: @@ -268,15 +420,10 @@ def minimal_agent_card( if transports is None: transports = [] return AgentCard( - url=url, - preferred_transport=transports[0] if transports else None, - additional_interfaces=[ - AgentInterface(transport=t, url=url) for t in transports[1:] - ] - if len(transports) > 1 - else [], - supports_authenticated_extended_card=True, - capabilities=AgentCapabilities(), + supported_interfaces=[ + AgentInterface(protocol_binding=t, url=url) for t in transports + ], + capabilities=AgentCapabilities(extended_agent_card=True), default_input_modes=[], default_output_modes=[], description='', diff --git a/src/a2a/client/client_task_manager.py b/src/a2a/client/client_task_manager.py deleted file mode 100644 index 060983e13..000000000 --- a/src/a2a/client/client_task_manager.py +++ /dev/null @@ -1,192 +0,0 @@ -import logging - -from a2a.client.errors import ( - A2AClientInvalidArgsError, - A2AClientInvalidStateError, -) -from a2a.server.events.event_queue import Event -from a2a.types import ( - Message, - Task, - TaskArtifactUpdateEvent, - TaskState, - TaskStatus, - TaskStatusUpdateEvent, -) -from a2a.utils import append_artifact_to_task - - -logger = logging.getLogger(__name__) - - -class ClientTaskManager: - """Helps manage a task's lifecycle during execution of a request. - - Responsible for retrieving, saving, and updating the `Task` object based on - events received from the agent. - """ - - def __init__( - self, - ) -> None: - """Initializes the `ClientTaskManager`.""" - self._current_task: Task | None = None - self._task_id: str | None = None - self._context_id: str | None = None - - def get_task(self) -> Task | None: - """Retrieves the current task object, either from memory. - - If `task_id` is set, it returns `_current_task` otherwise None. - - Returns: - The `Task` object if found, otherwise `None`. - """ - if not self._task_id: - logger.debug('task_id is not set, cannot get task.') - return None - - return self._current_task - - def get_task_or_raise(self) -> Task: - """Retrieves the current task object. - - Returns: - The `Task` object. - - Raises: - A2AClientInvalidStateError: If there is no current known Task. - """ - if not (task := self.get_task()): - # Note: The source of this error is either from bad client usage - # or from the server sending invalid updates. It indicates that this - # task manager has not consumed any information about a task, yet - # the caller is attempting to retrieve the current state of the task - # it expects to be present. - raise A2AClientInvalidStateError('no current Task') - return task - - async def save_task_event( - self, event: Task | TaskStatusUpdateEvent | TaskArtifactUpdateEvent - ) -> Task | None: - """Processes a task-related event (Task, Status, Artifact) and saves the updated task state. - - Ensures task and context IDs match or are set from the event. - - Args: - event: The task-related event (`Task`, `TaskStatusUpdateEvent`, or `TaskArtifactUpdateEvent`). - - Returns: - The updated `Task` object after processing the event. - - Raises: - ClientError: If the task ID in the event conflicts with the TaskManager's ID - when the TaskManager's ID is already set. - """ - if isinstance(event, Task): - if self._current_task: - raise A2AClientInvalidArgsError( - 'Task is already set, create new manager for new tasks.' - ) - await self._save_task(event) - return event - task_id_from_event = ( - event.id if isinstance(event, Task) else event.task_id - ) - if not self._task_id: - self._task_id = task_id_from_event - if not self._context_id: - self._context_id = event.context_id - - logger.debug( - 'Processing save of task event of type %s for task_id: %s', - type(event).__name__, - task_id_from_event, - ) - - task = self._current_task - if not task: - task = Task( - status=TaskStatus(state=TaskState.unknown), - id=task_id_from_event, - context_id=self._context_id if self._context_id else '', - ) - if isinstance(event, TaskStatusUpdateEvent): - logger.debug( - 'Updating task %s status to: %s', - event.task_id, - event.status.state, - ) - if event.status.message: - if not task.history: - task.history = [event.status.message] - else: - task.history.append(event.status.message) - if event.metadata: - if not task.metadata: - task.metadata = {} - task.metadata.update(event.metadata) - task.status = event.status - else: - logger.debug('Appending artifact to task %s', task.id) - append_artifact_to_task(task, event) - self._current_task = task - return task - - async def process(self, event: Event) -> Event: - """Processes an event, updates the task state if applicable, stores it, and returns the event. - - If the event is task-related (`Task`, `TaskStatusUpdateEvent`, `TaskArtifactUpdateEvent`), - the internal task state is updated and persisted. - - Args: - event: The event object received from the agent. - - Returns: - The same event object that was processed. - """ - if isinstance( - event, Task | TaskStatusUpdateEvent | TaskArtifactUpdateEvent - ): - await self.save_task_event(event) - - return event - - async def _save_task(self, task: Task) -> None: - """Saves the given task to the `_current_task` and updated `_task_id` and `_context_id`. - - Args: - task: The `Task` object to save. - """ - logger.debug('Saving task with id: %s', task.id) - self._current_task = task - if not self._task_id: - logger.info('New task created with id: %s', task.id) - self._task_id = task.id - self._context_id = task.context_id - - def update_with_message(self, message: Message, task: Task) -> Task: - """Updates a task object adding a new message to its history. - - If the task has a message in its current status, that message is moved - to the history first. - - Args: - message: The new `Message` to add to the history. - task: The `Task` object to update. - - Returns: - The updated `Task` object (updated in-place). - """ - if task.status.message: - if task.history: - task.history.append(task.status.message) - else: - task.history = [task.status.message] - task.status.message = None - if task.history: - task.history.append(message) - else: - task.history = [message] - self._current_task = task - return task diff --git a/src/a2a/client/errors.py b/src/a2a/client/errors.py index 106a05d68..4d3802d11 100644 --- a/src/a2a/client/errors.py +++ b/src/a2a/client/errors.py @@ -1,115 +1,19 @@ """Custom exceptions for the A2A client.""" -from a2a.types import JSONRPCErrorResponse +from a2a.utils.errors import A2AError -class A2AClientError(Exception): +class A2AClientError(A2AError): """Base exception for A2A Client errors.""" -class A2AClientHTTPError(A2AClientError): - """Client exception for HTTP errors received from the server.""" +class AgentCardResolutionError(A2AClientError): + """Exception raised when an agent card cannot be resolved.""" - def __init__(self, status_code: int, message: str): - """Initializes the A2AClientHTTPError. - - Args: - status_code: The HTTP status code of the response. - message: A descriptive error message. - """ + def __init__(self, message: str, status_code: int | None = None) -> None: + super().__init__(message) self.status_code = status_code - self.message = message - super().__init__(f'HTTP Error {status_code}: {message}') - - def __repr__(self) -> str: - """Returns an unambiguous representation showing structured attributes.""" - return ( - f'{self.__class__.__name__}(' - f'status_code={self.status_code!r}, ' - f'message={self.message!r})' - ) - - -class A2AClientJSONError(A2AClientError): - """Client exception for JSON errors during response parsing or validation.""" - - def __init__(self, message: str): - """Initializes the A2AClientJSONError. - - Args: - message: A descriptive error message. - """ - self.message = message - super().__init__(f'JSON Error: {message}') - - def __repr__(self) -> str: - """Returns an unambiguous representation showing structured attributes.""" - return f'{self.__class__.__name__}(message={self.message!r})' class A2AClientTimeoutError(A2AClientError): - """Client exception for timeout errors during a request.""" - - def __init__(self, message: str): - """Initializes the A2AClientTimeoutError. - - Args: - message: A descriptive error message. - """ - self.message = message - super().__init__(f'Timeout Error: {message}') - - def __repr__(self) -> str: - """Returns an unambiguous representation showing structured attributes.""" - return f'{self.__class__.__name__}(message={self.message!r})' - - -class A2AClientInvalidArgsError(A2AClientError): - """Client exception for invalid arguments passed to a method.""" - - def __init__(self, message: str): - """Initializes the A2AClientInvalidArgsError. - - Args: - message: A descriptive error message. - """ - self.message = message - super().__init__(f'Invalid arguments error: {message}') - - def __repr__(self) -> str: - """Returns an unambiguous representation showing structured attributes.""" - return f'{self.__class__.__name__}(message={self.message!r})' - - -class A2AClientInvalidStateError(A2AClientError): - """Client exception for an invalid client state.""" - - def __init__(self, message: str): - """Initializes the A2AClientInvalidStateError. - - Args: - message: A descriptive error message. - """ - self.message = message - super().__init__(f'Invalid state error: {message}') - - def __repr__(self) -> str: - """Returns an unambiguous representation showing structured attributes.""" - return f'{self.__class__.__name__}(message={self.message!r})' - - -class A2AClientJSONRPCError(A2AClientError): - """Client exception for JSON-RPC errors returned by the server.""" - - def __init__(self, error: JSONRPCErrorResponse): - """Initializes the A2AClientJsonRPCError. - - Args: - error: The JSON-RPC error object. - """ - self.error = error.error - super().__init__(f'JSON-RPC Error {error.error}') - - def __repr__(self) -> str: - """Returns an unambiguous representation showing the JSON-RPC error object.""" - return f'{self.__class__.__name__}({self.error!r})' + """Exception for timeout errors during a request.""" diff --git a/src/a2a/client/helpers.py b/src/a2a/client/helpers.py deleted file mode 100644 index 930c71e6b..000000000 --- a/src/a2a/client/helpers.py +++ /dev/null @@ -1,22 +0,0 @@ -"""Helper functions for the A2A client.""" - -from uuid import uuid4 - -from a2a.types import Message, Part, Role, TextPart - - -def create_text_message_object( - role: Role = Role.user, content: str = '' -) -> Message: - """Create a Message object containing a single TextPart. - - Args: - role: The role of the message sender (user or agent). Defaults to Role.user. - content: The text content of the message. Defaults to an empty string. - - Returns: - A `Message` object with a new UUID message_id. - """ - return Message( - role=role, parts=[Part(TextPart(text=content))], message_id=str(uuid4()) - ) diff --git a/src/a2a/client/interceptors.py b/src/a2a/client/interceptors.py new file mode 100644 index 000000000..9903708f3 --- /dev/null +++ b/src/a2a/client/interceptors.py @@ -0,0 +1,51 @@ +from __future__ import annotations + +from abc import ABC, abstractmethod +from dataclasses import dataclass +from typing import TYPE_CHECKING, Any + + +if TYPE_CHECKING: + from a2a.client.client import ClientCallContext + +from a2a.types.a2a_pb2 import ( # noqa: TC001 + AgentCard, +) + + +@dataclass +class BeforeArgs: + """Arguments passed to the interceptor before a method call.""" + + input: Any + method: str + agent_card: AgentCard + context: ClientCallContext | None = None + early_return: Any | None = None + + +@dataclass +class AfterArgs: + """Arguments passed to the interceptor after a method call completes.""" + + result: Any + method: str + agent_card: AgentCard + context: ClientCallContext | None = None + early_return: bool = False + + +class ClientCallInterceptor(ABC): + """An abstract base class for client-side call interceptors. + + Interceptors can inspect and modify requests before they are sent, + which is ideal for concerns like authentication, logging, or tracing. + """ + + @abstractmethod + async def before(self, args: BeforeArgs) -> None: + """Invoked before transport method.""" + + @abstractmethod + async def after(self, args: AfterArgs) -> None: + """Invoked after transport method.""" diff --git a/src/a2a/client/legacy.py b/src/a2a/client/legacy.py deleted file mode 100644 index 4318543d6..000000000 --- a/src/a2a/client/legacy.py +++ /dev/null @@ -1,344 +0,0 @@ -"""Backwards compatibility layer for legacy A2A clients.""" - -import warnings - -from collections.abc import AsyncGenerator -from typing import Any - -import httpx - -from a2a.client.errors import A2AClientJSONRPCError -from a2a.client.middleware import ClientCallContext, ClientCallInterceptor -from a2a.client.transports.jsonrpc import JsonRpcTransport -from a2a.types import ( - AgentCard, - CancelTaskRequest, - CancelTaskResponse, - CancelTaskSuccessResponse, - GetTaskPushNotificationConfigParams, - GetTaskPushNotificationConfigRequest, - GetTaskPushNotificationConfigResponse, - GetTaskPushNotificationConfigSuccessResponse, - GetTaskRequest, - GetTaskResponse, - GetTaskSuccessResponse, - JSONRPCErrorResponse, - SendMessageRequest, - SendMessageResponse, - SendMessageSuccessResponse, - SendStreamingMessageRequest, - SendStreamingMessageResponse, - SendStreamingMessageSuccessResponse, - SetTaskPushNotificationConfigRequest, - SetTaskPushNotificationConfigResponse, - SetTaskPushNotificationConfigSuccessResponse, - TaskIdParams, - TaskResubscriptionRequest, -) - - -class A2AClient: - """[DEPRECATED] Backwards compatibility wrapper for the JSON-RPC client.""" - - def __init__( - self, - httpx_client: httpx.AsyncClient, - agent_card: AgentCard | None = None, - url: str | None = None, - interceptors: list[ClientCallInterceptor] | None = None, - ): - warnings.warn( - 'A2AClient is deprecated and will be removed in a future version. ' - 'Use ClientFactory to create a client with a JSON-RPC transport.', - DeprecationWarning, - stacklevel=2, - ) - self._transport = JsonRpcTransport( - httpx_client, agent_card, url, interceptors - ) - - async def send_message( - self, - request: SendMessageRequest, - *, - http_kwargs: dict[str, Any] | None = None, - context: ClientCallContext | None = None, - ) -> SendMessageResponse: - """Sends a non-streaming message request to the agent. - - Args: - request: The `SendMessageRequest` object containing the message and configuration. - http_kwargs: Optional dictionary of keyword arguments to pass to the - underlying httpx.post request. - context: The client call context. - - Returns: - A `SendMessageResponse` object containing the agent's response (Task or Message) or an error. - - Raises: - A2AClientHTTPError: If an HTTP error occurs during the request. - A2AClientJSONError: If the response body cannot be decoded as JSON or validated. - """ - if not context and http_kwargs: - context = ClientCallContext(state={'http_kwargs': http_kwargs}) - - try: - result = await self._transport.send_message( - request.params, context=context - ) - return SendMessageResponse( - root=SendMessageSuccessResponse( - id=request.id, jsonrpc='2.0', result=result - ) - ) - except A2AClientJSONRPCError as e: - return SendMessageResponse(JSONRPCErrorResponse(error=e.error)) - - async def send_message_streaming( - self, - request: SendStreamingMessageRequest, - *, - http_kwargs: dict[str, Any] | None = None, - context: ClientCallContext | None = None, - ) -> AsyncGenerator[SendStreamingMessageResponse, None]: - """Sends a streaming message request to the agent and yields responses as they arrive. - - This method uses Server-Sent Events (SSE) to receive a stream of updates from the agent. - - Args: - request: The `SendStreamingMessageRequest` object containing the message and configuration. - http_kwargs: Optional dictionary of keyword arguments to pass to the - underlying httpx.post request. A default `timeout=None` is set but can be overridden. - context: The client call context. - - Yields: - `SendStreamingMessageResponse` objects as they are received in the SSE stream. - These can be Task, Message, TaskStatusUpdateEvent, or TaskArtifactUpdateEvent. - - Raises: - A2AClientHTTPError: If an HTTP or SSE protocol error occurs during the request. - A2AClientJSONError: If an SSE event data cannot be decoded as JSON or validated. - """ - if not context and http_kwargs: - context = ClientCallContext(state={'http_kwargs': http_kwargs}) - - async for result in self._transport.send_message_streaming( - request.params, context=context - ): - yield SendStreamingMessageResponse( - root=SendStreamingMessageSuccessResponse( - id=request.id, jsonrpc='2.0', result=result - ) - ) - - async def get_task( - self, - request: GetTaskRequest, - *, - http_kwargs: dict[str, Any] | None = None, - context: ClientCallContext | None = None, - ) -> GetTaskResponse: - """Retrieves the current state and history of a specific task. - - Args: - request: The `GetTaskRequest` object specifying the task ID and history length. - http_kwargs: Optional dictionary of keyword arguments to pass to the - underlying httpx.post request. - context: The client call context. - - Returns: - A `GetTaskResponse` object containing the Task or an error. - - Raises: - A2AClientHTTPError: If an HTTP error occurs during the request. - A2AClientJSONError: If the response body cannot be decoded as JSON or validated. - """ - if not context and http_kwargs: - context = ClientCallContext(state={'http_kwargs': http_kwargs}) - try: - result = await self._transport.get_task( - request.params, context=context - ) - return GetTaskResponse( - root=GetTaskSuccessResponse( - id=request.id, jsonrpc='2.0', result=result - ) - ) - except A2AClientJSONRPCError as e: - return GetTaskResponse(root=JSONRPCErrorResponse(error=e.error)) - - async def cancel_task( - self, - request: CancelTaskRequest, - *, - http_kwargs: dict[str, Any] | None = None, - context: ClientCallContext | None = None, - ) -> CancelTaskResponse: - """Requests the agent to cancel a specific task. - - Args: - request: The `CancelTaskRequest` object specifying the task ID. - http_kwargs: Optional dictionary of keyword arguments to pass to the - underlying httpx.post request. - context: The client call context. - - Returns: - A `CancelTaskResponse` object containing the updated Task with canceled status or an error. - - Raises: - A2AClientHTTPError: If an HTTP error occurs during the request. - A2AClientJSONError: If the response body cannot be decoded as JSON or validated. - """ - if not context and http_kwargs: - context = ClientCallContext(state={'http_kwargs': http_kwargs}) - try: - result = await self._transport.cancel_task( - request.params, context=context - ) - return CancelTaskResponse( - root=CancelTaskSuccessResponse( - id=request.id, jsonrpc='2.0', result=result - ) - ) - except A2AClientJSONRPCError as e: - return CancelTaskResponse(JSONRPCErrorResponse(error=e.error)) - - async def set_task_callback( - self, - request: SetTaskPushNotificationConfigRequest, - *, - http_kwargs: dict[str, Any] | None = None, - context: ClientCallContext | None = None, - ) -> SetTaskPushNotificationConfigResponse: - """Sets or updates the push notification configuration for a specific task. - - Args: - request: The `SetTaskPushNotificationConfigRequest` object specifying the task ID and configuration. - http_kwargs: Optional dictionary of keyword arguments to pass to the - underlying httpx.post request. - context: The client call context. - - Returns: - A `SetTaskPushNotificationConfigResponse` object containing the confirmation or an error. - - Raises: - A2AClientHTTPError: If an HTTP error occurs during the request. - A2AClientJSONError: If the response body cannot be decoded as JSON or validated. - """ - if not context and http_kwargs: - context = ClientCallContext(state={'http_kwargs': http_kwargs}) - try: - result = await self._transport.set_task_callback( - request.params, context=context - ) - return SetTaskPushNotificationConfigResponse( - root=SetTaskPushNotificationConfigSuccessResponse( - id=request.id, jsonrpc='2.0', result=result - ) - ) - except A2AClientJSONRPCError as e: - return SetTaskPushNotificationConfigResponse( - JSONRPCErrorResponse(error=e.error) - ) - - async def get_task_callback( - self, - request: GetTaskPushNotificationConfigRequest, - *, - http_kwargs: dict[str, Any] | None = None, - context: ClientCallContext | None = None, - ) -> GetTaskPushNotificationConfigResponse: - """Retrieves the push notification configuration for a specific task. - - Args: - request: The `GetTaskPushNotificationConfigRequest` object specifying the task ID. - http_kwargs: Optional dictionary of keyword arguments to pass to the - underlying httpx.post request. - context: The client call context. - - Returns: - A `GetTaskPushNotificationConfigResponse` object containing the configuration or an error. - - Raises: - A2AClientHTTPError: If an HTTP error occurs during the request. - A2AClientJSONError: If the response body cannot be decoded as JSON or validated. - """ - if not context and http_kwargs: - context = ClientCallContext(state={'http_kwargs': http_kwargs}) - params = request.params - if isinstance(params, TaskIdParams): - params = GetTaskPushNotificationConfigParams(id=request.params.id) - try: - result = await self._transport.get_task_callback( - params, context=context - ) - return GetTaskPushNotificationConfigResponse( - root=GetTaskPushNotificationConfigSuccessResponse( - id=request.id, jsonrpc='2.0', result=result - ) - ) - except A2AClientJSONRPCError as e: - return GetTaskPushNotificationConfigResponse( - JSONRPCErrorResponse(error=e.error) - ) - - async def resubscribe( - self, - request: TaskResubscriptionRequest, - *, - http_kwargs: dict[str, Any] | None = None, - context: ClientCallContext | None = None, - ) -> AsyncGenerator[SendStreamingMessageResponse, None]: - """Reconnects to get task updates. - - This method uses Server-Sent Events (SSE) to receive a stream of updates from the agent. - - Args: - request: The `TaskResubscriptionRequest` object containing the task information to reconnect to. - http_kwargs: Optional dictionary of keyword arguments to pass to the - underlying httpx.post request. A default `timeout=None` is set but can be overridden. - context: The client call context. - - Yields: - `SendStreamingMessageResponse` objects as they are received in the SSE stream. - These can be Task, Message, TaskStatusUpdateEvent, or TaskArtifactUpdateEvent. - - Raises: - A2AClientHTTPError: If an HTTP or SSE protocol error occurs during the request. - A2AClientJSONError: If an SSE event data cannot be decoded as JSON or validated. - """ - if not context and http_kwargs: - context = ClientCallContext(state={'http_kwargs': http_kwargs}) - - async for result in self._transport.resubscribe( - request.params, context=context - ): - yield SendStreamingMessageResponse( - root=SendStreamingMessageSuccessResponse( - id=request.id, jsonrpc='2.0', result=result - ) - ) - - async def get_card( - self, - *, - http_kwargs: dict[str, Any] | None = None, - context: ClientCallContext | None = None, - ) -> AgentCard: - """Retrieves the authenticated card (if necessary) or the public one. - - Args: - http_kwargs: Optional dictionary of keyword arguments to pass to the - underlying httpx.post request. - context: The client call context. - - Returns: - A `AgentCard` object containing the card or an error. - - Raises: - A2AClientHTTPError: If an HTTP error occurs during the request. - A2AClientJSONError: If the response body cannot be decoded as JSON or validated. - """ - if not context and http_kwargs: - context = ClientCallContext(state={'http_kwargs': http_kwargs}) - return await self._transport.get_card(context=context) diff --git a/src/a2a/client/legacy_grpc.py b/src/a2a/client/legacy_grpc.py deleted file mode 100644 index 0b62b0096..000000000 --- a/src/a2a/client/legacy_grpc.py +++ /dev/null @@ -1,44 +0,0 @@ -"""Backwards compatibility layer for the legacy A2A gRPC client.""" - -import warnings - -from typing import TYPE_CHECKING - -from a2a.client.transports.grpc import GrpcTransport -from a2a.types import AgentCard - - -if TYPE_CHECKING: - from a2a.grpc.a2a_pb2_grpc import A2AServiceStub - - -class A2AGrpcClient(GrpcTransport): - """[DEPRECATED] Backwards compatibility wrapper for the gRPC client.""" - - def __init__( # pylint: disable=super-init-not-called - self, - grpc_stub: 'A2AServiceStub', - agent_card: AgentCard, - ): - warnings.warn( - 'A2AGrpcClient is deprecated and will be removed in a future version. ' - 'Use ClientFactory to create a client with a gRPC transport.', - DeprecationWarning, - stacklevel=2, - ) - # The old gRPC client accepted a stub directly. The new one accepts a - # channel and builds the stub itself. We just have a stub here, so we - # need to handle initialization ourselves. - self.stub = grpc_stub - self.agent_card = agent_card - self._needs_extended_card = ( - agent_card.supports_authenticated_extended_card - if agent_card - else True - ) - - class _NopChannel: - async def close(self) -> None: - pass - - self.channel = _NopChannel() diff --git a/src/a2a/client/middleware.py b/src/a2a/client/middleware.py deleted file mode 100644 index 73ada982f..000000000 --- a/src/a2a/client/middleware.py +++ /dev/null @@ -1,53 +0,0 @@ -from __future__ import annotations - -from abc import ABC, abstractmethod -from collections.abc import MutableMapping # noqa: TC003 -from typing import TYPE_CHECKING, Any - -from pydantic import BaseModel, Field - - -if TYPE_CHECKING: - from a2a.types import AgentCard - - -class ClientCallContext(BaseModel): - """A context passed with each client call, allowing for call-specific. - - configuration and data passing. Such as authentication details or - request deadlines. - """ - - state: MutableMapping[str, Any] = Field(default_factory=dict) - - -class ClientCallInterceptor(ABC): - """An abstract base class for client-side call interceptors. - - Interceptors can inspect and modify requests before they are sent, - which is ideal for concerns like authentication, logging, or tracing. - """ - - @abstractmethod - async def intercept( - self, - method_name: str, - request_payload: dict[str, Any], - http_kwargs: dict[str, Any], - agent_card: AgentCard | None, - context: ClientCallContext | None, - ) -> tuple[dict[str, Any], dict[str, Any]]: - """ - Intercepts a client call before the request is sent. - - Args: - method_name: The name of the RPC method (e.g., 'message/send'). - request_payload: The JSON RPC request payload dictionary. - http_kwargs: The keyword arguments for the httpx request. - agent_card: The AgentCard associated with the client. - context: The ClientCallContext for this specific call. - - Returns: - A tuple containing the (potentially modified) request_payload - and http_kwargs. - """ diff --git a/src/a2a/client/optionals.py b/src/a2a/client/optionals.py index f55f01862..9344a811d 100644 --- a/src/a2a/client/optionals.py +++ b/src/a2a/client/optionals.py @@ -3,14 +3,14 @@ # Attempt to import the optional module try: - from grpc.aio import Channel # pyright: ignore[reportAssignmentType] + from grpc.aio import Channel # type: ignore[reportMissingModuleSource] except ImportError: - # If grpc.aio is not available, define a dummy type for type checking. - # This dummy type will only be used by type checkers. + # If grpc.aio is not available, define a stub type for type checking. + # This stub type will only be used by type checkers. if TYPE_CHECKING: class Channel: # type: ignore[no-redef] - """Dummy class for type hinting when grpc.aio is not available.""" + """Stub class for type hinting when grpc.aio is not available.""" else: Channel = None # At runtime, pd will be None if the import failed. diff --git a/src/a2a/client/service_parameters.py b/src/a2a/client/service_parameters.py new file mode 100644 index 000000000..39fe79ce1 --- /dev/null +++ b/src/a2a/client/service_parameters.py @@ -0,0 +1,64 @@ +from collections.abc import Callable +from typing import TypeAlias + +from a2a.extensions.common import ( + HTTP_EXTENSION_HEADER, + get_requested_extensions, +) + + +ServiceParameters: TypeAlias = dict[str, str] +ServiceParametersUpdate: TypeAlias = Callable[[ServiceParameters], None] + + +class ServiceParametersFactory: + """Factory for creating ServiceParameters.""" + + @staticmethod + def create(updates: list[ServiceParametersUpdate]) -> ServiceParameters: + """Create ServiceParameters from a list of updates. + + Args: + updates: List of update functions to apply. + + Returns: + The created ServiceParameters dictionary. + """ + return ServiceParametersFactory.create_from(None, updates) + + @staticmethod + def create_from( + service_parameters: ServiceParameters | None, + updates: list[ServiceParametersUpdate], + ) -> ServiceParameters: + """Create new ServiceParameters from existing ones and apply updates. + + Args: + service_parameters: Optional existing ServiceParameters to start from. + updates: List of update functions to apply. + + Returns: + New ServiceParameters dictionary. + """ + result = service_parameters.copy() if service_parameters else {} + for update in updates: + update(result) + return result + + +def with_a2a_extensions(extensions: list[str]) -> ServiceParametersUpdate: + """Create a ServiceParametersUpdate that merges A2A extension URIs. + + Unions the supplied URIs with any already present in the A2A-Extensions + parameter, deduplicating and emitting them in sorted order. Repeated + calls accumulate rather than overwrite. + """ + + def update(parameters: ServiceParameters) -> None: + if not extensions: + return + existing = parameters.get(HTTP_EXTENSION_HEADER, '') + merged = sorted(get_requested_extensions([existing, *extensions])) + parameters[HTTP_EXTENSION_HEADER] = ','.join(merged) + + return update diff --git a/src/a2a/client/transports/base.py b/src/a2a/client/transports/base.py index 5bbf89adb..e46aae25e 100644 --- a/src/a2a/client/transports/base.py +++ b/src/a2a/client/transports/base.py @@ -1,21 +1,27 @@ from abc import ABC, abstractmethod -from collections.abc import AsyncGenerator, Callable +from collections.abc import AsyncGenerator from types import TracebackType from typing_extensions import Self -from a2a.client.middleware import ClientCallContext -from a2a.types import ( +from a2a.client.client import ClientCallContext +from a2a.types.a2a_pb2 import ( AgentCard, - GetTaskPushNotificationConfigParams, - Message, - MessageSendParams, + CancelTaskRequest, + DeleteTaskPushNotificationConfigRequest, + GetExtendedAgentCardRequest, + GetTaskPushNotificationConfigRequest, + GetTaskRequest, + ListTaskPushNotificationConfigsRequest, + ListTaskPushNotificationConfigsResponse, + ListTasksRequest, + ListTasksResponse, + SendMessageRequest, + SendMessageResponse, + StreamResponse, + SubscribeToTaskRequest, Task, - TaskArtifactUpdateEvent, - TaskIdParams, TaskPushNotificationConfig, - TaskQueryParams, - TaskStatusUpdateEvent, ) @@ -38,23 +44,19 @@ async def __aexit__( @abstractmethod async def send_message( self, - request: MessageSendParams, + request: SendMessageRequest, *, context: ClientCallContext | None = None, - extensions: list[str] | None = None, - ) -> Task | Message: + ) -> SendMessageResponse: """Sends a non-streaming message request to the agent.""" @abstractmethod async def send_message_streaming( self, - request: MessageSendParams, + request: SendMessageRequest, *, context: ClientCallContext | None = None, - extensions: list[str] | None = None, - ) -> AsyncGenerator[ - Message | Task | TaskStatusUpdateEvent | TaskArtifactUpdateEvent - ]: + ) -> AsyncGenerator[StreamResponse]: """Sends a streaming message request to the agent and yields responses as they arrive.""" return yield @@ -62,66 +64,85 @@ async def send_message_streaming( @abstractmethod async def get_task( self, - request: TaskQueryParams, + request: GetTaskRequest, *, context: ClientCallContext | None = None, - extensions: list[str] | None = None, ) -> Task: """Retrieves the current state and history of a specific task.""" + @abstractmethod + async def list_tasks( + self, + request: ListTasksRequest, + *, + context: ClientCallContext | None = None, + ) -> ListTasksResponse: + """Retrieves tasks for an agent.""" + @abstractmethod async def cancel_task( self, - request: TaskIdParams, + request: CancelTaskRequest, *, context: ClientCallContext | None = None, - extensions: list[str] | None = None, ) -> Task: """Requests the agent to cancel a specific task.""" @abstractmethod - async def set_task_callback( + async def create_task_push_notification_config( self, request: TaskPushNotificationConfig, *, context: ClientCallContext | None = None, - extensions: list[str] | None = None, ) -> TaskPushNotificationConfig: """Sets or updates the push notification configuration for a specific task.""" @abstractmethod - async def get_task_callback( + async def get_task_push_notification_config( self, - request: GetTaskPushNotificationConfigParams, + request: GetTaskPushNotificationConfigRequest, *, context: ClientCallContext | None = None, - extensions: list[str] | None = None, ) -> TaskPushNotificationConfig: """Retrieves the push notification configuration for a specific task.""" @abstractmethod - async def resubscribe( + async def list_task_push_notification_configs( + self, + request: ListTaskPushNotificationConfigsRequest, + *, + context: ClientCallContext | None = None, + ) -> ListTaskPushNotificationConfigsResponse: + """Lists push notification configurations for a specific task.""" + + @abstractmethod + async def delete_task_push_notification_config( + self, + request: DeleteTaskPushNotificationConfigRequest, + *, + context: ClientCallContext | None = None, + ) -> None: + """Deletes the push notification configuration for a specific task.""" + + @abstractmethod + async def subscribe( self, - request: TaskIdParams, + request: SubscribeToTaskRequest, *, context: ClientCallContext | None = None, - extensions: list[str] | None = None, - ) -> AsyncGenerator[ - Task | Message | TaskStatusUpdateEvent | TaskArtifactUpdateEvent - ]: + ) -> AsyncGenerator[StreamResponse]: """Reconnects to get task updates.""" return yield @abstractmethod - async def get_card( + async def get_extended_agent_card( self, + request: GetExtendedAgentCardRequest, *, context: ClientCallContext | None = None, - extensions: list[str] | None = None, - signature_verifier: Callable[[AgentCard], None] | None = None, ) -> AgentCard: - """Retrieves the AgentCard.""" + """Retrieves the Extended AgentCard.""" @abstractmethod async def close(self) -> None: diff --git a/src/a2a/client/transports/grpc.py b/src/a2a/client/transports/grpc.py index 085f8c970..24c4b5385 100644 --- a/src/a2a/client/transports/grpc.py +++ b/src/a2a/client/transports/grpc.py @@ -1,43 +1,113 @@ import logging from collections.abc import AsyncGenerator, Callable +from functools import wraps +from typing import Any, NoReturn, cast + +from a2a.client.client import ClientCallContext +from a2a.client.errors import A2AClientError, A2AClientTimeoutError try: - import grpc + import grpc # type: ignore[reportMissingModuleSource] + + from grpc_status import rpc_status except ImportError as e: raise ImportError( - 'A2AGrpcClient requires grpcio and grpcio-tools to be installed. ' + 'A2AGrpcClient requires grpcio, grpcio-tools, and grpcio-status to be installed. ' 'Install with: ' "'pip install a2a-sdk[grpc]'" ) from e +from google.rpc import ( # type: ignore[reportMissingModuleSource] + error_details_pb2, +) + from a2a.client.client import ClientConfig -from a2a.client.middleware import ClientCallContext, ClientCallInterceptor from a2a.client.optionals import Channel from a2a.client.transports.base import ClientTransport -from a2a.extensions.common import HTTP_EXTENSION_HEADER -from a2a.grpc import a2a_pb2, a2a_pb2_grpc -from a2a.types import ( +from a2a.types import a2a_pb2_grpc +from a2a.types.a2a_pb2 import ( AgentCard, - GetTaskPushNotificationConfigParams, - Message, - MessageSendParams, + CancelTaskRequest, + DeleteTaskPushNotificationConfigRequest, + GetExtendedAgentCardRequest, + GetTaskPushNotificationConfigRequest, + GetTaskRequest, + ListTaskPushNotificationConfigsRequest, + ListTaskPushNotificationConfigsResponse, + ListTasksRequest, + ListTasksResponse, + SendMessageRequest, + SendMessageResponse, + StreamResponse, + SubscribeToTaskRequest, Task, - TaskArtifactUpdateEvent, - TaskIdParams, TaskPushNotificationConfig, - TaskQueryParams, - TaskStatusUpdateEvent, ) -from a2a.utils import proto_utils +from a2a.utils.constants import PROTOCOL_VERSION_CURRENT, VERSION_HEADER +from a2a.utils.errors import A2A_REASON_TO_ERROR, A2AError +from a2a.utils.proto_utils import bad_request_to_validation_errors from a2a.utils.telemetry import SpanKind, trace_class logger = logging.getLogger(__name__) +def _map_grpc_error(e: grpc.aio.AioRpcError) -> NoReturn: + + if e.code() == grpc.StatusCode.DEADLINE_EXCEEDED: + raise A2AClientTimeoutError('Client Request timed out') from e + + # Use grpc_status to cleanly extract the rich Status from the call + status = rpc_status.from_call(cast('grpc.Call', e)) + data = None + + if status is not None: + exception_cls: type[A2AError] | None = None + for detail in status.details: + if detail.Is(error_details_pb2.ErrorInfo.DESCRIPTOR): + error_info = error_details_pb2.ErrorInfo() + detail.Unpack(error_info) + if error_info.domain == 'a2a-protocol.org': + exception_cls = A2A_REASON_TO_ERROR.get(error_info.reason) + elif detail.Is(error_details_pb2.BadRequest.DESCRIPTOR): + bad_request = error_details_pb2.BadRequest() + detail.Unpack(bad_request) + data = {'errors': bad_request_to_validation_errors(bad_request)} + + if exception_cls: + raise exception_cls(status.message, data=data) from e + + raise A2AClientError(f'gRPC Error {e.code().name}: {e.details()}') from e + + +def _handle_grpc_exception(func: Callable[..., Any]) -> Callable[..., Any]: + @wraps(func) + async def wrapper(*args: Any, **kwargs: Any) -> Any: + try: + return await func(*args, **kwargs) + except grpc.aio.AioRpcError as e: + _map_grpc_error(e) + + return wrapper + + +def _handle_grpc_stream_exception( + func: Callable[..., Any], +) -> Callable[..., Any]: + @wraps(func) + async def wrapper(*args: Any, **kwargs: Any) -> Any: + try: + async for item in func(*args, **kwargs): + yield item + except grpc.aio.AioRpcError as e: + _map_grpc_error(e) + + return wrapper + + @trace_class(kind=SpanKind.CLIENT) class GrpcTransport(ClientTransport): """A gRPC transport for the A2A client.""" @@ -46,30 +116,11 @@ def __init__( self, channel: Channel, agent_card: AgentCard | None, - extensions: list[str] | None = None, ): """Initializes the GrpcTransport.""" self.agent_card = agent_card self.channel = channel self.stub = a2a_pb2_grpc.A2AServiceStub(channel) - self._needs_extended_card = ( - agent_card.supports_authenticated_extended_card - if agent_card - else True - ) - self.extensions = extensions - - def _get_grpc_metadata( - self, - extensions: list[str] | None = None, - ) -> list[tuple[str, str]] | None: - """Creates gRPC metadata for extensions.""" - extensions_to_use = extensions or self.extensions - if extensions_to_use: - return [ - (HTTP_EXTENSION_HEADER.lower(), ','.join(extensions_to_use)) - ] - return None @classmethod def create( @@ -77,174 +128,221 @@ def create( card: AgentCard, url: str, config: ClientConfig, - interceptors: list[ClientCallInterceptor], ) -> 'GrpcTransport': """Creates a gRPC transport for the A2A client.""" if config.grpc_channel_factory is None: raise ValueError('grpc_channel_factory is required when using gRPC') - return cls(config.grpc_channel_factory(url), card, config.extensions) + return cls(config.grpc_channel_factory(url), card) + @_handle_grpc_exception async def send_message( self, - request: MessageSendParams, + request: SendMessageRequest, *, context: ClientCallContext | None = None, - extensions: list[str] | None = None, - ) -> Task | Message: + ) -> SendMessageResponse: """Sends a non-streaming message request to the agent.""" - response = await self.stub.SendMessage( - a2a_pb2.SendMessageRequest( - request=proto_utils.ToProto.message(request.message), - configuration=proto_utils.ToProto.message_send_configuration( - request.configuration - ), - metadata=proto_utils.ToProto.metadata(request.metadata), - ), - metadata=self._get_grpc_metadata(extensions), + return await self._call_grpc( + self.stub.SendMessage, + request, + context, ) - if response.HasField('task'): - return proto_utils.FromProto.task(response.task) - return proto_utils.FromProto.message(response.msg) + @_handle_grpc_stream_exception async def send_message_streaming( self, - request: MessageSendParams, + request: SendMessageRequest, *, context: ClientCallContext | None = None, - extensions: list[str] | None = None, - ) -> AsyncGenerator[ - Message | Task | TaskStatusUpdateEvent | TaskArtifactUpdateEvent - ]: + ) -> AsyncGenerator[StreamResponse]: """Sends a streaming message request to the agent and yields responses as they arrive.""" - stream = self.stub.SendStreamingMessage( - a2a_pb2.SendMessageRequest( - request=proto_utils.ToProto.message(request.message), - configuration=proto_utils.ToProto.message_send_configuration( - request.configuration - ), - metadata=proto_utils.ToProto.metadata(request.metadata), - ), - metadata=self._get_grpc_metadata(extensions), - ) - while True: - response = await stream.read() - if response == grpc.aio.EOF: # pyright: ignore[reportAttributeAccessIssue] - break - yield proto_utils.FromProto.stream_response(response) + async for response in self._call_grpc_stream( + self.stub.SendStreamingMessage, + request, + context, + ): + yield response - async def resubscribe( + @_handle_grpc_stream_exception + async def subscribe( self, - request: TaskIdParams, + request: SubscribeToTaskRequest, *, context: ClientCallContext | None = None, - extensions: list[str] | None = None, - ) -> AsyncGenerator[ - Task | Message | TaskStatusUpdateEvent | TaskArtifactUpdateEvent - ]: + ) -> AsyncGenerator[StreamResponse]: """Reconnects to get task updates.""" - stream = self.stub.TaskSubscription( - a2a_pb2.TaskSubscriptionRequest(name=f'tasks/{request.id}'), - metadata=self._get_grpc_metadata(extensions), - ) - while True: - response = await stream.read() - if response == grpc.aio.EOF: # pyright: ignore[reportAttributeAccessIssue] - break - yield proto_utils.FromProto.stream_response(response) + async for response in self._call_grpc_stream( + self.stub.SubscribeToTask, + request, + context, + ): + yield response + @_handle_grpc_exception async def get_task( self, - request: TaskQueryParams, + request: GetTaskRequest, *, context: ClientCallContext | None = None, - extensions: list[str] | None = None, ) -> Task: """Retrieves the current state and history of a specific task.""" - task = await self.stub.GetTask( - a2a_pb2.GetTaskRequest( - name=f'tasks/{request.id}', - history_length=request.history_length, - ), - metadata=self._get_grpc_metadata(extensions), + return await self._call_grpc( + self.stub.GetTask, + request, + context, ) - return proto_utils.FromProto.task(task) + @_handle_grpc_exception + async def list_tasks( + self, + request: ListTasksRequest, + *, + context: ClientCallContext | None = None, + ) -> ListTasksResponse: + """Retrieves tasks for an agent.""" + return await self._call_grpc( + self.stub.ListTasks, + request, + context, + ) + + @_handle_grpc_exception async def cancel_task( self, - request: TaskIdParams, + request: CancelTaskRequest, *, context: ClientCallContext | None = None, - extensions: list[str] | None = None, ) -> Task: """Requests the agent to cancel a specific task.""" - task = await self.stub.CancelTask( - a2a_pb2.CancelTaskRequest(name=f'tasks/{request.id}'), - metadata=self._get_grpc_metadata(extensions), + return await self._call_grpc( + self.stub.CancelTask, + request, + context, ) - return proto_utils.FromProto.task(task) - async def set_task_callback( + @_handle_grpc_exception + async def create_task_push_notification_config( self, request: TaskPushNotificationConfig, *, context: ClientCallContext | None = None, - extensions: list[str] | None = None, ) -> TaskPushNotificationConfig: """Sets or updates the push notification configuration for a specific task.""" - config = await self.stub.CreateTaskPushNotificationConfig( - a2a_pb2.CreateTaskPushNotificationConfigRequest( - parent=f'tasks/{request.task_id}', - config_id=request.push_notification_config.id, - config=proto_utils.ToProto.task_push_notification_config( - request - ), - ), - metadata=self._get_grpc_metadata(extensions), + return await self._call_grpc( + self.stub.CreateTaskPushNotificationConfig, + request, + context, ) - return proto_utils.FromProto.task_push_notification_config(config) - async def get_task_callback( + @_handle_grpc_exception + async def get_task_push_notification_config( self, - request: GetTaskPushNotificationConfigParams, + request: GetTaskPushNotificationConfigRequest, *, context: ClientCallContext | None = None, - extensions: list[str] | None = None, ) -> TaskPushNotificationConfig: """Retrieves the push notification configuration for a specific task.""" - config = await self.stub.GetTaskPushNotificationConfig( - a2a_pb2.GetTaskPushNotificationConfigRequest( - name=f'tasks/{request.id}/pushNotificationConfigs/{request.push_notification_config_id}', - ), - metadata=self._get_grpc_metadata(extensions), + return await self._call_grpc( + self.stub.GetTaskPushNotificationConfig, + request, + context, + ) + + @_handle_grpc_exception + async def list_task_push_notification_configs( + self, + request: ListTaskPushNotificationConfigsRequest, + *, + context: ClientCallContext | None = None, + ) -> ListTaskPushNotificationConfigsResponse: + """Lists push notification configurations for a specific task.""" + return await self._call_grpc( + self.stub.ListTaskPushNotificationConfigs, + request, + context, + ) + + @_handle_grpc_exception + async def delete_task_push_notification_config( + self, + request: DeleteTaskPushNotificationConfigRequest, + *, + context: ClientCallContext | None = None, + ) -> None: + """Deletes the push notification configuration for a specific task.""" + await self._call_grpc( + self.stub.DeleteTaskPushNotificationConfig, + request, + context, ) - return proto_utils.FromProto.task_push_notification_config(config) - async def get_card( + @_handle_grpc_exception + async def get_extended_agent_card( self, + request: GetExtendedAgentCardRequest, *, context: ClientCallContext | None = None, - extensions: list[str] | None = None, - signature_verifier: Callable[[AgentCard], None] | None = None, ) -> AgentCard: """Retrieves the agent's card.""" card = self.agent_card - if card and not self._needs_extended_card: + if card and not card.capabilities.extended_agent_card: return card - if card is None and not self._needs_extended_card: - raise ValueError('Agent card is not available.') - card_pb = await self.stub.GetAgentCard( - a2a_pb2.GetAgentCardRequest(), - metadata=self._get_grpc_metadata(extensions), + return await self._call_grpc( + self.stub.GetExtendedAgentCard, + request, + context, ) - card = proto_utils.FromProto.agent_card(card_pb) - if signature_verifier: - signature_verifier(card) - - self.agent_card = card - self._needs_extended_card = False - return card async def close(self) -> None: """Closes the gRPC channel.""" await self.channel.close() + + def _get_grpc_metadata( + self, context: ClientCallContext | None + ) -> list[tuple[str, str]]: + metadata = [(VERSION_HEADER.lower(), PROTOCOL_VERSION_CURRENT)] + if context and context.service_parameters: + for key, value in context.service_parameters.items(): + metadata.append((key.lower(), value)) + return metadata + + def _get_grpc_timeout( + self, context: ClientCallContext | None + ) -> float | None: + return context.timeout if context else None + + async def _call_grpc( + self, + method: Callable[..., Any], + request: Any, + context: ClientCallContext | None, + **kwargs: Any, + ) -> Any: + + return await method( + request, + metadata=self._get_grpc_metadata(context), + timeout=self._get_grpc_timeout(context), + **kwargs, + ) + + async def _call_grpc_stream( + self, + method: Callable[..., Any], + request: Any, + context: ClientCallContext | None, + **kwargs: Any, + ) -> AsyncGenerator[StreamResponse]: + + stream = method( + request, + metadata=self._get_grpc_metadata(context), + timeout=self._get_grpc_timeout(context), + **kwargs, + ) + while True: + response = await stream.read() + if response == grpc.aio.EOF: # pyright: ignore[reportAttributeAccessIssue] + break + yield response diff --git a/src/a2a/client/transports/http_helpers.py b/src/a2a/client/transports/http_helpers.py new file mode 100644 index 000000000..0a73ed83c --- /dev/null +++ b/src/a2a/client/transports/http_helpers.py @@ -0,0 +1,155 @@ +import json + +from collections.abc import AsyncGenerator, Callable, Iterator +from contextlib import contextmanager +from typing import Any, NoReturn + +import httpx + +from httpx_sse import EventSource, SSEError + +from a2a.client.client import ClientCallContext +from a2a.client.errors import A2AClientError, A2AClientTimeoutError + + +def _default_sse_error_handler(sse_data: str) -> NoReturn: + raise A2AClientError(f'SSE stream error event received: {sse_data}') + + +@contextmanager +def handle_http_exceptions( + status_error_handler: Callable[[httpx.HTTPStatusError], NoReturn] + | None = None, +) -> Iterator[None]: + """Handles common HTTP exceptions for REST and JSON-RPC transports. + + Args: + status_error_handler: Optional handler for `httpx.HTTPStatusError`. + If provided, this handler should raise an appropriate domain-specific exception. + If not provided, a default `A2AClientError` will be raised. + """ + try: + yield + except httpx.TimeoutException as e: + raise A2AClientTimeoutError('Client Request timed out') from e + except httpx.HTTPStatusError as e: + if status_error_handler: + status_error_handler(e) + raise A2AClientError(f'HTTP Error {e.response.status_code}: {e}') from e + except SSEError as e: + raise A2AClientError( + f'Invalid SSE response or protocol error: {e}' + ) from e + except httpx.RequestError as e: + raise A2AClientError(f'Network communication error: {e}') from e + except json.JSONDecodeError as e: + raise A2AClientError(f'JSON Decode Error: {e}') from e + + +def get_http_args(context: ClientCallContext | None) -> dict[str, Any]: + """Extracts HTTP arguments from the client call context.""" + http_kwargs: dict[str, Any] = {} + if context and context.service_parameters: + http_kwargs['headers'] = context.service_parameters.copy() + if context and context.timeout is not None: + http_kwargs['timeout'] = httpx.Timeout(context.timeout) + return http_kwargs + + +async def send_http_request( + httpx_client: httpx.AsyncClient, + request: httpx.Request, + status_error_handler: Callable[[httpx.HTTPStatusError], NoReturn] + | None = None, +) -> dict[str, Any]: + """Sends an HTTP request and parses the JSON response, handling common exceptions.""" + with handle_http_exceptions(status_error_handler): + response = await httpx_client.send(request) + response.raise_for_status() + return response.json() + + +async def send_http_stream_request( + httpx_client: httpx.AsyncClient, + method: str, + url: str, + status_error_handler: Callable[[httpx.HTTPStatusError], NoReturn] + | None = None, + sse_error_handler: Callable[[str], NoReturn] = _default_sse_error_handler, + **kwargs: Any, +) -> AsyncGenerator[str]: + """Sends a streaming HTTP request, yielding SSE data strings and handling exceptions. + + Args: + httpx_client: The async HTTP client. + method: The HTTP method (e.g. 'POST', 'GET'). + url: The URL to send the request to. + status_error_handler: Handler for HTTP status errors. Should raise an + appropriate domain-specific exception. + sse_error_handler: Handler for SSE error events. Called with the + raw SSE data string when an ``event: error`` SSE event is received. + Should raise an appropriate domain-specific exception. + **kwargs: Additional keyword arguments forwarded to ``aconnect_sse``. + """ + with handle_http_exceptions(status_error_handler): + async with _SSEEventSource( + httpx_client, method, url, **kwargs + ) as event_source: + try: + event_source.response.raise_for_status() + except httpx.HTTPStatusError as e: + # Read upfront streaming error content immediately, otherwise lower-level handlers + # (e.g. response.json()) crash with 'ResponseNotRead' Access errors. + await event_source.response.aread() + raise e + + # If the response is not a stream, read it standardly (e.g., upfront JSON-RPC error payload) + if 'text/event-stream' not in event_source.response.headers.get( + 'content-type', '' + ): + content = await event_source.response.aread() + yield content.decode('utf-8') + return + + async for sse in event_source.aiter_sse(): + if not sse.data: + continue + if sse.event == 'error': + sse_error_handler(sse.data) + yield sse.data + + +class _SSEEventSource: + """Class-based replacement for ``httpx_sse.aconnect_sse``. + + ``aconnect_sse`` is an ``@asynccontextmanager`` whose internal async + generator gets tracked by the event loop. When the enclosing async + generator is abandoned, the event loop's generator cleanup collides + with the cascading cleanup — see https://bugs.python.org/issue38559. + + Plain ``__aenter__``/``__aexit__`` coroutines avoid this entirely. + """ + + def __init__( + self, + client: httpx.AsyncClient, + method: str, + url: str, + **kwargs: Any, + ) -> None: + headers = httpx.Headers(kwargs.pop('headers', None)) + headers.setdefault('Accept', 'text/event-stream') + headers.setdefault('Cache-Control', 'no-store') + self._request = client.build_request( + method, url, headers=headers, **kwargs + ) + self._client = client + self._response: httpx.Response | None = None + + async def __aenter__(self) -> EventSource: + self._response = await self._client.send(self._request, stream=True) + return EventSource(self._response) + + async def __aexit__(self, *args: object) -> None: + if self._response is not None: + await self._response.aclose() diff --git a/src/a2a/client/transports/jsonrpc.py b/src/a2a/client/transports/jsonrpc.py index bfa0b9517..252ea439d 100644 --- a/src/a2a/client/transports/jsonrpc.py +++ b/src/a2a/client/transports/jsonrpc.py @@ -1,57 +1,50 @@ -import json import logging -from collections.abc import AsyncGenerator, Callable -from typing import Any +from collections.abc import AsyncGenerator +from typing import Any, NoReturn from uuid import uuid4 import httpx -from httpx_sse import SSEError, aconnect_sse +from google.protobuf import json_format +from jsonrpc.jsonrpc2 import JSONRPC20Request, JSONRPC20Response -from a2a.client.card_resolver import A2ACardResolver -from a2a.client.errors import ( - A2AClientHTTPError, - A2AClientJSONError, - A2AClientJSONRPCError, - A2AClientTimeoutError, -) -from a2a.client.middleware import ClientCallContext, ClientCallInterceptor +from a2a.client.client import ClientCallContext +from a2a.client.errors import A2AClientError from a2a.client.transports.base import ClientTransport -from a2a.extensions.common import update_extension_header -from a2a.types import ( +from a2a.client.transports.http_helpers import ( + get_http_args, + send_http_request, + send_http_stream_request, +) +from a2a.types.a2a_pb2 import ( AgentCard, CancelTaskRequest, - CancelTaskResponse, - GetAuthenticatedExtendedCardRequest, - GetAuthenticatedExtendedCardResponse, - GetTaskPushNotificationConfigParams, + DeleteTaskPushNotificationConfigRequest, + GetExtendedAgentCardRequest, GetTaskPushNotificationConfigRequest, - GetTaskPushNotificationConfigResponse, GetTaskRequest, - GetTaskResponse, - JSONRPCErrorResponse, - Message, - MessageSendParams, + ListTaskPushNotificationConfigsRequest, + ListTaskPushNotificationConfigsResponse, + ListTasksRequest, + ListTasksResponse, SendMessageRequest, SendMessageResponse, - SendStreamingMessageRequest, - SendStreamingMessageResponse, - SetTaskPushNotificationConfigRequest, - SetTaskPushNotificationConfigResponse, + StreamResponse, + SubscribeToTaskRequest, Task, - TaskArtifactUpdateEvent, - TaskIdParams, TaskPushNotificationConfig, - TaskQueryParams, - TaskResubscriptionRequest, - TaskStatusUpdateEvent, ) +from a2a.utils.errors import JSON_RPC_ERROR_CODE_MAP from a2a.utils.telemetry import SpanKind, trace_class logger = logging.getLogger(__name__) +_JSON_RPC_ERROR_CODE_TO_A2A_ERROR = { + code: error_type for error_type, code in JSON_RPC_ERROR_CODE_MAP.items() +} + @trace_class(kind=SpanKind.CLIENT) class JsonRpcTransport(ClientTransport): @@ -60,378 +53,318 @@ class JsonRpcTransport(ClientTransport): def __init__( self, httpx_client: httpx.AsyncClient, - agent_card: AgentCard | None = None, - url: str | None = None, - interceptors: list[ClientCallInterceptor] | None = None, - extensions: list[str] | None = None, + agent_card: AgentCard, + url: str, ): """Initializes the JsonRpcTransport.""" - if url: - self.url = url - elif agent_card: - self.url = agent_card.url - else: - raise ValueError('Must provide either agent_card or url') - + self.url = url self.httpx_client = httpx_client self.agent_card = agent_card - self.interceptors = interceptors or [] - self._needs_extended_card = ( - agent_card.supports_authenticated_extended_card - if agent_card - else True - ) - self.extensions = extensions - - async def _apply_interceptors( - self, - method_name: str, - request_payload: dict[str, Any], - http_kwargs: dict[str, Any] | None, - context: ClientCallContext | None, - ) -> tuple[dict[str, Any], dict[str, Any]]: - final_http_kwargs = http_kwargs or {} - final_request_payload = request_payload - - for interceptor in self.interceptors: - ( - final_request_payload, - final_http_kwargs, - ) = await interceptor.intercept( - method_name, - final_request_payload, - final_http_kwargs, - self.agent_card, - context, - ) - return final_request_payload, final_http_kwargs - - def _get_http_args( - self, context: ClientCallContext | None - ) -> dict[str, Any] | None: - return context.state.get('http_kwargs') if context else None async def send_message( self, - request: MessageSendParams, + request: SendMessageRequest, *, context: ClientCallContext | None = None, - extensions: list[str] | None = None, - ) -> Task | Message: + ) -> SendMessageResponse: """Sends a non-streaming message request to the agent.""" - rpc_request = SendMessageRequest(params=request, id=str(uuid4())) - modified_kwargs = update_extension_header( - self._get_http_args(context), - extensions if extensions is not None else self.extensions, + rpc_request = JSONRPC20Request( + method='SendMessage', + params=json_format.MessageToDict(request), + _id=str(uuid4()), ) - payload, modified_kwargs = await self._apply_interceptors( - 'message/send', - rpc_request.model_dump(mode='json', exclude_none=True), - modified_kwargs, - context, + response_data = await self._send_request( + dict(rpc_request.data), context + ) + json_rpc_response = JSONRPC20Response(**response_data) + if json_rpc_response.error: + raise self._create_jsonrpc_error(json_rpc_response.error) + response: SendMessageResponse = json_format.ParseDict( + json_rpc_response.result, SendMessageResponse() ) - response_data = await self._send_request(payload, modified_kwargs) - response = SendMessageResponse.model_validate(response_data) - if isinstance(response.root, JSONRPCErrorResponse): - raise A2AClientJSONRPCError(response.root) - return response.root.result + return response async def send_message_streaming( self, - request: MessageSendParams, + request: SendMessageRequest, *, context: ClientCallContext | None = None, - extensions: list[str] | None = None, - ) -> AsyncGenerator[ - Message | Task | TaskStatusUpdateEvent | TaskArtifactUpdateEvent - ]: + ) -> AsyncGenerator[StreamResponse]: """Sends a streaming message request to the agent and yields responses as they arrive.""" - rpc_request = SendStreamingMessageRequest( - params=request, id=str(uuid4()) + rpc_request = JSONRPC20Request( + method='SendStreamingMessage', + params=json_format.MessageToDict(request), + _id=str(uuid4()), ) - modified_kwargs = update_extension_header( - self._get_http_args(context), - extensions if extensions is not None else self.extensions, - ) - payload, modified_kwargs = await self._apply_interceptors( - 'message/stream', - rpc_request.model_dump(mode='json', exclude_none=True), - modified_kwargs, + async for event in self._send_stream_request( + dict(rpc_request.data), context, - ) - modified_kwargs.setdefault( - 'timeout', self.httpx_client.timeout.as_dict().get('read', None) - ) - headers = dict(self.httpx_client.headers.items()) - headers.update(modified_kwargs.get('headers', {})) - modified_kwargs['headers'] = headers - - async with aconnect_sse( - self.httpx_client, - 'POST', - self.url, - json=payload, - **modified_kwargs, - ) as event_source: - try: - event_source.response.raise_for_status() - async for sse in event_source.aiter_sse(): - if not sse.data: - continue - response = SendStreamingMessageResponse.model_validate( - json.loads(sse.data) - ) - if isinstance(response.root, JSONRPCErrorResponse): - raise A2AClientJSONRPCError(response.root) - yield response.root.result - except httpx.TimeoutException as e: - raise A2AClientTimeoutError('Client Request timed out') from e - except httpx.HTTPStatusError as e: - raise A2AClientHTTPError(e.response.status_code, str(e)) from e - except SSEError as e: - raise A2AClientHTTPError( - 400, f'Invalid SSE response or protocol error: {e}' - ) from e - except json.JSONDecodeError as e: - raise A2AClientJSONError(str(e)) from e - except httpx.RequestError as e: - raise A2AClientHTTPError( - 503, f'Network communication error: {e}' - ) from e - - async def _send_request( - self, - rpc_request_payload: dict[str, Any], - http_kwargs: dict[str, Any] | None = None, - ) -> dict[str, Any]: - try: - response = await self.httpx_client.post( - self.url, json=rpc_request_payload, **(http_kwargs or {}) - ) - response.raise_for_status() - return response.json() - except httpx.TimeoutException as e: - raise A2AClientTimeoutError('Client Request timed out') from e - except httpx.HTTPStatusError as e: - raise A2AClientHTTPError(e.response.status_code, str(e)) from e - except json.JSONDecodeError as e: - raise A2AClientJSONError(str(e)) from e - except httpx.RequestError as e: - raise A2AClientHTTPError( - 503, f'Network communication error: {e}' - ) from e + ): + yield event async def get_task( self, - request: TaskQueryParams, + request: GetTaskRequest, *, context: ClientCallContext | None = None, - extensions: list[str] | None = None, ) -> Task: """Retrieves the current state and history of a specific task.""" - rpc_request = GetTaskRequest(params=request, id=str(uuid4())) - modified_kwargs = update_extension_header( - self._get_http_args(context), - extensions if extensions is not None else self.extensions, + rpc_request = JSONRPC20Request( + method='GetTask', + params=json_format.MessageToDict(request), + _id=str(uuid4()), ) - payload, modified_kwargs = await self._apply_interceptors( - 'tasks/get', - rpc_request.model_dump(mode='json', exclude_none=True), - modified_kwargs, - context, + response_data = await self._send_request( + dict(rpc_request.data), context + ) + json_rpc_response = JSONRPC20Response(**response_data) + if json_rpc_response.error: + raise self._create_jsonrpc_error(json_rpc_response.error) + response: Task = json_format.ParseDict(json_rpc_response.result, Task()) + return response + + async def list_tasks( + self, + request: ListTasksRequest, + *, + context: ClientCallContext | None = None, + ) -> ListTasksResponse: + """Retrieves tasks for an agent.""" + rpc_request = JSONRPC20Request( + method='ListTasks', + params=json_format.MessageToDict(request), + _id=str(uuid4()), + ) + response_data = await self._send_request( + dict(rpc_request.data), context ) - response_data = await self._send_request(payload, modified_kwargs) - response = GetTaskResponse.model_validate(response_data) - if isinstance(response.root, JSONRPCErrorResponse): - raise A2AClientJSONRPCError(response.root) - return response.root.result + json_rpc_response = JSONRPC20Response(**response_data) + if json_rpc_response.error: + raise self._create_jsonrpc_error(json_rpc_response.error) + response: ListTasksResponse = json_format.ParseDict( + json_rpc_response.result, ListTasksResponse() + ) + return response async def cancel_task( self, - request: TaskIdParams, + request: CancelTaskRequest, *, context: ClientCallContext | None = None, - extensions: list[str] | None = None, ) -> Task: """Requests the agent to cancel a specific task.""" - rpc_request = CancelTaskRequest(params=request, id=str(uuid4())) - modified_kwargs = update_extension_header( - self._get_http_args(context), - extensions if extensions is not None else self.extensions, + rpc_request = JSONRPC20Request( + method='CancelTask', + params=json_format.MessageToDict(request), + _id=str(uuid4()), ) - payload, modified_kwargs = await self._apply_interceptors( - 'tasks/cancel', - rpc_request.model_dump(mode='json', exclude_none=True), - modified_kwargs, - context, + response_data = await self._send_request( + dict(rpc_request.data), context ) - response_data = await self._send_request(payload, modified_kwargs) - response = CancelTaskResponse.model_validate(response_data) - if isinstance(response.root, JSONRPCErrorResponse): - raise A2AClientJSONRPCError(response.root) - return response.root.result + json_rpc_response = JSONRPC20Response(**response_data) + if json_rpc_response.error: + raise self._create_jsonrpc_error(json_rpc_response.error) + response: Task = json_format.ParseDict(json_rpc_response.result, Task()) + return response - async def set_task_callback( + async def create_task_push_notification_config( self, request: TaskPushNotificationConfig, *, context: ClientCallContext | None = None, - extensions: list[str] | None = None, ) -> TaskPushNotificationConfig: """Sets or updates the push notification configuration for a specific task.""" - rpc_request = SetTaskPushNotificationConfigRequest( - params=request, id=str(uuid4()) + rpc_request = JSONRPC20Request( + method='CreateTaskPushNotificationConfig', + params=json_format.MessageToDict(request), + _id=str(uuid4()), ) - modified_kwargs = update_extension_header( - self._get_http_args(context), - extensions if extensions is not None else self.extensions, - ) - payload, modified_kwargs = await self._apply_interceptors( - 'tasks/pushNotificationConfig/set', - rpc_request.model_dump(mode='json', exclude_none=True), - modified_kwargs, - context, + response_data = await self._send_request( + dict(rpc_request.data), context ) - response_data = await self._send_request(payload, modified_kwargs) - response = SetTaskPushNotificationConfigResponse.model_validate( - response_data + json_rpc_response = JSONRPC20Response(**response_data) + if json_rpc_response.error: + raise self._create_jsonrpc_error(json_rpc_response.error) + response: TaskPushNotificationConfig = json_format.ParseDict( + json_rpc_response.result, TaskPushNotificationConfig() ) - if isinstance(response.root, JSONRPCErrorResponse): - raise A2AClientJSONRPCError(response.root) - return response.root.result + return response - async def get_task_callback( + async def get_task_push_notification_config( self, - request: GetTaskPushNotificationConfigParams, + request: GetTaskPushNotificationConfigRequest, *, context: ClientCallContext | None = None, - extensions: list[str] | None = None, ) -> TaskPushNotificationConfig: """Retrieves the push notification configuration for a specific task.""" - rpc_request = GetTaskPushNotificationConfigRequest( - params=request, id=str(uuid4()) + rpc_request = JSONRPC20Request( + method='GetTaskPushNotificationConfig', + params=json_format.MessageToDict(request), + _id=str(uuid4()), ) - modified_kwargs = update_extension_header( - self._get_http_args(context), - extensions if extensions is not None else self.extensions, + response_data = await self._send_request( + dict(rpc_request.data), context ) - payload, modified_kwargs = await self._apply_interceptors( - 'tasks/pushNotificationConfig/get', - rpc_request.model_dump(mode='json', exclude_none=True), - modified_kwargs, - context, + json_rpc_response = JSONRPC20Response(**response_data) + if json_rpc_response.error: + raise self._create_jsonrpc_error(json_rpc_response.error) + response: TaskPushNotificationConfig = json_format.ParseDict( + json_rpc_response.result, TaskPushNotificationConfig() ) - response_data = await self._send_request(payload, modified_kwargs) - response = GetTaskPushNotificationConfigResponse.model_validate( - response_data + return response + + async def list_task_push_notification_configs( + self, + request: ListTaskPushNotificationConfigsRequest, + *, + context: ClientCallContext | None = None, + ) -> ListTaskPushNotificationConfigsResponse: + """Lists push notification configurations for a specific task.""" + rpc_request = JSONRPC20Request( + method='ListTaskPushNotificationConfigs', + params=json_format.MessageToDict(request), + _id=str(uuid4()), + ) + response_data = await self._send_request( + dict(rpc_request.data), context + ) + json_rpc_response = JSONRPC20Response(**response_data) + if json_rpc_response.error: + raise self._create_jsonrpc_error(json_rpc_response.error) + response: ListTaskPushNotificationConfigsResponse = ( + json_format.ParseDict( + json_rpc_response.result, + ListTaskPushNotificationConfigsResponse(), + ) + ) + return response + + async def delete_task_push_notification_config( + self, + request: DeleteTaskPushNotificationConfigRequest, + *, + context: ClientCallContext | None = None, + ) -> None: + """Deletes the push notification configuration for a specific task.""" + rpc_request = JSONRPC20Request( + method='DeleteTaskPushNotificationConfig', + params=json_format.MessageToDict(request), + _id=str(uuid4()), + ) + response_data = await self._send_request( + dict(rpc_request.data), context ) - if isinstance(response.root, JSONRPCErrorResponse): - raise A2AClientJSONRPCError(response.root) - return response.root.result + json_rpc_response = JSONRPC20Response(**response_data) + if json_rpc_response.error: + raise self._create_jsonrpc_error(json_rpc_response.error) - async def resubscribe( + async def subscribe( self, - request: TaskIdParams, + request: SubscribeToTaskRequest, *, context: ClientCallContext | None = None, - extensions: list[str] | None = None, - ) -> AsyncGenerator[ - Task | Message | TaskStatusUpdateEvent | TaskArtifactUpdateEvent - ]: + ) -> AsyncGenerator[StreamResponse]: """Reconnects to get task updates.""" - rpc_request = TaskResubscriptionRequest(params=request, id=str(uuid4())) - modified_kwargs = update_extension_header( - self._get_http_args(context), - extensions if extensions is not None else self.extensions, + rpc_request = JSONRPC20Request( + method='SubscribeToTask', + params=json_format.MessageToDict(request), + _id=str(uuid4()), ) - payload, modified_kwargs = await self._apply_interceptors( - 'tasks/resubscribe', - rpc_request.model_dump(mode='json', exclude_none=True), - modified_kwargs, + async for event in self._send_stream_request( + dict(rpc_request.data), context, - ) - modified_kwargs.setdefault('timeout', None) + ): + yield event - async with aconnect_sse( - self.httpx_client, - 'POST', - self.url, - json=payload, - **modified_kwargs, - ) as event_source: - try: - async for sse in event_source.aiter_sse(): - response = SendStreamingMessageResponse.model_validate_json( - sse.data - ) - if isinstance(response.root, JSONRPCErrorResponse): - raise A2AClientJSONRPCError(response.root) - yield response.root.result - except httpx.TimeoutException as e: - raise A2AClientTimeoutError('Client Request timed out') from e - except SSEError as e: - raise A2AClientHTTPError( - 400, f'Invalid SSE response or protocol error: {e}' - ) from e - except json.JSONDecodeError as e: - raise A2AClientJSONError(str(e)) from e - except httpx.RequestError as e: - raise A2AClientHTTPError( - 503, f'Network communication error: {e}' - ) from e - - async def get_card( + async def get_extended_agent_card( self, + request: GetExtendedAgentCardRequest, *, context: ClientCallContext | None = None, - extensions: list[str] | None = None, - signature_verifier: Callable[[AgentCard], None] | None = None, ) -> AgentCard: """Retrieves the agent's card.""" - modified_kwargs = update_extension_header( - self._get_http_args(context), - extensions if extensions is not None else self.extensions, - ) card = self.agent_card - - if not card: - resolver = A2ACardResolver(self.httpx_client, self.url) - card = await resolver.get_agent_card( - http_kwargs=modified_kwargs, - signature_verifier=signature_verifier, - ) - self._needs_extended_card = ( - card.supports_authenticated_extended_card - ) - self.agent_card = card - - if not self._needs_extended_card: + if not card.capabilities.extended_agent_card: return card - request = GetAuthenticatedExtendedCardRequest(id=str(uuid4())) - payload, modified_kwargs = await self._apply_interceptors( - request.method, - request.model_dump(mode='json', exclude_none=True), - modified_kwargs, - context, + rpc_request = JSONRPC20Request( + method='GetExtendedAgentCard', + params=json_format.MessageToDict(request), + _id=str(uuid4()), ) response_data = await self._send_request( - payload, - modified_kwargs, + dict(rpc_request.data), + context, ) - response = GetAuthenticatedExtendedCardResponse.model_validate( - response_data + json_rpc_response = JSONRPC20Response(**response_data) + if json_rpc_response.error: + raise self._create_jsonrpc_error(json_rpc_response.error) + # Validate type of the response + if not isinstance(json_rpc_response.result, dict): + raise A2AClientError( + f'Invalid response type: {type(json_rpc_response.result)}' + ) + response: AgentCard = json_format.ParseDict( + json_rpc_response.result, AgentCard() ) - if isinstance(response.root, JSONRPCErrorResponse): - raise A2AClientJSONRPCError(response.root) - card = response.root.result - if signature_verifier: - signature_verifier(card) - self.agent_card = card - self._needs_extended_card = False - return card + return response async def close(self) -> None: """Closes the httpx client.""" await self.httpx_client.aclose() + + def _create_jsonrpc_error(self, error_dict: dict[str, Any]) -> Exception: + """Creates the appropriate A2AError from a JSON-RPC error dictionary.""" + code = error_dict.get('code') + message = error_dict.get('message', str(error_dict)) + data = error_dict.get('data') + + if isinstance(code, int) and code in _JSON_RPC_ERROR_CODE_TO_A2A_ERROR: + return _JSON_RPC_ERROR_CODE_TO_A2A_ERROR[code](message, data=data) + + # Fallback to general A2AClientError + return A2AClientError(f'JSON-RPC Error {code}: {message}') + + async def _send_request( + self, + payload: dict[str, Any], + context: ClientCallContext | None = None, + ) -> dict[str, Any]: + http_kwargs = get_http_args(context) + + request = self.httpx_client.build_request( + 'POST', self.url, json=payload, **(http_kwargs or {}) + ) + return await send_http_request(self.httpx_client, request) + + async def _send_stream_request( + self, + rpc_request_payload: dict[str, Any], + context: ClientCallContext | None = None, + ) -> AsyncGenerator[StreamResponse]: + http_kwargs = get_http_args(context) + + async for sse_data in send_http_stream_request( + self.httpx_client, + 'POST', + self.url, + None, + self._handle_sse_error, + json=rpc_request_payload, + **http_kwargs, + ): + json_rpc_response = JSONRPC20Response.from_json(sse_data) + if json_rpc_response.error: + raise self._create_jsonrpc_error(json_rpc_response.error) + response: StreamResponse = json_format.ParseDict( + json_rpc_response.result, StreamResponse() + ) + yield response + + def _handle_sse_error(self, sse_data: str) -> NoReturn: + """Handles SSE error events by parsing JSON-RPC error payload and raising the appropriate domain error.""" + json_rpc_response = JSONRPC20Response.from_json(sse_data) + if json_rpc_response.error: + raise self._create_jsonrpc_error(json_rpc_response.error) + raise A2AClientError(f'SSE stream error: {sse_data}') diff --git a/src/a2a/client/transports/rest.py b/src/a2a/client/transports/rest.py index 7a826cd68..3dfe95927 100644 --- a/src/a2a/client/transports/rest.py +++ b/src/a2a/client/transports/rest.py @@ -1,43 +1,87 @@ import json import logging -from collections.abc import AsyncGenerator, Callable -from typing import Any +from collections.abc import AsyncGenerator +from typing import Any, NoReturn import httpx from google.protobuf.json_format import MessageToDict, Parse, ParseDict -from httpx_sse import SSEError, aconnect_sse -from a2a.client.card_resolver import A2ACardResolver -from a2a.client.errors import ( - A2AClientHTTPError, - A2AClientJSONError, - A2AClientTimeoutError, -) -from a2a.client.middleware import ClientCallContext, ClientCallInterceptor +from a2a.client.client import ClientCallContext +from a2a.client.errors import A2AClientError from a2a.client.transports.base import ClientTransport -from a2a.extensions.common import update_extension_header -from a2a.grpc import a2a_pb2 -from a2a.types import ( +from a2a.client.transports.http_helpers import ( + get_http_args, + send_http_request, + send_http_stream_request, +) +from a2a.types.a2a_pb2 import ( AgentCard, - GetTaskPushNotificationConfigParams, - Message, - MessageSendParams, + CancelTaskRequest, + DeleteTaskPushNotificationConfigRequest, + GetExtendedAgentCardRequest, + GetTaskPushNotificationConfigRequest, + GetTaskRequest, + ListTaskPushNotificationConfigsRequest, + ListTaskPushNotificationConfigsResponse, + ListTasksRequest, + ListTasksResponse, + SendMessageRequest, + SendMessageResponse, + StreamResponse, + SubscribeToTaskRequest, Task, - TaskArtifactUpdateEvent, - TaskIdParams, TaskPushNotificationConfig, - TaskQueryParams, - TaskStatusUpdateEvent, ) -from a2a.utils import proto_utils +from a2a.utils.errors import A2A_REASON_TO_ERROR, MethodNotFoundError from a2a.utils.telemetry import SpanKind, trace_class logger = logging.getLogger(__name__) +def _parse_rest_error( + error_payload: dict[str, Any], + fallback_message: str, +) -> Exception | None: + """Parses a REST error payload and returns the appropriate A2AError. + + Args: + error_payload: The parsed JSON error payload. + fallback_message: Message to use if the payload has no ``message``. + + Returns: + The mapped A2AError if a known reason was found, otherwise ``None``. + """ + error_data = error_payload.get('error', {}) + message = error_data.get('message', fallback_message) + details = error_data.get('details', []) + if not isinstance(details, list): + return None + + # The `details` array can contain multiple different error objects. + # We extract the first `ErrorInfo` object because it contains the + # specific `reason` code needed to map this back to a Python A2AError. + for d in details: + if ( + isinstance(d, dict) + and d.get('@type') == 'type.googleapis.com/google.rpc.ErrorInfo' + ): + reason = d.get('reason') + metadata = d.get('metadata') or {} + if isinstance(reason, str): + exception_cls = A2A_REASON_TO_ERROR.get(reason) + if exception_cls: + exc = exception_cls(message) + if metadata: + exc.data = metadata + return exc + break + + return None + + @trace_class(kind=SpanKind.CLIENT) class RestTransport(ClientTransport): """A REST transport for the A2A client.""" @@ -45,383 +89,321 @@ class RestTransport(ClientTransport): def __init__( self, httpx_client: httpx.AsyncClient, - agent_card: AgentCard | None = None, - url: str | None = None, - interceptors: list[ClientCallInterceptor] | None = None, - extensions: list[str] | None = None, + agent_card: AgentCard, + url: str, ): """Initializes the RestTransport.""" - if url: - self.url = url - elif agent_card: - self.url = agent_card.url - else: - raise ValueError('Must provide either agent_card or url') - if self.url.endswith('/'): - self.url = self.url[:-1] + self.url = url.removesuffix('/') self.httpx_client = httpx_client self.agent_card = agent_card - self.interceptors = interceptors or [] - self._needs_extended_card = ( - agent_card.supports_authenticated_extended_card - if agent_card - else True - ) - self.extensions = extensions - - async def _apply_interceptors( - self, - request_payload: dict[str, Any], - http_kwargs: dict[str, Any] | None, - context: ClientCallContext | None, - ) -> tuple[dict[str, Any], dict[str, Any]]: - final_http_kwargs = http_kwargs or {} - final_request_payload = request_payload - # TODO: Implement interceptors for other transports - return final_request_payload, final_http_kwargs - - def _get_http_args( - self, context: ClientCallContext | None - ) -> dict[str, Any] | None: - return context.state.get('http_kwargs') if context else None - - async def _prepare_send_message( - self, - request: MessageSendParams, - context: ClientCallContext | None, - extensions: list[str] | None = None, - ) -> tuple[dict[str, Any], dict[str, Any]]: - pb = a2a_pb2.SendMessageRequest( - request=proto_utils.ToProto.message(request.message), - configuration=proto_utils.ToProto.message_send_configuration( - request.configuration - ), - metadata=( - proto_utils.ToProto.metadata(request.metadata) - if request.metadata - else None - ), - ) - payload = MessageToDict(pb) - modified_kwargs = update_extension_header( - self._get_http_args(context), - extensions if extensions is not None else self.extensions, - ) - payload, modified_kwargs = await self._apply_interceptors( - payload, - modified_kwargs, - context, - ) - return payload, modified_kwargs async def send_message( self, - request: MessageSendParams, + request: SendMessageRequest, *, context: ClientCallContext | None = None, - extensions: list[str] | None = None, - ) -> Task | Message: + ) -> SendMessageResponse: """Sends a non-streaming message request to the agent.""" - payload, modified_kwargs = await self._prepare_send_message( - request, context, extensions + response_data = await self._execute_request( + 'POST', + '/message:send', + request.tenant, + context=context, + json=MessageToDict(request), ) - response_data = await self._send_post_request( - '/v1/message:send', payload, modified_kwargs + response: SendMessageResponse = ParseDict( + response_data, SendMessageResponse() ) - response_pb = a2a_pb2.SendMessageResponse() - ParseDict(response_data, response_pb) - return proto_utils.FromProto.task_or_message(response_pb) + return response async def send_message_streaming( self, - request: MessageSendParams, + request: SendMessageRequest, *, context: ClientCallContext | None = None, - extensions: list[str] | None = None, - ) -> AsyncGenerator[ - Task | TaskStatusUpdateEvent | TaskArtifactUpdateEvent | Message - ]: + ) -> AsyncGenerator[StreamResponse]: """Sends a streaming message request to the agent and yields responses as they arrive.""" - payload, modified_kwargs = await self._prepare_send_message( - request, context, extensions - ) - - modified_kwargs.setdefault('timeout', None) + payload = MessageToDict(request) - async with aconnect_sse( - self.httpx_client, + async for event in self._send_stream_request( 'POST', - f'{self.url}/v1/message:stream', + '/message:stream', + request.tenant, + context=context, json=payload, - **modified_kwargs, - ) as event_source: - try: - event_source.response.raise_for_status() - async for sse in event_source.aiter_sse(): - if not sse.data: - continue - event = a2a_pb2.StreamResponse() - Parse(sse.data, event) - yield proto_utils.FromProto.stream_response(event) - except httpx.TimeoutException as e: - raise A2AClientTimeoutError('Client Request timed out') from e - except httpx.HTTPStatusError as e: - raise A2AClientHTTPError(e.response.status_code, str(e)) from e - except SSEError as e: - raise A2AClientHTTPError( - 400, f'Invalid SSE response or protocol error: {e}' - ) from e - except json.JSONDecodeError as e: - raise A2AClientJSONError(str(e)) from e - except httpx.RequestError as e: - raise A2AClientHTTPError( - 503, f'Network communication error: {e}' - ) from e - - async def _send_request(self, request: httpx.Request) -> dict[str, Any]: - try: - response = await self.httpx_client.send(request) - response.raise_for_status() - return response.json() - except httpx.TimeoutException as e: - raise A2AClientTimeoutError('Client Request timed out') from e - except httpx.HTTPStatusError as e: - raise A2AClientHTTPError(e.response.status_code, str(e)) from e - except json.JSONDecodeError as e: - raise A2AClientJSONError(str(e)) from e - except httpx.RequestError as e: - raise A2AClientHTTPError( - 503, f'Network communication error: {e}' - ) from e - - async def _send_post_request( - self, - target: str, - rpc_request_payload: dict[str, Any], - http_kwargs: dict[str, Any] | None = None, - ) -> dict[str, Any]: - return await self._send_request( - self.httpx_client.build_request( - 'POST', - f'{self.url}{target}', - json=rpc_request_payload, - **(http_kwargs or {}), - ) - ) - - async def _send_get_request( - self, - target: str, - query_params: dict[str, str], - http_kwargs: dict[str, Any] | None = None, - ) -> dict[str, Any]: - return await self._send_request( - self.httpx_client.build_request( - 'GET', - f'{self.url}{target}', - params=query_params, - **(http_kwargs or {}), - ) - ) + ): + yield event async def get_task( self, - request: TaskQueryParams, + request: GetTaskRequest, *, context: ClientCallContext | None = None, - extensions: list[str] | None = None, ) -> Task: """Retrieves the current state and history of a specific task.""" - modified_kwargs = update_extension_header( - self._get_http_args(context), - extensions if extensions is not None else self.extensions, + params = MessageToDict(request) + if 'id' in params: + del params['id'] # id is part of the URL path + if 'tenant' in params: + del params['tenant'] + + response_data = await self._execute_request( + 'GET', + f'/tasks/{request.id}', + request.tenant, + context=context, + params=params, ) - _payload, modified_kwargs = await self._apply_interceptors( - request.model_dump(mode='json', exclude_none=True), - modified_kwargs, - context, + response: Task = ParseDict(response_data, Task()) + return response + + async def list_tasks( + self, + request: ListTasksRequest, + *, + context: ClientCallContext | None = None, + ) -> ListTasksResponse: + """Retrieves tasks for an agent.""" + params = MessageToDict(request) + if 'tenant' in params: + del params['tenant'] + + response_data = await self._execute_request( + 'GET', + '/tasks', + request.tenant, + context=context, + params=params, ) - response_data = await self._send_get_request( - f'/v1/tasks/{request.id}', - {'historyLength': str(request.history_length)} - if request.history_length is not None - else {}, - modified_kwargs, + response: ListTasksResponse = ParseDict( + response_data, ListTasksResponse() ) - task = a2a_pb2.Task() - ParseDict(response_data, task) - return proto_utils.FromProto.task(task) + return response async def cancel_task( self, - request: TaskIdParams, + request: CancelTaskRequest, *, context: ClientCallContext | None = None, - extensions: list[str] | None = None, ) -> Task: """Requests the agent to cancel a specific task.""" - pb = a2a_pb2.CancelTaskRequest(name=f'tasks/{request.id}') - payload = MessageToDict(pb) - modified_kwargs = update_extension_header( - self._get_http_args(context), - extensions if extensions is not None else self.extensions, - ) - payload, modified_kwargs = await self._apply_interceptors( - payload, - modified_kwargs, - context, - ) - response_data = await self._send_post_request( - f'/v1/tasks/{request.id}:cancel', payload, modified_kwargs + response_data = await self._execute_request( + 'POST', + f'/tasks/{request.id}:cancel', + request.tenant, + context=context, + json=MessageToDict(request), ) - task = a2a_pb2.Task() - ParseDict(response_data, task) - return proto_utils.FromProto.task(task) + response: Task = ParseDict(response_data, Task()) + return response - async def set_task_callback( + async def create_task_push_notification_config( self, request: TaskPushNotificationConfig, *, context: ClientCallContext | None = None, - extensions: list[str] | None = None, ) -> TaskPushNotificationConfig: """Sets or updates the push notification configuration for a specific task.""" - pb = a2a_pb2.CreateTaskPushNotificationConfigRequest( - parent=f'tasks/{request.task_id}', - config_id=request.push_notification_config.id, - config=proto_utils.ToProto.task_push_notification_config(request), - ) - payload = MessageToDict(pb) - modified_kwargs = update_extension_header( - self._get_http_args(context), - extensions if extensions is not None else self.extensions, - ) - payload, modified_kwargs = await self._apply_interceptors( - payload, modified_kwargs, context + response_data = await self._execute_request( + 'POST', + f'/tasks/{request.task_id}/pushNotificationConfigs', + request.tenant, + context=context, + json=MessageToDict(request), ) - response_data = await self._send_post_request( - f'/v1/tasks/{request.task_id}/pushNotificationConfigs', - payload, - modified_kwargs, + response: TaskPushNotificationConfig = ParseDict( + response_data, TaskPushNotificationConfig() ) - config = a2a_pb2.TaskPushNotificationConfig() - ParseDict(response_data, config) - return proto_utils.FromProto.task_push_notification_config(config) + return response - async def get_task_callback( + async def get_task_push_notification_config( self, - request: GetTaskPushNotificationConfigParams, + request: GetTaskPushNotificationConfigRequest, *, context: ClientCallContext | None = None, - extensions: list[str] | None = None, ) -> TaskPushNotificationConfig: """Retrieves the push notification configuration for a specific task.""" - pb = a2a_pb2.GetTaskPushNotificationConfigRequest( - name=f'tasks/{request.id}/pushNotificationConfigs/{request.push_notification_config_id}', + params = MessageToDict(request) + if 'id' in params: + del params['id'] + if 'taskId' in params: + del params['taskId'] + if 'tenant' in params: + del params['tenant'] + + response_data = await self._execute_request( + 'GET', + f'/tasks/{request.task_id}/pushNotificationConfigs/{request.id}', + request.tenant, + context=context, + params=params, ) - payload = MessageToDict(pb) - modified_kwargs = update_extension_header( - self._get_http_args(context), - extensions if extensions is not None else self.extensions, + response: TaskPushNotificationConfig = ParseDict( + response_data, TaskPushNotificationConfig() ) - payload, modified_kwargs = await self._apply_interceptors( - payload, - modified_kwargs, - context, + return response + + async def list_task_push_notification_configs( + self, + request: ListTaskPushNotificationConfigsRequest, + *, + context: ClientCallContext | None = None, + ) -> ListTaskPushNotificationConfigsResponse: + """Lists push notification configurations for a specific task.""" + params = MessageToDict(request) + if 'taskId' in params: + del params['taskId'] + if 'tenant' in params: + del params['tenant'] + + response_data = await self._execute_request( + 'GET', + f'/tasks/{request.task_id}/pushNotificationConfigs', + request.tenant, + context=context, + params=params, ) - response_data = await self._send_get_request( - f'/v1/tasks/{request.id}/pushNotificationConfigs/{request.push_notification_config_id}', - {}, - modified_kwargs, + response: ListTaskPushNotificationConfigsResponse = ParseDict( + response_data, ListTaskPushNotificationConfigsResponse() ) - config = a2a_pb2.TaskPushNotificationConfig() - ParseDict(response_data, config) - return proto_utils.FromProto.task_push_notification_config(config) + return response - async def resubscribe( + async def delete_task_push_notification_config( self, - request: TaskIdParams, + request: DeleteTaskPushNotificationConfigRequest, *, context: ClientCallContext | None = None, - extensions: list[str] | None = None, - ) -> AsyncGenerator[ - Task | TaskStatusUpdateEvent | TaskArtifactUpdateEvent | Message - ]: - """Reconnects to get task updates.""" - modified_kwargs = update_extension_header( - self._get_http_args(context), - extensions if extensions is not None else self.extensions, + ) -> None: + """Deletes the push notification configuration for a specific task.""" + params = MessageToDict(request) + if 'id' in params: + del params['id'] + if 'taskId' in params: + del params['taskId'] + if 'tenant' in params: + del params['tenant'] + + await self._execute_request( + 'DELETE', + f'/tasks/{request.task_id}/pushNotificationConfigs/{request.id}', + request.tenant, + context=context, + params=params, ) - modified_kwargs.setdefault('timeout', None) - async with aconnect_sse( - self.httpx_client, - 'GET', - f'{self.url}/v1/tasks/{request.id}:subscribe', - **modified_kwargs, - ) as event_source: - try: - async for sse in event_source.aiter_sse(): - event = a2a_pb2.StreamResponse() - Parse(sse.data, event) - yield proto_utils.FromProto.stream_response(event) - except httpx.TimeoutException as e: - raise A2AClientTimeoutError('Client Request timed out') from e - except SSEError as e: - raise A2AClientHTTPError( - 400, f'Invalid SSE response or protocol error: {e}' - ) from e - except json.JSONDecodeError as e: - raise A2AClientJSONError(str(e)) from e - except httpx.RequestError as e: - raise A2AClientHTTPError( - 503, f'Network communication error: {e}' - ) from e - - async def get_card( + async def subscribe( self, + request: SubscribeToTaskRequest, + *, + context: ClientCallContext | None = None, + ) -> AsyncGenerator[StreamResponse]: + """Reconnects to get task updates.""" + async for event in self._send_stream_request( + 'POST', + f'/tasks/{request.id}:subscribe', + request.tenant, + context=context, + ): + yield event + + async def get_extended_agent_card( + self, + request: GetExtendedAgentCardRequest, *, context: ClientCallContext | None = None, - extensions: list[str] | None = None, - signature_verifier: Callable[[AgentCard], None] | None = None, ) -> AgentCard: - """Retrieves the agent's card.""" - modified_kwargs = update_extension_header( - self._get_http_args(context), - extensions if extensions is not None else self.extensions, - ) + """Retrieves the Extended AgentCard.""" card = self.agent_card - - if not card: - resolver = A2ACardResolver(self.httpx_client, self.url) - card = await resolver.get_agent_card( - http_kwargs=modified_kwargs, - signature_verifier=signature_verifier, - ) - self._needs_extended_card = ( - card.supports_authenticated_extended_card - ) - self.agent_card = card - - if not self._needs_extended_card: + if not card.capabilities.extended_agent_card: return card - _, modified_kwargs = await self._apply_interceptors( - {}, - modified_kwargs, - context, - ) - response_data = await self._send_get_request( - '/v1/card', {}, modified_kwargs + response_data = await self._execute_request( + 'GET', '/extendedAgentCard', request.tenant, context=context ) - card = AgentCard.model_validate(response_data) - if signature_verifier: - signature_verifier(card) - self.agent_card = card - self._needs_extended_card = False - return card + return ParseDict(response_data, AgentCard()) async def close(self) -> None: """Closes the httpx client.""" await self.httpx_client.aclose() + + def _get_path(self, base_path: str, tenant: str) -> str: + """Returns the full path, prepending the tenant if provided.""" + return f'/{tenant}{base_path}' if tenant else base_path + + def _handle_http_error(self, e: httpx.HTTPStatusError) -> NoReturn: + """Handles HTTP status errors and raises the appropriate A2AError.""" + try: + error_payload = e.response.json() + mapped = _parse_rest_error(error_payload, str(e)) + if mapped: + raise mapped from e + except (json.JSONDecodeError, ValueError): + pass + + status_code = e.response.status_code + if status_code == httpx.codes.NOT_FOUND: + raise MethodNotFoundError( + f'Resource not found: {e.request.url}' + ) from e + + raise A2AClientError(f'HTTP Error {status_code}: {e}') from e + + def _handle_sse_error(self, sse_data: str) -> NoReturn: + """Handles SSE error events by parsing the REST error payload and raising the appropriate A2AError.""" + error_payload = json.loads(sse_data) + mapped = _parse_rest_error(error_payload, sse_data) + if mapped: + raise mapped + raise A2AClientError(sse_data) + + async def _send_stream_request( + self, + method: str, + target: str, + tenant: str, + context: ClientCallContext | None = None, + *, + json: dict[str, Any] | None = None, + ) -> AsyncGenerator[StreamResponse]: + path = self._get_path(target, tenant) + http_kwargs = get_http_args(context) + + async for sse_data in send_http_stream_request( + self.httpx_client, + method, + f'{self.url}{path}', + self._handle_http_error, + self._handle_sse_error, + json=json, + **http_kwargs, + ): + event: StreamResponse = Parse(sse_data, StreamResponse()) + yield event + + async def _send_request(self, request: httpx.Request) -> dict[str, Any]: + return await send_http_request( + self.httpx_client, request, self._handle_http_error + ) + + async def _execute_request( # noqa: PLR0913 + self, + method: str, + target: str, + tenant: str, + context: ClientCallContext | None = None, + *, + json: dict[str, Any] | None = None, + params: dict[str, Any] | None = None, + ) -> dict[str, Any]: + path = self._get_path(target, tenant) + http_kwargs = get_http_args(context) + + request = self.httpx_client.build_request( + method, + f'{self.url}{path}', + json=json, + params=params, + **http_kwargs, + ) + return await self._send_request(request) diff --git a/src/a2a/client/transports/tenant_decorator.py b/src/a2a/client/transports/tenant_decorator.py new file mode 100644 index 000000000..d1059d757 --- /dev/null +++ b/src/a2a/client/transports/tenant_decorator.py @@ -0,0 +1,167 @@ +from collections.abc import AsyncGenerator + +from a2a.client.client import ClientCallContext +from a2a.client.transports.base import ClientTransport +from a2a.types.a2a_pb2 import ( + AgentCard, + CancelTaskRequest, + DeleteTaskPushNotificationConfigRequest, + GetExtendedAgentCardRequest, + GetTaskPushNotificationConfigRequest, + GetTaskRequest, + ListTaskPushNotificationConfigsRequest, + ListTaskPushNotificationConfigsResponse, + ListTasksRequest, + ListTasksResponse, + SendMessageRequest, + SendMessageResponse, + StreamResponse, + SubscribeToTaskRequest, + Task, + TaskPushNotificationConfig, +) + + +class TenantTransportDecorator(ClientTransport): + """A transport decorator that attaches a tenant to all requests.""" + + def __init__(self, base: ClientTransport, tenant: str): + self._base = base + self._tenant = tenant + + def _resolve_tenant(self, tenant: str) -> str: + """If tenant is not provided, use the default tenant. + + Returns: + The tenant used for the request. + """ + return tenant or self._tenant + + async def send_message( + self, + request: SendMessageRequest, + *, + context: ClientCallContext | None = None, + ) -> SendMessageResponse: + """Sends a streaming message request to the agent and yields responses as they arrive.""" + request.tenant = self._resolve_tenant(request.tenant) + return await self._base.send_message(request, context=context) + + async def send_message_streaming( + self, + request: SendMessageRequest, + *, + context: ClientCallContext | None = None, + ) -> AsyncGenerator[StreamResponse]: + """Sends a streaming message request to the agent and yields responses.""" + request.tenant = self._resolve_tenant(request.tenant) + async for event in self._base.send_message_streaming( + request, context=context + ): + yield event + + async def get_task( + self, + request: GetTaskRequest, + *, + context: ClientCallContext | None = None, + ) -> Task: + """Retrieves the current state and history of a specific task.""" + request.tenant = self._resolve_tenant(request.tenant) + return await self._base.get_task(request, context=context) + + async def list_tasks( + self, + request: ListTasksRequest, + *, + context: ClientCallContext | None = None, + ) -> ListTasksResponse: + """Retrieves tasks for an agent.""" + request.tenant = self._resolve_tenant(request.tenant) + return await self._base.list_tasks(request, context=context) + + async def cancel_task( + self, + request: CancelTaskRequest, + *, + context: ClientCallContext | None = None, + ) -> Task: + """Requests the agent to cancel a specific task.""" + request.tenant = self._resolve_tenant(request.tenant) + return await self._base.cancel_task(request, context=context) + + async def create_task_push_notification_config( + self, + request: TaskPushNotificationConfig, + *, + context: ClientCallContext | None = None, + ) -> TaskPushNotificationConfig: + """Sets or updates the push notification configuration for a specific task.""" + request.tenant = self._resolve_tenant(request.tenant) + return await self._base.create_task_push_notification_config( + request, context=context + ) + + async def get_task_push_notification_config( + self, + request: GetTaskPushNotificationConfigRequest, + *, + context: ClientCallContext | None = None, + ) -> TaskPushNotificationConfig: + """Retrieves the push notification configuration for a specific task.""" + request.tenant = self._resolve_tenant(request.tenant) + return await self._base.get_task_push_notification_config( + request, context=context + ) + + async def list_task_push_notification_configs( + self, + request: ListTaskPushNotificationConfigsRequest, + *, + context: ClientCallContext | None = None, + ) -> ListTaskPushNotificationConfigsResponse: + """Lists push notification configurations for a specific task.""" + request.tenant = self._resolve_tenant(request.tenant) + return await self._base.list_task_push_notification_configs( + request, context=context + ) + + async def delete_task_push_notification_config( + self, + request: DeleteTaskPushNotificationConfigRequest, + *, + context: ClientCallContext | None = None, + ) -> None: + """Deletes the push notification configuration for a specific task.""" + request.tenant = self._resolve_tenant(request.tenant) + await self._base.delete_task_push_notification_config( + request, context=context + ) + + async def subscribe( + self, + request: SubscribeToTaskRequest, + *, + context: ClientCallContext | None = None, + ) -> AsyncGenerator[StreamResponse]: + """Reconnects to get task updates.""" + request.tenant = self._resolve_tenant(request.tenant) + async for event in self._base.subscribe(request, context=context): + yield event + + async def get_extended_agent_card( + self, + request: GetExtendedAgentCardRequest, + *, + context: ClientCallContext | None = None, + ) -> AgentCard: + """Retrieves the Extended AgentCard.""" + request.tenant = self._resolve_tenant(request.tenant) + return await self._base.get_extended_agent_card( + request, + context=context, + ) + + async def close(self) -> None: + """Closes the transport.""" + await self._base.close() diff --git a/src/a2a/compat/__init__.py b/src/a2a/compat/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/src/a2a/compat/v0_3/README.md b/src/a2a/compat/v0_3/README.md new file mode 100644 index 000000000..4c705535a --- /dev/null +++ b/src/a2a/compat/v0_3/README.md @@ -0,0 +1,54 @@ +# A2A Protocol Backward Compatibility (v0.3) + +This directory (`src/a2a/compat/v0_3/`) provides the foundational types and translation layers necessary for modern `v1.0` clients and servers to interoperate with legacy `v0.3` A2A systems. + +## Data Representations + +To support cross-version compatibility across JSON, REST, and gRPC, this directory manages three distinct data representations: + +### 1. Legacy v0.3 Pydantic Models (`types.py`) +This file contains Python [Pydantic](https://docs.pydantic.dev/) models generated from the legacy v0.3 JSON schema. +* **Purpose**: This is the "pivot" format. Legacy JSON-RPC and REST implementations natively serialize to/from these models. It acts as the intermediary between old wire formats and the modern SDK. + +### 2. Legacy v0.3 Protobuf Bindings (`a2a_v0_3_pb2.py`) +This module contains the native Protobuf bindings for the legacy v0.3 gRPC protocol. +* **Purpose**: To decode incoming bytes from legacy gRPC clients or encode outbound bytes to legacy gRPC servers. +* **Note**: It is generated into the `a2a.v1` package namespace. + +### 3. Current v1.0 Protobuf Bindings (`a2a.types.a2a_pb2`) +This is the central source of truth for the modern SDK (`v1.0`). All legacy payloads must ultimately be translated into these `v1.0` core objects to be processed by the modern `AgentExecutor`. +* **Note**: It is generated into the `lf.a2a.v1` package namespace. +--- + +## Transformation Utilities + +Payloads arriving from legacy clients undergo a phased transformation to bridge the gap between versions. + +### Legacy gRPC ↔ Legacy Pydantic: `proto_utils.py` +This module handles the mapping between legacy `v0.3` gRPC Protobuf objects and legacy `v0.3` Pydantic models. +This is a copy of the `a2a.types.proto_utils` module from 0.3 release. + +```python +from a2a.compat.v0_3 import a2a_v0_3_pb2 +from a2a.compat.v0_3 import types as types_v03 +from a2a.compat.v0_3 import proto_utils + +# 1. Receive legacy bytes over the wire +legacy_pb_msg = a2a_v0_3_pb2.Message() +legacy_pb_msg.ParseFromString(wire_bytes) + +# 2. Convert to intermediate Pydantic representation +pydantic_msg: types_v03.Message = proto_utils.FromProto.message(legacy_pb_msg) +``` + +### Legacy Pydantic ↔ Modern v1.0 Protobuf: `conversions.py` +This module structurally translates between legacy `v0.3` Pydantic objects and modern `v1.0` Core Protobufs. + +```python +from a2a.types import a2a_pb2 as pb2_v10 +from a2a.compat.v0_3 import conversions + +# 3. Convert the legacy Pydantic object into a modern v1.0 Protobuf +core_pb_msg: pb2_v10.Message = conversions.to_core_message(pydantic_msg) + +``` diff --git a/src/a2a/compat/v0_3/__init__.py b/src/a2a/compat/v0_3/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/src/a2a/compat/v0_3/a2a_v0_3.proto b/src/a2a/compat/v0_3/a2a_v0_3.proto new file mode 100644 index 000000000..41eaa0341 --- /dev/null +++ b/src/a2a/compat/v0_3/a2a_v0_3.proto @@ -0,0 +1,735 @@ +// Older protoc compilers don't understand edition yet. +syntax = "proto3"; +package a2a.v1; + +import "google/api/annotations.proto"; +import "google/api/client.proto"; +import "google/api/field_behavior.proto"; +import "google/protobuf/empty.proto"; +import "google/protobuf/struct.proto"; +import "google/protobuf/timestamp.proto"; + +option csharp_namespace = "A2a.V1"; +option go_package = "google.golang.org/a2a/v1"; +option java_multiple_files = true; +option java_outer_classname = "A2A"; +option java_package = "com.google.a2a.v1"; + +// A2AService defines the gRPC version of the A2A protocol. This has a slightly +// different shape than the JSONRPC version to better conform to AIP-127, +// where appropriate. The nouns are AgentCard, Message, Task and +// TaskPushNotificationConfig. +// - Messages are not a standard resource so there is no get/delete/update/list +// interface, only a send and stream custom methods. +// - Tasks have a get interface and custom cancel and subscribe methods. +// - TaskPushNotificationConfig are a resource whose parent is a task. +// They have get, list and create methods. +// - AgentCard is a static resource with only a get method. +service A2AService { + // Send a message to the agent. This is a blocking call that will return the + // task once it is completed, or a LRO if requested. + rpc SendMessage(SendMessageRequest) returns (SendMessageResponse) { + option (google.api.http) = { + post: "/v1/message:send" + body: "*" + }; + } + // SendStreamingMessage is a streaming call that will return a stream of + // task update events until the Task is in an interrupted or terminal state. + rpc SendStreamingMessage(SendMessageRequest) returns (stream StreamResponse) { + option (google.api.http) = { + post: "/v1/message:stream" + body: "*" + }; + } + + // Get the current state of a task from the agent. + rpc GetTask(GetTaskRequest) returns (Task) { + option (google.api.http) = { + get: "/v1/{name=tasks/*}" + }; + option (google.api.method_signature) = "name"; + } + // Cancel a task from the agent. If supported one should expect no + // more task updates for the task. + rpc CancelTask(CancelTaskRequest) returns (Task) { + option (google.api.http) = { + post: "/v1/{name=tasks/*}:cancel" + body: "*" + }; + } + // TaskSubscription is a streaming call that will return a stream of task + // update events. This attaches the stream to an existing in process task. + // If the task is complete the stream will return the completed task (like + // GetTask) and close the stream. + rpc TaskSubscription(TaskSubscriptionRequest) + returns (stream StreamResponse) { + option (google.api.http) = { + get: "/v1/{name=tasks/*}:subscribe" + }; + } + + // Set a push notification config for a task. + rpc CreateTaskPushNotificationConfig(CreateTaskPushNotificationConfigRequest) + returns (TaskPushNotificationConfig) { + option (google.api.http) = { + post: "/v1/{parent=tasks/*/pushNotificationConfigs}" + body: "config" + }; + option (google.api.method_signature) = "parent,config"; + } + // Get a push notification config for a task. + rpc GetTaskPushNotificationConfig(GetTaskPushNotificationConfigRequest) + returns (TaskPushNotificationConfig) { + option (google.api.http) = { + get: "/v1/{name=tasks/*/pushNotificationConfigs/*}" + }; + option (google.api.method_signature) = "name"; + } + // Get a list of push notifications configured for a task. + rpc ListTaskPushNotificationConfig(ListTaskPushNotificationConfigRequest) + returns (ListTaskPushNotificationConfigResponse) { + option (google.api.http) = { + get: "/v1/{parent=tasks/*}/pushNotificationConfigs" + }; + option (google.api.method_signature) = "parent"; + } + // GetAgentCard returns the agent card for the agent. + rpc GetAgentCard(GetAgentCardRequest) returns (AgentCard) { + option (google.api.http) = { + get: "/v1/card" + }; + } + // Delete a push notification config for a task. + rpc DeleteTaskPushNotificationConfig(DeleteTaskPushNotificationConfigRequest) + returns (google.protobuf.Empty) { + option (google.api.http) = { + delete: "/v1/{name=tasks/*/pushNotificationConfigs/*}" + }; + option (google.api.method_signature) = "name"; + } +} + +///////// Data Model //////////// + +// Configuration of a send message request. +message SendMessageConfiguration { + // The output modes that the agent is expected to respond with. + repeated string accepted_output_modes = 1; + // A configuration of a webhook that can be used to receive updates + PushNotificationConfig push_notification = 2; + // The maximum number of messages to include in the history. if 0, the + // history will be unlimited. + int32 history_length = 3; + // If true, the message will be blocking until the task is completed. If + // false, the message will be non-blocking and the task will be returned + // immediately. It is the caller's responsibility to check for any task + // updates. + bool blocking = 4; +} + +// Task is the core unit of action for A2A. It has a current status +// and when results are created for the task they are stored in the +// artifact. If there are multiple turns for a task, these are stored in +// history. +message Task { + // Unique identifier (e.g. UUID) for the task, generated by the server for a + // new task. + string id = 1; + // Unique identifier (e.g. UUID) for the contextual collection of interactions + // (tasks and messages). Created by the A2A server. + string context_id = 2; + // The current status of a Task, including state and a message. + TaskStatus status = 3; + // A set of output artifacts for a Task. + repeated Artifact artifacts = 4; + // protolint:disable REPEATED_FIELD_NAMES_PLURALIZED + // The history of interactions from a task. + repeated Message history = 5; + // protolint:enable REPEATED_FIELD_NAMES_PLURALIZED + // A key/value object to store custom metadata about a task. + google.protobuf.Struct metadata = 6; +} + +// The set of states a Task can be in. +enum TaskState { + TASK_STATE_UNSPECIFIED = 0; + // Represents the status that acknowledges a task is created + TASK_STATE_SUBMITTED = 1; + // Represents the status that a task is actively being processed + TASK_STATE_WORKING = 2; + // Represents the status a task is finished. This is a terminal state + TASK_STATE_COMPLETED = 3; + // Represents the status a task is done but failed. This is a terminal state + TASK_STATE_FAILED = 4; + // Represents the status a task was cancelled before it finished. + // This is a terminal state. + TASK_STATE_CANCELLED = 5; + // Represents the status that the task requires information to complete. + // This is an interrupted state. + TASK_STATE_INPUT_REQUIRED = 6; + // Represents the status that the agent has decided to not perform the task. + // This may be done during initial task creation or later once an agent + // has determined it can't or won't proceed. This is a terminal state. + TASK_STATE_REJECTED = 7; + // Represents the state that some authentication is needed from the upstream + // client. Authentication is expected to come out-of-band thus this is not + // an interrupted or terminal state. + TASK_STATE_AUTH_REQUIRED = 8; +} + +// A container for the status of a task +message TaskStatus { + // The current state of this task + TaskState state = 1; + // A message associated with the status. + Message update = 2 [json_name = "message"]; + // Timestamp when the status was recorded. + // Example: "2023-10-27T10:00:00Z" + google.protobuf.Timestamp timestamp = 3; +} + +// Part represents a container for a section of communication content. +// Parts can be purely textual, some sort of file (image, video, etc) or +// a structured data blob (i.e. JSON). +message Part { + oneof part { + string text = 1; + FilePart file = 2; + DataPart data = 3; + } + // Optional metadata associated with this part. + google.protobuf.Struct metadata = 4; +} + +// FilePart represents the different ways files can be provided. If files are +// small, directly feeding the bytes is supported via file_with_bytes. If the +// file is large, the agent should read the content as appropriate directly +// from the file_with_uri source. +message FilePart { + oneof file { + string file_with_uri = 1; + bytes file_with_bytes = 2; + } + string mime_type = 3; + string name = 4; +} + +// DataPart represents a structured blob. This is most commonly a JSON payload. +message DataPart { + google.protobuf.Struct data = 1; +} + +enum Role { + ROLE_UNSPECIFIED = 0; + // USER role refers to communication from the client to the server. + ROLE_USER = 1; + // AGENT role refers to communication from the server to the client. + ROLE_AGENT = 2; +} + +// Message is one unit of communication between client and server. It is +// associated with a context and optionally a task. Since the server is +// responsible for the context definition, it must always provide a context_id +// in its messages. The client can optionally provide the context_id if it +// knows the context to associate the message to. Similarly for task_id, +// except the server decides if a task is created and whether to include the +// task_id. +message Message { + // The unique identifier (e.g. UUID)of the message. This is required and + // created by the message creator. + string message_id = 1; + // The context id of the message. This is optional and if set, the message + // will be associated with the given context. + string context_id = 2; + // The task id of the message. This is optional and if set, the message + // will be associated with the given task. + string task_id = 3; + // A role for the message. + Role role = 4; + // protolint:disable REPEATED_FIELD_NAMES_PLURALIZED + // Content is the container of the message content. + repeated Part content = 5; + // protolint:enable REPEATED_FIELD_NAMES_PLURALIZED + // Any optional metadata to provide along with the message. + google.protobuf.Struct metadata = 6; + // The URIs of extensions that are present or contributed to this Message. + repeated string extensions = 7; +} + +// Artifacts are the container for task completed results. These are similar +// to Messages but are intended to be the product of a task, as opposed to +// point-to-point communication. +message Artifact { + // Unique identifier (e.g. UUID) for the artifact. It must be at least unique + // within a task. + string artifact_id = 1; + // A human readable name for the artifact. + string name = 3; + // A human readable description of the artifact, optional. + string description = 4; + // The content of the artifact. + repeated Part parts = 5; + // Optional metadata included with the artifact. + google.protobuf.Struct metadata = 6; + // The URIs of extensions that are present or contributed to this Artifact. + repeated string extensions = 7; +} + +// TaskStatusUpdateEvent is a delta even on a task indicating that a task +// has changed. +message TaskStatusUpdateEvent { + // The id of the task that is changed + string task_id = 1; + // The id of the context that the task belongs to + string context_id = 2; + // The new status of the task. + TaskStatus status = 3; + // Whether this is the last status update expected for this task. + bool final = 4; + // Optional metadata to associate with the task update. + google.protobuf.Struct metadata = 5; +} + +// TaskArtifactUpdateEvent represents a task delta where an artifact has +// been generated. +message TaskArtifactUpdateEvent { + // The id of the task for this artifact + string task_id = 1; + // The id of the context that this task belongs too + string context_id = 2; + // The artifact itself + Artifact artifact = 3; + // Whether this should be appended to a prior one produced + bool append = 4; + // Whether this represents the last part of an artifact + bool last_chunk = 5; + // Optional metadata associated with the artifact update. + google.protobuf.Struct metadata = 6; +} + +// Configuration for setting up push notifications for task updates. +message PushNotificationConfig { + // A unique identifier (e.g. UUID) for this push notification. + string id = 1; + // Url to send the notification too + string url = 2; + // Token unique for this task/session + string token = 3; + // Information about the authentication to sent with the notification + AuthenticationInfo authentication = 4; +} + +// Defines authentication details, used for push notifications. +message AuthenticationInfo { + // Supported authentication schemes - e.g. Basic, Bearer, etc + repeated string schemes = 1; + // Optional credentials + string credentials = 2; +} + +// Defines additional transport information for the agent. +message AgentInterface { + // The url this interface is found at. + string url = 1; + // The transport supported this url. This is an open form string, to be + // easily extended for many transport protocols. The core ones officially + // supported are JSONRPC, GRPC and HTTP+JSON. + string transport = 2; +} + +// AgentCard conveys key information: +// - Overall details (version, name, description, uses) +// - Skills; a set of actions/solutions the agent can perform +// - Default modalities/content types supported by the agent. +// - Authentication requirements +// Next ID: 19 +message AgentCard { + // The version of the A2A protocol this agent supports. + string protocol_version = 16; + // A human readable name for the agent. + // Example: "Recipe Agent" + string name = 1; + // A description of the agent's domain of action/solution space. + // Example: "Agent that helps users with recipes and cooking." + string description = 2; + // A URL to the address the agent is hosted at. This represents the + // preferred endpoint as declared by the agent. + string url = 3; + // The transport of the preferred endpoint. If empty, defaults to JSONRPC. + string preferred_transport = 14; + // Announcement of additional supported transports. Client can use any of + // the supported transports. + repeated AgentInterface additional_interfaces = 15; + // The service provider of the agent. + AgentProvider provider = 4; + // The version of the agent. + // Example: "1.0.0" + string version = 5; + // A url to provide additional documentation about the agent. + string documentation_url = 6; + // A2A Capability set supported by the agent. + AgentCapabilities capabilities = 7; + // The security scheme details used for authenticating with this agent. + map security_schemes = 8; + // protolint:disable REPEATED_FIELD_NAMES_PLURALIZED + // Security requirements for contacting the agent. + // This list can be seen as an OR of ANDs. Each object in the list describes + // one possible set of security requirements that must be present on a + // request. This allows specifying, for example, "callers must either use + // OAuth OR an API Key AND mTLS." + // Example: + // security { + // schemes { key: "oauth" value { list: ["read"] } } + // } + // security { + // schemes { key: "api-key" } + // schemes { key: "mtls" } + // } + repeated Security security = 9; + // protolint:enable REPEATED_FIELD_NAMES_PLURALIZED + // The set of interaction modes that the agent supports across all skills. + // This can be overridden per skill. Defined as mime types. + repeated string default_input_modes = 10; + // The mime types supported as outputs from this agent. + repeated string default_output_modes = 11; + // Skills represent a unit of ability an agent can perform. This may + // somewhat abstract but represents a more focused set of actions that the + // agent is highly likely to succeed at. + repeated AgentSkill skills = 12; + // Whether the agent supports providing an extended agent card when + // the user is authenticated, i.e. is the card from .well-known + // different than the card from GetAgentCard. + bool supports_authenticated_extended_card = 13; + // JSON Web Signatures computed for this AgentCard. + repeated AgentCardSignature signatures = 17; + // An optional URL to an icon for the agent. + string icon_url = 18; +} + +// Represents information about the service provider of an agent. +message AgentProvider { + // The providers reference url + // Example: "https://ai.google.dev" + string url = 1; + // The providers organization name + // Example: "Google" + string organization = 2; +} + +// Defines the A2A feature set supported by the agent +message AgentCapabilities { + // If the agent will support streaming responses + bool streaming = 1; + // If the agent can send push notifications to the clients webhook + bool push_notifications = 2; + // Extensions supported by this agent. + repeated AgentExtension extensions = 3; +} + +// A declaration of an extension supported by an Agent. +message AgentExtension { + // The URI of the extension. + // Example: "https://developers.google.com/identity/protocols/oauth2" + string uri = 1; + // A description of how this agent uses this extension. + // Example: "Google OAuth 2.0 authentication" + string description = 2; + // Whether the client must follow specific requirements of the extension. + // Example: false + bool required = 3; + // Optional configuration for the extension. + google.protobuf.Struct params = 4; +} + +// AgentSkill represents a unit of action/solution that the agent can perform. +// One can think of this as a type of highly reliable solution that an agent +// can be tasked to provide. Agents have the autonomy to choose how and when +// to use specific skills, but clients should have confidence that if the +// skill is defined that unit of action can be reliably performed. +message AgentSkill { + // Unique identifier of the skill within this agent. + string id = 1; + // A human readable name for the skill. + string name = 2; + // A human (or llm) readable description of the skill + // details and behaviors. + string description = 3; + // A set of tags for the skill to enhance categorization/utilization. + // Example: ["cooking", "customer support", "billing"] + repeated string tags = 4; + // A set of example queries that this skill is designed to address. + // These examples should help the caller to understand how to craft requests + // to the agent to achieve specific goals. + // Example: ["I need a recipe for bread"] + repeated string examples = 5; + // Possible input modalities supported. + repeated string input_modes = 6; + // Possible output modalities produced + repeated string output_modes = 7; + // protolint:disable REPEATED_FIELD_NAMES_PLURALIZED + // Security schemes necessary for the agent to leverage this skill. + // As in the overall AgentCard.security, this list represents a logical OR of + // security requirement objects. Each object is a set of security schemes + // that must be used together (a logical AND). + repeated Security security = 8; + // protolint:enable REPEATED_FIELD_NAMES_PLURALIZED +} + +// AgentCardSignature represents a JWS signature of an AgentCard. +// This follows the JSON format of an RFC 7515 JSON Web Signature (JWS). +message AgentCardSignature { + // The protected JWS header for the signature. This is always a + // base64url-encoded JSON object. Required. + string protected = 1 [(google.api.field_behavior) = REQUIRED]; + // The computed signature, base64url-encoded. Required. + string signature = 2 [(google.api.field_behavior) = REQUIRED]; + // The unprotected JWS header values. + google.protobuf.Struct header = 3; +} + +message TaskPushNotificationConfig { + // The resource name of the config. + // Format: tasks/{task_id}/pushNotificationConfigs/{config_id} + string name = 1; + // The push notification configuration details. + PushNotificationConfig push_notification_config = 2; +} + +// protolint:disable REPEATED_FIELD_NAMES_PLURALIZED +message StringList { + repeated string list = 1; +} +// protolint:enable REPEATED_FIELD_NAMES_PLURALIZED + +message Security { + map schemes = 1; +} + +message SecurityScheme { + oneof scheme { + APIKeySecurityScheme api_key_security_scheme = 1; + HTTPAuthSecurityScheme http_auth_security_scheme = 2; + OAuth2SecurityScheme oauth2_security_scheme = 3; + OpenIdConnectSecurityScheme open_id_connect_security_scheme = 4; + MutualTlsSecurityScheme mtls_security_scheme = 5; + } +} + +message APIKeySecurityScheme { + // Description of this security scheme. + string description = 1; + // Location of the API key, valid values are "query", "header", or "cookie" + string location = 2; + // Name of the header, query or cookie parameter to be used. + string name = 3; +} + +message HTTPAuthSecurityScheme { + // Description of this security scheme. + string description = 1; + // The name of the HTTP Authentication scheme to be used in the + // Authorization header as defined in RFC7235. The values used SHOULD be + // registered in the IANA Authentication Scheme registry. + // The value is case-insensitive, as defined in RFC7235. + string scheme = 2; + // A hint to the client to identify how the bearer token is formatted. + // Bearer tokens are usually generated by an authorization server, so + // this information is primarily for documentation purposes. + string bearer_format = 3; +} + +message OAuth2SecurityScheme { + // Description of this security scheme. + string description = 1; + // An object containing configuration information for the flow types supported + OAuthFlows flows = 2; + // URL to the oauth2 authorization server metadata + // [RFC8414](https://datatracker.ietf.org/doc/html/rfc8414). TLS is required. + string oauth2_metadata_url = 3; +} + +message OpenIdConnectSecurityScheme { + // Description of this security scheme. + string description = 1; + // Well-known URL to discover the [[OpenID-Connect-Discovery]] provider + // metadata. + string open_id_connect_url = 2; +} + +message MutualTlsSecurityScheme { + // Description of this security scheme. + string description = 1; +} + +message OAuthFlows { + oneof flow { + AuthorizationCodeOAuthFlow authorization_code = 1; + ClientCredentialsOAuthFlow client_credentials = 2; + ImplicitOAuthFlow implicit = 3; + PasswordOAuthFlow password = 4; + } +} + +message AuthorizationCodeOAuthFlow { + // The authorization URL to be used for this flow. This MUST be in the + // form of a URL. The OAuth2 standard requires the use of TLS + string authorization_url = 1; + // The token URL to be used for this flow. This MUST be in the form of a URL. + // The OAuth2 standard requires the use of TLS. + string token_url = 2; + // The URL to be used for obtaining refresh tokens. This MUST be in the + // form of a URL. The OAuth2 standard requires the use of TLS. + string refresh_url = 3; + // The available scopes for the OAuth2 security scheme. A map between the + // scope name and a short description for it. The map MAY be empty. + map scopes = 4; +} + +message ClientCredentialsOAuthFlow { + // The token URL to be used for this flow. This MUST be in the form of a URL. + // The OAuth2 standard requires the use of TLS. + string token_url = 1; + // The URL to be used for obtaining refresh tokens. This MUST be in the + // form of a URL. The OAuth2 standard requires the use of TLS. + string refresh_url = 2; + // The available scopes for the OAuth2 security scheme. A map between the + // scope name and a short description for it. The map MAY be empty. + map scopes = 3; +} + +message ImplicitOAuthFlow { + // The authorization URL to be used for this flow. This MUST be in the + // form of a URL. The OAuth2 standard requires the use of TLS + string authorization_url = 1; + // The URL to be used for obtaining refresh tokens. This MUST be in the + // form of a URL. The OAuth2 standard requires the use of TLS. + string refresh_url = 2; + // The available scopes for the OAuth2 security scheme. A map between the + // scope name and a short description for it. The map MAY be empty. + map scopes = 3; +} + +message PasswordOAuthFlow { + // The token URL to be used for this flow. This MUST be in the form of a URL. + // The OAuth2 standard requires the use of TLS. + string token_url = 1; + // The URL to be used for obtaining refresh tokens. This MUST be in the + // form of a URL. The OAuth2 standard requires the use of TLS. + string refresh_url = 2; + // The available scopes for the OAuth2 security scheme. A map between the + // scope name and a short description for it. The map MAY be empty. + map scopes = 3; +} + +///////////// Request Messages /////////// +message SendMessageRequest { + // The message to send to the agent. + Message request = 1 + [(google.api.field_behavior) = REQUIRED, json_name = "message"]; + // Configuration for the send request. + SendMessageConfiguration configuration = 2; + // Optional metadata for the request. + google.protobuf.Struct metadata = 3; +} + +message GetTaskRequest { + // The resource name of the task. + // Format: tasks/{task_id} + string name = 1 [(google.api.field_behavior) = REQUIRED]; + // The number of most recent messages from the task's history to retrieve. + int32 history_length = 2; +} + +message CancelTaskRequest { + // The resource name of the task to cancel. + // Format: tasks/{task_id} + string name = 1; +} + +message GetTaskPushNotificationConfigRequest { + // The resource name of the config to retrieve. + // Format: tasks/{task_id}/pushNotificationConfigs/{config_id} + string name = 1; +} + +message DeleteTaskPushNotificationConfigRequest { + // The resource name of the config to delete. + // Format: tasks/{task_id}/pushNotificationConfigs/{config_id} + string name = 1; +} + +message CreateTaskPushNotificationConfigRequest { + // The parent task resource for this config. + // Format: tasks/{task_id} + string parent = 1 [ + (google.api.field_behavior) = REQUIRED + ]; + // The ID for the new config. + string config_id = 2 [(google.api.field_behavior) = REQUIRED]; + // The configuration to create. + TaskPushNotificationConfig config = 3 + [(google.api.field_behavior) = REQUIRED]; +} + +message TaskSubscriptionRequest { + // The resource name of the task to subscribe to. + // Format: tasks/{task_id} + string name = 1; +} + +message ListTaskPushNotificationConfigRequest { + // The parent task resource. + // Format: tasks/{task_id} + string parent = 1; + // For AIP-158 these fields are present. Usually not used/needed. + // The maximum number of configurations to return. + // If unspecified, all configs will be returned. + int32 page_size = 2; + + // A page token received from a previous + // ListTaskPushNotificationConfigRequest call. + // Provide this to retrieve the subsequent page. + // When paginating, all other parameters provided to + // `ListTaskPushNotificationConfigRequest` must match the call that provided + // the page token. + string page_token = 3; +} + +message GetAgentCardRequest { + // Empty. Added to fix linter violation. +} + +//////// Response Messages /////////// +message SendMessageResponse { + oneof payload { + Task task = 1; + Message msg = 2 [json_name = "message"]; + } +} + +// The stream response for a message. The stream should be one of the following +// sequences: +// If the response is a message, the stream should contain one, and only one, +// message and then close +// If the response is a task lifecycle, the first response should be a Task +// object followed by zero or more TaskStatusUpdateEvents and +// TaskArtifactUpdateEvents. The stream should complete when the Task +// if in an interrupted or terminal state. A stream that ends before these +// conditions are met are +message StreamResponse { + oneof payload { + Task task = 1; + Message msg = 2 [json_name = "message"]; + TaskStatusUpdateEvent status_update = 3; + TaskArtifactUpdateEvent artifact_update = 4; + } +} + +message ListTaskPushNotificationConfigResponse { + // The list of push notification configurations. + repeated TaskPushNotificationConfig configs = 1; + // A token, which can be sent as `page_token` to retrieve the next page. + // If this field is omitted, there are no subsequent pages. + string next_page_token = 2; +} diff --git a/src/a2a/compat/v0_3/a2a_v0_3_pb2.py b/src/a2a/compat/v0_3/a2a_v0_3_pb2.py new file mode 100644 index 000000000..e310e530b --- /dev/null +++ b/src/a2a/compat/v0_3/a2a_v0_3_pb2.py @@ -0,0 +1,195 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# NO CHECKED-IN PROTOBUF GENCODE +# source: a2a_v0_3.proto +# Protobuf Python Version: 5.29.3 +"""Generated protocol buffer code.""" +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import runtime_version as _runtime_version +from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder +_runtime_version.ValidateProtobufRuntimeVersion( + _runtime_version.Domain.PUBLIC, + 5, + 29, + 3, + '', + 'a2a_v0_3.proto' +) +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 +from google.api import client_pb2 as google_dot_api_dot_client__pb2 +from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2 +from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 +from google.protobuf import struct_pb2 as google_dot_protobuf_dot_struct__pb2 +from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\x0e\x61\x32\x61_v0_3.proto\x12\x06\x61\x32\x61.v1\x1a\x1cgoogle/api/annotations.proto\x1a\x17google/api/client.proto\x1a\x1fgoogle/api/field_behavior.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a\x1cgoogle/protobuf/struct.proto\x1a\x1fgoogle/protobuf/timestamp.proto\"\xde\x01\n\x18SendMessageConfiguration\x12\x32\n\x15\x61\x63\x63\x65pted_output_modes\x18\x01 \x03(\tR\x13\x61\x63\x63\x65ptedOutputModes\x12K\n\x11push_notification\x18\x02 \x01(\x0b\x32\x1e.a2a.v1.PushNotificationConfigR\x10pushNotification\x12%\n\x0ehistory_length\x18\x03 \x01(\x05R\rhistoryLength\x12\x1a\n\x08\x62locking\x18\x04 \x01(\x08R\x08\x62locking\"\xf1\x01\n\x04Task\x12\x0e\n\x02id\x18\x01 \x01(\tR\x02id\x12\x1d\n\ncontext_id\x18\x02 \x01(\tR\tcontextId\x12*\n\x06status\x18\x03 \x01(\x0b\x32\x12.a2a.v1.TaskStatusR\x06status\x12.\n\tartifacts\x18\x04 \x03(\x0b\x32\x10.a2a.v1.ArtifactR\tartifacts\x12)\n\x07history\x18\x05 \x03(\x0b\x32\x0f.a2a.v1.MessageR\x07history\x12\x33\n\x08metadata\x18\x06 \x01(\x0b\x32\x17.google.protobuf.StructR\x08metadata\"\x99\x01\n\nTaskStatus\x12\'\n\x05state\x18\x01 \x01(\x0e\x32\x11.a2a.v1.TaskStateR\x05state\x12(\n\x06update\x18\x02 \x01(\x0b\x32\x0f.a2a.v1.MessageR\x07message\x12\x38\n\ttimestamp\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.TimestampR\ttimestamp\"\xa9\x01\n\x04Part\x12\x14\n\x04text\x18\x01 \x01(\tH\x00R\x04text\x12&\n\x04\x66ile\x18\x02 \x01(\x0b\x32\x10.a2a.v1.FilePartH\x00R\x04\x66ile\x12&\n\x04\x64\x61ta\x18\x03 \x01(\x0b\x32\x10.a2a.v1.DataPartH\x00R\x04\x64\x61ta\x12\x33\n\x08metadata\x18\x04 \x01(\x0b\x32\x17.google.protobuf.StructR\x08metadataB\x06\n\x04part\"\x93\x01\n\x08\x46ilePart\x12$\n\rfile_with_uri\x18\x01 \x01(\tH\x00R\x0b\x66ileWithUri\x12(\n\x0f\x66ile_with_bytes\x18\x02 \x01(\x0cH\x00R\rfileWithBytes\x12\x1b\n\tmime_type\x18\x03 \x01(\tR\x08mimeType\x12\x12\n\x04name\x18\x04 \x01(\tR\x04nameB\x06\n\x04\x66ile\"7\n\x08\x44\x61taPart\x12+\n\x04\x64\x61ta\x18\x01 \x01(\x0b\x32\x17.google.protobuf.StructR\x04\x64\x61ta\"\xff\x01\n\x07Message\x12\x1d\n\nmessage_id\x18\x01 \x01(\tR\tmessageId\x12\x1d\n\ncontext_id\x18\x02 \x01(\tR\tcontextId\x12\x17\n\x07task_id\x18\x03 \x01(\tR\x06taskId\x12 \n\x04role\x18\x04 \x01(\x0e\x32\x0c.a2a.v1.RoleR\x04role\x12&\n\x07\x63ontent\x18\x05 \x03(\x0b\x32\x0c.a2a.v1.PartR\x07\x63ontent\x12\x33\n\x08metadata\x18\x06 \x01(\x0b\x32\x17.google.protobuf.StructR\x08metadata\x12\x1e\n\nextensions\x18\x07 \x03(\tR\nextensions\"\xda\x01\n\x08\x41rtifact\x12\x1f\n\x0b\x61rtifact_id\x18\x01 \x01(\tR\nartifactId\x12\x12\n\x04name\x18\x03 \x01(\tR\x04name\x12 \n\x0b\x64\x65scription\x18\x04 \x01(\tR\x0b\x64\x65scription\x12\"\n\x05parts\x18\x05 \x03(\x0b\x32\x0c.a2a.v1.PartR\x05parts\x12\x33\n\x08metadata\x18\x06 \x01(\x0b\x32\x17.google.protobuf.StructR\x08metadata\x12\x1e\n\nextensions\x18\x07 \x03(\tR\nextensions\"\xc6\x01\n\x15TaskStatusUpdateEvent\x12\x17\n\x07task_id\x18\x01 \x01(\tR\x06taskId\x12\x1d\n\ncontext_id\x18\x02 \x01(\tR\tcontextId\x12*\n\x06status\x18\x03 \x01(\x0b\x32\x12.a2a.v1.TaskStatusR\x06status\x12\x14\n\x05\x66inal\x18\x04 \x01(\x08R\x05\x66inal\x12\x33\n\x08metadata\x18\x05 \x01(\x0b\x32\x17.google.protobuf.StructR\x08metadata\"\xeb\x01\n\x17TaskArtifactUpdateEvent\x12\x17\n\x07task_id\x18\x01 \x01(\tR\x06taskId\x12\x1d\n\ncontext_id\x18\x02 \x01(\tR\tcontextId\x12,\n\x08\x61rtifact\x18\x03 \x01(\x0b\x32\x10.a2a.v1.ArtifactR\x08\x61rtifact\x12\x16\n\x06\x61ppend\x18\x04 \x01(\x08R\x06\x61ppend\x12\x1d\n\nlast_chunk\x18\x05 \x01(\x08R\tlastChunk\x12\x33\n\x08metadata\x18\x06 \x01(\x0b\x32\x17.google.protobuf.StructR\x08metadata\"\x94\x01\n\x16PushNotificationConfig\x12\x0e\n\x02id\x18\x01 \x01(\tR\x02id\x12\x10\n\x03url\x18\x02 \x01(\tR\x03url\x12\x14\n\x05token\x18\x03 \x01(\tR\x05token\x12\x42\n\x0e\x61uthentication\x18\x04 \x01(\x0b\x32\x1a.a2a.v1.AuthenticationInfoR\x0e\x61uthentication\"P\n\x12\x41uthenticationInfo\x12\x18\n\x07schemes\x18\x01 \x03(\tR\x07schemes\x12 \n\x0b\x63redentials\x18\x02 \x01(\tR\x0b\x63redentials\"@\n\x0e\x41gentInterface\x12\x10\n\x03url\x18\x01 \x01(\tR\x03url\x12\x1c\n\ttransport\x18\x02 \x01(\tR\ttransport\"\xc8\x07\n\tAgentCard\x12)\n\x10protocol_version\x18\x10 \x01(\tR\x0fprotocolVersion\x12\x12\n\x04name\x18\x01 \x01(\tR\x04name\x12 \n\x0b\x64\x65scription\x18\x02 \x01(\tR\x0b\x64\x65scription\x12\x10\n\x03url\x18\x03 \x01(\tR\x03url\x12/\n\x13preferred_transport\x18\x0e \x01(\tR\x12preferredTransport\x12K\n\x15\x61\x64\x64itional_interfaces\x18\x0f \x03(\x0b\x32\x16.a2a.v1.AgentInterfaceR\x14\x61\x64\x64itionalInterfaces\x12\x31\n\x08provider\x18\x04 \x01(\x0b\x32\x15.a2a.v1.AgentProviderR\x08provider\x12\x18\n\x07version\x18\x05 \x01(\tR\x07version\x12+\n\x11\x64ocumentation_url\x18\x06 \x01(\tR\x10\x64ocumentationUrl\x12=\n\x0c\x63\x61pabilities\x18\x07 \x01(\x0b\x32\x19.a2a.v1.AgentCapabilitiesR\x0c\x63\x61pabilities\x12Q\n\x10security_schemes\x18\x08 \x03(\x0b\x32&.a2a.v1.AgentCard.SecuritySchemesEntryR\x0fsecuritySchemes\x12,\n\x08security\x18\t \x03(\x0b\x32\x10.a2a.v1.SecurityR\x08security\x12.\n\x13\x64\x65\x66\x61ult_input_modes\x18\n \x03(\tR\x11\x64\x65\x66\x61ultInputModes\x12\x30\n\x14\x64\x65\x66\x61ult_output_modes\x18\x0b \x03(\tR\x12\x64\x65\x66\x61ultOutputModes\x12*\n\x06skills\x18\x0c \x03(\x0b\x32\x12.a2a.v1.AgentSkillR\x06skills\x12O\n$supports_authenticated_extended_card\x18\r \x01(\x08R!supportsAuthenticatedExtendedCard\x12:\n\nsignatures\x18\x11 \x03(\x0b\x32\x1a.a2a.v1.AgentCardSignatureR\nsignatures\x12\x19\n\x08icon_url\x18\x12 \x01(\tR\x07iconUrl\x1aZ\n\x14SecuritySchemesEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12,\n\x05value\x18\x02 \x01(\x0b\x32\x16.a2a.v1.SecuritySchemeR\x05value:\x02\x38\x01\"E\n\rAgentProvider\x12\x10\n\x03url\x18\x01 \x01(\tR\x03url\x12\"\n\x0corganization\x18\x02 \x01(\tR\x0corganization\"\x98\x01\n\x11\x41gentCapabilities\x12\x1c\n\tstreaming\x18\x01 \x01(\x08R\tstreaming\x12-\n\x12push_notifications\x18\x02 \x01(\x08R\x11pushNotifications\x12\x36\n\nextensions\x18\x03 \x03(\x0b\x32\x16.a2a.v1.AgentExtensionR\nextensions\"\x91\x01\n\x0e\x41gentExtension\x12\x10\n\x03uri\x18\x01 \x01(\tR\x03uri\x12 \n\x0b\x64\x65scription\x18\x02 \x01(\tR\x0b\x64\x65scription\x12\x1a\n\x08required\x18\x03 \x01(\x08R\x08required\x12/\n\x06params\x18\x04 \x01(\x0b\x32\x17.google.protobuf.StructR\x06params\"\xf4\x01\n\nAgentSkill\x12\x0e\n\x02id\x18\x01 \x01(\tR\x02id\x12\x12\n\x04name\x18\x02 \x01(\tR\x04name\x12 \n\x0b\x64\x65scription\x18\x03 \x01(\tR\x0b\x64\x65scription\x12\x12\n\x04tags\x18\x04 \x03(\tR\x04tags\x12\x1a\n\x08\x65xamples\x18\x05 \x03(\tR\x08\x65xamples\x12\x1f\n\x0binput_modes\x18\x06 \x03(\tR\ninputModes\x12!\n\x0coutput_modes\x18\x07 \x03(\tR\x0boutputModes\x12,\n\x08security\x18\x08 \x03(\x0b\x32\x10.a2a.v1.SecurityR\x08security\"\x8b\x01\n\x12\x41gentCardSignature\x12!\n\tprotected\x18\x01 \x01(\tB\x03\xe0\x41\x02R\tprotected\x12!\n\tsignature\x18\x02 \x01(\tB\x03\xe0\x41\x02R\tsignature\x12/\n\x06header\x18\x03 \x01(\x0b\x32\x17.google.protobuf.StructR\x06header\"\x8a\x01\n\x1aTaskPushNotificationConfig\x12\x12\n\x04name\x18\x01 \x01(\tR\x04name\x12X\n\x18push_notification_config\x18\x02 \x01(\x0b\x32\x1e.a2a.v1.PushNotificationConfigR\x16pushNotificationConfig\" \n\nStringList\x12\x12\n\x04list\x18\x01 \x03(\tR\x04list\"\x93\x01\n\x08Security\x12\x37\n\x07schemes\x18\x01 \x03(\x0b\x32\x1d.a2a.v1.Security.SchemesEntryR\x07schemes\x1aN\n\x0cSchemesEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12(\n\x05value\x18\x02 \x01(\x0b\x32\x12.a2a.v1.StringListR\x05value:\x02\x38\x01\"\xe6\x03\n\x0eSecurityScheme\x12U\n\x17\x61pi_key_security_scheme\x18\x01 \x01(\x0b\x32\x1c.a2a.v1.APIKeySecuritySchemeH\x00R\x14\x61piKeySecurityScheme\x12[\n\x19http_auth_security_scheme\x18\x02 \x01(\x0b\x32\x1e.a2a.v1.HTTPAuthSecuritySchemeH\x00R\x16httpAuthSecurityScheme\x12T\n\x16oauth2_security_scheme\x18\x03 \x01(\x0b\x32\x1c.a2a.v1.OAuth2SecuritySchemeH\x00R\x14oauth2SecurityScheme\x12k\n\x1fopen_id_connect_security_scheme\x18\x04 \x01(\x0b\x32#.a2a.v1.OpenIdConnectSecuritySchemeH\x00R\x1bopenIdConnectSecurityScheme\x12S\n\x14mtls_security_scheme\x18\x05 \x01(\x0b\x32\x1f.a2a.v1.MutualTlsSecuritySchemeH\x00R\x12mtlsSecuritySchemeB\x08\n\x06scheme\"h\n\x14\x41PIKeySecurityScheme\x12 \n\x0b\x64\x65scription\x18\x01 \x01(\tR\x0b\x64\x65scription\x12\x1a\n\x08location\x18\x02 \x01(\tR\x08location\x12\x12\n\x04name\x18\x03 \x01(\tR\x04name\"w\n\x16HTTPAuthSecurityScheme\x12 \n\x0b\x64\x65scription\x18\x01 \x01(\tR\x0b\x64\x65scription\x12\x16\n\x06scheme\x18\x02 \x01(\tR\x06scheme\x12#\n\rbearer_format\x18\x03 \x01(\tR\x0c\x62\x65\x61rerFormat\"\x92\x01\n\x14OAuth2SecurityScheme\x12 \n\x0b\x64\x65scription\x18\x01 \x01(\tR\x0b\x64\x65scription\x12(\n\x05\x66lows\x18\x02 \x01(\x0b\x32\x12.a2a.v1.OAuthFlowsR\x05\x66lows\x12.\n\x13oauth2_metadata_url\x18\x03 \x01(\tR\x11oauth2MetadataUrl\"n\n\x1bOpenIdConnectSecurityScheme\x12 \n\x0b\x64\x65scription\x18\x01 \x01(\tR\x0b\x64\x65scription\x12-\n\x13open_id_connect_url\x18\x02 \x01(\tR\x10openIdConnectUrl\";\n\x17MutualTlsSecurityScheme\x12 \n\x0b\x64\x65scription\x18\x01 \x01(\tR\x0b\x64\x65scription\"\xb0\x02\n\nOAuthFlows\x12S\n\x12\x61uthorization_code\x18\x01 \x01(\x0b\x32\".a2a.v1.AuthorizationCodeOAuthFlowH\x00R\x11\x61uthorizationCode\x12S\n\x12\x63lient_credentials\x18\x02 \x01(\x0b\x32\".a2a.v1.ClientCredentialsOAuthFlowH\x00R\x11\x63lientCredentials\x12\x37\n\x08implicit\x18\x03 \x01(\x0b\x32\x19.a2a.v1.ImplicitOAuthFlowH\x00R\x08implicit\x12\x37\n\x08password\x18\x04 \x01(\x0b\x32\x19.a2a.v1.PasswordOAuthFlowH\x00R\x08passwordB\x06\n\x04\x66low\"\x8a\x02\n\x1a\x41uthorizationCodeOAuthFlow\x12+\n\x11\x61uthorization_url\x18\x01 \x01(\tR\x10\x61uthorizationUrl\x12\x1b\n\ttoken_url\x18\x02 \x01(\tR\x08tokenUrl\x12\x1f\n\x0brefresh_url\x18\x03 \x01(\tR\nrefreshUrl\x12\x46\n\x06scopes\x18\x04 \x03(\x0b\x32..a2a.v1.AuthorizationCodeOAuthFlow.ScopesEntryR\x06scopes\x1a\x39\n\x0bScopesEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12\x14\n\x05value\x18\x02 \x01(\tR\x05value:\x02\x38\x01\"\xdd\x01\n\x1a\x43lientCredentialsOAuthFlow\x12\x1b\n\ttoken_url\x18\x01 \x01(\tR\x08tokenUrl\x12\x1f\n\x0brefresh_url\x18\x02 \x01(\tR\nrefreshUrl\x12\x46\n\x06scopes\x18\x03 \x03(\x0b\x32..a2a.v1.ClientCredentialsOAuthFlow.ScopesEntryR\x06scopes\x1a\x39\n\x0bScopesEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12\x14\n\x05value\x18\x02 \x01(\tR\x05value:\x02\x38\x01\"\xdb\x01\n\x11ImplicitOAuthFlow\x12+\n\x11\x61uthorization_url\x18\x01 \x01(\tR\x10\x61uthorizationUrl\x12\x1f\n\x0brefresh_url\x18\x02 \x01(\tR\nrefreshUrl\x12=\n\x06scopes\x18\x03 \x03(\x0b\x32%.a2a.v1.ImplicitOAuthFlow.ScopesEntryR\x06scopes\x1a\x39\n\x0bScopesEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12\x14\n\x05value\x18\x02 \x01(\tR\x05value:\x02\x38\x01\"\xcb\x01\n\x11PasswordOAuthFlow\x12\x1b\n\ttoken_url\x18\x01 \x01(\tR\x08tokenUrl\x12\x1f\n\x0brefresh_url\x18\x02 \x01(\tR\nrefreshUrl\x12=\n\x06scopes\x18\x03 \x03(\x0b\x32%.a2a.v1.PasswordOAuthFlow.ScopesEntryR\x06scopes\x1a\x39\n\x0bScopesEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12\x14\n\x05value\x18\x02 \x01(\tR\x05value:\x02\x38\x01\"\xc1\x01\n\x12SendMessageRequest\x12.\n\x07request\x18\x01 \x01(\x0b\x32\x0f.a2a.v1.MessageB\x03\xe0\x41\x02R\x07message\x12\x46\n\rconfiguration\x18\x02 \x01(\x0b\x32 .a2a.v1.SendMessageConfigurationR\rconfiguration\x12\x33\n\x08metadata\x18\x03 \x01(\x0b\x32\x17.google.protobuf.StructR\x08metadata\"P\n\x0eGetTaskRequest\x12\x17\n\x04name\x18\x01 \x01(\tB\x03\xe0\x41\x02R\x04name\x12%\n\x0ehistory_length\x18\x02 \x01(\x05R\rhistoryLength\"\'\n\x11\x43\x61ncelTaskRequest\x12\x12\n\x04name\x18\x01 \x01(\tR\x04name\":\n$GetTaskPushNotificationConfigRequest\x12\x12\n\x04name\x18\x01 \x01(\tR\x04name\"=\n\'DeleteTaskPushNotificationConfigRequest\x12\x12\n\x04name\x18\x01 \x01(\tR\x04name\"\xa9\x01\n\'CreateTaskPushNotificationConfigRequest\x12\x1b\n\x06parent\x18\x01 \x01(\tB\x03\xe0\x41\x02R\x06parent\x12 \n\tconfig_id\x18\x02 \x01(\tB\x03\xe0\x41\x02R\x08\x63onfigId\x12?\n\x06\x63onfig\x18\x03 \x01(\x0b\x32\".a2a.v1.TaskPushNotificationConfigB\x03\xe0\x41\x02R\x06\x63onfig\"-\n\x17TaskSubscriptionRequest\x12\x12\n\x04name\x18\x01 \x01(\tR\x04name\"{\n%ListTaskPushNotificationConfigRequest\x12\x16\n\x06parent\x18\x01 \x01(\tR\x06parent\x12\x1b\n\tpage_size\x18\x02 \x01(\x05R\x08pageSize\x12\x1d\n\npage_token\x18\x03 \x01(\tR\tpageToken\"\x15\n\x13GetAgentCardRequest\"m\n\x13SendMessageResponse\x12\"\n\x04task\x18\x01 \x01(\x0b\x32\x0c.a2a.v1.TaskH\x00R\x04task\x12\'\n\x03msg\x18\x02 \x01(\x0b\x32\x0f.a2a.v1.MessageH\x00R\x07messageB\t\n\x07payload\"\xfa\x01\n\x0eStreamResponse\x12\"\n\x04task\x18\x01 \x01(\x0b\x32\x0c.a2a.v1.TaskH\x00R\x04task\x12\'\n\x03msg\x18\x02 \x01(\x0b\x32\x0f.a2a.v1.MessageH\x00R\x07message\x12\x44\n\rstatus_update\x18\x03 \x01(\x0b\x32\x1d.a2a.v1.TaskStatusUpdateEventH\x00R\x0cstatusUpdate\x12J\n\x0f\x61rtifact_update\x18\x04 \x01(\x0b\x32\x1f.a2a.v1.TaskArtifactUpdateEventH\x00R\x0e\x61rtifactUpdateB\t\n\x07payload\"\x8e\x01\n&ListTaskPushNotificationConfigResponse\x12<\n\x07\x63onfigs\x18\x01 \x03(\x0b\x32\".a2a.v1.TaskPushNotificationConfigR\x07\x63onfigs\x12&\n\x0fnext_page_token\x18\x02 \x01(\tR\rnextPageToken*\xfa\x01\n\tTaskState\x12\x1a\n\x16TASK_STATE_UNSPECIFIED\x10\x00\x12\x18\n\x14TASK_STATE_SUBMITTED\x10\x01\x12\x16\n\x12TASK_STATE_WORKING\x10\x02\x12\x18\n\x14TASK_STATE_COMPLETED\x10\x03\x12\x15\n\x11TASK_STATE_FAILED\x10\x04\x12\x18\n\x14TASK_STATE_CANCELLED\x10\x05\x12\x1d\n\x19TASK_STATE_INPUT_REQUIRED\x10\x06\x12\x17\n\x13TASK_STATE_REJECTED\x10\x07\x12\x1c\n\x18TASK_STATE_AUTH_REQUIRED\x10\x08*;\n\x04Role\x12\x14\n\x10ROLE_UNSPECIFIED\x10\x00\x12\r\n\tROLE_USER\x10\x01\x12\x0e\n\nROLE_AGENT\x10\x02\x32\xbb\n\n\nA2AService\x12\x63\n\x0bSendMessage\x12\x1a.a2a.v1.SendMessageRequest\x1a\x1b.a2a.v1.SendMessageResponse\"\x1b\x82\xd3\xe4\x93\x02\x15\"\x10/v1/message:send:\x01*\x12k\n\x14SendStreamingMessage\x12\x1a.a2a.v1.SendMessageRequest\x1a\x16.a2a.v1.StreamResponse\"\x1d\x82\xd3\xe4\x93\x02\x17\"\x12/v1/message:stream:\x01*0\x01\x12R\n\x07GetTask\x12\x16.a2a.v1.GetTaskRequest\x1a\x0c.a2a.v1.Task\"!\xda\x41\x04name\x82\xd3\xe4\x93\x02\x14\x12\x12/v1/{name=tasks/*}\x12[\n\nCancelTask\x12\x19.a2a.v1.CancelTaskRequest\x1a\x0c.a2a.v1.Task\"$\x82\xd3\xe4\x93\x02\x1e\"\x19/v1/{name=tasks/*}:cancel:\x01*\x12s\n\x10TaskSubscription\x12\x1f.a2a.v1.TaskSubscriptionRequest\x1a\x16.a2a.v1.StreamResponse\"$\x82\xd3\xe4\x93\x02\x1e\x12\x1c/v1/{name=tasks/*}:subscribe0\x01\x12\xc5\x01\n CreateTaskPushNotificationConfig\x12/.a2a.v1.CreateTaskPushNotificationConfigRequest\x1a\".a2a.v1.TaskPushNotificationConfig\"L\xda\x41\rparent,config\x82\xd3\xe4\x93\x02\x36\",/v1/{parent=tasks/*/pushNotificationConfigs}:\x06\x63onfig\x12\xae\x01\n\x1dGetTaskPushNotificationConfig\x12,.a2a.v1.GetTaskPushNotificationConfigRequest\x1a\".a2a.v1.TaskPushNotificationConfig\";\xda\x41\x04name\x82\xd3\xe4\x93\x02.\x12,/v1/{name=tasks/*/pushNotificationConfigs/*}\x12\xbe\x01\n\x1eListTaskPushNotificationConfig\x12-.a2a.v1.ListTaskPushNotificationConfigRequest\x1a..a2a.v1.ListTaskPushNotificationConfigResponse\"=\xda\x41\x06parent\x82\xd3\xe4\x93\x02.\x12,/v1/{parent=tasks/*}/pushNotificationConfigs\x12P\n\x0cGetAgentCard\x12\x1b.a2a.v1.GetAgentCardRequest\x1a\x11.a2a.v1.AgentCard\"\x10\x82\xd3\xe4\x93\x02\n\x12\x08/v1/card\x12\xa8\x01\n DeleteTaskPushNotificationConfig\x12/.a2a.v1.DeleteTaskPushNotificationConfigRequest\x1a\x16.google.protobuf.Empty\";\xda\x41\x04name\x82\xd3\xe4\x93\x02.*,/v1/{name=tasks/*/pushNotificationConfigs/*}Bl\n\ncom.a2a.v1B\x0b\x41\x32\x61V03ProtoP\x01Z\x18google.golang.org/a2a/v1\xa2\x02\x03\x41XX\xaa\x02\x06\x41\x32\x61.V1\xca\x02\x06\x41\x32\x61\\V1\xe2\x02\x12\x41\x32\x61\\V1\\GPBMetadata\xea\x02\x07\x41\x32\x61::V1b\x06proto3') + +_globals = globals() +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'a2a_v0_3_pb2', _globals) +if not _descriptor._USE_C_DESCRIPTORS: + _globals['DESCRIPTOR']._loaded_options = None + _globals['DESCRIPTOR']._serialized_options = b'\n\ncom.a2a.v1B\013A2aV03ProtoP\001Z\030google.golang.org/a2a/v1\242\002\003AXX\252\002\006A2a.V1\312\002\006A2a\\V1\342\002\022A2a\\V1\\GPBMetadata\352\002\007A2a::V1' + _globals['_AGENTCARD_SECURITYSCHEMESENTRY']._loaded_options = None + _globals['_AGENTCARD_SECURITYSCHEMESENTRY']._serialized_options = b'8\001' + _globals['_AGENTCARDSIGNATURE'].fields_by_name['protected']._loaded_options = None + _globals['_AGENTCARDSIGNATURE'].fields_by_name['protected']._serialized_options = b'\340A\002' + _globals['_AGENTCARDSIGNATURE'].fields_by_name['signature']._loaded_options = None + _globals['_AGENTCARDSIGNATURE'].fields_by_name['signature']._serialized_options = b'\340A\002' + _globals['_SECURITY_SCHEMESENTRY']._loaded_options = None + _globals['_SECURITY_SCHEMESENTRY']._serialized_options = b'8\001' + _globals['_AUTHORIZATIONCODEOAUTHFLOW_SCOPESENTRY']._loaded_options = None + _globals['_AUTHORIZATIONCODEOAUTHFLOW_SCOPESENTRY']._serialized_options = b'8\001' + _globals['_CLIENTCREDENTIALSOAUTHFLOW_SCOPESENTRY']._loaded_options = None + _globals['_CLIENTCREDENTIALSOAUTHFLOW_SCOPESENTRY']._serialized_options = b'8\001' + _globals['_IMPLICITOAUTHFLOW_SCOPESENTRY']._loaded_options = None + _globals['_IMPLICITOAUTHFLOW_SCOPESENTRY']._serialized_options = b'8\001' + _globals['_PASSWORDOAUTHFLOW_SCOPESENTRY']._loaded_options = None + _globals['_PASSWORDOAUTHFLOW_SCOPESENTRY']._serialized_options = b'8\001' + _globals['_SENDMESSAGEREQUEST'].fields_by_name['request']._loaded_options = None + _globals['_SENDMESSAGEREQUEST'].fields_by_name['request']._serialized_options = b'\340A\002' + _globals['_GETTASKREQUEST'].fields_by_name['name']._loaded_options = None + _globals['_GETTASKREQUEST'].fields_by_name['name']._serialized_options = b'\340A\002' + _globals['_CREATETASKPUSHNOTIFICATIONCONFIGREQUEST'].fields_by_name['parent']._loaded_options = None + _globals['_CREATETASKPUSHNOTIFICATIONCONFIGREQUEST'].fields_by_name['parent']._serialized_options = b'\340A\002' + _globals['_CREATETASKPUSHNOTIFICATIONCONFIGREQUEST'].fields_by_name['config_id']._loaded_options = None + _globals['_CREATETASKPUSHNOTIFICATIONCONFIGREQUEST'].fields_by_name['config_id']._serialized_options = b'\340A\002' + _globals['_CREATETASKPUSHNOTIFICATIONCONFIGREQUEST'].fields_by_name['config']._loaded_options = None + _globals['_CREATETASKPUSHNOTIFICATIONCONFIGREQUEST'].fields_by_name['config']._serialized_options = b'\340A\002' + _globals['_A2ASERVICE'].methods_by_name['SendMessage']._loaded_options = None + _globals['_A2ASERVICE'].methods_by_name['SendMessage']._serialized_options = b'\202\323\344\223\002\025\"\020/v1/message:send:\001*' + _globals['_A2ASERVICE'].methods_by_name['SendStreamingMessage']._loaded_options = None + _globals['_A2ASERVICE'].methods_by_name['SendStreamingMessage']._serialized_options = b'\202\323\344\223\002\027\"\022/v1/message:stream:\001*' + _globals['_A2ASERVICE'].methods_by_name['GetTask']._loaded_options = None + _globals['_A2ASERVICE'].methods_by_name['GetTask']._serialized_options = b'\332A\004name\202\323\344\223\002\024\022\022/v1/{name=tasks/*}' + _globals['_A2ASERVICE'].methods_by_name['CancelTask']._loaded_options = None + _globals['_A2ASERVICE'].methods_by_name['CancelTask']._serialized_options = b'\202\323\344\223\002\036\"\031/v1/{name=tasks/*}:cancel:\001*' + _globals['_A2ASERVICE'].methods_by_name['TaskSubscription']._loaded_options = None + _globals['_A2ASERVICE'].methods_by_name['TaskSubscription']._serialized_options = b'\202\323\344\223\002\036\022\034/v1/{name=tasks/*}:subscribe' + _globals['_A2ASERVICE'].methods_by_name['CreateTaskPushNotificationConfig']._loaded_options = None + _globals['_A2ASERVICE'].methods_by_name['CreateTaskPushNotificationConfig']._serialized_options = b'\332A\rparent,config\202\323\344\223\0026\",/v1/{parent=tasks/*/pushNotificationConfigs}:\006config' + _globals['_A2ASERVICE'].methods_by_name['GetTaskPushNotificationConfig']._loaded_options = None + _globals['_A2ASERVICE'].methods_by_name['GetTaskPushNotificationConfig']._serialized_options = b'\332A\004name\202\323\344\223\002.\022,/v1/{name=tasks/*/pushNotificationConfigs/*}' + _globals['_A2ASERVICE'].methods_by_name['ListTaskPushNotificationConfig']._loaded_options = None + _globals['_A2ASERVICE'].methods_by_name['ListTaskPushNotificationConfig']._serialized_options = b'\332A\006parent\202\323\344\223\002.\022,/v1/{parent=tasks/*}/pushNotificationConfigs' + _globals['_A2ASERVICE'].methods_by_name['GetAgentCard']._loaded_options = None + _globals['_A2ASERVICE'].methods_by_name['GetAgentCard']._serialized_options = b'\202\323\344\223\002\n\022\010/v1/card' + _globals['_A2ASERVICE'].methods_by_name['DeleteTaskPushNotificationConfig']._loaded_options = None + _globals['_A2ASERVICE'].methods_by_name['DeleteTaskPushNotificationConfig']._serialized_options = b'\332A\004name\202\323\344\223\002.*,/v1/{name=tasks/*/pushNotificationConfigs/*}' + _globals['_TASKSTATE']._serialized_start=8071 + _globals['_TASKSTATE']._serialized_end=8321 + _globals['_ROLE']._serialized_start=8323 + _globals['_ROLE']._serialized_end=8382 + _globals['_SENDMESSAGECONFIGURATION']._serialized_start=207 + _globals['_SENDMESSAGECONFIGURATION']._serialized_end=429 + _globals['_TASK']._serialized_start=432 + _globals['_TASK']._serialized_end=673 + _globals['_TASKSTATUS']._serialized_start=676 + _globals['_TASKSTATUS']._serialized_end=829 + _globals['_PART']._serialized_start=832 + _globals['_PART']._serialized_end=1001 + _globals['_FILEPART']._serialized_start=1004 + _globals['_FILEPART']._serialized_end=1151 + _globals['_DATAPART']._serialized_start=1153 + _globals['_DATAPART']._serialized_end=1208 + _globals['_MESSAGE']._serialized_start=1211 + _globals['_MESSAGE']._serialized_end=1466 + _globals['_ARTIFACT']._serialized_start=1469 + _globals['_ARTIFACT']._serialized_end=1687 + _globals['_TASKSTATUSUPDATEEVENT']._serialized_start=1690 + _globals['_TASKSTATUSUPDATEEVENT']._serialized_end=1888 + _globals['_TASKARTIFACTUPDATEEVENT']._serialized_start=1891 + _globals['_TASKARTIFACTUPDATEEVENT']._serialized_end=2126 + _globals['_PUSHNOTIFICATIONCONFIG']._serialized_start=2129 + _globals['_PUSHNOTIFICATIONCONFIG']._serialized_end=2277 + _globals['_AUTHENTICATIONINFO']._serialized_start=2279 + _globals['_AUTHENTICATIONINFO']._serialized_end=2359 + _globals['_AGENTINTERFACE']._serialized_start=2361 + _globals['_AGENTINTERFACE']._serialized_end=2425 + _globals['_AGENTCARD']._serialized_start=2428 + _globals['_AGENTCARD']._serialized_end=3396 + _globals['_AGENTCARD_SECURITYSCHEMESENTRY']._serialized_start=3306 + _globals['_AGENTCARD_SECURITYSCHEMESENTRY']._serialized_end=3396 + _globals['_AGENTPROVIDER']._serialized_start=3398 + _globals['_AGENTPROVIDER']._serialized_end=3467 + _globals['_AGENTCAPABILITIES']._serialized_start=3470 + _globals['_AGENTCAPABILITIES']._serialized_end=3622 + _globals['_AGENTEXTENSION']._serialized_start=3625 + _globals['_AGENTEXTENSION']._serialized_end=3770 + _globals['_AGENTSKILL']._serialized_start=3773 + _globals['_AGENTSKILL']._serialized_end=4017 + _globals['_AGENTCARDSIGNATURE']._serialized_start=4020 + _globals['_AGENTCARDSIGNATURE']._serialized_end=4159 + _globals['_TASKPUSHNOTIFICATIONCONFIG']._serialized_start=4162 + _globals['_TASKPUSHNOTIFICATIONCONFIG']._serialized_end=4300 + _globals['_STRINGLIST']._serialized_start=4302 + _globals['_STRINGLIST']._serialized_end=4334 + _globals['_SECURITY']._serialized_start=4337 + _globals['_SECURITY']._serialized_end=4484 + _globals['_SECURITY_SCHEMESENTRY']._serialized_start=4406 + _globals['_SECURITY_SCHEMESENTRY']._serialized_end=4484 + _globals['_SECURITYSCHEME']._serialized_start=4487 + _globals['_SECURITYSCHEME']._serialized_end=4973 + _globals['_APIKEYSECURITYSCHEME']._serialized_start=4975 + _globals['_APIKEYSECURITYSCHEME']._serialized_end=5079 + _globals['_HTTPAUTHSECURITYSCHEME']._serialized_start=5081 + _globals['_HTTPAUTHSECURITYSCHEME']._serialized_end=5200 + _globals['_OAUTH2SECURITYSCHEME']._serialized_start=5203 + _globals['_OAUTH2SECURITYSCHEME']._serialized_end=5349 + _globals['_OPENIDCONNECTSECURITYSCHEME']._serialized_start=5351 + _globals['_OPENIDCONNECTSECURITYSCHEME']._serialized_end=5461 + _globals['_MUTUALTLSSECURITYSCHEME']._serialized_start=5463 + _globals['_MUTUALTLSSECURITYSCHEME']._serialized_end=5522 + _globals['_OAUTHFLOWS']._serialized_start=5525 + _globals['_OAUTHFLOWS']._serialized_end=5829 + _globals['_AUTHORIZATIONCODEOAUTHFLOW']._serialized_start=5832 + _globals['_AUTHORIZATIONCODEOAUTHFLOW']._serialized_end=6098 + _globals['_AUTHORIZATIONCODEOAUTHFLOW_SCOPESENTRY']._serialized_start=6041 + _globals['_AUTHORIZATIONCODEOAUTHFLOW_SCOPESENTRY']._serialized_end=6098 + _globals['_CLIENTCREDENTIALSOAUTHFLOW']._serialized_start=6101 + _globals['_CLIENTCREDENTIALSOAUTHFLOW']._serialized_end=6322 + _globals['_CLIENTCREDENTIALSOAUTHFLOW_SCOPESENTRY']._serialized_start=6041 + _globals['_CLIENTCREDENTIALSOAUTHFLOW_SCOPESENTRY']._serialized_end=6098 + _globals['_IMPLICITOAUTHFLOW']._serialized_start=6325 + _globals['_IMPLICITOAUTHFLOW']._serialized_end=6544 + _globals['_IMPLICITOAUTHFLOW_SCOPESENTRY']._serialized_start=6041 + _globals['_IMPLICITOAUTHFLOW_SCOPESENTRY']._serialized_end=6098 + _globals['_PASSWORDOAUTHFLOW']._serialized_start=6547 + _globals['_PASSWORDOAUTHFLOW']._serialized_end=6750 + _globals['_PASSWORDOAUTHFLOW_SCOPESENTRY']._serialized_start=6041 + _globals['_PASSWORDOAUTHFLOW_SCOPESENTRY']._serialized_end=6098 + _globals['_SENDMESSAGEREQUEST']._serialized_start=6753 + _globals['_SENDMESSAGEREQUEST']._serialized_end=6946 + _globals['_GETTASKREQUEST']._serialized_start=6948 + _globals['_GETTASKREQUEST']._serialized_end=7028 + _globals['_CANCELTASKREQUEST']._serialized_start=7030 + _globals['_CANCELTASKREQUEST']._serialized_end=7069 + _globals['_GETTASKPUSHNOTIFICATIONCONFIGREQUEST']._serialized_start=7071 + _globals['_GETTASKPUSHNOTIFICATIONCONFIGREQUEST']._serialized_end=7129 + _globals['_DELETETASKPUSHNOTIFICATIONCONFIGREQUEST']._serialized_start=7131 + _globals['_DELETETASKPUSHNOTIFICATIONCONFIGREQUEST']._serialized_end=7192 + _globals['_CREATETASKPUSHNOTIFICATIONCONFIGREQUEST']._serialized_start=7195 + _globals['_CREATETASKPUSHNOTIFICATIONCONFIGREQUEST']._serialized_end=7364 + _globals['_TASKSUBSCRIPTIONREQUEST']._serialized_start=7366 + _globals['_TASKSUBSCRIPTIONREQUEST']._serialized_end=7411 + _globals['_LISTTASKPUSHNOTIFICATIONCONFIGREQUEST']._serialized_start=7413 + _globals['_LISTTASKPUSHNOTIFICATIONCONFIGREQUEST']._serialized_end=7536 + _globals['_GETAGENTCARDREQUEST']._serialized_start=7538 + _globals['_GETAGENTCARDREQUEST']._serialized_end=7559 + _globals['_SENDMESSAGERESPONSE']._serialized_start=7561 + _globals['_SENDMESSAGERESPONSE']._serialized_end=7670 + _globals['_STREAMRESPONSE']._serialized_start=7673 + _globals['_STREAMRESPONSE']._serialized_end=7923 + _globals['_LISTTASKPUSHNOTIFICATIONCONFIGRESPONSE']._serialized_start=7926 + _globals['_LISTTASKPUSHNOTIFICATIONCONFIGRESPONSE']._serialized_end=8068 + _globals['_A2ASERVICE']._serialized_start=8385 + _globals['_A2ASERVICE']._serialized_end=9724 +# @@protoc_insertion_point(module_scope) diff --git a/src/a2a/grpc/a2a_pb2.pyi b/src/a2a/compat/v0_3/a2a_v0_3_pb2.pyi similarity index 100% rename from src/a2a/grpc/a2a_pb2.pyi rename to src/a2a/compat/v0_3/a2a_v0_3_pb2.pyi diff --git a/src/a2a/grpc/a2a_pb2_grpc.py b/src/a2a/compat/v0_3/a2a_v0_3_pb2_grpc.py similarity index 77% rename from src/a2a/grpc/a2a_pb2_grpc.py rename to src/a2a/compat/v0_3/a2a_v0_3_pb2_grpc.py index 9b0ad41bc..3bbd4dec7 100644 --- a/src/a2a/grpc/a2a_pb2_grpc.py +++ b/src/a2a/compat/v0_3/a2a_v0_3_pb2_grpc.py @@ -2,7 +2,7 @@ """Client and server classes corresponding to protobuf-defined services.""" import grpc -from . import a2a_pb2 as a2a__pb2 +from . import a2a_v0_3_pb2 as a2a__v0__3__pb2 from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 @@ -27,52 +27,52 @@ def __init__(self, channel): """ self.SendMessage = channel.unary_unary( '/a2a.v1.A2AService/SendMessage', - request_serializer=a2a__pb2.SendMessageRequest.SerializeToString, - response_deserializer=a2a__pb2.SendMessageResponse.FromString, + request_serializer=a2a__v0__3__pb2.SendMessageRequest.SerializeToString, + response_deserializer=a2a__v0__3__pb2.SendMessageResponse.FromString, _registered_method=True) self.SendStreamingMessage = channel.unary_stream( '/a2a.v1.A2AService/SendStreamingMessage', - request_serializer=a2a__pb2.SendMessageRequest.SerializeToString, - response_deserializer=a2a__pb2.StreamResponse.FromString, + request_serializer=a2a__v0__3__pb2.SendMessageRequest.SerializeToString, + response_deserializer=a2a__v0__3__pb2.StreamResponse.FromString, _registered_method=True) self.GetTask = channel.unary_unary( '/a2a.v1.A2AService/GetTask', - request_serializer=a2a__pb2.GetTaskRequest.SerializeToString, - response_deserializer=a2a__pb2.Task.FromString, + request_serializer=a2a__v0__3__pb2.GetTaskRequest.SerializeToString, + response_deserializer=a2a__v0__3__pb2.Task.FromString, _registered_method=True) self.CancelTask = channel.unary_unary( '/a2a.v1.A2AService/CancelTask', - request_serializer=a2a__pb2.CancelTaskRequest.SerializeToString, - response_deserializer=a2a__pb2.Task.FromString, + request_serializer=a2a__v0__3__pb2.CancelTaskRequest.SerializeToString, + response_deserializer=a2a__v0__3__pb2.Task.FromString, _registered_method=True) self.TaskSubscription = channel.unary_stream( '/a2a.v1.A2AService/TaskSubscription', - request_serializer=a2a__pb2.TaskSubscriptionRequest.SerializeToString, - response_deserializer=a2a__pb2.StreamResponse.FromString, + request_serializer=a2a__v0__3__pb2.TaskSubscriptionRequest.SerializeToString, + response_deserializer=a2a__v0__3__pb2.StreamResponse.FromString, _registered_method=True) self.CreateTaskPushNotificationConfig = channel.unary_unary( '/a2a.v1.A2AService/CreateTaskPushNotificationConfig', - request_serializer=a2a__pb2.CreateTaskPushNotificationConfigRequest.SerializeToString, - response_deserializer=a2a__pb2.TaskPushNotificationConfig.FromString, + request_serializer=a2a__v0__3__pb2.CreateTaskPushNotificationConfigRequest.SerializeToString, + response_deserializer=a2a__v0__3__pb2.TaskPushNotificationConfig.FromString, _registered_method=True) self.GetTaskPushNotificationConfig = channel.unary_unary( '/a2a.v1.A2AService/GetTaskPushNotificationConfig', - request_serializer=a2a__pb2.GetTaskPushNotificationConfigRequest.SerializeToString, - response_deserializer=a2a__pb2.TaskPushNotificationConfig.FromString, + request_serializer=a2a__v0__3__pb2.GetTaskPushNotificationConfigRequest.SerializeToString, + response_deserializer=a2a__v0__3__pb2.TaskPushNotificationConfig.FromString, _registered_method=True) self.ListTaskPushNotificationConfig = channel.unary_unary( '/a2a.v1.A2AService/ListTaskPushNotificationConfig', - request_serializer=a2a__pb2.ListTaskPushNotificationConfigRequest.SerializeToString, - response_deserializer=a2a__pb2.ListTaskPushNotificationConfigResponse.FromString, + request_serializer=a2a__v0__3__pb2.ListTaskPushNotificationConfigRequest.SerializeToString, + response_deserializer=a2a__v0__3__pb2.ListTaskPushNotificationConfigResponse.FromString, _registered_method=True) self.GetAgentCard = channel.unary_unary( '/a2a.v1.A2AService/GetAgentCard', - request_serializer=a2a__pb2.GetAgentCardRequest.SerializeToString, - response_deserializer=a2a__pb2.AgentCard.FromString, + request_serializer=a2a__v0__3__pb2.GetAgentCardRequest.SerializeToString, + response_deserializer=a2a__v0__3__pb2.AgentCard.FromString, _registered_method=True) self.DeleteTaskPushNotificationConfig = channel.unary_unary( '/a2a.v1.A2AService/DeleteTaskPushNotificationConfig', - request_serializer=a2a__pb2.DeleteTaskPushNotificationConfigRequest.SerializeToString, + request_serializer=a2a__v0__3__pb2.DeleteTaskPushNotificationConfigRequest.SerializeToString, response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, _registered_method=True) @@ -171,52 +171,52 @@ def add_A2AServiceServicer_to_server(servicer, server): rpc_method_handlers = { 'SendMessage': grpc.unary_unary_rpc_method_handler( servicer.SendMessage, - request_deserializer=a2a__pb2.SendMessageRequest.FromString, - response_serializer=a2a__pb2.SendMessageResponse.SerializeToString, + request_deserializer=a2a__v0__3__pb2.SendMessageRequest.FromString, + response_serializer=a2a__v0__3__pb2.SendMessageResponse.SerializeToString, ), 'SendStreamingMessage': grpc.unary_stream_rpc_method_handler( servicer.SendStreamingMessage, - request_deserializer=a2a__pb2.SendMessageRequest.FromString, - response_serializer=a2a__pb2.StreamResponse.SerializeToString, + request_deserializer=a2a__v0__3__pb2.SendMessageRequest.FromString, + response_serializer=a2a__v0__3__pb2.StreamResponse.SerializeToString, ), 'GetTask': grpc.unary_unary_rpc_method_handler( servicer.GetTask, - request_deserializer=a2a__pb2.GetTaskRequest.FromString, - response_serializer=a2a__pb2.Task.SerializeToString, + request_deserializer=a2a__v0__3__pb2.GetTaskRequest.FromString, + response_serializer=a2a__v0__3__pb2.Task.SerializeToString, ), 'CancelTask': grpc.unary_unary_rpc_method_handler( servicer.CancelTask, - request_deserializer=a2a__pb2.CancelTaskRequest.FromString, - response_serializer=a2a__pb2.Task.SerializeToString, + request_deserializer=a2a__v0__3__pb2.CancelTaskRequest.FromString, + response_serializer=a2a__v0__3__pb2.Task.SerializeToString, ), 'TaskSubscription': grpc.unary_stream_rpc_method_handler( servicer.TaskSubscription, - request_deserializer=a2a__pb2.TaskSubscriptionRequest.FromString, - response_serializer=a2a__pb2.StreamResponse.SerializeToString, + request_deserializer=a2a__v0__3__pb2.TaskSubscriptionRequest.FromString, + response_serializer=a2a__v0__3__pb2.StreamResponse.SerializeToString, ), 'CreateTaskPushNotificationConfig': grpc.unary_unary_rpc_method_handler( servicer.CreateTaskPushNotificationConfig, - request_deserializer=a2a__pb2.CreateTaskPushNotificationConfigRequest.FromString, - response_serializer=a2a__pb2.TaskPushNotificationConfig.SerializeToString, + request_deserializer=a2a__v0__3__pb2.CreateTaskPushNotificationConfigRequest.FromString, + response_serializer=a2a__v0__3__pb2.TaskPushNotificationConfig.SerializeToString, ), 'GetTaskPushNotificationConfig': grpc.unary_unary_rpc_method_handler( servicer.GetTaskPushNotificationConfig, - request_deserializer=a2a__pb2.GetTaskPushNotificationConfigRequest.FromString, - response_serializer=a2a__pb2.TaskPushNotificationConfig.SerializeToString, + request_deserializer=a2a__v0__3__pb2.GetTaskPushNotificationConfigRequest.FromString, + response_serializer=a2a__v0__3__pb2.TaskPushNotificationConfig.SerializeToString, ), 'ListTaskPushNotificationConfig': grpc.unary_unary_rpc_method_handler( servicer.ListTaskPushNotificationConfig, - request_deserializer=a2a__pb2.ListTaskPushNotificationConfigRequest.FromString, - response_serializer=a2a__pb2.ListTaskPushNotificationConfigResponse.SerializeToString, + request_deserializer=a2a__v0__3__pb2.ListTaskPushNotificationConfigRequest.FromString, + response_serializer=a2a__v0__3__pb2.ListTaskPushNotificationConfigResponse.SerializeToString, ), 'GetAgentCard': grpc.unary_unary_rpc_method_handler( servicer.GetAgentCard, - request_deserializer=a2a__pb2.GetAgentCardRequest.FromString, - response_serializer=a2a__pb2.AgentCard.SerializeToString, + request_deserializer=a2a__v0__3__pb2.GetAgentCardRequest.FromString, + response_serializer=a2a__v0__3__pb2.AgentCard.SerializeToString, ), 'DeleteTaskPushNotificationConfig': grpc.unary_unary_rpc_method_handler( servicer.DeleteTaskPushNotificationConfig, - request_deserializer=a2a__pb2.DeleteTaskPushNotificationConfigRequest.FromString, + request_deserializer=a2a__v0__3__pb2.DeleteTaskPushNotificationConfigRequest.FromString, response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, ), } @@ -255,8 +255,8 @@ def SendMessage(request, request, target, '/a2a.v1.A2AService/SendMessage', - a2a__pb2.SendMessageRequest.SerializeToString, - a2a__pb2.SendMessageResponse.FromString, + a2a__v0__3__pb2.SendMessageRequest.SerializeToString, + a2a__v0__3__pb2.SendMessageResponse.FromString, options, channel_credentials, insecure, @@ -282,8 +282,8 @@ def SendStreamingMessage(request, request, target, '/a2a.v1.A2AService/SendStreamingMessage', - a2a__pb2.SendMessageRequest.SerializeToString, - a2a__pb2.StreamResponse.FromString, + a2a__v0__3__pb2.SendMessageRequest.SerializeToString, + a2a__v0__3__pb2.StreamResponse.FromString, options, channel_credentials, insecure, @@ -309,8 +309,8 @@ def GetTask(request, request, target, '/a2a.v1.A2AService/GetTask', - a2a__pb2.GetTaskRequest.SerializeToString, - a2a__pb2.Task.FromString, + a2a__v0__3__pb2.GetTaskRequest.SerializeToString, + a2a__v0__3__pb2.Task.FromString, options, channel_credentials, insecure, @@ -336,8 +336,8 @@ def CancelTask(request, request, target, '/a2a.v1.A2AService/CancelTask', - a2a__pb2.CancelTaskRequest.SerializeToString, - a2a__pb2.Task.FromString, + a2a__v0__3__pb2.CancelTaskRequest.SerializeToString, + a2a__v0__3__pb2.Task.FromString, options, channel_credentials, insecure, @@ -363,8 +363,8 @@ def TaskSubscription(request, request, target, '/a2a.v1.A2AService/TaskSubscription', - a2a__pb2.TaskSubscriptionRequest.SerializeToString, - a2a__pb2.StreamResponse.FromString, + a2a__v0__3__pb2.TaskSubscriptionRequest.SerializeToString, + a2a__v0__3__pb2.StreamResponse.FromString, options, channel_credentials, insecure, @@ -390,8 +390,8 @@ def CreateTaskPushNotificationConfig(request, request, target, '/a2a.v1.A2AService/CreateTaskPushNotificationConfig', - a2a__pb2.CreateTaskPushNotificationConfigRequest.SerializeToString, - a2a__pb2.TaskPushNotificationConfig.FromString, + a2a__v0__3__pb2.CreateTaskPushNotificationConfigRequest.SerializeToString, + a2a__v0__3__pb2.TaskPushNotificationConfig.FromString, options, channel_credentials, insecure, @@ -417,8 +417,8 @@ def GetTaskPushNotificationConfig(request, request, target, '/a2a.v1.A2AService/GetTaskPushNotificationConfig', - a2a__pb2.GetTaskPushNotificationConfigRequest.SerializeToString, - a2a__pb2.TaskPushNotificationConfig.FromString, + a2a__v0__3__pb2.GetTaskPushNotificationConfigRequest.SerializeToString, + a2a__v0__3__pb2.TaskPushNotificationConfig.FromString, options, channel_credentials, insecure, @@ -444,8 +444,8 @@ def ListTaskPushNotificationConfig(request, request, target, '/a2a.v1.A2AService/ListTaskPushNotificationConfig', - a2a__pb2.ListTaskPushNotificationConfigRequest.SerializeToString, - a2a__pb2.ListTaskPushNotificationConfigResponse.FromString, + a2a__v0__3__pb2.ListTaskPushNotificationConfigRequest.SerializeToString, + a2a__v0__3__pb2.ListTaskPushNotificationConfigResponse.FromString, options, channel_credentials, insecure, @@ -471,8 +471,8 @@ def GetAgentCard(request, request, target, '/a2a.v1.A2AService/GetAgentCard', - a2a__pb2.GetAgentCardRequest.SerializeToString, - a2a__pb2.AgentCard.FromString, + a2a__v0__3__pb2.GetAgentCardRequest.SerializeToString, + a2a__v0__3__pb2.AgentCard.FromString, options, channel_credentials, insecure, @@ -498,7 +498,7 @@ def DeleteTaskPushNotificationConfig(request, request, target, '/a2a.v1.A2AService/DeleteTaskPushNotificationConfig', - a2a__pb2.DeleteTaskPushNotificationConfigRequest.SerializeToString, + a2a__v0__3__pb2.DeleteTaskPushNotificationConfigRequest.SerializeToString, google_dot_protobuf_dot_empty__pb2.Empty.FromString, options, channel_credentials, diff --git a/src/a2a/compat/v0_3/buf.lock b/src/a2a/compat/v0_3/buf.lock new file mode 100644 index 000000000..5df8acde6 --- /dev/null +++ b/src/a2a/compat/v0_3/buf.lock @@ -0,0 +1,6 @@ +# Generated by buf. DO NOT EDIT. +version: v2 +deps: + - name: buf.build/googleapis/googleapis + commit: 004180b77378443887d3b55cabc00384 + digest: b5:e8f475fe3330f31f5fd86ac689093bcd274e19611a09db91f41d637cb9197881ce89882b94d13a58738e53c91c6e4bae7dc1feba85f590164c975a89e25115dc diff --git a/src/a2a/compat/v0_3/buf.yaml b/src/a2a/compat/v0_3/buf.yaml new file mode 100644 index 000000000..8d304d427 --- /dev/null +++ b/src/a2a/compat/v0_3/buf.yaml @@ -0,0 +1,3 @@ +version: v2 +deps: + - buf.build/googleapis/googleapis diff --git a/src/a2a/compat/v0_3/context_builders.py b/src/a2a/compat/v0_3/context_builders.py new file mode 100644 index 000000000..2f2eec362 --- /dev/null +++ b/src/a2a/compat/v0_3/context_builders.py @@ -0,0 +1,80 @@ +"""Context builders that add v0.3 backwards-compatibility for extensions. + +The current spec uses ``A2A-Extensions`` (RFC 6648, no ``X-`` prefix). v0.3 +clients still send the old ``X-A2A-Extensions`` name, so the v0.3 compat +adapters wrap the default builders with these classes to recognize both names. +""" + +from typing import TYPE_CHECKING, Any + +import grpc + +from a2a.compat.v0_3.extension_headers import LEGACY_HTTP_EXTENSION_HEADER +from a2a.extensions.common import get_requested_extensions +from a2a.server.context import ServerCallContext + + +if TYPE_CHECKING: + from starlette.requests import Request + + from a2a.server.request_handlers.grpc_handler import ( + GrpcServerCallContextBuilder, + ) + from a2a.server.routes.common import ServerCallContextBuilder +else: + try: + from starlette.requests import Request + except ImportError: + Request = Any + + +def _get_legacy_grpc_extensions( + context: grpc.aio.ServicerContext, +) -> list[str]: + md = context.invocation_metadata() + if md is None: + return [] + lower_key = LEGACY_HTTP_EXTENSION_HEADER.lower() + return [ + e if isinstance(e, str) else e.decode('utf-8') + for k, e in md + if k.lower() == lower_key + ] + + +class V03ServerCallContextBuilder: + """Wraps a ServerCallContextBuilder to also accept the legacy header. + + Recognizes the v0.3 ``X-A2A-Extensions`` HTTP header in addition to the + spec ``A2A-Extensions``. + """ + + def __init__(self, inner: 'ServerCallContextBuilder') -> None: + self._inner = inner + + def build(self, request: 'Request') -> ServerCallContext: + """Builds a ServerCallContext, merging legacy extension headers.""" + context = self._inner.build(request) + context.requested_extensions |= get_requested_extensions( + request.headers.getlist(LEGACY_HTTP_EXTENSION_HEADER) + ) + return context + + +class V03GrpcServerCallContextBuilder: + """Wraps a GrpcServerCallContextBuilder to also accept the legacy metadata. + + Recognizes the v0.3 ``X-A2A-Extensions`` gRPC metadata key in addition to + the spec ``A2A-Extensions``. + """ + + def __init__(self, inner: 'GrpcServerCallContextBuilder') -> None: + self._inner = inner + + def build(self, context: grpc.aio.ServicerContext) -> ServerCallContext: + """Builds a ServerCallContext, merging legacy extension metadata.""" + server_context = self._inner.build(context) + server_context.requested_extensions |= get_requested_extensions( + _get_legacy_grpc_extensions(context) + ) + return server_context diff --git a/src/a2a/compat/v0_3/conversions.py b/src/a2a/compat/v0_3/conversions.py new file mode 100644 index 000000000..5945380e9 --- /dev/null +++ b/src/a2a/compat/v0_3/conversions.py @@ -0,0 +1,1375 @@ +import base64 + +from typing import Any + +from google.protobuf.json_format import MessageToDict, ParseDict + +from a2a.compat.v0_3 import types as types_v03 +from a2a.compat.v0_3.versions import is_legacy_version +from a2a.types import a2a_pb2 as pb2_v10 +from a2a.utils import constants, errors + + +_COMPAT_TO_CORE_TASK_STATE: dict[types_v03.TaskState, Any] = { + types_v03.TaskState.unknown: pb2_v10.TaskState.TASK_STATE_UNSPECIFIED, + types_v03.TaskState.submitted: pb2_v10.TaskState.TASK_STATE_SUBMITTED, + types_v03.TaskState.working: pb2_v10.TaskState.TASK_STATE_WORKING, + types_v03.TaskState.completed: pb2_v10.TaskState.TASK_STATE_COMPLETED, + types_v03.TaskState.failed: pb2_v10.TaskState.TASK_STATE_FAILED, + types_v03.TaskState.canceled: pb2_v10.TaskState.TASK_STATE_CANCELED, + types_v03.TaskState.input_required: pb2_v10.TaskState.TASK_STATE_INPUT_REQUIRED, + types_v03.TaskState.rejected: pb2_v10.TaskState.TASK_STATE_REJECTED, + types_v03.TaskState.auth_required: pb2_v10.TaskState.TASK_STATE_AUTH_REQUIRED, +} + +_CORE_TO_COMPAT_TASK_STATE: dict[Any, types_v03.TaskState] = { + v: k for k, v in _COMPAT_TO_CORE_TASK_STATE.items() +} + + +def to_core_part(compat_part: types_v03.Part) -> pb2_v10.Part: # noqa: PLR0912 + """Converts a v0.3 Part (Pydantic model) to a v1.0 core Part (Protobuf object).""" + core_part = pb2_v10.Part() + root = compat_part.root + + if isinstance(root, types_v03.TextPart): + core_part.text = root.text + if root.metadata is not None: + ParseDict(root.metadata, core_part.metadata) + + elif isinstance(root, types_v03.DataPart): + if root.metadata is None: + data_part_compat = False + else: + meta = dict(root.metadata) + data_part_compat = meta.pop('data_part_compat', False) + if meta: + ParseDict(meta, core_part.metadata) + + if data_part_compat: + val = root.data['value'] + ParseDict(val, core_part.data) + else: + ParseDict(root.data, core_part.data.struct_value) + + elif isinstance(root, types_v03.FilePart): + if isinstance(root.file, types_v03.FileWithBytes): + core_part.raw = base64.b64decode(root.file.bytes) + if root.file.mime_type: + core_part.media_type = root.file.mime_type + if root.file.name: + core_part.filename = root.file.name + elif isinstance(root.file, types_v03.FileWithUri): + core_part.url = root.file.uri + if root.file.mime_type: + core_part.media_type = root.file.mime_type + if root.file.name: + core_part.filename = root.file.name + + if root.metadata is not None: + ParseDict(root.metadata, core_part.metadata) + + return core_part + + +def to_compat_part(core_part: pb2_v10.Part) -> types_v03.Part: + """Converts a v1.0 core Part (Protobuf object) to a v0.3 Part (Pydantic model).""" + which = core_part.WhichOneof('content') + metadata = ( + MessageToDict(core_part.metadata) + if core_part.HasField('metadata') + else None + ) + + if which == 'text': + return types_v03.Part( + root=types_v03.TextPart(text=core_part.text, metadata=metadata) + ) + + if which == 'data': + # core_part.data is a google.protobuf.Value. It can be converted to dict. + data_dict = MessageToDict(core_part.data) + if not isinstance(data_dict, dict): + data_dict = {'value': data_dict} + metadata = metadata or {} + metadata['data_part_compat'] = True + + return types_v03.Part( + root=types_v03.DataPart(data=data_dict, metadata=metadata) + ) + + if which in ('raw', 'url'): + media_type = core_part.media_type if core_part.media_type else None + filename = core_part.filename if core_part.filename else None + + if which == 'raw': + b64 = base64.b64encode(core_part.raw).decode('utf-8') + file_obj_bytes = types_v03.FileWithBytes( + bytes=b64, mime_type=media_type, name=filename + ) + return types_v03.Part( + root=types_v03.FilePart(file=file_obj_bytes, metadata=metadata) + ) + file_obj_uri = types_v03.FileWithUri( + uri=core_part.url, mime_type=media_type, name=filename + ) + return types_v03.Part( + root=types_v03.FilePart(file=file_obj_uri, metadata=metadata) + ) + + raise ValueError(f'Unknown part content type: {which}') + + +def to_core_message(compat_msg: types_v03.Message) -> pb2_v10.Message: + """Convert message to v1.0 core type.""" + core_msg = pb2_v10.Message( + message_id=compat_msg.message_id, + context_id=compat_msg.context_id or '', + task_id=compat_msg.task_id or '', + ) + if compat_msg.reference_task_ids: + core_msg.reference_task_ids.extend(compat_msg.reference_task_ids) + + if compat_msg.role == types_v03.Role.user: + core_msg.role = pb2_v10.Role.ROLE_USER + elif compat_msg.role == types_v03.Role.agent: + core_msg.role = pb2_v10.Role.ROLE_AGENT + + if compat_msg.metadata: + ParseDict(compat_msg.metadata, core_msg.metadata) + + if compat_msg.extensions: + core_msg.extensions.extend(compat_msg.extensions) + + for p in compat_msg.parts: + core_msg.parts.append(to_core_part(p)) + return core_msg + + +def to_compat_message(core_msg: pb2_v10.Message) -> types_v03.Message: + """Convert message to v0.3 compat type.""" + role = ( + types_v03.Role.user + if core_msg.role == pb2_v10.Role.ROLE_USER + else types_v03.Role.agent + ) + return types_v03.Message( + message_id=core_msg.message_id, + role=role, + context_id=core_msg.context_id or None, + task_id=core_msg.task_id or None, + reference_task_ids=list(core_msg.reference_task_ids) + if core_msg.reference_task_ids + else None, + metadata=MessageToDict(core_msg.metadata) + if core_msg.metadata + else None, + extensions=list(core_msg.extensions) if core_msg.extensions else None, + parts=[to_compat_part(p) for p in core_msg.parts], + ) + + +def to_core_task_status( + compat_status: types_v03.TaskStatus, +) -> pb2_v10.TaskStatus: + """Convert task status to v1.0 core type.""" + core_status = pb2_v10.TaskStatus() + if compat_status.state: + core_status.state = _COMPAT_TO_CORE_TASK_STATE.get( + compat_status.state, pb2_v10.TaskState.TASK_STATE_UNSPECIFIED + ) + + if compat_status.message: + core_status.message.CopyFrom(to_core_message(compat_status.message)) + if compat_status.timestamp: + core_status.timestamp.FromJsonString( + str(compat_status.timestamp).replace('+00:00', 'Z') + ) + return core_status + + +def to_compat_task_status( + core_status: pb2_v10.TaskStatus, +) -> types_v03.TaskStatus: + """Convert task status to v0.3 compat type.""" + state_enum = _CORE_TO_COMPAT_TASK_STATE.get( + core_status.state, types_v03.TaskState.unknown + ) + + update = ( + to_compat_message(core_status.message) + if core_status.HasField('message') + else None + ) + ts = ( + core_status.timestamp.ToJsonString() + if core_status.HasField('timestamp') + else None + ) + + return types_v03.TaskStatus(state=state_enum, message=update, timestamp=ts) + + +def to_core_task(compat_task: types_v03.Task) -> pb2_v10.Task: + """Convert task to v1.0 core type.""" + core_task = pb2_v10.Task( + id=compat_task.id, + context_id=compat_task.context_id, + ) + if compat_task.status: + core_task.status.CopyFrom(to_core_task_status(compat_task.status)) + if compat_task.history: + for m in compat_task.history: + core_task.history.append(to_core_message(m)) + if compat_task.artifacts: + for a in compat_task.artifacts: + core_task.artifacts.append(to_core_artifact(a)) + if compat_task.metadata: + ParseDict(compat_task.metadata, core_task.metadata) + return core_task + + +def to_compat_task(core_task: pb2_v10.Task) -> types_v03.Task: + """Convert task to v0.3 compat type.""" + return types_v03.Task( + id=core_task.id, + context_id=core_task.context_id, + status=to_compat_task_status(core_task.status) + if core_task.HasField('status') + else types_v03.TaskStatus(state=types_v03.TaskState.unknown), + history=[to_compat_message(m) for m in core_task.history] + if core_task.history + else None, + artifacts=[to_compat_artifact(a) for a in core_task.artifacts] + if core_task.artifacts + else None, + metadata=MessageToDict(core_task.metadata) + if core_task.HasField('metadata') + else None, + ) + + +def to_core_authentication_info( + compat_auth: types_v03.PushNotificationAuthenticationInfo, +) -> pb2_v10.AuthenticationInfo: + """Convert authentication info to v1.0 core type.""" + core_auth = pb2_v10.AuthenticationInfo() + if compat_auth.schemes: + core_auth.scheme = compat_auth.schemes[0] + if compat_auth.credentials: + core_auth.credentials = compat_auth.credentials + return core_auth + + +def to_compat_authentication_info( + core_auth: pb2_v10.AuthenticationInfo, +) -> types_v03.PushNotificationAuthenticationInfo: + """Convert authentication info to v0.3 compat type.""" + return types_v03.PushNotificationAuthenticationInfo( + schemes=[core_auth.scheme] if core_auth.scheme else [], + credentials=core_auth.credentials if core_auth.credentials else None, + ) + + +def to_core_push_notification_config( + compat_config: types_v03.PushNotificationConfig, +) -> pb2_v10.TaskPushNotificationConfig: + """Convert push notification config to v1.0 core type.""" + core_config = pb2_v10.TaskPushNotificationConfig(url=compat_config.url) + if compat_config.id: + core_config.id = compat_config.id + if compat_config.token: + core_config.token = compat_config.token + if compat_config.authentication: + core_config.authentication.CopyFrom( + to_core_authentication_info(compat_config.authentication) + ) + return core_config + + +def to_compat_push_notification_config( + core_config: pb2_v10.TaskPushNotificationConfig, +) -> types_v03.PushNotificationConfig: + """Convert push notification config to v0.3 compat type.""" + return types_v03.PushNotificationConfig( + url=core_config.url if core_config.url else '', + id=core_config.id if core_config.id else None, + token=core_config.token if core_config.token else None, + authentication=to_compat_authentication_info(core_config.authentication) + if core_config.HasField('authentication') + else None, + ) + + +def to_core_send_message_configuration( + compat_config: types_v03.MessageSendConfiguration, +) -> pb2_v10.SendMessageConfiguration: + """Convert send message configuration to v1.0 core type.""" + core_config = pb2_v10.SendMessageConfiguration() + # Result will be blocking by default (return_immediately=False) + if compat_config.accepted_output_modes: + core_config.accepted_output_modes.extend( + compat_config.accepted_output_modes + ) + if compat_config.push_notification_config: + core_config.task_push_notification_config.CopyFrom( + to_core_push_notification_config( + compat_config.push_notification_config + ) + ) + if compat_config.history_length is not None: + core_config.history_length = compat_config.history_length + if compat_config.blocking is not None: + core_config.return_immediately = not compat_config.blocking + return core_config + + +def to_compat_send_message_configuration( + core_config: pb2_v10.SendMessageConfiguration, +) -> types_v03.MessageSendConfiguration: + """Convert send message configuration to v0.3 compat type.""" + return types_v03.MessageSendConfiguration( + accepted_output_modes=list(core_config.accepted_output_modes) + if core_config.accepted_output_modes + else None, + push_notification_config=to_compat_push_notification_config( + core_config.task_push_notification_config + ) + if core_config.HasField('task_push_notification_config') + else None, + history_length=core_config.history_length + if core_config.HasField('history_length') + else None, + blocking=not core_config.return_immediately, + ) + + +def to_core_artifact(compat_artifact: types_v03.Artifact) -> pb2_v10.Artifact: + """Convert artifact to v1.0 core type.""" + core_artifact = pb2_v10.Artifact(artifact_id=compat_artifact.artifact_id) + if compat_artifact.name: + core_artifact.name = compat_artifact.name + if compat_artifact.description: + core_artifact.description = compat_artifact.description + for p in compat_artifact.parts: + core_artifact.parts.append(to_core_part(p)) + if compat_artifact.metadata: + ParseDict(compat_artifact.metadata, core_artifact.metadata) + if compat_artifact.extensions: + core_artifact.extensions.extend(compat_artifact.extensions) + return core_artifact + + +def to_compat_artifact(core_artifact: pb2_v10.Artifact) -> types_v03.Artifact: + """Convert artifact to v0.3 compat type.""" + return types_v03.Artifact( + artifact_id=core_artifact.artifact_id, + name=core_artifact.name if core_artifact.name else None, + description=core_artifact.description + if core_artifact.description + else None, + parts=[to_compat_part(p) for p in core_artifact.parts], + metadata=MessageToDict(core_artifact.metadata) + if core_artifact.HasField('metadata') + else None, + extensions=list(core_artifact.extensions) + if core_artifact.extensions + else None, + ) + + +def to_core_task_status_update_event( + compat_event: types_v03.TaskStatusUpdateEvent, +) -> pb2_v10.TaskStatusUpdateEvent: + """Convert task status update event to v1.0 core type.""" + core_event = pb2_v10.TaskStatusUpdateEvent( + task_id=compat_event.task_id, context_id=compat_event.context_id + ) + if compat_event.status: + core_event.status.CopyFrom(to_core_task_status(compat_event.status)) + if compat_event.metadata: + ParseDict(compat_event.metadata, core_event.metadata) + return core_event + + +def to_compat_task_status_update_event( + core_event: pb2_v10.TaskStatusUpdateEvent, +) -> types_v03.TaskStatusUpdateEvent: + """Convert task status update event to v0.3 compat type.""" + status = ( + to_compat_task_status(core_event.status) + if core_event.HasField('status') + else types_v03.TaskStatus(state=types_v03.TaskState.unknown) + ) + final = status.state in ( + types_v03.TaskState.completed, + types_v03.TaskState.canceled, + types_v03.TaskState.failed, + types_v03.TaskState.rejected, + ) + return types_v03.TaskStatusUpdateEvent( + task_id=core_event.task_id, + context_id=core_event.context_id, + status=status, + metadata=MessageToDict(core_event.metadata) + if core_event.HasField('metadata') + else None, + final=final, + ) + + +def to_core_task_artifact_update_event( + compat_event: types_v03.TaskArtifactUpdateEvent, +) -> pb2_v10.TaskArtifactUpdateEvent: + """Convert task artifact update event to v1.0 core type.""" + core_event = pb2_v10.TaskArtifactUpdateEvent( + task_id=compat_event.task_id, context_id=compat_event.context_id + ) + if compat_event.artifact: + core_event.artifact.CopyFrom(to_core_artifact(compat_event.artifact)) + if compat_event.append is not None: + core_event.append = compat_event.append + if compat_event.last_chunk is not None: + core_event.last_chunk = compat_event.last_chunk + if compat_event.metadata: + ParseDict(compat_event.metadata, core_event.metadata) + return core_event + + +def to_core_security_requirement( + compat_req: dict[str, list[str]], +) -> pb2_v10.SecurityRequirement: + """Convert security requirement to v1.0 core type.""" + core_req = pb2_v10.SecurityRequirement() + for scheme_name, scopes in compat_req.items(): + sl = pb2_v10.StringList() + sl.list.extend(scopes) + core_req.schemes[scheme_name].CopyFrom(sl) + return core_req + + +def to_compat_security_requirement( + core_req: pb2_v10.SecurityRequirement, +) -> dict[str, list[str]]: + """Convert security requirement to v0.3 compat type.""" + return { + scheme_name: list(string_list.list) + for scheme_name, string_list in core_req.schemes.items() + } + + +def to_core_oauth_flows( + compat_flows: types_v03.OAuthFlows, +) -> pb2_v10.OAuthFlows: + """Convert oauth flows to v1.0 core type.""" + core_flows = pb2_v10.OAuthFlows() + if compat_flows.authorization_code: + f = pb2_v10.AuthorizationCodeOAuthFlow( + authorization_url=compat_flows.authorization_code.authorization_url, + token_url=compat_flows.authorization_code.token_url, + scopes=compat_flows.authorization_code.scopes, + ) + if compat_flows.authorization_code.refresh_url: + f.refresh_url = compat_flows.authorization_code.refresh_url + core_flows.authorization_code.CopyFrom(f) + + if compat_flows.client_credentials: + f_client = pb2_v10.ClientCredentialsOAuthFlow( + token_url=compat_flows.client_credentials.token_url, + scopes=compat_flows.client_credentials.scopes, + ) + if compat_flows.client_credentials.refresh_url: + f_client.refresh_url = compat_flows.client_credentials.refresh_url + core_flows.client_credentials.CopyFrom(f_client) + + if compat_flows.implicit: + f_impl = pb2_v10.ImplicitOAuthFlow( + authorization_url=compat_flows.implicit.authorization_url, + scopes=compat_flows.implicit.scopes, + ) + if compat_flows.implicit.refresh_url: + f_impl.refresh_url = compat_flows.implicit.refresh_url + core_flows.implicit.CopyFrom(f_impl) + + if compat_flows.password: + f_pass = pb2_v10.PasswordOAuthFlow( + token_url=compat_flows.password.token_url, + scopes=compat_flows.password.scopes, + ) + if compat_flows.password.refresh_url: + f_pass.refresh_url = compat_flows.password.refresh_url + core_flows.password.CopyFrom(f_pass) + + return core_flows + + +def to_compat_oauth_flows( + core_flows: pb2_v10.OAuthFlows, +) -> types_v03.OAuthFlows: + """Convert oauth flows to v0.3 compat type.""" + which = core_flows.WhichOneof('flow') + auth_code, client_cred, implicit, password = None, None, None, None + + if which == 'authorization_code': + auth_code = types_v03.AuthorizationCodeOAuthFlow( + authorization_url=core_flows.authorization_code.authorization_url, + token_url=core_flows.authorization_code.token_url, + scopes=dict(core_flows.authorization_code.scopes), + refresh_url=core_flows.authorization_code.refresh_url + if core_flows.authorization_code.refresh_url + else None, + ) + elif which == 'client_credentials': + client_cred = types_v03.ClientCredentialsOAuthFlow( + token_url=core_flows.client_credentials.token_url, + scopes=dict(core_flows.client_credentials.scopes), + refresh_url=core_flows.client_credentials.refresh_url + if core_flows.client_credentials.refresh_url + else None, + ) + elif which == 'implicit': + implicit = types_v03.ImplicitOAuthFlow( + authorization_url=core_flows.implicit.authorization_url, + scopes=dict(core_flows.implicit.scopes), + refresh_url=core_flows.implicit.refresh_url + if core_flows.implicit.refresh_url + else None, + ) + elif which == 'password': + password = types_v03.PasswordOAuthFlow( + token_url=core_flows.password.token_url, + scopes=dict(core_flows.password.scopes), + refresh_url=core_flows.password.refresh_url + if core_flows.password.refresh_url + else None, + ) + # Note: device_code from v1.0 is dropped since v0.3 doesn't support it + + return types_v03.OAuthFlows( + authorization_code=auth_code, + client_credentials=client_cred, + implicit=implicit, + password=password, + ) + + +def to_core_security_scheme( + compat_scheme: types_v03.SecurityScheme, +) -> pb2_v10.SecurityScheme: + """Convert security scheme to v1.0 core type.""" + core_scheme = pb2_v10.SecurityScheme() + root = compat_scheme.root + + if isinstance(root, types_v03.APIKeySecurityScheme): + core_scheme.api_key_security_scheme.location = root.in_.value + core_scheme.api_key_security_scheme.name = root.name + if root.description: + core_scheme.api_key_security_scheme.description = root.description + + elif isinstance(root, types_v03.HTTPAuthSecurityScheme): + core_scheme.http_auth_security_scheme.scheme = root.scheme + if root.bearer_format: + core_scheme.http_auth_security_scheme.bearer_format = ( + root.bearer_format + ) + if root.description: + core_scheme.http_auth_security_scheme.description = root.description + + elif isinstance(root, types_v03.OAuth2SecurityScheme): + core_scheme.oauth2_security_scheme.flows.CopyFrom( + to_core_oauth_flows(root.flows) + ) + if root.oauth2_metadata_url: + core_scheme.oauth2_security_scheme.oauth2_metadata_url = ( + root.oauth2_metadata_url + ) + if root.description: + core_scheme.oauth2_security_scheme.description = root.description + + elif isinstance(root, types_v03.OpenIdConnectSecurityScheme): + core_scheme.open_id_connect_security_scheme.open_id_connect_url = ( + root.open_id_connect_url + ) + if root.description: + core_scheme.open_id_connect_security_scheme.description = ( + root.description + ) + + elif isinstance(root, types_v03.MutualTLSSecurityScheme): + # Mutual TLS has no required fields other than description which is optional + core_scheme.mtls_security_scheme.SetInParent() + if root.description: + core_scheme.mtls_security_scheme.description = root.description + + return core_scheme + + +def to_compat_security_scheme( + core_scheme: pb2_v10.SecurityScheme, +) -> types_v03.SecurityScheme: + """Convert security scheme to v0.3 compat type.""" + which = core_scheme.WhichOneof('scheme') + + if which == 'api_key_security_scheme': + s_api = core_scheme.api_key_security_scheme + return types_v03.SecurityScheme( + root=types_v03.APIKeySecurityScheme( + in_=types_v03.In(s_api.location), + name=s_api.name, + description=s_api.description if s_api.description else None, + ) + ) + + if which == 'http_auth_security_scheme': + s_http = core_scheme.http_auth_security_scheme + return types_v03.SecurityScheme( + root=types_v03.HTTPAuthSecurityScheme( + scheme=s_http.scheme, + bearer_format=s_http.bearer_format + if s_http.bearer_format + else None, + description=s_http.description if s_http.description else None, + ) + ) + + if which == 'oauth2_security_scheme': + s_oauth = core_scheme.oauth2_security_scheme + return types_v03.SecurityScheme( + root=types_v03.OAuth2SecurityScheme( + flows=to_compat_oauth_flows(s_oauth.flows), + oauth2_metadata_url=s_oauth.oauth2_metadata_url + if s_oauth.oauth2_metadata_url + else None, + description=s_oauth.description + if s_oauth.description + else None, + ) + ) + + if which == 'open_id_connect_security_scheme': + s_oidc = core_scheme.open_id_connect_security_scheme + return types_v03.SecurityScheme( + root=types_v03.OpenIdConnectSecurityScheme( + open_id_connect_url=s_oidc.open_id_connect_url, + description=s_oidc.description if s_oidc.description else None, + ) + ) + + if which == 'mtls_security_scheme': + s_mtls = core_scheme.mtls_security_scheme + return types_v03.SecurityScheme( + root=types_v03.MutualTLSSecurityScheme( + description=s_mtls.description if s_mtls.description else None + ) + ) + + raise ValueError(f'Unknown security scheme type: {which}') + + +def to_core_agent_interface( + compat_interface: types_v03.AgentInterface, +) -> pb2_v10.AgentInterface: + """Convert agent interface to v1.0 core type.""" + return pb2_v10.AgentInterface( + url=compat_interface.url, + protocol_binding=compat_interface.transport, + protocol_version=constants.PROTOCOL_VERSION_0_3, # Defaulting for legacy + ) + + +def to_compat_agent_interface( + core_interface: pb2_v10.AgentInterface, +) -> types_v03.AgentInterface: + """Convert agent interface to v0.3 compat type.""" + return types_v03.AgentInterface( + url=core_interface.url, transport=core_interface.protocol_binding + ) + + +def to_core_agent_provider( + compat_provider: types_v03.AgentProvider, +) -> pb2_v10.AgentProvider: + """Convert agent provider to v1.0 core type.""" + return pb2_v10.AgentProvider( + url=compat_provider.url, organization=compat_provider.organization + ) + + +def to_compat_agent_provider( + core_provider: pb2_v10.AgentProvider, +) -> types_v03.AgentProvider: + """Convert agent provider to v0.3 compat type.""" + return types_v03.AgentProvider( + url=core_provider.url, organization=core_provider.organization + ) + + +def to_core_agent_extension( + compat_ext: types_v03.AgentExtension, +) -> pb2_v10.AgentExtension: + """Convert agent extension to v1.0 core type.""" + core_ext = pb2_v10.AgentExtension() + if compat_ext.uri: + core_ext.uri = compat_ext.uri + if compat_ext.description: + core_ext.description = compat_ext.description + if compat_ext.required is not None: + core_ext.required = compat_ext.required + if compat_ext.params: + ParseDict(compat_ext.params, core_ext.params) + return core_ext + + +def to_compat_agent_extension( + core_ext: pb2_v10.AgentExtension, +) -> types_v03.AgentExtension: + """Convert agent extension to v0.3 compat type.""" + return types_v03.AgentExtension( + uri=core_ext.uri, + description=core_ext.description if core_ext.description else None, + required=core_ext.required, + params=MessageToDict(core_ext.params) + if core_ext.HasField('params') + else None, + ) + + +def to_core_agent_capabilities( + compat_cap: types_v03.AgentCapabilities, +) -> pb2_v10.AgentCapabilities: + """Convert agent capabilities to v1.0 core type.""" + core_cap = pb2_v10.AgentCapabilities() + if compat_cap.streaming is not None: + core_cap.streaming = compat_cap.streaming + if compat_cap.push_notifications is not None: + core_cap.push_notifications = compat_cap.push_notifications + if compat_cap.extensions: + core_cap.extensions.extend( + [to_core_agent_extension(e) for e in compat_cap.extensions] + ) + return core_cap + + +def to_compat_agent_capabilities( + core_cap: pb2_v10.AgentCapabilities, +) -> types_v03.AgentCapabilities: + """Convert agent capabilities to v0.3 compat type.""" + return types_v03.AgentCapabilities( + streaming=core_cap.streaming + if core_cap.HasField('streaming') + else None, + push_notifications=core_cap.push_notifications + if core_cap.HasField('push_notifications') + else None, + extensions=[to_compat_agent_extension(e) for e in core_cap.extensions] + if core_cap.extensions + else None, + state_transition_history=None, # No longer supported in v1.0 + ) + + +def to_core_agent_skill( + compat_skill: types_v03.AgentSkill, +) -> pb2_v10.AgentSkill: + """Convert agent skill to v1.0 core type.""" + core_skill = pb2_v10.AgentSkill( + id=compat_skill.id, + name=compat_skill.name, + description=compat_skill.description, + ) + if compat_skill.tags: + core_skill.tags.extend(compat_skill.tags) + if compat_skill.examples: + core_skill.examples.extend(compat_skill.examples) + if compat_skill.input_modes: + core_skill.input_modes.extend(compat_skill.input_modes) + if compat_skill.output_modes: + core_skill.output_modes.extend(compat_skill.output_modes) + if compat_skill.security: + core_skill.security_requirements.extend( + [to_core_security_requirement(r) for r in compat_skill.security] + ) + return core_skill + + +def to_compat_agent_skill( + core_skill: pb2_v10.AgentSkill, +) -> types_v03.AgentSkill: + """Convert agent skill to v0.3 compat type.""" + return types_v03.AgentSkill( + id=core_skill.id, + name=core_skill.name, + description=core_skill.description, + tags=list(core_skill.tags) if core_skill.tags else [], + examples=list(core_skill.examples) if core_skill.examples else None, + input_modes=list(core_skill.input_modes) + if core_skill.input_modes + else None, + output_modes=list(core_skill.output_modes) + if core_skill.output_modes + else None, + security=[ + to_compat_security_requirement(r) + for r in core_skill.security_requirements + ] + if core_skill.security_requirements + else None, + ) + + +def to_core_agent_card_signature( + compat_sig: types_v03.AgentCardSignature, +) -> pb2_v10.AgentCardSignature: + """Convert agent card signature to v1.0 core type.""" + core_sig = pb2_v10.AgentCardSignature( + protected=compat_sig.protected, signature=compat_sig.signature + ) + if compat_sig.header: + ParseDict(compat_sig.header, core_sig.header) + return core_sig + + +def to_compat_agent_card_signature( + core_sig: pb2_v10.AgentCardSignature, +) -> types_v03.AgentCardSignature: + """Convert agent card signature to v0.3 compat type.""" + return types_v03.AgentCardSignature( + protected=core_sig.protected, + signature=core_sig.signature, + header=MessageToDict(core_sig.header) + if core_sig.HasField('header') + else None, + ) + + +def to_core_agent_card(compat_card: types_v03.AgentCard) -> pb2_v10.AgentCard: + """Convert agent card to v1.0 core type.""" + core_card = pb2_v10.AgentCard( + name=compat_card.name, + description=compat_card.description, + version=compat_card.version, + ) + + # Map primary interface + primary_interface = pb2_v10.AgentInterface( + url=compat_card.url, + protocol_binding=compat_card.preferred_transport or 'JSONRPC', + protocol_version=compat_card.protocol_version + or constants.PROTOCOL_VERSION_0_3, + ) + core_card.supported_interfaces.append(primary_interface) + + if compat_card.additional_interfaces: + core_card.supported_interfaces.extend( + [ + to_core_agent_interface(i) + for i in compat_card.additional_interfaces + ] + ) + + if compat_card.provider: + core_card.provider.CopyFrom( + to_core_agent_provider(compat_card.provider) + ) + + if compat_card.documentation_url: + core_card.documentation_url = compat_card.documentation_url + + if compat_card.icon_url: + core_card.icon_url = compat_card.icon_url + + core_cap = to_core_agent_capabilities(compat_card.capabilities) + if compat_card.supports_authenticated_extended_card is not None: + core_cap.extended_agent_card = ( + compat_card.supports_authenticated_extended_card + ) + core_card.capabilities.CopyFrom(core_cap) + + if compat_card.security_schemes: + for k, v in compat_card.security_schemes.items(): + core_card.security_schemes[k].CopyFrom(to_core_security_scheme(v)) + + if compat_card.security: + core_card.security_requirements.extend( + [to_core_security_requirement(r) for r in compat_card.security] + ) + + if compat_card.default_input_modes: + core_card.default_input_modes.extend(compat_card.default_input_modes) + + if compat_card.default_output_modes: + core_card.default_output_modes.extend(compat_card.default_output_modes) + + if compat_card.skills: + core_card.skills.extend( + [to_core_agent_skill(s) for s in compat_card.skills] + ) + + if compat_card.signatures: + core_card.signatures.extend( + [to_core_agent_card_signature(s) for s in compat_card.signatures] + ) + + return core_card + + +def to_compat_agent_card(core_card: pb2_v10.AgentCard) -> types_v03.AgentCard: + # Map supported interfaces back to legacy layout + """Convert agent card to v0.3 compat type.""" + compat_interfaces = [ + interface + for interface in core_card.supported_interfaces + if ( + (not interface.protocol_version) + or is_legacy_version(interface.protocol_version) + ) + ] + if not compat_interfaces: + raise errors.VersionNotSupportedError( + 'AgentCard must have at least one interface with compatible protocol version.' + ) + + primary_interface = compat_interfaces[0] + additional_interfaces = [ + to_compat_agent_interface(i) for i in compat_interfaces[1:] + ] + + compat_cap = to_compat_agent_capabilities(core_card.capabilities) + supports_authenticated_extended_card = ( + core_card.capabilities.extended_agent_card + if core_card.capabilities.HasField('extended_agent_card') + else None + ) + + return types_v03.AgentCard( + name=core_card.name, + description=core_card.description, + version=core_card.version, + url=primary_interface.url, + preferred_transport=primary_interface.protocol_binding, + protocol_version=primary_interface.protocol_version + or constants.PROTOCOL_VERSION_0_3, + additional_interfaces=additional_interfaces or None, + provider=to_compat_agent_provider(core_card.provider) + if core_card.HasField('provider') + else None, + documentation_url=core_card.documentation_url + if core_card.HasField('documentation_url') + else None, + icon_url=core_card.icon_url if core_card.HasField('icon_url') else None, + capabilities=compat_cap, + supports_authenticated_extended_card=supports_authenticated_extended_card, + security_schemes={ + k: to_compat_security_scheme(v) + for k, v in core_card.security_schemes.items() + } + if core_card.security_schemes + else None, + security=[ + to_compat_security_requirement(r) + for r in core_card.security_requirements + ] + if core_card.security_requirements + else None, + default_input_modes=list(core_card.default_input_modes) + if core_card.default_input_modes + else [], + default_output_modes=list(core_card.default_output_modes) + if core_card.default_output_modes + else [], + skills=[to_compat_agent_skill(s) for s in core_card.skills] + if core_card.skills + else [], + signatures=[ + to_compat_agent_card_signature(s) for s in core_card.signatures + ] + if core_card.signatures + else None, + ) + + +def to_compat_task_artifact_update_event( + core_event: pb2_v10.TaskArtifactUpdateEvent, +) -> types_v03.TaskArtifactUpdateEvent: + """Convert task artifact update event to v0.3 compat type.""" + return types_v03.TaskArtifactUpdateEvent( + task_id=core_event.task_id, + context_id=core_event.context_id, + artifact=to_compat_artifact(core_event.artifact), + append=core_event.append, + last_chunk=core_event.last_chunk, + metadata=MessageToDict(core_event.metadata) + if core_event.HasField('metadata') + else None, + ) + + +def to_core_task_push_notification_config( + compat_config: types_v03.TaskPushNotificationConfig, +) -> pb2_v10.TaskPushNotificationConfig: + """Convert task push notification config to v1.0 core type.""" + core_config = pb2_v10.TaskPushNotificationConfig( + task_id=compat_config.task_id + ) + if compat_config.push_notification_config: + core_config.MergeFrom( + to_core_push_notification_config( + compat_config.push_notification_config + ) + ) + return core_config + + +def to_compat_task_push_notification_config( + core_config: pb2_v10.TaskPushNotificationConfig, +) -> types_v03.TaskPushNotificationConfig: + """Convert task push notification config to v0.3 compat type.""" + return types_v03.TaskPushNotificationConfig( + task_id=core_config.task_id, + push_notification_config=to_compat_push_notification_config( + core_config + ), + ) + + +def to_core_send_message_request( + compat_req: types_v03.SendMessageRequest, +) -> pb2_v10.SendMessageRequest: + """Convert send message request to v1.0 core type.""" + core_req = pb2_v10.SendMessageRequest() + if compat_req.params.message: + core_req.message.CopyFrom(to_core_message(compat_req.params.message)) + if compat_req.params.configuration: + core_req.configuration.CopyFrom( + to_core_send_message_configuration(compat_req.params.configuration) + ) + if compat_req.params.metadata: + ParseDict(compat_req.params.metadata, core_req.metadata) + return core_req + + +def to_compat_send_message_request( + core_req: pb2_v10.SendMessageRequest, request_id: str | int +) -> types_v03.SendMessageRequest: + """Convert send message request to v0.3 compat type.""" + return types_v03.SendMessageRequest( + id=request_id, + params=types_v03.MessageSendParams( + message=to_compat_message(core_req.message), + configuration=to_compat_send_message_configuration( + core_req.configuration + ) + if core_req.HasField('configuration') + else None, + metadata=MessageToDict(core_req.metadata) + if core_req.HasField('metadata') + else None, + ), + ) + + +def to_core_get_task_request( + compat_req: types_v03.GetTaskRequest, +) -> pb2_v10.GetTaskRequest: + """Convert get task request to v1.0 core type.""" + core_req = pb2_v10.GetTaskRequest() + core_req.id = compat_req.params.id + if compat_req.params.history_length is not None: + core_req.history_length = compat_req.params.history_length + return core_req + + +def to_compat_get_task_request( + core_req: pb2_v10.GetTaskRequest, request_id: str | int +) -> types_v03.GetTaskRequest: + """Convert get task request to v0.3 compat type.""" + return types_v03.GetTaskRequest( + id=request_id, + params=types_v03.TaskQueryParams( + id=core_req.id, + history_length=core_req.history_length + if core_req.HasField('history_length') + else None, + ), + ) + + +def to_core_cancel_task_request( + compat_req: types_v03.CancelTaskRequest, +) -> pb2_v10.CancelTaskRequest: + """Convert cancel task request to v1.0 core type.""" + core_req = pb2_v10.CancelTaskRequest(id=compat_req.params.id) + if compat_req.params.metadata: + ParseDict(compat_req.params.metadata, core_req.metadata) + return core_req + + +def to_compat_cancel_task_request( + core_req: pb2_v10.CancelTaskRequest, request_id: str | int +) -> types_v03.CancelTaskRequest: + """Convert cancel task request to v0.3 compat type.""" + return types_v03.CancelTaskRequest( + id=request_id, + params=types_v03.TaskIdParams( + id=core_req.id, + metadata=MessageToDict(core_req.metadata) + if core_req.HasField('metadata') + else None, + ), + ) + + +def to_core_get_task_push_notification_config_request( + compat_req: types_v03.GetTaskPushNotificationConfigRequest, +) -> pb2_v10.GetTaskPushNotificationConfigRequest: + """Convert get task push notification config request to v1.0 core type.""" + if isinstance( + compat_req.params, types_v03.GetTaskPushNotificationConfigParams + ): + return pb2_v10.GetTaskPushNotificationConfigRequest( + task_id=compat_req.params.id, + id=compat_req.params.push_notification_config_id, + ) + return pb2_v10.GetTaskPushNotificationConfigRequest( + task_id=compat_req.params.id + ) + + +def to_compat_get_task_push_notification_config_request( + core_req: pb2_v10.GetTaskPushNotificationConfigRequest, + request_id: str | int, +) -> types_v03.GetTaskPushNotificationConfigRequest: + """Convert get task push notification config request to v0.3 compat type.""" + params: ( + types_v03.GetTaskPushNotificationConfigParams | types_v03.TaskIdParams + ) + if core_req.id: + params = types_v03.GetTaskPushNotificationConfigParams( + id=core_req.task_id, push_notification_config_id=core_req.id + ) + else: + params = types_v03.TaskIdParams(id=core_req.task_id) + return types_v03.GetTaskPushNotificationConfigRequest( + id=request_id, params=params + ) + + +def to_core_delete_task_push_notification_config_request( + compat_req: types_v03.DeleteTaskPushNotificationConfigRequest, +) -> pb2_v10.DeleteTaskPushNotificationConfigRequest: + """Convert delete task push notification config request to v1.0 core type.""" + return pb2_v10.DeleteTaskPushNotificationConfigRequest( + task_id=compat_req.params.id, + id=compat_req.params.push_notification_config_id, + ) + + +def to_compat_delete_task_push_notification_config_request( + core_req: pb2_v10.DeleteTaskPushNotificationConfigRequest, + request_id: str | int, +) -> types_v03.DeleteTaskPushNotificationConfigRequest: + """Convert delete task push notification config request to v0.3 compat type.""" + return types_v03.DeleteTaskPushNotificationConfigRequest( + id=request_id, + params=types_v03.DeleteTaskPushNotificationConfigParams( + id=core_req.task_id, push_notification_config_id=core_req.id + ), + ) + + +def to_core_create_task_push_notification_config_request( + compat_req: types_v03.SetTaskPushNotificationConfigRequest, +) -> pb2_v10.TaskPushNotificationConfig: + """Convert create task push notification config request to v1.0 core type.""" + core_req = pb2_v10.TaskPushNotificationConfig( + task_id=compat_req.params.task_id + ) + if compat_req.params.push_notification_config: + core_req.MergeFrom( + to_core_push_notification_config( + compat_req.params.push_notification_config + ) + ) + return core_req + + +def to_compat_create_task_push_notification_config_request( + core_req: pb2_v10.TaskPushNotificationConfig, + request_id: str | int, +) -> types_v03.SetTaskPushNotificationConfigRequest: + """Convert create task push notification config request to v0.3 compat type.""" + return types_v03.SetTaskPushNotificationConfigRequest( + id=request_id, + params=types_v03.TaskPushNotificationConfig( + task_id=core_req.task_id, + push_notification_config=to_compat_push_notification_config( + core_req + ), + ), + ) + + +def to_core_subscribe_to_task_request( + compat_req: types_v03.TaskResubscriptionRequest, +) -> pb2_v10.SubscribeToTaskRequest: + """Convert subscribe to task request to v1.0 core type.""" + return pb2_v10.SubscribeToTaskRequest(id=compat_req.params.id) + + +def to_compat_subscribe_to_task_request( + core_req: pb2_v10.SubscribeToTaskRequest, request_id: str | int +) -> types_v03.TaskResubscriptionRequest: + """Convert subscribe to task request to v0.3 compat type.""" + return types_v03.TaskResubscriptionRequest( + id=request_id, params=types_v03.TaskIdParams(id=core_req.id) + ) + + +def to_core_list_task_push_notification_config_request( + compat_req: types_v03.ListTaskPushNotificationConfigRequest, +) -> pb2_v10.ListTaskPushNotificationConfigsRequest: + """Convert list task push notification config request to v1.0 core type.""" + core_req = pb2_v10.ListTaskPushNotificationConfigsRequest() + if compat_req.params.id: + core_req.task_id = compat_req.params.id + return core_req + + +def to_compat_list_task_push_notification_config_request( + core_req: pb2_v10.ListTaskPushNotificationConfigsRequest, + request_id: str | int, +) -> types_v03.ListTaskPushNotificationConfigRequest: + """Convert list task push notification config request to v0.3 compat type.""" + return types_v03.ListTaskPushNotificationConfigRequest( + id=request_id, + params=types_v03.ListTaskPushNotificationConfigParams( + id=core_req.task_id + ), + ) + + +def to_core_list_task_push_notification_config_response( + compat_res: types_v03.ListTaskPushNotificationConfigResponse, +) -> pb2_v10.ListTaskPushNotificationConfigsResponse: + """Convert list task push notification config response to v1.0 core type.""" + core_res = pb2_v10.ListTaskPushNotificationConfigsResponse() + root = compat_res.root + if isinstance( + root, types_v03.ListTaskPushNotificationConfigSuccessResponse + ): + for c in root.result: + core_res.configs.append(to_core_task_push_notification_config(c)) + return core_res + + +def to_compat_list_task_push_notification_config_response( + core_res: pb2_v10.ListTaskPushNotificationConfigsResponse, + request_id: str | int | None = None, +) -> types_v03.ListTaskPushNotificationConfigResponse: + """Convert list task push notification config response to v0.3 compat type.""" + return types_v03.ListTaskPushNotificationConfigResponse( + root=types_v03.ListTaskPushNotificationConfigSuccessResponse( + id=request_id, + result=[ + to_compat_task_push_notification_config(c) + for c in core_res.configs + ], + ) + ) + + +def to_core_send_message_response( + compat_res: types_v03.SendMessageResponse, +) -> pb2_v10.SendMessageResponse: + """Convert send message response to v1.0 core type.""" + core_res = pb2_v10.SendMessageResponse() + root = compat_res.root + if isinstance(root, types_v03.SendMessageSuccessResponse): + if isinstance(root.result, types_v03.Task): + core_res.task.CopyFrom(to_core_task(root.result)) + else: + core_res.message.CopyFrom(to_core_message(root.result)) + return core_res + + +def to_compat_send_message_response( + core_res: pb2_v10.SendMessageResponse, request_id: str | int | None = None +) -> types_v03.SendMessageResponse: + """Convert send message response to v0.3 compat type.""" + if core_res.HasField('task'): + result_task = to_compat_task(core_res.task) + return types_v03.SendMessageResponse( + root=types_v03.SendMessageSuccessResponse( + id=request_id, result=result_task + ) + ) + result_msg = to_compat_message(core_res.message) + return types_v03.SendMessageResponse( + root=types_v03.SendMessageSuccessResponse( + id=request_id, result=result_msg + ) + ) + + +def to_core_stream_response( + compat_res: types_v03.SendStreamingMessageSuccessResponse, +) -> pb2_v10.StreamResponse: + """Convert stream response to v1.0 core type.""" + core_res = pb2_v10.StreamResponse() + root = compat_res.result + + if isinstance(root, types_v03.Message): + core_res.message.CopyFrom(to_core_message(root)) + elif isinstance(root, types_v03.Task): + core_res.task.CopyFrom(to_core_task(root)) + elif isinstance(root, types_v03.TaskStatusUpdateEvent): + core_res.status_update.CopyFrom(to_core_task_status_update_event(root)) + elif isinstance(root, types_v03.TaskArtifactUpdateEvent): + core_res.artifact_update.CopyFrom( + to_core_task_artifact_update_event(root) + ) + + return core_res + + +def to_compat_stream_response( + core_res: pb2_v10.StreamResponse, request_id: str | int | None = None +) -> types_v03.SendStreamingMessageSuccessResponse: + """Convert stream response to v0.3 compat type.""" + which = core_res.WhichOneof('payload') + if which == 'message': + return types_v03.SendStreamingMessageSuccessResponse( + id=request_id, result=to_compat_message(core_res.message) + ) + if which == 'task': + return types_v03.SendStreamingMessageSuccessResponse( + id=request_id, result=to_compat_task(core_res.task) + ) + if which == 'status_update': + return types_v03.SendStreamingMessageSuccessResponse( + id=request_id, + result=to_compat_task_status_update_event(core_res.status_update), + ) + if which == 'artifact_update': + return types_v03.SendStreamingMessageSuccessResponse( + id=request_id, + result=to_compat_task_artifact_update_event( + core_res.artifact_update + ), + ) + + raise ValueError(f'Unknown stream response event type: {which}') + + +def to_core_get_extended_agent_card_request( + compat_req: types_v03.GetAuthenticatedExtendedCardRequest, +) -> pb2_v10.GetExtendedAgentCardRequest: + """Convert get extended agent card request to v1.0 core type.""" + return pb2_v10.GetExtendedAgentCardRequest() + + +def to_compat_get_extended_agent_card_request( + core_req: pb2_v10.GetExtendedAgentCardRequest, request_id: str | int +) -> types_v03.GetAuthenticatedExtendedCardRequest: + """Convert get extended agent card request to v0.3 compat type.""" + return types_v03.GetAuthenticatedExtendedCardRequest(id=request_id) diff --git a/src/a2a/compat/v0_3/extension_headers.py b/src/a2a/compat/v0_3/extension_headers.py new file mode 100644 index 000000000..e1421a0b0 --- /dev/null +++ b/src/a2a/compat/v0_3/extension_headers.py @@ -0,0 +1,27 @@ +"""Shared header name constants for v0.3 extension compatibility. + +The current spec uses ``A2A-Extensions``. v0.3 used the ``X-`` prefixed +``X-A2A-Extensions`` form. v0.3 compat servers and clients accept/emit both +names so they can interoperate with peers that only know the legacy one. +""" + +from a2a.client.service_parameters import ServiceParameters +from a2a.extensions.common import HTTP_EXTENSION_HEADER + + +LEGACY_HTTP_EXTENSION_HEADER = f'X-{HTTP_EXTENSION_HEADER}' + + +def add_legacy_extension_header(parameters: ServiceParameters) -> None: + """Mirrors the ``A2A-Extensions`` parameter under its legacy name in-place. + + Used by v0.3 compat client transports so that requests can be understood + by older v0.3 servers that only recognize ``X-A2A-Extensions``. + """ + if ( + HTTP_EXTENSION_HEADER in parameters + and LEGACY_HTTP_EXTENSION_HEADER not in parameters + ): + parameters[LEGACY_HTTP_EXTENSION_HEADER] = parameters[ + HTTP_EXTENSION_HEADER + ] diff --git a/src/a2a/compat/v0_3/grpc_handler.py b/src/a2a/compat/v0_3/grpc_handler.py new file mode 100644 index 000000000..b7bec26ea --- /dev/null +++ b/src/a2a/compat/v0_3/grpc_handler.py @@ -0,0 +1,362 @@ +# ruff: noqa: N802 +import logging + +from collections.abc import AsyncIterable, Awaitable, Callable +from typing import TypeVar + +import grpc +import grpc.aio + +from google.protobuf import empty_pb2 + +from a2a.compat.v0_3 import ( + a2a_v0_3_pb2, + a2a_v0_3_pb2_grpc, + proto_utils, +) +from a2a.compat.v0_3 import ( + types as types_v03, +) +from a2a.compat.v0_3.context_builders import V03GrpcServerCallContextBuilder +from a2a.compat.v0_3.request_handler import RequestHandler03 +from a2a.server.context import ServerCallContext +from a2a.server.request_handlers.grpc_handler import ( + _ERROR_CODE_MAP, + DefaultGrpcServerCallContextBuilder, + GrpcServerCallContextBuilder, +) +from a2a.server.request_handlers.request_handler import RequestHandler +from a2a.utils.errors import A2AError, InvalidParamsError + + +logger = logging.getLogger(__name__) + +TResponse = TypeVar('TResponse') + + +class CompatGrpcHandler(a2a_v0_3_pb2_grpc.A2AServiceServicer): + """Backward compatible gRPC handler for A2A v0.3.""" + + def __init__( + self, + request_handler: RequestHandler, + context_builder: GrpcServerCallContextBuilder | None = None, + ): + """Initializes the CompatGrpcHandler. + + Args: + request_handler: The underlying `RequestHandler` instance to + delegate requests to. + context_builder: The CallContextBuilder object. If none the + DefaultCallContextBuilder is used. + """ + self.handler03 = RequestHandler03(request_handler=request_handler) + self._context_builder = V03GrpcServerCallContextBuilder( + context_builder or DefaultGrpcServerCallContextBuilder() + ) + + async def _handle_unary( + self, + context: grpc.aio.ServicerContext, + handler_func: Callable[[ServerCallContext], Awaitable[TResponse]], + default_response: TResponse, + ) -> TResponse: + """Centralized error handling and context management for unary calls.""" + try: + server_context = self._context_builder.build(context) + result = await handler_func(server_context) + except A2AError as e: + await self.abort_context(e, context) + else: + return result + return default_response + + async def _handle_stream( + self, + context: grpc.aio.ServicerContext, + handler_func: Callable[[ServerCallContext], AsyncIterable[TResponse]], + ) -> AsyncIterable[TResponse]: + """Centralized error handling and context management for streaming calls.""" + try: + server_context = self._context_builder.build(context) + async for item in handler_func(server_context): + yield item + except A2AError as e: + await self.abort_context(e, context) + + def _extract_task_id(self, resource_name: str) -> str: + """Extracts task_id from resource name.""" + m = proto_utils.TASK_NAME_MATCH.match(resource_name) + if not m: + raise InvalidParamsError(message=f'No task for {resource_name}') + return m.group(1) + + def _extract_task_and_config_id( + self, resource_name: str + ) -> tuple[str, str]: + """Extracts task_id and config_id from resource name.""" + m = proto_utils.TASK_PUSH_CONFIG_NAME_MATCH.match(resource_name) + if not m: + raise InvalidParamsError( + message=f'Bad resource name {resource_name}' + ) + return m.group(1), m.group(2) + + async def abort_context( + self, error: A2AError, context: grpc.aio.ServicerContext + ) -> None: + """Sets the grpc errors appropriately in the context.""" + code = _ERROR_CODE_MAP.get(type(error)) + if code: + await context.abort( + code, + f'{type(error).__name__}: {error.message}', + ) + else: + await context.abort( + grpc.StatusCode.UNKNOWN, + f'Unknown error type: {error}', + ) + + async def SendMessage( + self, + request: a2a_v0_3_pb2.SendMessageRequest, + context: grpc.aio.ServicerContext, + ) -> a2a_v0_3_pb2.SendMessageResponse: + """Handles the 'SendMessage' gRPC method (v0.3).""" + + async def _handler( + server_context: ServerCallContext, + ) -> a2a_v0_3_pb2.SendMessageResponse: + req_v03 = types_v03.SendMessageRequest( + id=0, params=proto_utils.FromProto.message_send_params(request) + ) + result = await self.handler03.on_message_send( + req_v03, server_context + ) + if isinstance(result, types_v03.Task): + return a2a_v0_3_pb2.SendMessageResponse( + task=proto_utils.ToProto.task(result) + ) + return a2a_v0_3_pb2.SendMessageResponse( + msg=proto_utils.ToProto.message(result) + ) + + return await self._handle_unary( + context, _handler, a2a_v0_3_pb2.SendMessageResponse() + ) + + async def SendStreamingMessage( + self, + request: a2a_v0_3_pb2.SendMessageRequest, + context: grpc.aio.ServicerContext, + ) -> AsyncIterable[a2a_v0_3_pb2.StreamResponse]: + """Handles the 'SendStreamingMessage' gRPC method (v0.3).""" + + async def _handler( + server_context: ServerCallContext, + ) -> AsyncIterable[a2a_v0_3_pb2.StreamResponse]: + req_v03 = types_v03.SendMessageRequest( + id=0, params=proto_utils.FromProto.message_send_params(request) + ) + async for v03_stream_resp in self.handler03.on_message_send_stream( + req_v03, server_context + ): + yield proto_utils.ToProto.stream_response( + v03_stream_resp.result + ) + + async for item in self._handle_stream(context, _handler): + yield item + + async def GetTask( + self, + request: a2a_v0_3_pb2.GetTaskRequest, + context: grpc.aio.ServicerContext, + ) -> a2a_v0_3_pb2.Task: + """Handles the 'GetTask' gRPC method (v0.3).""" + + async def _handler( + server_context: ServerCallContext, + ) -> a2a_v0_3_pb2.Task: + req_v03 = types_v03.GetTaskRequest( + id=0, params=proto_utils.FromProto.task_query_params(request) + ) + task = await self.handler03.on_get_task(req_v03, server_context) + return proto_utils.ToProto.task(task) + + return await self._handle_unary(context, _handler, a2a_v0_3_pb2.Task()) + + async def CancelTask( + self, + request: a2a_v0_3_pb2.CancelTaskRequest, + context: grpc.aio.ServicerContext, + ) -> a2a_v0_3_pb2.Task: + """Handles the 'CancelTask' gRPC method (v0.3).""" + + async def _handler( + server_context: ServerCallContext, + ) -> a2a_v0_3_pb2.Task: + req_v03 = types_v03.CancelTaskRequest( + id=0, params=proto_utils.FromProto.task_id_params(request) + ) + task = await self.handler03.on_cancel_task(req_v03, server_context) + return proto_utils.ToProto.task(task) + + return await self._handle_unary(context, _handler, a2a_v0_3_pb2.Task()) + + async def TaskSubscription( + self, + request: a2a_v0_3_pb2.TaskSubscriptionRequest, + context: grpc.aio.ServicerContext, + ) -> AsyncIterable[a2a_v0_3_pb2.StreamResponse]: + """Handles the 'TaskSubscription' gRPC method (v0.3).""" + + async def _handler( + server_context: ServerCallContext, + ) -> AsyncIterable[a2a_v0_3_pb2.StreamResponse]: + req_v03 = types_v03.TaskResubscriptionRequest( + id=0, params=proto_utils.FromProto.task_id_params(request) + ) + async for v03_stream_resp in self.handler03.on_subscribe_to_task( + req_v03, server_context + ): + yield proto_utils.ToProto.stream_response( + v03_stream_resp.result + ) + + async for item in self._handle_stream(context, _handler): + yield item + + async def CreateTaskPushNotificationConfig( + self, + request: a2a_v0_3_pb2.CreateTaskPushNotificationConfigRequest, + context: grpc.aio.ServicerContext, + ) -> a2a_v0_3_pb2.TaskPushNotificationConfig: + """Handles the 'CreateTaskPushNotificationConfig' gRPC method (v0.3).""" + + async def _handler( + server_context: ServerCallContext, + ) -> a2a_v0_3_pb2.TaskPushNotificationConfig: + req_v03 = types_v03.SetTaskPushNotificationConfigRequest( + id=0, + params=proto_utils.FromProto.task_push_notification_config_request( + request + ), + ) + res_v03 = ( + await self.handler03.on_create_task_push_notification_config( + req_v03, server_context + ) + ) + return proto_utils.ToProto.task_push_notification_config(res_v03) + + return await self._handle_unary( + context, _handler, a2a_v0_3_pb2.TaskPushNotificationConfig() + ) + + async def GetTaskPushNotificationConfig( + self, + request: a2a_v0_3_pb2.GetTaskPushNotificationConfigRequest, + context: grpc.aio.ServicerContext, + ) -> a2a_v0_3_pb2.TaskPushNotificationConfig: + """Handles the 'GetTaskPushNotificationConfig' gRPC method (v0.3).""" + + async def _handler( + server_context: ServerCallContext, + ) -> a2a_v0_3_pb2.TaskPushNotificationConfig: + task_id, config_id = self._extract_task_and_config_id(request.name) + req_v03 = types_v03.GetTaskPushNotificationConfigRequest( + id=0, + params=types_v03.GetTaskPushNotificationConfigParams( + id=task_id, push_notification_config_id=config_id + ), + ) + res_v03 = await self.handler03.on_get_task_push_notification_config( + req_v03, server_context + ) + return proto_utils.ToProto.task_push_notification_config(res_v03) + + return await self._handle_unary( + context, _handler, a2a_v0_3_pb2.TaskPushNotificationConfig() + ) + + async def ListTaskPushNotificationConfig( + self, + request: a2a_v0_3_pb2.ListTaskPushNotificationConfigRequest, + context: grpc.aio.ServicerContext, + ) -> a2a_v0_3_pb2.ListTaskPushNotificationConfigResponse: + """Handles the 'ListTaskPushNotificationConfig' gRPC method (v0.3).""" + + async def _handler( + server_context: ServerCallContext, + ) -> a2a_v0_3_pb2.ListTaskPushNotificationConfigResponse: + task_id = self._extract_task_id(request.parent) + req_v03 = types_v03.ListTaskPushNotificationConfigRequest( + id=0, + params=types_v03.ListTaskPushNotificationConfigParams( + id=task_id + ), + ) + res_v03 = ( + await self.handler03.on_list_task_push_notification_configs( + req_v03, server_context + ) + ) + + return a2a_v0_3_pb2.ListTaskPushNotificationConfigResponse( + configs=[ + proto_utils.ToProto.task_push_notification_config(c) + for c in res_v03 + ] + ) + + return await self._handle_unary( + context, + _handler, + a2a_v0_3_pb2.ListTaskPushNotificationConfigResponse(), + ) + + async def GetAgentCard( + self, + request: a2a_v0_3_pb2.GetAgentCardRequest, + context: grpc.aio.ServicerContext, + ) -> a2a_v0_3_pb2.AgentCard: + """Get the extended agent card for the agent served (v0.3).""" + + async def _handler( + server_context: ServerCallContext, + ) -> a2a_v0_3_pb2.AgentCard: + req_v03 = types_v03.GetAuthenticatedExtendedCardRequest(id=0) + res_v03 = await self.handler03.on_get_extended_agent_card( + req_v03, server_context + ) + return proto_utils.ToProto.agent_card(res_v03) + + return await self._handle_unary( + context, _handler, a2a_v0_3_pb2.AgentCard() + ) + + async def DeleteTaskPushNotificationConfig( + self, + request: a2a_v0_3_pb2.DeleteTaskPushNotificationConfigRequest, + context: grpc.aio.ServicerContext, + ) -> empty_pb2.Empty: + """Handles the 'DeleteTaskPushNotificationConfig' gRPC method (v0.3).""" + + async def _handler( + server_context: ServerCallContext, + ) -> empty_pb2.Empty: + task_id, config_id = self._extract_task_and_config_id(request.name) + req_v03 = types_v03.DeleteTaskPushNotificationConfigRequest( + id=0, + params=types_v03.DeleteTaskPushNotificationConfigParams( + id=task_id, push_notification_config_id=config_id + ), + ) + await self.handler03.on_delete_task_push_notification_config( + req_v03, server_context + ) + return empty_pb2.Empty() + + return await self._handle_unary(context, _handler, empty_pb2.Empty()) diff --git a/src/a2a/compat/v0_3/grpc_transport.py b/src/a2a/compat/v0_3/grpc_transport.py new file mode 100644 index 000000000..95314e3f1 --- /dev/null +++ b/src/a2a/compat/v0_3/grpc_transport.py @@ -0,0 +1,370 @@ +import logging + +from collections.abc import AsyncGenerator, Callable +from functools import wraps +from typing import Any, NoReturn + +from a2a.client.errors import A2AClientError, A2AClientTimeoutError +from a2a.utils.errors import JSON_RPC_ERROR_CODE_MAP + + +try: + import grpc # type: ignore[reportMissingModuleSource] +except ImportError as e: + raise ImportError( + 'A2AGrpcClient requires grpcio and grpcio-tools to be installed. ' + 'Install with: ' + "'pip install a2a-sdk[grpc]'" + ) from e + + +from a2a.client.client import ClientCallContext, ClientConfig +from a2a.client.optionals import Channel +from a2a.client.transports.base import ClientTransport +from a2a.compat.v0_3 import ( + a2a_v0_3_pb2, + a2a_v0_3_pb2_grpc, + conversions, + proto_utils, +) +from a2a.compat.v0_3 import ( + types as types_v03, +) +from a2a.compat.v0_3.extension_headers import add_legacy_extension_header +from a2a.types import a2a_pb2 +from a2a.utils.constants import PROTOCOL_VERSION_0_3, VERSION_HEADER +from a2a.utils.telemetry import SpanKind, trace_class + + +logger = logging.getLogger(__name__) + +_A2A_ERROR_NAME_TO_CLS = { + error_type.__name__: error_type for error_type in JSON_RPC_ERROR_CODE_MAP +} + + +def _map_grpc_error(e: grpc.aio.AioRpcError) -> NoReturn: + if e.code() == grpc.StatusCode.DEADLINE_EXCEEDED: + raise A2AClientTimeoutError('Client Request timed out') from e + + details = e.details() + if isinstance(details, str) and ': ' in details: + error_type_name, error_message = details.split(': ', 1) + exception_cls = _A2A_ERROR_NAME_TO_CLS.get(error_type_name) + if exception_cls: + raise exception_cls(error_message) from e + raise A2AClientError(f'gRPC Error {e.code().name}: {e.details()}') from e + + +def _handle_grpc_exception(func: Callable[..., Any]) -> Callable[..., Any]: + @wraps(func) + async def wrapper(*args: Any, **kwargs: Any) -> Any: + try: + return await func(*args, **kwargs) + except grpc.aio.AioRpcError as e: + _map_grpc_error(e) + + return wrapper + + +def _handle_grpc_stream_exception( + func: Callable[..., Any], +) -> Callable[..., Any]: + @wraps(func) + async def wrapper(*args: Any, **kwargs: Any) -> Any: + try: + async for item in func(*args, **kwargs): + yield item + except grpc.aio.AioRpcError as e: + _map_grpc_error(e) + + return wrapper + + +@trace_class(kind=SpanKind.CLIENT) +class CompatGrpcTransport(ClientTransport): + """A backward compatible gRPC transport for A2A v0.3.""" + + def __init__(self, channel: Channel, agent_card: a2a_pb2.AgentCard | None): + """Initializes the CompatGrpcTransport.""" + self.agent_card = agent_card + self.channel = channel + self.stub = a2a_v0_3_pb2_grpc.A2AServiceStub(channel) + + @classmethod + def create( + cls, + card: a2a_pb2.AgentCard, + url: str, + config: ClientConfig, + ) -> 'CompatGrpcTransport': + """Creates a gRPC transport for the A2A client.""" + if config.grpc_channel_factory is None: + raise ValueError('grpc_channel_factory is required when using gRPC') + return cls(config.grpc_channel_factory(url), card) + + @_handle_grpc_exception + async def send_message( + self, + request: a2a_pb2.SendMessageRequest, + *, + context: ClientCallContext | None = None, + ) -> a2a_pb2.SendMessageResponse: + """Sends a non-streaming message request to the agent (v0.3).""" + req_v03 = conversions.to_compat_send_message_request( + request, request_id=0 + ) + req_proto = a2a_v0_3_pb2.SendMessageRequest( + request=proto_utils.ToProto.message(req_v03.params.message), + configuration=proto_utils.ToProto.message_send_configuration( + req_v03.params.configuration + ), + metadata=proto_utils.ToProto.metadata(req_v03.params.metadata), + ) + + resp_proto = await self.stub.SendMessage( + req_proto, + metadata=self._get_grpc_metadata(context), + ) + + which = resp_proto.WhichOneof('payload') + if which == 'task': + return a2a_pb2.SendMessageResponse( + task=conversions.to_core_task( + proto_utils.FromProto.task(resp_proto.task) + ) + ) + if which == 'msg': + return a2a_pb2.SendMessageResponse( + message=conversions.to_core_message( + proto_utils.FromProto.message(resp_proto.msg) + ) + ) + return a2a_pb2.SendMessageResponse() + + @_handle_grpc_stream_exception + async def send_message_streaming( + self, + request: a2a_pb2.SendMessageRequest, + *, + context: ClientCallContext | None = None, + ) -> AsyncGenerator[a2a_pb2.StreamResponse]: + """Sends a streaming message request to the agent (v0.3).""" + req_v03 = conversions.to_compat_send_message_request( + request, request_id=0 + ) + req_proto = a2a_v0_3_pb2.SendMessageRequest( + request=proto_utils.ToProto.message(req_v03.params.message), + configuration=proto_utils.ToProto.message_send_configuration( + req_v03.params.configuration + ), + metadata=proto_utils.ToProto.metadata(req_v03.params.metadata), + ) + + stream = self.stub.SendStreamingMessage( + req_proto, + metadata=self._get_grpc_metadata(context), + ) + while True: + response = await stream.read() + if response == grpc.aio.EOF: # type: ignore[attr-defined] + break + yield conversions.to_core_stream_response( + types_v03.SendStreamingMessageSuccessResponse( + result=proto_utils.FromProto.stream_response(response) + ) + ) + + @_handle_grpc_stream_exception + async def subscribe( + self, + request: a2a_pb2.SubscribeToTaskRequest, + *, + context: ClientCallContext | None = None, + ) -> AsyncGenerator[a2a_pb2.StreamResponse]: + """Reconnects to get task updates (v0.3).""" + req_proto = a2a_v0_3_pb2.TaskSubscriptionRequest( + name=f'tasks/{request.id}' + ) + + stream = self.stub.TaskSubscription( + req_proto, + metadata=self._get_grpc_metadata(context), + ) + while True: + response = await stream.read() + if response == grpc.aio.EOF: # type: ignore[attr-defined] + break + yield conversions.to_core_stream_response( + types_v03.SendStreamingMessageSuccessResponse( + result=proto_utils.FromProto.stream_response(response) + ) + ) + + @_handle_grpc_exception + async def get_task( + self, + request: a2a_pb2.GetTaskRequest, + *, + context: ClientCallContext | None = None, + ) -> a2a_pb2.Task: + """Retrieves the current state and history of a specific task (v0.3).""" + req_proto = a2a_v0_3_pb2.GetTaskRequest( + name=f'tasks/{request.id}', + history_length=request.history_length, + ) + resp_proto = await self.stub.GetTask( + req_proto, + metadata=self._get_grpc_metadata(context), + ) + return conversions.to_core_task(proto_utils.FromProto.task(resp_proto)) + + @_handle_grpc_exception + async def list_tasks( + self, + request: a2a_pb2.ListTasksRequest, + *, + context: ClientCallContext | None = None, + ) -> a2a_pb2.ListTasksResponse: + """Retrieves tasks for an agent (v0.3 - NOT SUPPORTED in v0.3).""" + # v0.3 proto doesn't have ListTasks. + raise NotImplementedError( + 'ListTasks is not supported in A2A v0.3 gRPC.' + ) + + @_handle_grpc_exception + async def cancel_task( + self, + request: a2a_pb2.CancelTaskRequest, + *, + context: ClientCallContext | None = None, + ) -> a2a_pb2.Task: + """Requests the agent to cancel a specific task (v0.3).""" + req_proto = a2a_v0_3_pb2.CancelTaskRequest(name=f'tasks/{request.id}') + resp_proto = await self.stub.CancelTask( + req_proto, + metadata=self._get_grpc_metadata(context), + ) + return conversions.to_core_task(proto_utils.FromProto.task(resp_proto)) + + @_handle_grpc_exception + async def create_task_push_notification_config( + self, + request: a2a_pb2.TaskPushNotificationConfig, + *, + context: ClientCallContext | None = None, + ) -> a2a_pb2.TaskPushNotificationConfig: + """Sets or updates the push notification configuration (v0.3).""" + req_v03 = ( + conversions.to_compat_create_task_push_notification_config_request( + request, request_id=0 + ) + ) + req_proto = a2a_v0_3_pb2.CreateTaskPushNotificationConfigRequest( + parent=f'tasks/{request.task_id}', + config_id=req_v03.params.push_notification_config.id, + config=proto_utils.ToProto.task_push_notification_config( + req_v03.params + ), + ) + resp_proto = await self.stub.CreateTaskPushNotificationConfig( + req_proto, + metadata=self._get_grpc_metadata(context), + ) + return conversions.to_core_task_push_notification_config( + proto_utils.FromProto.task_push_notification_config(resp_proto) + ) + + @_handle_grpc_exception + async def get_task_push_notification_config( + self, + request: a2a_pb2.GetTaskPushNotificationConfigRequest, + *, + context: ClientCallContext | None = None, + ) -> a2a_pb2.TaskPushNotificationConfig: + """Retrieves the push notification configuration (v0.3).""" + req_proto = a2a_v0_3_pb2.GetTaskPushNotificationConfigRequest( + name=f'tasks/{request.task_id}/pushNotificationConfigs/{request.id}' + ) + resp_proto = await self.stub.GetTaskPushNotificationConfig( + req_proto, + metadata=self._get_grpc_metadata(context), + ) + return conversions.to_core_task_push_notification_config( + proto_utils.FromProto.task_push_notification_config(resp_proto) + ) + + @_handle_grpc_exception + async def list_task_push_notification_configs( + self, + request: a2a_pb2.ListTaskPushNotificationConfigsRequest, + *, + context: ClientCallContext | None = None, + ) -> a2a_pb2.ListTaskPushNotificationConfigsResponse: + """Lists push notification configurations for a specific task (v0.3).""" + req_proto = a2a_v0_3_pb2.ListTaskPushNotificationConfigRequest( + parent=f'tasks/{request.task_id}' + ) + resp_proto = await self.stub.ListTaskPushNotificationConfig( + req_proto, + metadata=self._get_grpc_metadata(context), + ) + return conversions.to_core_list_task_push_notification_config_response( + proto_utils.FromProto.list_task_push_notification_config_response( + resp_proto + ) + ) + + @_handle_grpc_exception + async def delete_task_push_notification_config( + self, + request: a2a_pb2.DeleteTaskPushNotificationConfigRequest, + *, + context: ClientCallContext | None = None, + ) -> None: + """Deletes the push notification configuration (v0.3).""" + req_proto = a2a_v0_3_pb2.DeleteTaskPushNotificationConfigRequest( + name=f'tasks/{request.task_id}/pushNotificationConfigs/{request.id}' + ) + await self.stub.DeleteTaskPushNotificationConfig( + req_proto, + metadata=self._get_grpc_metadata(context), + ) + + @_handle_grpc_exception + async def get_extended_agent_card( + self, + request: a2a_pb2.GetExtendedAgentCardRequest, + *, + context: ClientCallContext | None = None, + ) -> a2a_pb2.AgentCard: + """Retrieves the agent's card (v0.3).""" + req_proto = a2a_v0_3_pb2.GetAgentCardRequest() + resp_proto = await self.stub.GetAgentCard( + req_proto, + metadata=self._get_grpc_metadata(context), + ) + card = conversions.to_core_agent_card( + proto_utils.FromProto.agent_card(resp_proto) + ) + + self.agent_card = card + return card + + async def close(self) -> None: + """Closes the gRPC channel.""" + await self.channel.close() + + def _get_grpc_metadata( + self, context: ClientCallContext | None = None + ) -> list[tuple[str, str]]: + """Creates gRPC metadata for extensions.""" + metadata = [(VERSION_HEADER.lower(), PROTOCOL_VERSION_0_3)] + + if context and context.service_parameters: + params = dict(context.service_parameters) + add_legacy_extension_header(params) + for key, value in params.items(): + metadata.append((key.lower(), value)) + + return metadata diff --git a/src/a2a/compat/v0_3/jsonrpc_adapter.py b/src/a2a/compat/v0_3/jsonrpc_adapter.py new file mode 100644 index 000000000..580034e9b --- /dev/null +++ b/src/a2a/compat/v0_3/jsonrpc_adapter.py @@ -0,0 +1,280 @@ +import logging + +from collections.abc import AsyncIterable, AsyncIterator +from typing import TYPE_CHECKING, Any + +from sse_starlette.sse import EventSourceResponse +from starlette.responses import JSONResponse + + +if TYPE_CHECKING: + from starlette.requests import Request + + from a2a.server.request_handlers.request_handler import RequestHandler + + _package_starlette_installed = True +else: + try: + from starlette.requests import Request + + _package_starlette_installed = True + except ImportError: + Request = Any + + _package_starlette_installed = False + +from a2a.compat.v0_3 import types as types_v03 +from a2a.compat.v0_3.context_builders import V03ServerCallContextBuilder +from a2a.compat.v0_3.request_handler import RequestHandler03 +from a2a.server.context import ServerCallContext +from a2a.server.jsonrpc_models import ( + InternalError as CoreInternalError, +) +from a2a.server.jsonrpc_models import ( + InvalidRequestError as CoreInvalidRequestError, +) +from a2a.server.jsonrpc_models import ( + JSONRPCError as CoreJSONRPCError, +) +from a2a.server.routes.common import ( + DefaultServerCallContextBuilder, + ServerCallContextBuilder, +) +from a2a.utils import constants +from a2a.utils.version_validator import validate_version + + +logger = logging.getLogger(__name__) + + +class JSONRPC03Adapter: + """Adapter to make RequestHandler work with v0.3 JSONRPC API.""" + + METHOD_TO_MODEL = { + 'message/send': types_v03.SendMessageRequest, + 'message/stream': types_v03.SendStreamingMessageRequest, + 'tasks/get': types_v03.GetTaskRequest, + 'tasks/cancel': types_v03.CancelTaskRequest, + 'tasks/pushNotificationConfig/set': types_v03.SetTaskPushNotificationConfigRequest, + 'tasks/pushNotificationConfig/get': types_v03.GetTaskPushNotificationConfigRequest, + 'tasks/pushNotificationConfig/list': types_v03.ListTaskPushNotificationConfigRequest, + 'tasks/pushNotificationConfig/delete': types_v03.DeleteTaskPushNotificationConfigRequest, + 'tasks/resubscribe': types_v03.TaskResubscriptionRequest, + 'agent/getAuthenticatedExtendedCard': types_v03.GetAuthenticatedExtendedCardRequest, + } + + def __init__( + self, + http_handler: 'RequestHandler', + context_builder: 'ServerCallContextBuilder | None' = None, + ): + self.handler = RequestHandler03( + request_handler=http_handler, + ) + self._context_builder = V03ServerCallContextBuilder( + context_builder or DefaultServerCallContextBuilder() + ) + + def supports_method(self, method: str) -> bool: + """Returns True if the v0.3 adapter supports the given method name.""" + return method in self.METHOD_TO_MODEL + + def _generate_error_response( + self, + request_id: 'str | int | None', + error: 'Exception | CoreJSONRPCError', + ) -> JSONResponse: + if isinstance(error, CoreJSONRPCError): + err_dict = error.model_dump(by_alias=True) + return JSONResponse( + {'jsonrpc': '2.0', 'id': request_id, 'error': err_dict} + ) + + internal_error = CoreInternalError(message=str(error)) + return JSONResponse( + { + 'jsonrpc': '2.0', + 'id': request_id, + 'error': internal_error.model_dump(by_alias=True), + } + ) + + async def handle_request( + self, + request_id: 'str | int | None', + method: str, + body: dict, + request: Request, + ) -> 'JSONResponse | EventSourceResponse': + """Handles v0.3 specific JSON-RPC requests.""" + try: + model_class = self.METHOD_TO_MODEL[method] + try: + specific_request = model_class.model_validate(body) # type: ignore[attr-defined] + except Exception as e: + logger.exception( + 'Failed to validate base JSON-RPC request for v0.3' + ) + + return self._generate_error_response( + request_id, + CoreInvalidRequestError(data=str(e)), + ) + + call_context = self._context_builder.build(request) + call_context.tenant = ( + getattr(specific_request.params, 'tenant', '') + if hasattr(specific_request, 'params') + else getattr(specific_request, 'tenant', '') + ) + call_context.state['method'] = method + call_context.state['request_id'] = request_id + + if method in ('message/stream', 'tasks/resubscribe'): + return await self._process_streaming_request( + request_id, specific_request, call_context + ) + + return await self._process_non_streaming_request( + request_id, specific_request, call_context + ) + except Exception as e: + logger.exception('Unhandled exception in v0.3 JSONRPCAdapter') + return self._generate_error_response( + request_id, CoreInternalError(message=str(e)) + ) + + @validate_version(constants.PROTOCOL_VERSION_0_3) + async def _process_non_streaming_request( + self, + request_id: 'str | int | None', + request_obj: Any, + context: ServerCallContext, + ) -> JSONResponse: + method = request_obj.method + result: Any + if method == 'message/send': + res_msg = await self.handler.on_message_send(request_obj, context) + result = types_v03.SendMessageResponse( + root=types_v03.SendMessageSuccessResponse( + id=request_id, result=res_msg + ) + ) + elif method == 'tasks/get': + res_get = await self.handler.on_get_task(request_obj, context) + result = types_v03.GetTaskResponse( + root=types_v03.GetTaskSuccessResponse( + id=request_id, result=res_get + ) + ) + elif method == 'tasks/cancel': + res_cancel = await self.handler.on_cancel_task(request_obj, context) + result = types_v03.CancelTaskResponse( + root=types_v03.CancelTaskSuccessResponse( + id=request_id, result=res_cancel + ) + ) + elif method == 'tasks/pushNotificationConfig/get': + res_get_push = ( + await self.handler.on_get_task_push_notification_config( + request_obj, context + ) + ) + result = types_v03.GetTaskPushNotificationConfigResponse( + root=types_v03.GetTaskPushNotificationConfigSuccessResponse( + id=request_id, result=res_get_push + ) + ) + elif method == 'tasks/pushNotificationConfig/set': + res_set_push = ( + await self.handler.on_create_task_push_notification_config( + request_obj, context + ) + ) + result = types_v03.SetTaskPushNotificationConfigResponse( + root=types_v03.SetTaskPushNotificationConfigSuccessResponse( + id=request_id, result=res_set_push + ) + ) + elif method == 'tasks/pushNotificationConfig/list': + res_list_push = ( + await self.handler.on_list_task_push_notification_configs( + request_obj, context + ) + ) + result = types_v03.ListTaskPushNotificationConfigResponse( + root=types_v03.ListTaskPushNotificationConfigSuccessResponse( + id=request_id, result=res_list_push + ) + ) + elif method == 'tasks/pushNotificationConfig/delete': + await self.handler.on_delete_task_push_notification_config( + request_obj, context + ) + result = types_v03.DeleteTaskPushNotificationConfigResponse( + root=types_v03.DeleteTaskPushNotificationConfigSuccessResponse( + id=request_id, result=None + ) + ) + elif method == 'agent/getAuthenticatedExtendedCard': + res_card = await self.handler.on_get_extended_agent_card( + request_obj, context + ) + result = types_v03.GetAuthenticatedExtendedCardResponse( + root=types_v03.GetAuthenticatedExtendedCardSuccessResponse( + id=request_id, result=res_card + ) + ) + else: + raise ValueError(f'Unsupported method {method}') + + return JSONResponse( + content=result.model_dump( + mode='json', by_alias=True, exclude_none=True + ) + ) + + @validate_version(constants.PROTOCOL_VERSION_0_3) + async def _process_streaming_request( + self, + request_id: 'str | int | None', + request_obj: Any, + context: ServerCallContext, + ) -> EventSourceResponse: + method = request_obj.method + if method == 'message/stream': + stream_gen = self.handler.on_message_send_stream( + request_obj, context + ) + elif method == 'tasks/resubscribe': + stream_gen = self.handler.on_subscribe_to_task(request_obj, context) + else: + raise ValueError(f'Unsupported streaming method {method}') + + async def event_generator( + stream: AsyncIterable[Any], + ) -> AsyncIterator[dict[str, str]]: + try: + async for item in stream: + yield { + 'data': item.model_dump_json( + by_alias=True, exclude_none=True + ) + } + except Exception as e: + logger.exception( + 'Error during stream generation in v0.3 JSONRPCAdapter' + ) + err = types_v03.InternalError(message=str(e)) + err_resp = types_v03.SendStreamingMessageResponse( + root=types_v03.JSONRPCErrorResponse( + id=request_id, error=err + ) + ) + yield { + 'data': err_resp.model_dump_json( + by_alias=True, exclude_none=True + ) + } + + return EventSourceResponse(event_generator(stream_gen)) diff --git a/src/a2a/compat/v0_3/jsonrpc_transport.py b/src/a2a/compat/v0_3/jsonrpc_transport.py new file mode 100644 index 000000000..caccd2811 --- /dev/null +++ b/src/a2a/compat/v0_3/jsonrpc_transport.py @@ -0,0 +1,500 @@ +import json +import logging + +from collections.abc import AsyncGenerator +from typing import Any, NoReturn +from uuid import uuid4 + +import httpx + +from jsonrpc.jsonrpc2 import JSONRPC20Request, JSONRPC20Response + +from a2a.client.client import ClientCallContext +from a2a.client.errors import A2AClientError +from a2a.client.transports.base import ClientTransport +from a2a.client.transports.http_helpers import ( + get_http_args, + send_http_request, + send_http_stream_request, +) +from a2a.compat.v0_3 import conversions +from a2a.compat.v0_3 import types as types_v03 +from a2a.compat.v0_3.extension_headers import add_legacy_extension_header +from a2a.types.a2a_pb2 import ( + AgentCard, + CancelTaskRequest, + DeleteTaskPushNotificationConfigRequest, + GetExtendedAgentCardRequest, + GetTaskPushNotificationConfigRequest, + GetTaskRequest, + ListTaskPushNotificationConfigsRequest, + ListTaskPushNotificationConfigsResponse, + ListTasksRequest, + ListTasksResponse, + SendMessageRequest, + SendMessageResponse, + StreamResponse, + SubscribeToTaskRequest, + Task, + TaskPushNotificationConfig, +) +from a2a.utils.constants import PROTOCOL_VERSION_0_3, VERSION_HEADER +from a2a.utils.errors import JSON_RPC_ERROR_CODE_MAP +from a2a.utils.telemetry import SpanKind, trace_class + + +logger = logging.getLogger(__name__) + +_JSON_RPC_ERROR_CODE_TO_A2A_ERROR = { + code: error_type for error_type, code in JSON_RPC_ERROR_CODE_MAP.items() +} + + +@trace_class(kind=SpanKind.CLIENT) +class CompatJsonRpcTransport(ClientTransport): + """A backward compatible JSON-RPC transport for A2A v0.3.""" + + def __init__( + self, + httpx_client: httpx.AsyncClient, + agent_card: AgentCard | None, + url: str, + ): + """Initializes the CompatJsonRpcTransport.""" + self.url = url + self.httpx_client = httpx_client + self.agent_card = agent_card + + async def send_message( + self, + request: SendMessageRequest, + *, + context: ClientCallContext | None = None, + ) -> SendMessageResponse: + """Sends a non-streaming message request to the agent.""" + req_v03 = conversions.to_compat_send_message_request( + request, request_id=0 + ) + + rpc_request = JSONRPC20Request( + method='message/send', + params=req_v03.params.model_dump( + by_alias=True, exclude_none=True, mode='json' + ), + _id=str(uuid4()), + ) + response_data = await self._send_request( + dict(rpc_request.data), context + ) + json_rpc_response = JSONRPC20Response(**response_data) + if json_rpc_response.error: + raise self._create_jsonrpc_error(json_rpc_response.error) + + result_dict = json_rpc_response.result + if not isinstance(result_dict, dict): + return SendMessageResponse() + + kind = result_dict.get('kind') + + # Fallback for old servers that might omit kind + if not kind: + if 'messageId' in result_dict: + kind = 'message' + elif 'id' in result_dict: + kind = 'task' + + if kind == 'task': + return SendMessageResponse( + task=conversions.to_core_task( + types_v03.Task.model_validate(result_dict) + ) + ) + if kind == 'message': + return SendMessageResponse( + message=conversions.to_core_message( + types_v03.Message.model_validate(result_dict) + ) + ) + + return SendMessageResponse() + + async def send_message_streaming( + self, + request: SendMessageRequest, + *, + context: ClientCallContext | None = None, + ) -> AsyncGenerator[StreamResponse]: + """Sends a streaming message request to the agent and yields responses as they arrive.""" + req_v03 = conversions.to_compat_send_message_request( + request, request_id=0 + ) + + rpc_request = JSONRPC20Request( + method='message/stream', + params=req_v03.params.model_dump( + by_alias=True, exclude_none=True, mode='json' + ), + _id=str(uuid4()), + ) + async for event in self._send_stream_request( + dict(rpc_request.data), + context, + ): + yield event + + async def get_task( + self, + request: GetTaskRequest, + *, + context: ClientCallContext | None = None, + ) -> Task: + """Retrieves the current state and history of a specific task.""" + req_v03 = conversions.to_compat_get_task_request(request, request_id=0) + + rpc_request = JSONRPC20Request( + method='tasks/get', + params=req_v03.params.model_dump( + by_alias=True, exclude_none=True, mode='json' + ), + _id=str(uuid4()), + ) + response_data = await self._send_request( + dict(rpc_request.data), context + ) + json_rpc_response = JSONRPC20Response(**response_data) + if json_rpc_response.error: + raise self._create_jsonrpc_error(json_rpc_response.error) + return conversions.to_core_task( + types_v03.Task.model_validate(json_rpc_response.result) + ) + + async def list_tasks( + self, + request: ListTasksRequest, + *, + context: ClientCallContext | None = None, + ) -> ListTasksResponse: + """Retrieves tasks for an agent.""" + raise NotImplementedError( + 'ListTasks is not supported in A2A v0.3 JSONRPC.' + ) + + async def cancel_task( + self, + request: CancelTaskRequest, + *, + context: ClientCallContext | None = None, + ) -> Task: + """Requests the agent to cancel a specific task.""" + req_v03 = conversions.to_compat_cancel_task_request( + request, request_id=0 + ) + + rpc_request = JSONRPC20Request( + method='tasks/cancel', + params=req_v03.params.model_dump( + by_alias=True, exclude_none=True, mode='json' + ), + _id=str(uuid4()), + ) + response_data = await self._send_request( + dict(rpc_request.data), context + ) + json_rpc_response = JSONRPC20Response(**response_data) + if json_rpc_response.error: + raise self._create_jsonrpc_error(json_rpc_response.error) + + return conversions.to_core_task( + types_v03.Task.model_validate(json_rpc_response.result) + ) + + async def create_task_push_notification_config( + self, + request: TaskPushNotificationConfig, + *, + context: ClientCallContext | None = None, + ) -> TaskPushNotificationConfig: + """Sets or updates the push notification configuration for a specific task.""" + req_v03 = ( + conversions.to_compat_create_task_push_notification_config_request( + request, request_id=0 + ) + ) + rpc_request = JSONRPC20Request( + method='tasks/pushNotificationConfig/set', + params=req_v03.params.model_dump( + by_alias=True, exclude_none=True, mode='json' + ), + _id=str(uuid4()), + ) + response_data = await self._send_request( + dict(rpc_request.data), context + ) + json_rpc_response = JSONRPC20Response(**response_data) + if json_rpc_response.error: + raise self._create_jsonrpc_error(json_rpc_response.error) + + return conversions.to_core_task_push_notification_config( + types_v03.TaskPushNotificationConfig.model_validate( + json_rpc_response.result + ) + ) + + async def get_task_push_notification_config( + self, + request: GetTaskPushNotificationConfigRequest, + *, + context: ClientCallContext | None = None, + ) -> TaskPushNotificationConfig: + """Retrieves the push notification configuration for a specific task.""" + req_v03 = ( + conversions.to_compat_get_task_push_notification_config_request( + request, request_id=0 + ) + ) + rpc_request = JSONRPC20Request( + method='tasks/pushNotificationConfig/get', + params=req_v03.params.model_dump( + by_alias=True, exclude_none=True, mode='json' + ), + _id=str(uuid4()), + ) + response_data = await self._send_request( + dict(rpc_request.data), context + ) + json_rpc_response = JSONRPC20Response(**response_data) + if json_rpc_response.error: + raise self._create_jsonrpc_error(json_rpc_response.error) + + return conversions.to_core_task_push_notification_config( + types_v03.TaskPushNotificationConfig.model_validate( + json_rpc_response.result + ) + ) + + async def list_task_push_notification_configs( + self, + request: ListTaskPushNotificationConfigsRequest, + *, + context: ClientCallContext | None = None, + ) -> ListTaskPushNotificationConfigsResponse: + """Lists push notification configurations for a specific task.""" + req_v03 = ( + conversions.to_compat_list_task_push_notification_config_request( + request, request_id=0 + ) + ) + rpc_request = JSONRPC20Request( + method='tasks/pushNotificationConfig/list', + params=req_v03.params.model_dump( + by_alias=True, exclude_none=True, mode='json' + ), + _id=str(uuid4()), + ) + response_data = await self._send_request( + dict(rpc_request.data), context + ) + json_rpc_response = JSONRPC20Response(**response_data) + if json_rpc_response.error: + raise self._create_jsonrpc_error(json_rpc_response.error) + + configs_data = json_rpc_response.result + if not isinstance(configs_data, list): + return ListTaskPushNotificationConfigsResponse() + + response = ListTaskPushNotificationConfigsResponse() + for config_data in configs_data: + response.configs.append( + conversions.to_core_task_push_notification_config( + types_v03.TaskPushNotificationConfig.model_validate( + config_data + ) + ) + ) + return response + + async def delete_task_push_notification_config( + self, + request: DeleteTaskPushNotificationConfigRequest, + *, + context: ClientCallContext | None = None, + ) -> None: + """Deletes the push notification configuration for a specific task.""" + req_v03 = ( + conversions.to_compat_delete_task_push_notification_config_request( + request, request_id=0 + ) + ) + rpc_request = JSONRPC20Request( + method='tasks/pushNotificationConfig/delete', + params=req_v03.params.model_dump( + by_alias=True, exclude_none=True, mode='json' + ), + _id=str(uuid4()), + ) + response_data = await self._send_request( + dict(rpc_request.data), context + ) + if 'result' not in response_data and 'error' not in response_data: + response_data['result'] = None + + json_rpc_response = JSONRPC20Response(**response_data) + if json_rpc_response.error: + raise self._create_jsonrpc_error(json_rpc_response.error) + + async def subscribe( + self, + request: SubscribeToTaskRequest, + *, + context: ClientCallContext | None = None, + ) -> AsyncGenerator[StreamResponse]: + """Reconnects to get task updates.""" + req_v03 = conversions.to_compat_subscribe_to_task_request( + request, request_id=0 + ) + rpc_request = JSONRPC20Request( + method='tasks/resubscribe', + params=req_v03.params.model_dump( + by_alias=True, exclude_none=True, mode='json' + ), + _id=str(uuid4()), + ) + async for event in self._send_stream_request( + dict(rpc_request.data), + context, + ): + yield event + + async def get_extended_agent_card( + self, + request: GetExtendedAgentCardRequest, + *, + context: ClientCallContext | None = None, + ) -> AgentCard: + """Retrieves the Extended AgentCard.""" + card = self.agent_card + if card and not card.capabilities.extended_agent_card: + return card + + rpc_request = JSONRPC20Request( + method='agent/getAuthenticatedExtendedCard', + params={}, + _id=str(uuid4()), + ) + response_data = await self._send_request( + dict(rpc_request.data), context + ) + json_rpc_response = JSONRPC20Response(**response_data) + if json_rpc_response.error: + raise self._create_jsonrpc_error(json_rpc_response.error) + + card = conversions.to_core_agent_card( + types_v03.AgentCard.model_validate(json_rpc_response.result) + ) + self.agent_card = card + return card + + async def close(self) -> None: + """Closes the httpx client.""" + await self.httpx_client.aclose() + + def _create_jsonrpc_error( + self, error_dict: dict[str, Any] + ) -> A2AClientError: + """Raises a specific error based on jsonrpc error code.""" + code = error_dict.get('code') + message = error_dict.get('message', 'Unknown Error') + + if isinstance(code, int): + error_class = _JSON_RPC_ERROR_CODE_TO_A2A_ERROR.get(code) + if error_class: + return error_class(message) # type: ignore[return-value] + + return A2AClientError(message) + + def _handle_http_error(self, e: httpx.HTTPStatusError) -> NoReturn: + """Handles HTTP errors for standard requests.""" + raise A2AClientError(f'HTTP Error: {e.response.status_code}') from e + + async def _send_stream_request( + self, + json_data: dict[str, Any], + context: ClientCallContext | None = None, + ) -> AsyncGenerator[StreamResponse]: + """Sends an HTTP stream request.""" + http_kwargs = get_http_args(context) + http_kwargs.setdefault('headers', {}) + http_kwargs['headers'][VERSION_HEADER.lower()] = PROTOCOL_VERSION_0_3 + add_legacy_extension_header(http_kwargs['headers']) + + async for sse_data in send_http_stream_request( + self.httpx_client, + 'POST', + self.url, + self._handle_http_error, + json=json_data, + **http_kwargs, + ): + data = json.loads(sse_data) + if 'error' in data: + raise self._create_jsonrpc_error(data['error']) + + result_dict = data.get('result', {}) + if not isinstance(result_dict, dict): + continue + + kind = result_dict.get('kind') + + if not kind: + if 'taskId' in result_dict and 'final' in result_dict: + kind = 'status-update' + elif 'messageId' in result_dict: + kind = 'message' + elif 'id' in result_dict: + kind = 'task' + + result: ( + types_v03.Task + | types_v03.Message + | types_v03.TaskStatusUpdateEvent + | types_v03.TaskArtifactUpdateEvent + ) + if kind == 'task': + result = types_v03.Task.model_validate(result_dict) + elif kind == 'message': + result = types_v03.Message.model_validate(result_dict) + elif kind == 'status-update': + result = types_v03.TaskStatusUpdateEvent.model_validate( + result_dict + ) + elif kind == 'artifact-update': + result = types_v03.TaskArtifactUpdateEvent.model_validate( + result_dict + ) + else: + continue + + yield conversions.to_core_stream_response( + types_v03.SendStreamingMessageSuccessResponse(result=result) + ) + + async def _send_request( + self, + json_data: dict[str, Any], + context: ClientCallContext | None = None, + ) -> dict[str, Any]: + """Sends an HTTP request.""" + http_kwargs = get_http_args(context) + http_kwargs.setdefault('headers', {}) + http_kwargs['headers'][VERSION_HEADER.lower()] = PROTOCOL_VERSION_0_3 + add_legacy_extension_header(http_kwargs['headers']) + + request = self.httpx_client.build_request( + 'POST', + self.url, + json=json_data, + **http_kwargs, + ) + return await send_http_request( + self.httpx_client, request, self._handle_http_error + ) diff --git a/src/a2a/compat/v0_3/model_conversions.py b/src/a2a/compat/v0_3/model_conversions.py new file mode 100644 index 000000000..9b3cc44f8 --- /dev/null +++ b/src/a2a/compat/v0_3/model_conversions.py @@ -0,0 +1,92 @@ +"""Database model conversions for v0.3 compatibility.""" + +from typing import TYPE_CHECKING + + +if TYPE_CHECKING: + from cryptography.fernet import Fernet + + +from a2a.compat.v0_3 import types as types_v03 +from a2a.compat.v0_3.conversions import ( + to_compat_push_notification_config, + to_compat_task, + to_core_task, + to_core_task_push_notification_config, +) +from a2a.server.models import PushNotificationConfigModel, TaskModel +from a2a.types import a2a_pb2 as pb2_v10 + + +def core_to_compat_task_model(task: pb2_v10.Task, owner: str) -> TaskModel: + """Converts a 1.0 core Task to a TaskModel using v0.3 JSON structure.""" + compat_task = to_compat_task(task) + data = compat_task.model_dump(mode='json') + + return TaskModel( + id=task.id, + context_id=task.context_id, + owner=owner, + status=data.get('status'), + history=data.get('history'), + artifacts=data.get('artifacts'), + task_metadata=data.get('metadata'), + protocol_version='0.3', + ) + + +def compat_task_model_to_core(task_model: TaskModel) -> pb2_v10.Task: + """Converts a TaskModel with v0.3 structure to a 1.0 core Task.""" + compat_task = types_v03.Task( + id=task_model.id, + context_id=task_model.context_id, + status=types_v03.TaskStatus.model_validate(task_model.status), + artifacts=( + [types_v03.Artifact.model_validate(a) for a in task_model.artifacts] + if task_model.artifacts + else [] + ), + history=( + [types_v03.Message.model_validate(h) for h in task_model.history] + if task_model.history + else [] + ), + metadata=task_model.task_metadata, + ) + return to_core_task(compat_task) + + +def core_to_compat_push_notification_config_model( + task_id: str, + config: pb2_v10.TaskPushNotificationConfig, + owner: str, + fernet: 'Fernet | None' = None, +) -> PushNotificationConfigModel: + """Converts a 1.0 core TaskPushNotificationConfig to a PushNotificationConfigModel using v0.3 JSON structure.""" + compat_config = to_compat_push_notification_config(config) + + json_payload = compat_config.model_dump_json().encode('utf-8') + data_to_store = fernet.encrypt(json_payload) if fernet else json_payload + + return PushNotificationConfigModel( + task_id=task_id, + config_id=config.id, + owner=owner, + config_data=data_to_store, + protocol_version='0.3', + ) + + +def compat_push_notification_config_model_to_core( + model_instance: str, task_id: str +) -> pb2_v10.TaskPushNotificationConfig: + """Converts a PushNotificationConfigModel with v0.3 structure back to a 1.0 core TaskPushNotificationConfig.""" + inner_config = types_v03.PushNotificationConfig.model_validate_json( + model_instance + ) + return to_core_task_push_notification_config( + types_v03.TaskPushNotificationConfig( + task_id=task_id, + push_notification_config=inner_config, + ) + ) diff --git a/src/a2a/compat/v0_3/proto_utils.py b/src/a2a/compat/v0_3/proto_utils.py new file mode 100644 index 000000000..d9c5688dc --- /dev/null +++ b/src/a2a/compat/v0_3/proto_utils.py @@ -0,0 +1,1099 @@ +# mypy: disable-error-code="arg-type" +"""This file was migrated from the a2a-python SDK version 0.3. + +It provides utilities for converting between legacy v0.3 Pydantic models and legacy v0.3 Protobuf definitions. +""" + +import json +import logging +import re + +from typing import Any + +from google.protobuf import json_format, struct_pb2 + +from a2a.compat.v0_3 import a2a_v0_3_pb2 as a2a_pb2 +from a2a.compat.v0_3 import types +from a2a.utils.errors import InvalidParamsError + + +logger = logging.getLogger(__name__) + + +# Regexp patterns for matching +TASK_NAME_MATCH = re.compile(r'tasks/([^/]+)') +TASK_PUSH_CONFIG_NAME_MATCH = re.compile( + r'tasks/([^/]+)/pushNotificationConfigs/([^/]+)' +) + + +def dict_to_struct(dictionary: dict[str, Any]) -> struct_pb2.Struct: + """Converts a Python dict to a Struct proto. + + Unfortunately, using `json_format.ParseDict` does not work because this + wants the dictionary to be an exact match of the Struct proto with fields + and keys and values, not the traditional Python dict structure. + + Args: + dictionary: The Python dict to convert. + + Returns: + The Struct proto. + """ + struct = struct_pb2.Struct() + for key, val in dictionary.items(): + if isinstance(val, dict): + struct[key] = dict_to_struct(val) + else: + struct[key] = val + return struct + + +def make_dict_serializable(value: Any) -> Any: + """Dict pre-processing utility: converts non-serializable values to serializable form. + + Use this when you want to normalize a dictionary before dict->Struct conversion. + + Args: + value: The value to convert. + + Returns: + A serializable value. + """ + if isinstance(value, str | int | float | bool) or value is None: + return value + if isinstance(value, dict): + return {k: make_dict_serializable(v) for k, v in value.items()} + if isinstance(value, list | tuple): + return [make_dict_serializable(item) for item in value] + return str(value) + + +def normalize_large_integers_to_strings( + value: Any, max_safe_digits: int = 15 +) -> Any: + """Integer preprocessing utility: converts large integers to strings. + + Use this when you want to convert large integers to strings considering + JavaScript's MAX_SAFE_INTEGER (2^53 - 1) limitation. + + Args: + value: The value to convert. + max_safe_digits: Maximum safe integer digits (default: 15). + + Returns: + A normalized value. + """ + max_safe_int = 10**max_safe_digits - 1 + + def _normalize(item: Any) -> Any: + if isinstance(item, int) and abs(item) > max_safe_int: + return str(item) + if isinstance(item, dict): + return {k: _normalize(v) for k, v in item.items()} + if isinstance(item, list | tuple): + return [_normalize(i) for i in item] + return item + + return _normalize(value) + + +def parse_string_integers_in_dict(value: Any, max_safe_digits: int = 15) -> Any: + """String post-processing utility: converts large integer strings back to integers. + + Use this when you want to restore large integer strings to integers + after Struct->dict conversion. + + Args: + value: The value to convert. + max_safe_digits: Maximum safe integer digits (default: 15). + + Returns: + A parsed value. + """ + if isinstance(value, dict): + return { + k: parse_string_integers_in_dict(v, max_safe_digits) + for k, v in value.items() + } + if isinstance(value, list | tuple): + return [ + parse_string_integers_in_dict(item, max_safe_digits) + for item in value + ] + if isinstance(value, str): + # Handle potential negative numbers. + stripped_value = value.lstrip('-') + if stripped_value.isdigit() and len(stripped_value) > max_safe_digits: + return int(value) + return value + + +class ToProto: + """Converts Python types to proto types.""" + + @classmethod + def message(cls, message: types.Message | None) -> a2a_pb2.Message | None: + if message is None: + return None + return a2a_pb2.Message( + message_id=message.message_id, + content=[cls.part(p) for p in message.parts], + context_id=message.context_id or '', + task_id=message.task_id or '', + role=cls.role(message.role), + metadata=cls.metadata(message.metadata), + extensions=message.extensions or [], + ) + + @classmethod + def metadata( + cls, metadata: dict[str, Any] | None + ) -> struct_pb2.Struct | None: + if metadata is None: + return None + return dict_to_struct(metadata) + + @classmethod + def part(cls, part: types.Part) -> a2a_pb2.Part: + if isinstance(part.root, types.TextPart): + return a2a_pb2.Part( + text=part.root.text, metadata=cls.metadata(part.root.metadata) + ) + if isinstance(part.root, types.FilePart): + return a2a_pb2.Part( + file=cls.file(part.root.file), + metadata=cls.metadata(part.root.metadata), + ) + if isinstance(part.root, types.DataPart): + return a2a_pb2.Part( + data=cls.data(part.root.data), + metadata=cls.metadata(part.root.metadata), + ) + raise ValueError(f'Unsupported part type: {part.root}') + + @classmethod + def data(cls, data: dict[str, Any]) -> a2a_pb2.DataPart: + return a2a_pb2.DataPart(data=dict_to_struct(data)) + + @classmethod + def file( + cls, file: types.FileWithUri | types.FileWithBytes + ) -> a2a_pb2.FilePart: + if isinstance(file, types.FileWithUri): + return a2a_pb2.FilePart( + file_with_uri=file.uri, mime_type=file.mime_type, name=file.name + ) + return a2a_pb2.FilePart( + file_with_bytes=file.bytes.encode('utf-8'), + mime_type=file.mime_type, + name=file.name, + ) + + @classmethod + def task(cls, task: types.Task) -> a2a_pb2.Task: + return a2a_pb2.Task( + id=task.id, + context_id=task.context_id, + status=cls.task_status(task.status), + artifacts=( + [cls.artifact(a) for a in task.artifacts] + if task.artifacts + else None + ), + history=( + [cls.message(h) for h in task.history] # type: ignore[misc] + if task.history + else None + ), + metadata=cls.metadata(task.metadata), + ) + + @classmethod + def task_status(cls, status: types.TaskStatus) -> a2a_pb2.TaskStatus: + return a2a_pb2.TaskStatus( + state=cls.task_state(status.state), + update=cls.message(status.message), + ) + + @classmethod + def task_state(cls, state: types.TaskState) -> a2a_pb2.TaskState: + match state: + case types.TaskState.submitted: + return a2a_pb2.TaskState.TASK_STATE_SUBMITTED + case types.TaskState.working: + return a2a_pb2.TaskState.TASK_STATE_WORKING + case types.TaskState.completed: + return a2a_pb2.TaskState.TASK_STATE_COMPLETED + case types.TaskState.canceled: + return a2a_pb2.TaskState.TASK_STATE_CANCELLED + case types.TaskState.failed: + return a2a_pb2.TaskState.TASK_STATE_FAILED + case types.TaskState.input_required: + return a2a_pb2.TaskState.TASK_STATE_INPUT_REQUIRED + case types.TaskState.auth_required: + return a2a_pb2.TaskState.TASK_STATE_AUTH_REQUIRED + case types.TaskState.rejected: + return a2a_pb2.TaskState.TASK_STATE_REJECTED + case _: + return a2a_pb2.TaskState.TASK_STATE_UNSPECIFIED + + @classmethod + def artifact(cls, artifact: types.Artifact) -> a2a_pb2.Artifact: + return a2a_pb2.Artifact( + artifact_id=artifact.artifact_id, + description=artifact.description, + metadata=cls.metadata(artifact.metadata), + name=artifact.name, + parts=[cls.part(p) for p in artifact.parts], + extensions=artifact.extensions or [], + ) + + @classmethod + def authentication_info( + cls, info: types.PushNotificationAuthenticationInfo + ) -> a2a_pb2.AuthenticationInfo: + return a2a_pb2.AuthenticationInfo( + schemes=info.schemes, + credentials=info.credentials, + ) + + @classmethod + def push_notification_config( + cls, config: types.PushNotificationConfig + ) -> a2a_pb2.PushNotificationConfig: + auth_info = ( + cls.authentication_info(config.authentication) + if config.authentication + else None + ) + return a2a_pb2.PushNotificationConfig( + id=config.id or '', + url=config.url, + token=config.token, + authentication=auth_info, + ) + + @classmethod + def task_artifact_update_event( + cls, event: types.TaskArtifactUpdateEvent + ) -> a2a_pb2.TaskArtifactUpdateEvent: + return a2a_pb2.TaskArtifactUpdateEvent( + task_id=event.task_id, + context_id=event.context_id, + artifact=cls.artifact(event.artifact), + metadata=cls.metadata(event.metadata), + append=event.append or False, + last_chunk=event.last_chunk or False, + ) + + @classmethod + def task_status_update_event( + cls, event: types.TaskStatusUpdateEvent + ) -> a2a_pb2.TaskStatusUpdateEvent: + return a2a_pb2.TaskStatusUpdateEvent( + task_id=event.task_id, + context_id=event.context_id, + status=cls.task_status(event.status), + metadata=cls.metadata(event.metadata), + final=event.final, + ) + + @classmethod + def message_send_configuration( + cls, config: types.MessageSendConfiguration | None + ) -> a2a_pb2.SendMessageConfiguration: + if not config: + return a2a_pb2.SendMessageConfiguration() + return a2a_pb2.SendMessageConfiguration( + accepted_output_modes=config.accepted_output_modes, + push_notification=cls.push_notification_config( + config.push_notification_config + ) + if config.push_notification_config + else None, + history_length=config.history_length, + blocking=config.blocking or False, + ) + + @classmethod + def update_event( + cls, + event: types.Task + | types.Message + | types.TaskStatusUpdateEvent + | types.TaskArtifactUpdateEvent, + ) -> a2a_pb2.StreamResponse: + """Converts a task, message, or task update event to a StreamResponse.""" + return cls.stream_response(event) + + @classmethod + def task_or_message( + cls, event: types.Task | types.Message + ) -> a2a_pb2.SendMessageResponse: + if isinstance(event, types.Message): + return a2a_pb2.SendMessageResponse( + msg=cls.message(event), + ) + return a2a_pb2.SendMessageResponse( + task=cls.task(event), + ) + + @classmethod + def stream_response( + cls, + event: ( + types.Message + | types.Task + | types.TaskStatusUpdateEvent + | types.TaskArtifactUpdateEvent + ), + ) -> a2a_pb2.StreamResponse: + if isinstance(event, types.Message): + return a2a_pb2.StreamResponse(msg=cls.message(event)) + if isinstance(event, types.Task): + return a2a_pb2.StreamResponse(task=cls.task(event)) + if isinstance(event, types.TaskStatusUpdateEvent): + return a2a_pb2.StreamResponse( + status_update=cls.task_status_update_event(event), + ) + if isinstance(event, types.TaskArtifactUpdateEvent): + return a2a_pb2.StreamResponse( + artifact_update=cls.task_artifact_update_event(event), + ) + raise ValueError(f'Unsupported event type: {type(event)}') + + @classmethod + def task_push_notification_config( + cls, config: types.TaskPushNotificationConfig + ) -> a2a_pb2.TaskPushNotificationConfig: + return a2a_pb2.TaskPushNotificationConfig( + name=f'tasks/{config.task_id}/pushNotificationConfigs/{config.push_notification_config.id}', + push_notification_config=cls.push_notification_config( + config.push_notification_config, + ), + ) + + @classmethod + def agent_card( + cls, + card: types.AgentCard, + ) -> a2a_pb2.AgentCard: + return a2a_pb2.AgentCard( + capabilities=cls.capabilities(card.capabilities), + default_input_modes=list(card.default_input_modes), + default_output_modes=list(card.default_output_modes), + description=card.description, + documentation_url=card.documentation_url, + name=card.name, + provider=cls.provider(card.provider), + security=cls.security(card.security), + security_schemes=cls.security_schemes(card.security_schemes), + skills=[cls.skill(x) for x in card.skills] if card.skills else [], + url=card.url, + version=card.version, + supports_authenticated_extended_card=bool( + card.supports_authenticated_extended_card + ), + preferred_transport=card.preferred_transport, + protocol_version=card.protocol_version, + additional_interfaces=[ + cls.agent_interface(x) for x in card.additional_interfaces + ] + if card.additional_interfaces + else None, + signatures=[cls.agent_card_signature(x) for x in card.signatures] + if card.signatures + else None, + ) + + @classmethod + def agent_card_signature( + cls, signature: types.AgentCardSignature + ) -> a2a_pb2.AgentCardSignature: + return a2a_pb2.AgentCardSignature( + protected=signature.protected, + signature=signature.signature, + header=dict_to_struct(signature.header) + if signature.header is not None + else None, + ) + + @classmethod + def agent_interface( + cls, + interface: types.AgentInterface, + ) -> a2a_pb2.AgentInterface: + return a2a_pb2.AgentInterface( + transport=interface.transport, + url=interface.url, + ) + + @classmethod + def capabilities( + cls, capabilities: types.AgentCapabilities + ) -> a2a_pb2.AgentCapabilities: + return a2a_pb2.AgentCapabilities( + streaming=bool(capabilities.streaming), + push_notifications=bool(capabilities.push_notifications), + extensions=[ + cls.extension(x) for x in capabilities.extensions or [] + ], + ) + + @classmethod + def extension( + cls, + extension: types.AgentExtension, + ) -> a2a_pb2.AgentExtension: + return a2a_pb2.AgentExtension( + uri=extension.uri, + description=extension.description, + params=dict_to_struct(extension.params) + if extension.params + else None, + required=extension.required, + ) + + @classmethod + def provider( + cls, provider: types.AgentProvider | None + ) -> a2a_pb2.AgentProvider | None: + if not provider: + return None + return a2a_pb2.AgentProvider( + organization=provider.organization, + url=provider.url, + ) + + @classmethod + def security( + cls, + security: list[dict[str, list[str]]] | None, + ) -> list[a2a_pb2.Security] | None: + if not security: + return None + return [ + a2a_pb2.Security( + schemes={k: a2a_pb2.StringList(list=v) for (k, v) in s.items()} + ) + for s in security + ] + + @classmethod + def security_schemes( + cls, + schemes: dict[str, types.SecurityScheme] | None, + ) -> dict[str, a2a_pb2.SecurityScheme] | None: + if not schemes: + return None + return {k: cls.security_scheme(v) for (k, v) in schemes.items()} + + @classmethod + def security_scheme( + cls, + scheme: types.SecurityScheme, + ) -> a2a_pb2.SecurityScheme: + if isinstance(scheme.root, types.APIKeySecurityScheme): + return a2a_pb2.SecurityScheme( + api_key_security_scheme=a2a_pb2.APIKeySecurityScheme( + description=scheme.root.description, + location=scheme.root.in_.value, + name=scheme.root.name, + ) + ) + if isinstance(scheme.root, types.HTTPAuthSecurityScheme): + return a2a_pb2.SecurityScheme( + http_auth_security_scheme=a2a_pb2.HTTPAuthSecurityScheme( + description=scheme.root.description, + scheme=scheme.root.scheme, + bearer_format=scheme.root.bearer_format, + ) + ) + if isinstance(scheme.root, types.OAuth2SecurityScheme): + return a2a_pb2.SecurityScheme( + oauth2_security_scheme=a2a_pb2.OAuth2SecurityScheme( + description=scheme.root.description, + flows=cls.oauth2_flows(scheme.root.flows), + ) + ) + if isinstance(scheme.root, types.MutualTLSSecurityScheme): + return a2a_pb2.SecurityScheme( + mtls_security_scheme=a2a_pb2.MutualTlsSecurityScheme( + description=scheme.root.description, + ) + ) + return a2a_pb2.SecurityScheme( + open_id_connect_security_scheme=a2a_pb2.OpenIdConnectSecurityScheme( + description=scheme.root.description, + open_id_connect_url=scheme.root.open_id_connect_url, + ) + ) + + @classmethod + def oauth2_flows(cls, flows: types.OAuthFlows) -> a2a_pb2.OAuthFlows: + if flows.authorization_code: + return a2a_pb2.OAuthFlows( + authorization_code=a2a_pb2.AuthorizationCodeOAuthFlow( + authorization_url=flows.authorization_code.authorization_url, + refresh_url=flows.authorization_code.refresh_url, + scopes=dict(flows.authorization_code.scopes.items()), + token_url=flows.authorization_code.token_url, + ), + ) + if flows.client_credentials: + return a2a_pb2.OAuthFlows( + client_credentials=a2a_pb2.ClientCredentialsOAuthFlow( + refresh_url=flows.client_credentials.refresh_url, + scopes=dict(flows.client_credentials.scopes.items()), + token_url=flows.client_credentials.token_url, + ), + ) + if flows.implicit: + return a2a_pb2.OAuthFlows( + implicit=a2a_pb2.ImplicitOAuthFlow( + authorization_url=flows.implicit.authorization_url, + refresh_url=flows.implicit.refresh_url, + scopes=dict(flows.implicit.scopes.items()), + ), + ) + if flows.password: + return a2a_pb2.OAuthFlows( + password=a2a_pb2.PasswordOAuthFlow( + refresh_url=flows.password.refresh_url, + scopes=dict(flows.password.scopes.items()), + token_url=flows.password.token_url, + ), + ) + raise ValueError('Unknown oauth flow definition') + + @classmethod + def skill(cls, skill: types.AgentSkill) -> a2a_pb2.AgentSkill: + return a2a_pb2.AgentSkill( + id=skill.id, + name=skill.name, + description=skill.description, + tags=skill.tags, + examples=skill.examples, + input_modes=skill.input_modes, + output_modes=skill.output_modes, + ) + + @classmethod + def role(cls, role: types.Role) -> a2a_pb2.Role: + match role: + case types.Role.user: + return a2a_pb2.Role.ROLE_USER + case types.Role.agent: + return a2a_pb2.Role.ROLE_AGENT + case _: + return a2a_pb2.Role.ROLE_UNSPECIFIED + + +class FromProto: + """Converts proto types to Python types.""" + + @classmethod + def message(cls, message: a2a_pb2.Message) -> types.Message: + return types.Message( + message_id=message.message_id, + parts=[cls.part(p) for p in message.content], + context_id=message.context_id or None, + task_id=message.task_id or None, + role=cls.role(message.role), + metadata=cls.metadata(message.metadata), + extensions=list(message.extensions) or None, + ) + + @classmethod + def metadata(cls, metadata: struct_pb2.Struct) -> dict[str, Any]: + if not metadata.fields: + return {} + return json_format.MessageToDict(metadata) + + @classmethod + def part(cls, part: a2a_pb2.Part) -> types.Part: + if part.HasField('text'): + return types.Part( + root=types.TextPart( + text=part.text, + metadata=cls.metadata(part.metadata) + if part.metadata + else None, + ), + ) + if part.HasField('file'): + return types.Part( + root=types.FilePart( + file=cls.file(part.file), + metadata=cls.metadata(part.metadata) + if part.metadata + else None, + ), + ) + if part.HasField('data'): + return types.Part( + root=types.DataPart( + data=cls.data(part.data), + metadata=cls.metadata(part.metadata) + if part.metadata + else None, + ), + ) + raise ValueError(f'Unsupported part type: {part}') + + @classmethod + def data(cls, data: a2a_pb2.DataPart) -> dict[str, Any]: + json_data = json_format.MessageToJson(data.data) + return json.loads(json_data) + + @classmethod + def file( + cls, file: a2a_pb2.FilePart + ) -> types.FileWithUri | types.FileWithBytes: + common_args = { + 'mime_type': file.mime_type or None, + 'name': file.name or None, + } + if file.HasField('file_with_uri'): + return types.FileWithUri( + uri=file.file_with_uri, + **common_args, + ) + return types.FileWithBytes( + bytes=file.file_with_bytes.decode('utf-8'), + **common_args, + ) + + @classmethod + def task_or_message( + cls, event: a2a_pb2.SendMessageResponse + ) -> types.Task | types.Message: + if event.HasField('msg'): + return cls.message(event.msg) + return cls.task(event.task) + + @classmethod + def task(cls, task: a2a_pb2.Task) -> types.Task: + return types.Task( + id=task.id, + context_id=task.context_id, + status=cls.task_status(task.status), + artifacts=[cls.artifact(a) for a in task.artifacts], + history=[cls.message(h) for h in task.history], + metadata=cls.metadata(task.metadata), + ) + + @classmethod + def task_status(cls, status: a2a_pb2.TaskStatus) -> types.TaskStatus: + return types.TaskStatus( + state=cls.task_state(status.state), + message=cls.message(status.update), + ) + + @classmethod + def task_state(cls, state: a2a_pb2.TaskState) -> types.TaskState: + match state: + case a2a_pb2.TaskState.TASK_STATE_SUBMITTED: + return types.TaskState.submitted + case a2a_pb2.TaskState.TASK_STATE_WORKING: + return types.TaskState.working + case a2a_pb2.TaskState.TASK_STATE_COMPLETED: + return types.TaskState.completed + case a2a_pb2.TaskState.TASK_STATE_CANCELLED: + return types.TaskState.canceled + case a2a_pb2.TaskState.TASK_STATE_FAILED: + return types.TaskState.failed + case a2a_pb2.TaskState.TASK_STATE_INPUT_REQUIRED: + return types.TaskState.input_required + case a2a_pb2.TaskState.TASK_STATE_AUTH_REQUIRED: + return types.TaskState.auth_required + case a2a_pb2.TaskState.TASK_STATE_REJECTED: + return types.TaskState.rejected + case _: + return types.TaskState.unknown + + @classmethod + def artifact(cls, artifact: a2a_pb2.Artifact) -> types.Artifact: + return types.Artifact( + artifact_id=artifact.artifact_id, + description=artifact.description, + metadata=cls.metadata(artifact.metadata), + name=artifact.name, + parts=[cls.part(p) for p in artifact.parts], + extensions=artifact.extensions or None, + ) + + @classmethod + def task_artifact_update_event( + cls, event: a2a_pb2.TaskArtifactUpdateEvent + ) -> types.TaskArtifactUpdateEvent: + return types.TaskArtifactUpdateEvent( + task_id=event.task_id, + context_id=event.context_id, + artifact=cls.artifact(event.artifact), + metadata=cls.metadata(event.metadata), + append=event.append, + last_chunk=event.last_chunk, + ) + + @classmethod + def task_status_update_event( + cls, event: a2a_pb2.TaskStatusUpdateEvent + ) -> types.TaskStatusUpdateEvent: + return types.TaskStatusUpdateEvent( + task_id=event.task_id, + context_id=event.context_id, + status=cls.task_status(event.status), + metadata=cls.metadata(event.metadata), + final=event.final, + ) + + @classmethod + def push_notification_config( + cls, config: a2a_pb2.PushNotificationConfig + ) -> types.PushNotificationConfig: + return types.PushNotificationConfig( + id=config.id, + url=config.url, + token=config.token, + authentication=cls.authentication_info(config.authentication) + if config.HasField('authentication') + else None, + ) + + @classmethod + def authentication_info( + cls, info: a2a_pb2.AuthenticationInfo + ) -> types.PushNotificationAuthenticationInfo: + return types.PushNotificationAuthenticationInfo( + schemes=list(info.schemes), + credentials=info.credentials, + ) + + @classmethod + def message_send_configuration( + cls, config: a2a_pb2.SendMessageConfiguration + ) -> types.MessageSendConfiguration: + return types.MessageSendConfiguration( + accepted_output_modes=list(config.accepted_output_modes), + push_notification_config=cls.push_notification_config( + config.push_notification + ) + if config.HasField('push_notification') + else None, + history_length=config.history_length, + blocking=config.blocking, + ) + + @classmethod + def message_send_params( + cls, request: a2a_pb2.SendMessageRequest + ) -> types.MessageSendParams: + return types.MessageSendParams( + configuration=cls.message_send_configuration(request.configuration), + message=cls.message(request.request), + metadata=cls.metadata(request.metadata), + ) + + @classmethod + def task_id_params( + cls, + request: ( + a2a_pb2.CancelTaskRequest + | a2a_pb2.TaskSubscriptionRequest + | a2a_pb2.GetTaskPushNotificationConfigRequest + ), + ) -> types.TaskIdParams: + if isinstance(request, a2a_pb2.GetTaskPushNotificationConfigRequest): + m = TASK_PUSH_CONFIG_NAME_MATCH.match(request.name) + if not m: + raise InvalidParamsError(message=f'No task for {request.name}') + return types.TaskIdParams(id=m.group(1)) + m = TASK_NAME_MATCH.match(request.name) + if not m: + raise InvalidParamsError(message=f'No task for {request.name}') + return types.TaskIdParams(id=m.group(1)) + + @classmethod + def task_push_notification_config_request( + cls, + request: a2a_pb2.CreateTaskPushNotificationConfigRequest, + ) -> types.TaskPushNotificationConfig: + m = TASK_NAME_MATCH.match(request.parent) + if not m: + raise InvalidParamsError(message=f'No task for {request.parent}') + return types.TaskPushNotificationConfig( + push_notification_config=cls.push_notification_config( + request.config.push_notification_config, + ), + task_id=m.group(1), + ) + + @classmethod + def task_push_notification_config( + cls, + config: a2a_pb2.TaskPushNotificationConfig, + ) -> types.TaskPushNotificationConfig: + m = TASK_PUSH_CONFIG_NAME_MATCH.match(config.name) + if not m: + raise InvalidParamsError( + message=f'Bad TaskPushNotificationConfig resource name {config.name}' + ) + return types.TaskPushNotificationConfig( + push_notification_config=cls.push_notification_config( + config.push_notification_config, + ), + task_id=m.group(1), + ) + + @classmethod + def agent_card( + cls, + card: a2a_pb2.AgentCard, + ) -> types.AgentCard: + return types.AgentCard( + capabilities=cls.capabilities(card.capabilities), + default_input_modes=list(card.default_input_modes), + default_output_modes=list(card.default_output_modes), + description=card.description, + documentation_url=card.documentation_url, + name=card.name, + provider=cls.provider(card.provider), + security=cls.security(list(card.security)), + security_schemes=cls.security_schemes(dict(card.security_schemes)), + skills=[cls.skill(x) for x in card.skills] if card.skills else [], + url=card.url, + version=card.version, + supports_authenticated_extended_card=card.supports_authenticated_extended_card, + preferred_transport=card.preferred_transport, + protocol_version=card.protocol_version, + additional_interfaces=[ + cls.agent_interface(x) for x in card.additional_interfaces + ] + if card.additional_interfaces + else None, + signatures=[cls.agent_card_signature(x) for x in card.signatures] + if card.signatures + else None, + ) + + @classmethod + def agent_card_signature( + cls, signature: a2a_pb2.AgentCardSignature + ) -> types.AgentCardSignature: + return types.AgentCardSignature( + protected=signature.protected, + signature=signature.signature, + header=json_format.MessageToDict(signature.header), + ) + + @classmethod + def agent_interface( + cls, + interface: a2a_pb2.AgentInterface, + ) -> types.AgentInterface: + return types.AgentInterface( + transport=interface.transport, + url=interface.url, + ) + + @classmethod + def task_query_params( + cls, + request: a2a_pb2.GetTaskRequest, + ) -> types.TaskQueryParams: + m = TASK_NAME_MATCH.match(request.name) + if not m: + raise InvalidParamsError(message=f'No task for {request.name}') + return types.TaskQueryParams( + history_length=request.history_length + if request.history_length + else None, + id=m.group(1), + metadata=None, + ) + + @classmethod + def capabilities( + cls, capabilities: a2a_pb2.AgentCapabilities + ) -> types.AgentCapabilities: + return types.AgentCapabilities( + streaming=capabilities.streaming, + push_notifications=capabilities.push_notifications, + extensions=[ + cls.agent_extension(x) for x in capabilities.extensions + ], + ) + + @classmethod + def agent_extension( + cls, + extension: a2a_pb2.AgentExtension, + ) -> types.AgentExtension: + return types.AgentExtension( + uri=extension.uri, + description=extension.description, + params=json_format.MessageToDict(extension.params), + required=extension.required, + ) + + @classmethod + def security( + cls, + security: list[a2a_pb2.Security] | None, + ) -> list[dict[str, list[str]]] | None: + if not security: + return None + return [ + {k: list(v.list) for (k, v) in s.schemes.items()} for s in security + ] + + @classmethod + def provider( + cls, provider: a2a_pb2.AgentProvider | None + ) -> types.AgentProvider | None: + if not provider: + return None + return types.AgentProvider( + organization=provider.organization, + url=provider.url, + ) + + @classmethod + def security_schemes( + cls, schemes: dict[str, a2a_pb2.SecurityScheme] + ) -> dict[str, types.SecurityScheme]: + return {k: cls.security_scheme(v) for (k, v) in schemes.items()} + + @classmethod + def security_scheme( + cls, + scheme: a2a_pb2.SecurityScheme, + ) -> types.SecurityScheme: + if scheme.HasField('api_key_security_scheme'): + return types.SecurityScheme( + root=types.APIKeySecurityScheme( + description=scheme.api_key_security_scheme.description, + name=scheme.api_key_security_scheme.name, + in_=types.In(scheme.api_key_security_scheme.location), # type: ignore[call-arg] + ) + ) + if scheme.HasField('http_auth_security_scheme'): + return types.SecurityScheme( + root=types.HTTPAuthSecurityScheme( + description=scheme.http_auth_security_scheme.description, + scheme=scheme.http_auth_security_scheme.scheme, + bearer_format=scheme.http_auth_security_scheme.bearer_format, + ) + ) + if scheme.HasField('oauth2_security_scheme'): + return types.SecurityScheme( + root=types.OAuth2SecurityScheme( + description=scheme.oauth2_security_scheme.description, + flows=cls.oauth2_flows(scheme.oauth2_security_scheme.flows), + ) + ) + if scheme.HasField('mtls_security_scheme'): + return types.SecurityScheme( + root=types.MutualTLSSecurityScheme( + description=scheme.mtls_security_scheme.description, + ) + ) + return types.SecurityScheme( + root=types.OpenIdConnectSecurityScheme( + description=scheme.open_id_connect_security_scheme.description, + open_id_connect_url=scheme.open_id_connect_security_scheme.open_id_connect_url, + ) + ) + + @classmethod + def oauth2_flows(cls, flows: a2a_pb2.OAuthFlows) -> types.OAuthFlows: + if flows.HasField('authorization_code'): + return types.OAuthFlows( + authorization_code=types.AuthorizationCodeOAuthFlow( + authorization_url=flows.authorization_code.authorization_url, + refresh_url=flows.authorization_code.refresh_url, + scopes=dict(flows.authorization_code.scopes.items()), + token_url=flows.authorization_code.token_url, + ), + ) + if flows.HasField('client_credentials'): + return types.OAuthFlows( + client_credentials=types.ClientCredentialsOAuthFlow( + refresh_url=flows.client_credentials.refresh_url, + scopes=dict(flows.client_credentials.scopes.items()), + token_url=flows.client_credentials.token_url, + ), + ) + if flows.HasField('implicit'): + return types.OAuthFlows( + implicit=types.ImplicitOAuthFlow( + authorization_url=flows.implicit.authorization_url, + refresh_url=flows.implicit.refresh_url, + scopes=dict(flows.implicit.scopes.items()), + ), + ) + return types.OAuthFlows( + password=types.PasswordOAuthFlow( + refresh_url=flows.password.refresh_url, + scopes=dict(flows.password.scopes.items()), + token_url=flows.password.token_url, + ), + ) + + @classmethod + def stream_response( + cls, + response: a2a_pb2.StreamResponse, + ) -> ( + types.Message + | types.Task + | types.TaskStatusUpdateEvent + | types.TaskArtifactUpdateEvent + ): + if response.HasField('msg'): + return cls.message(response.msg) + if response.HasField('task'): + return cls.task(response.task) + if response.HasField('status_update'): + return cls.task_status_update_event(response.status_update) + if response.HasField('artifact_update'): + return cls.task_artifact_update_event(response.artifact_update) + raise ValueError('Unsupported StreamResponse type') + + @classmethod + def list_task_push_notification_config_response( + cls, response: a2a_pb2.ListTaskPushNotificationConfigResponse + ) -> types.ListTaskPushNotificationConfigResponse: + return types.ListTaskPushNotificationConfigResponse( + root=types.ListTaskPushNotificationConfigSuccessResponse( + result=[ + cls.task_push_notification_config(c) + for c in response.configs + ], + id=None, + ) + ) + + @classmethod + def skill(cls, skill: a2a_pb2.AgentSkill) -> types.AgentSkill: + return types.AgentSkill( + id=skill.id, + name=skill.name, + description=skill.description, + tags=list(skill.tags), + examples=list(skill.examples), + input_modes=list(skill.input_modes), + output_modes=list(skill.output_modes), + ) + + @classmethod + def role(cls, role: a2a_pb2.Role) -> types.Role: + match role: + case a2a_pb2.Role.ROLE_USER: + return types.Role.user + case a2a_pb2.Role.ROLE_AGENT: + return types.Role.agent + case _: + return types.Role.agent diff --git a/src/a2a/compat/v0_3/request_handler.py b/src/a2a/compat/v0_3/request_handler.py new file mode 100644 index 000000000..d79a5cc5d --- /dev/null +++ b/src/a2a/compat/v0_3/request_handler.py @@ -0,0 +1,182 @@ +import logging +import typing + +from collections.abc import AsyncIterable + +from a2a.compat.v0_3 import conversions +from a2a.compat.v0_3 import types as types_v03 +from a2a.server.context import ServerCallContext +from a2a.server.request_handlers.request_handler import RequestHandler +from a2a.types.a2a_pb2 import Task +from a2a.utils import proto_utils as core_proto_utils +from a2a.utils.errors import TaskNotFoundError + + +logger = logging.getLogger(__name__) + + +class RequestHandler03: + """A protocol-agnostic v0.3 RequestHandler that delegates to the v1.0 RequestHandler.""" + + def __init__(self, request_handler: RequestHandler): + self.request_handler = request_handler + + async def on_message_send( + self, + request: types_v03.SendMessageRequest, + context: ServerCallContext, + ) -> types_v03.Task | types_v03.Message: + """Sends a message using v0.3 protocol types.""" + v10_req = conversions.to_core_send_message_request(request) + task_or_message = await self.request_handler.on_message_send( + v10_req, context + ) + if isinstance(task_or_message, Task): + return conversions.to_compat_task(task_or_message) + return conversions.to_compat_message(task_or_message) + + async def on_message_send_stream( + self, + request: types_v03.SendMessageRequest, + context: ServerCallContext, + ) -> AsyncIterable[types_v03.SendStreamingMessageSuccessResponse]: + """Sends a message stream using v0.3 protocol types.""" + v10_req = conversions.to_core_send_message_request(request) + async for event in self.request_handler.on_message_send_stream( + v10_req, context + ): + v10_stream_resp = core_proto_utils.to_stream_response(event) + yield conversions.to_compat_stream_response( + v10_stream_resp, request.id + ) + + async def on_cancel_task( + self, + request: types_v03.CancelTaskRequest, + context: ServerCallContext, + ) -> types_v03.Task: + """Cancels a task using v0.3 protocol types.""" + v10_req = conversions.to_core_cancel_task_request(request) + v10_task = await self.request_handler.on_cancel_task(v10_req, context) + if v10_task: + return conversions.to_compat_task(v10_task) + raise TaskNotFoundError + + async def on_subscribe_to_task( + self, + request: types_v03.TaskResubscriptionRequest, + context: ServerCallContext, + ) -> AsyncIterable[types_v03.SendStreamingMessageSuccessResponse]: + """Subscribes to a task using v0.3 protocol types.""" + v10_req = conversions.to_core_subscribe_to_task_request(request) + async for event in self.request_handler.on_subscribe_to_task( + v10_req, context + ): + v10_stream_resp = core_proto_utils.to_stream_response(event) + yield conversions.to_compat_stream_response( + v10_stream_resp, request.id + ) + + async def on_get_task_push_notification_config( + self, + request: types_v03.GetTaskPushNotificationConfigRequest, + context: ServerCallContext, + ) -> types_v03.TaskPushNotificationConfig: + """Gets a push notification config using v0.3 protocol types.""" + v10_req = conversions.to_core_get_task_push_notification_config_request( + request + ) + v10_config = ( + await self.request_handler.on_get_task_push_notification_config( + v10_req, context + ) + ) + return conversions.to_compat_task_push_notification_config(v10_config) + + async def on_create_task_push_notification_config( + self, + request: types_v03.SetTaskPushNotificationConfigRequest, + context: ServerCallContext, + ) -> types_v03.TaskPushNotificationConfig: + """Creates a push notification config using v0.3 protocol types.""" + v10_req = ( + conversions.to_core_create_task_push_notification_config_request( + request + ) + ) + v10_config = ( + await self.request_handler.on_create_task_push_notification_config( + v10_req, context + ) + ) + return conversions.to_compat_task_push_notification_config(v10_config) + + async def on_get_task( + self, + request: types_v03.GetTaskRequest, + context: ServerCallContext, + ) -> types_v03.Task: + """Gets a task using v0.3 protocol types.""" + v10_req = conversions.to_core_get_task_request(request) + v10_task = await self.request_handler.on_get_task(v10_req, context) + if v10_task: + return conversions.to_compat_task(v10_task) + raise TaskNotFoundError + + async def on_list_task_push_notification_configs( + self, + request: types_v03.ListTaskPushNotificationConfigRequest, + context: ServerCallContext, + ) -> list[types_v03.TaskPushNotificationConfig]: + """Lists push notification configs using v0.3 protocol types.""" + v10_req = ( + conversions.to_core_list_task_push_notification_config_request( + request + ) + ) + v10_resp = ( + await self.request_handler.on_list_task_push_notification_configs( + v10_req, context + ) + ) + v03_resp = ( + conversions.to_compat_list_task_push_notification_config_response( + v10_resp, request.id + ) + ) + if isinstance( + v03_resp.root, + types_v03.ListTaskPushNotificationConfigSuccessResponse, + ): + return typing.cast( + 'list[types_v03.TaskPushNotificationConfig]', + v03_resp.root.result, + ) + return [] + + async def on_delete_task_push_notification_config( + self, + request: types_v03.DeleteTaskPushNotificationConfigRequest, + context: ServerCallContext, + ) -> None: + """Deletes a push notification config using v0.3 protocol types.""" + v10_req = ( + conversions.to_core_delete_task_push_notification_config_request( + request + ) + ) + await self.request_handler.on_delete_task_push_notification_config( + v10_req, context + ) + + async def on_get_extended_agent_card( + self, + request: types_v03.GetAuthenticatedExtendedCardRequest, + context: ServerCallContext, + ) -> types_v03.AgentCard: + """Gets the authenticated extended agent card using v0.3 protocol types.""" + v10_req = conversions.to_core_get_extended_agent_card_request(request) + v10_card = await self.request_handler.on_get_extended_agent_card( + v10_req, context + ) + return conversions.to_compat_agent_card(v10_card) diff --git a/src/a2a/compat/v0_3/rest_adapter.py b/src/a2a/compat/v0_3/rest_adapter.py new file mode 100644 index 000000000..38687054f --- /dev/null +++ b/src/a2a/compat/v0_3/rest_adapter.py @@ -0,0 +1,153 @@ +import functools +import json +import logging + +from collections.abc import AsyncIterable, AsyncIterator, Awaitable, Callable +from typing import TYPE_CHECKING, Any + + +if TYPE_CHECKING: + from sse_starlette.sse import EventSourceResponse + from starlette.requests import Request + from starlette.responses import JSONResponse, Response + + from a2a.server.context import ServerCallContext + from a2a.server.request_handlers.request_handler import RequestHandler + + _package_starlette_installed = True +else: + try: + from sse_starlette.sse import EventSourceResponse + from starlette.requests import Request + from starlette.responses import JSONResponse, Response + + _package_starlette_installed = True + except ImportError: + EventSourceResponse = Any + Request = Any + JSONResponse = Any + Response = Any + + _package_starlette_installed = False + + +from a2a.compat.v0_3.context_builders import V03ServerCallContextBuilder +from a2a.compat.v0_3.rest_handler import REST03Handler +from a2a.server.routes.common import ( + DefaultServerCallContextBuilder, + ServerCallContextBuilder, +) +from a2a.utils.error_handlers import ( + rest_error_handler, + rest_stream_error_handler, +) +from a2a.utils.errors import ( + InvalidRequestError, +) + + +logger = logging.getLogger(__name__) + + +class REST03Adapter: + """Adapter to make RequestHandler work with v0.3 RESTful API. + + Defines v0.3 REST request processors and their routes, as well as managing response generation including Server-Sent Events (SSE). + """ + + def __init__( + self, + http_handler: 'RequestHandler', + context_builder: 'ServerCallContextBuilder | None' = None, + ): + self.handler = REST03Handler(request_handler=http_handler) + self._context_builder = V03ServerCallContextBuilder( + context_builder or DefaultServerCallContextBuilder() + ) + + @rest_error_handler + async def _handle_request( + self, + method: 'Callable[[Request, ServerCallContext], Awaitable[Any]]', + request: Request, + ) -> Response: + call_context = self._context_builder.build(request) + response = await method(request, call_context) + return JSONResponse(content=response) + + @rest_stream_error_handler + async def _handle_streaming_request( + self, + method: 'Callable[[Request, ServerCallContext], AsyncIterable[Any]]', + request: Request, + ) -> EventSourceResponse: + try: + await request.body() + except (ValueError, RuntimeError, OSError) as e: + raise InvalidRequestError( + message=f'Failed to pre-consume request body: {e}' + ) from e + + call_context = self._context_builder.build(request) + + async def event_generator( + stream: AsyncIterable[Any], + ) -> AsyncIterator[str]: + async for item in stream: + yield json.dumps(item) + + return EventSourceResponse( + event_generator(method(request, call_context)) + ) + + def routes(self) -> dict[tuple[str, str], Callable[[Request], Any]]: + """Constructs a dictionary of API routes and their corresponding handlers.""" + routes: dict[tuple[str, str], Callable[[Request], Any]] = { + ('/v1/message:send', 'POST'): functools.partial( + self._handle_request, self.handler.on_message_send + ), + ('/v1/message:stream', 'POST'): functools.partial( + self._handle_streaming_request, + self.handler.on_message_send_stream, + ), + ('/v1/tasks/{id}:cancel', 'POST'): functools.partial( + self._handle_request, self.handler.on_cancel_task + ), + ('/v1/tasks/{id}:subscribe', 'GET'): functools.partial( + self._handle_streaming_request, + self.handler.on_subscribe_to_task, + ), + ('/v1/tasks/{id}:subscribe', 'POST'): functools.partial( + self._handle_streaming_request, + self.handler.on_subscribe_to_task, + ), + ('/v1/tasks/{id}', 'GET'): functools.partial( + self._handle_request, self.handler.on_get_task + ), + ( + '/v1/tasks/{id}/pushNotificationConfigs/{push_id}', + 'GET', + ): functools.partial( + self._handle_request, self.handler.get_push_notification + ), + ( + '/v1/tasks/{id}/pushNotificationConfigs', + 'POST', + ): functools.partial( + self._handle_request, self.handler.set_push_notification + ), + ( + '/v1/tasks/{id}/pushNotificationConfigs', + 'GET', + ): functools.partial( + self._handle_request, self.handler.list_push_notifications + ), + ('/v1/tasks', 'GET'): functools.partial( + self._handle_request, self.handler.list_tasks + ), + ('/v1/card', 'GET'): functools.partial( + self._handle_request, self.handler.on_get_extended_agent_card + ), + } + + return routes diff --git a/src/a2a/compat/v0_3/rest_handler.py b/src/a2a/compat/v0_3/rest_handler.py new file mode 100644 index 000000000..bd5fcd2e6 --- /dev/null +++ b/src/a2a/compat/v0_3/rest_handler.py @@ -0,0 +1,313 @@ +import logging + +from collections.abc import AsyncIterator +from typing import TYPE_CHECKING, Any + +from google.protobuf.json_format import MessageToDict, Parse + + +if TYPE_CHECKING: + from starlette.requests import Request + + from a2a.server.request_handlers.request_handler import RequestHandler + + _package_starlette_installed = True +else: + try: + from starlette.requests import Request + + _package_starlette_installed = True + except ImportError: + Request = Any + + _package_starlette_installed = False + +from a2a.compat.v0_3 import a2a_v0_3_pb2 as pb2_v03 +from a2a.compat.v0_3 import proto_utils +from a2a.compat.v0_3 import types as types_v03 +from a2a.compat.v0_3.request_handler import RequestHandler03 +from a2a.server.context import ServerCallContext +from a2a.utils import constants +from a2a.utils.telemetry import SpanKind, trace_class +from a2a.utils.version_validator import validate_version + + +logger = logging.getLogger(__name__) + + +@trace_class(kind=SpanKind.SERVER) +class REST03Handler: + """Maps incoming REST-like (JSON+HTTP) requests to the appropriate request handler method and formats responses for v0.3 compatibility.""" + + def __init__( + self, + request_handler: 'RequestHandler', + ): + """Initializes the REST03Handler. + + Args: + request_handler: The underlying `RequestHandler` instance to delegate requests to (v1.0). + """ + self.handler03 = RequestHandler03(request_handler=request_handler) + + @validate_version(constants.PROTOCOL_VERSION_0_3) + async def on_message_send( + self, + request: Request, + context: ServerCallContext, + ) -> dict[str, Any]: + """Handles the 'message/send' REST method. + + Args: + request: The incoming `Request` object. + context: Context provided by the server. + + Returns: + A `dict` containing the result (Task or Message) in v0.3 format. + """ + body = await request.body() + v03_pb_msg = pb2_v03.SendMessageRequest() + Parse(body, v03_pb_msg, ignore_unknown_fields=True) + v03_params_msg = proto_utils.FromProto.message_send_params(v03_pb_msg) + rpc_req = types_v03.SendMessageRequest(id='', params=v03_params_msg) + + v03_resp = await self.handler03.on_message_send(rpc_req, context) + + pb2_v03_resp = proto_utils.ToProto.task_or_message(v03_resp) + return MessageToDict(pb2_v03_resp) + + @validate_version(constants.PROTOCOL_VERSION_0_3) + async def on_message_send_stream( + self, + request: Request, + context: ServerCallContext, + ) -> AsyncIterator[dict[str, Any]]: + """Handles the 'message/stream' REST method. + + Args: + request: The incoming `Request` object. + context: Context provided by the server. + + Yields: + JSON serialized objects containing streaming events in v0.3 format. + """ + body = await request.body() + v03_pb_msg = pb2_v03.SendMessageRequest() + Parse(body, v03_pb_msg, ignore_unknown_fields=True) + v03_params_msg = proto_utils.FromProto.message_send_params(v03_pb_msg) + rpc_req = types_v03.SendMessageRequest(id='', params=v03_params_msg) + + async for v03_stream_resp in self.handler03.on_message_send_stream( + rpc_req, context + ): + v03_pb_resp = proto_utils.ToProto.stream_response( + v03_stream_resp.result + ) + yield MessageToDict(v03_pb_resp) + + @validate_version(constants.PROTOCOL_VERSION_0_3) + async def on_cancel_task( + self, + request: Request, + context: ServerCallContext, + ) -> dict[str, Any]: + """Handles the 'tasks/cancel' REST method. + + Args: + request: The incoming `Request` object. + context: Context provided by the server. + + Returns: + A `dict` containing the updated Task in v0.3 format. + """ + task_id = request.path_params['id'] + rpc_req = types_v03.CancelTaskRequest( + id='', + params=types_v03.TaskIdParams(id=task_id), + ) + + v03_resp = await self.handler03.on_cancel_task(rpc_req, context) + pb2_v03_task = proto_utils.ToProto.task(v03_resp) + return MessageToDict(pb2_v03_task) + + @validate_version(constants.PROTOCOL_VERSION_0_3) + async def on_subscribe_to_task( + self, + request: Request, + context: ServerCallContext, + ) -> AsyncIterator[dict[str, Any]]: + """Handles the 'tasks/{id}:subscribe' REST method. + + Args: + request: The incoming `Request` object. + context: Context provided by the server. + + Yields: + JSON serialized objects containing streaming events in v0.3 format. + """ + task_id = request.path_params['id'] + rpc_req = types_v03.TaskResubscriptionRequest( + id='', + params=types_v03.TaskIdParams(id=task_id), + ) + + async for v03_stream_resp in self.handler03.on_subscribe_to_task( + rpc_req, context + ): + v03_pb_resp = proto_utils.ToProto.stream_response( + v03_stream_resp.result + ) + yield MessageToDict(v03_pb_resp) + + @validate_version(constants.PROTOCOL_VERSION_0_3) + async def get_push_notification( + self, + request: Request, + context: ServerCallContext, + ) -> dict[str, Any]: + """Handles the 'tasks/pushNotificationConfig/get' REST method. + + Args: + request: The incoming `Request` object. + context: Context provided by the server. + + Returns: + A `dict` containing the config in v0.3 format. + """ + task_id = request.path_params['id'] + push_id = request.path_params['push_id'] + + rpc_req = types_v03.GetTaskPushNotificationConfigRequest( + id='', + params=types_v03.GetTaskPushNotificationConfigParams( + id=task_id, push_notification_config_id=push_id + ), + ) + + v03_resp = await self.handler03.on_get_task_push_notification_config( + rpc_req, context + ) + pb2_v03_config = proto_utils.ToProto.task_push_notification_config( + v03_resp + ) + return MessageToDict(pb2_v03_config) + + @validate_version(constants.PROTOCOL_VERSION_0_3) + async def set_push_notification( + self, + request: Request, + context: ServerCallContext, + ) -> dict[str, Any]: + """Handles the 'tasks/pushNotificationConfig/set' REST method. + + Args: + request: The incoming `Request` object. + context: Context provided by the server. + + Returns: + A `dict` containing the config object in v0.3 format. + """ + task_id = request.path_params['id'] + body = await request.body() + + v03_pb_push = pb2_v03.CreateTaskPushNotificationConfigRequest() + Parse(body, v03_pb_push, ignore_unknown_fields=True) + + v03_params_push = ( + proto_utils.FromProto.task_push_notification_config_request( + v03_pb_push + ) + ) + v03_params_push.task_id = task_id + + rpc_req_push = types_v03.SetTaskPushNotificationConfigRequest( + id='', + params=v03_params_push, + ) + + v03_resp = await self.handler03.on_create_task_push_notification_config( + rpc_req_push, context + ) + pb2_v03_config = proto_utils.ToProto.task_push_notification_config( + v03_resp + ) + return MessageToDict(pb2_v03_config) + + @validate_version(constants.PROTOCOL_VERSION_0_3) + async def on_get_task( + self, + request: Request, + context: ServerCallContext, + ) -> dict[str, Any]: + """Handles the 'v1/tasks/{id}' REST method. + + Args: + request: The incoming `Request` object. + context: Context provided by the server. + + Returns: + A `Task` object containing the Task in v0.3 format. + """ + task_id = request.path_params['id'] + history_length_str = request.query_params.get('historyLength') + history_length = int(history_length_str) if history_length_str else None + + rpc_req = types_v03.GetTaskRequest( + id='', + params=types_v03.TaskQueryParams( + id=task_id, history_length=history_length + ), + ) + + v03_resp = await self.handler03.on_get_task(rpc_req, context) + pb2_v03_task = proto_utils.ToProto.task(v03_resp) + return MessageToDict(pb2_v03_task) + + @validate_version(constants.PROTOCOL_VERSION_0_3) + async def list_push_notifications( + self, + request: Request, + context: ServerCallContext, + ) -> dict[str, Any]: + """Handles the 'tasks/pushNotificationConfig/list' REST method.""" + task_id = request.path_params['id'] + + rpc_req = types_v03.ListTaskPushNotificationConfigRequest( + id='', + params=types_v03.ListTaskPushNotificationConfigParams(id=task_id), + ) + + v03_resp = await self.handler03.on_list_task_push_notification_configs( + rpc_req, context + ) + + pb2_v03_resp = pb2_v03.ListTaskPushNotificationConfigResponse( + configs=[ + proto_utils.ToProto.task_push_notification_config(c) + for c in v03_resp + ] + ) + + return MessageToDict(pb2_v03_resp) + + @validate_version(constants.PROTOCOL_VERSION_0_3) + async def list_tasks( + self, + request: Request, + context: ServerCallContext, + ) -> dict[str, Any]: + """Handles the 'tasks/list' REST method.""" + raise NotImplementedError('list tasks not implemented') + + @validate_version(constants.PROTOCOL_VERSION_0_3) + async def on_get_extended_agent_card( + self, + request: Request, + context: ServerCallContext, + ) -> dict[str, Any]: + """Handles the 'v1/agent/authenticatedExtendedAgentCard' REST method.""" + rpc_req = types_v03.GetAuthenticatedExtendedCardRequest(id=0) + v03_resp = await self.handler03.on_get_extended_agent_card( + rpc_req, context + ) + return v03_resp.model_dump(mode='json', exclude_none=True) diff --git a/src/a2a/compat/v0_3/rest_transport.py b/src/a2a/compat/v0_3/rest_transport.py new file mode 100644 index 000000000..bcaed2949 --- /dev/null +++ b/src/a2a/compat/v0_3/rest_transport.py @@ -0,0 +1,428 @@ +import contextlib +import json +import logging + +from collections.abc import AsyncGenerator +from typing import Any, NoReturn + +import httpx + +from google.protobuf.json_format import MessageToDict, Parse, ParseDict + +from a2a.client.client import ClientCallContext +from a2a.client.errors import A2AClientError +from a2a.client.transports.base import ClientTransport +from a2a.client.transports.http_helpers import ( + get_http_args, + send_http_request, + send_http_stream_request, +) +from a2a.compat.v0_3 import ( + a2a_v0_3_pb2, + conversions, + proto_utils, +) +from a2a.compat.v0_3 import ( + types as types_v03, +) +from a2a.compat.v0_3.extension_headers import add_legacy_extension_header +from a2a.types.a2a_pb2 import ( + AgentCard, + CancelTaskRequest, + DeleteTaskPushNotificationConfigRequest, + GetExtendedAgentCardRequest, + GetTaskPushNotificationConfigRequest, + GetTaskRequest, + ListTaskPushNotificationConfigsRequest, + ListTaskPushNotificationConfigsResponse, + ListTasksRequest, + ListTasksResponse, + SendMessageRequest, + SendMessageResponse, + StreamResponse, + SubscribeToTaskRequest, + Task, + TaskPushNotificationConfig, +) +from a2a.utils.constants import PROTOCOL_VERSION_0_3, VERSION_HEADER +from a2a.utils.errors import JSON_RPC_ERROR_CODE_MAP, MethodNotFoundError +from a2a.utils.telemetry import SpanKind, trace_class + + +logger = logging.getLogger(__name__) + +_A2A_ERROR_NAME_TO_CLS = { + error_type.__name__: error_type for error_type in JSON_RPC_ERROR_CODE_MAP +} + + +@trace_class(kind=SpanKind.CLIENT) +class CompatRestTransport(ClientTransport): + """A backward compatible REST transport for A2A v0.3.""" + + def __init__( + self, + httpx_client: httpx.AsyncClient, + agent_card: AgentCard | None, + url: str, + subscribe_method_override: str | None = None, + ): + """Initializes the CompatRestTransport.""" + self.url = url.removesuffix('/') + self.httpx_client = httpx_client + self.agent_card = agent_card + self._subscribe_method_override = subscribe_method_override + self._subscribe_auto_method_override = subscribe_method_override is None + + async def send_message( + self, + request: SendMessageRequest, + *, + context: ClientCallContext | None = None, + ) -> SendMessageResponse: + """Sends a non-streaming message request to the agent.""" + req_v03 = conversions.to_compat_send_message_request( + request, request_id=0 + ) + req_proto = a2a_v0_3_pb2.SendMessageRequest( + request=proto_utils.ToProto.message(req_v03.params.message), + configuration=proto_utils.ToProto.message_send_configuration( + req_v03.params.configuration + ), + metadata=proto_utils.ToProto.metadata(req_v03.params.metadata), + ) + + response_data = await self._execute_request( + 'POST', + '/v1/message:send', + context=context, + json=MessageToDict(req_proto, preserving_proto_field_name=True), + ) + + resp_proto = ParseDict( + response_data, + a2a_v0_3_pb2.SendMessageResponse(), + ignore_unknown_fields=True, + ) + which = resp_proto.WhichOneof('payload') + if which == 'task': + return SendMessageResponse( + task=conversions.to_core_task( + proto_utils.FromProto.task(resp_proto.task) + ) + ) + if which == 'msg': + return SendMessageResponse( + message=conversions.to_core_message( + proto_utils.FromProto.message(resp_proto.msg) + ) + ) + return SendMessageResponse() + + async def send_message_streaming( + self, + request: SendMessageRequest, + *, + context: ClientCallContext | None = None, + ) -> AsyncGenerator[StreamResponse]: + """Sends a streaming message request to the agent and yields responses as they arrive.""" + req_v03 = conversions.to_compat_send_message_request( + request, request_id=0 + ) + req_proto = a2a_v0_3_pb2.SendMessageRequest( + request=proto_utils.ToProto.message(req_v03.params.message), + configuration=proto_utils.ToProto.message_send_configuration( + req_v03.params.configuration + ), + metadata=proto_utils.ToProto.metadata(req_v03.params.metadata), + ) + + async for event in self._send_stream_request( + 'POST', + '/v1/message:stream', + context=context, + json=MessageToDict(req_proto, preserving_proto_field_name=True), + ): + yield event + + async def get_task( + self, + request: GetTaskRequest, + *, + context: ClientCallContext | None = None, + ) -> Task: + """Retrieves the current state and history of a specific task.""" + params = {} + if request.HasField('history_length'): + params['historyLength'] = request.history_length + + response_data = await self._execute_request( + 'GET', + f'/v1/tasks/{request.id}', + context=context, + params=params, + ) + resp_proto = ParseDict( + response_data, a2a_v0_3_pb2.Task(), ignore_unknown_fields=True + ) + return conversions.to_core_task(proto_utils.FromProto.task(resp_proto)) + + async def list_tasks( + self, + request: ListTasksRequest, + *, + context: ClientCallContext | None = None, + ) -> ListTasksResponse: + """Retrieves tasks for an agent.""" + raise NotImplementedError( + 'ListTasks is not supported in A2A v0.3 REST.' + ) + + async def cancel_task( + self, + request: CancelTaskRequest, + *, + context: ClientCallContext | None = None, + ) -> Task: + """Requests the agent to cancel a specific task.""" + response_data = await self._execute_request( + 'POST', + f'/v1/tasks/{request.id}:cancel', + context=context, + ) + resp_proto = ParseDict( + response_data, a2a_v0_3_pb2.Task(), ignore_unknown_fields=True + ) + return conversions.to_core_task(proto_utils.FromProto.task(resp_proto)) + + async def create_task_push_notification_config( + self, + request: TaskPushNotificationConfig, + *, + context: ClientCallContext | None = None, + ) -> TaskPushNotificationConfig: + """Sets or updates the push notification configuration for a specific task.""" + req_v03 = ( + conversions.to_compat_create_task_push_notification_config_request( + request, request_id=0 + ) + ) + req_proto = a2a_v0_3_pb2.CreateTaskPushNotificationConfigRequest( + parent=f'tasks/{request.task_id}', + config_id=req_v03.params.push_notification_config.id, + config=proto_utils.ToProto.task_push_notification_config( + req_v03.params + ), + ) + response_data = await self._execute_request( + 'POST', + f'/v1/tasks/{request.task_id}/pushNotificationConfigs', + context=context, + json=MessageToDict(req_proto, preserving_proto_field_name=True), + ) + resp_proto = ParseDict( + response_data, + a2a_v0_3_pb2.TaskPushNotificationConfig(), + ignore_unknown_fields=True, + ) + return conversions.to_core_task_push_notification_config( + proto_utils.FromProto.task_push_notification_config(resp_proto) + ) + + async def get_task_push_notification_config( + self, + request: GetTaskPushNotificationConfigRequest, + *, + context: ClientCallContext | None = None, + ) -> TaskPushNotificationConfig: + """Retrieves the push notification configuration for a specific task.""" + response_data = await self._execute_request( + 'GET', + f'/v1/tasks/{request.task_id}/pushNotificationConfigs/{request.id}', + context=context, + ) + resp_proto = ParseDict( + response_data, + a2a_v0_3_pb2.TaskPushNotificationConfig(), + ignore_unknown_fields=True, + ) + return conversions.to_core_task_push_notification_config( + proto_utils.FromProto.task_push_notification_config(resp_proto) + ) + + async def list_task_push_notification_configs( + self, + request: ListTaskPushNotificationConfigsRequest, + *, + context: ClientCallContext | None = None, + ) -> ListTaskPushNotificationConfigsResponse: + """Lists push notification configurations for a specific task.""" + raise NotImplementedError( + 'list_task_push_notification_configs not supported in v0.3 REST' + ) + + async def delete_task_push_notification_config( + self, + request: DeleteTaskPushNotificationConfigRequest, + *, + context: ClientCallContext | None = None, + ) -> None: + """Deletes the push notification configuration for a specific task.""" + raise NotImplementedError( + 'delete_task_push_notification_config not supported in v0.3 REST' + ) + + async def subscribe( + self, + request: SubscribeToTaskRequest, + *, + context: ClientCallContext | None = None, + ) -> AsyncGenerator[StreamResponse]: + """Reconnects to get task updates. + + This method implements backward compatibility logic for the subscribe + endpoint. It first attempts to use POST, which is the official method + for A2A subscribe endpoint. If the server returns 405 Method Not Allowed, + it falls back to GET and remembers this preference for future calls + on this transport instance. If both fail with 405, it will default back + to POST for next calls but will not retry again. + """ + subscribe_method = self._subscribe_method_override or 'POST' + try: + async for event in self._send_stream_request( + subscribe_method, + f'/v1/tasks/{request.id}:subscribe', + context=context, + ): + yield event + except A2AClientError as e: + # Check for 405 Method Not Allowed in the cause (httpx.HTTPStatusError) + cause = e.__cause__ + if ( + isinstance(cause, httpx.HTTPStatusError) + and cause.response.status_code == httpx.codes.METHOD_NOT_ALLOWED + ): + if self._subscribe_method_override: + if self._subscribe_auto_method_override: + self._subscribe_auto_method_override = False + self._subscribe_method_override = 'POST' + raise + else: + self._subscribe_method_override = 'GET' + async for event in self.subscribe(request, context=context): + yield event + else: + raise + + async def get_extended_agent_card( + self, + request: GetExtendedAgentCardRequest, + *, + context: ClientCallContext | None = None, + ) -> AgentCard: + """Retrieves the Extended AgentCard.""" + card = self.agent_card + if card and not card.capabilities.extended_agent_card: + return card + + response_data = await self._execute_request( + 'GET', '/v1/card', context=context + ) + resp_proto = ParseDict( + response_data, a2a_v0_3_pb2.AgentCard(), ignore_unknown_fields=True + ) + card = conversions.to_core_agent_card( + proto_utils.FromProto.agent_card(resp_proto) + ) + self.agent_card = card + return card + + async def close(self) -> None: + """Closes the httpx client.""" + await self.httpx_client.aclose() + + def _handle_http_error(self, e: httpx.HTTPStatusError) -> NoReturn: + """Handles HTTP status errors and raises the appropriate A2AError.""" + try: + with contextlib.suppress(httpx.StreamClosed): + e.response.read() + + try: + error_data = e.response.json() + except (json.JSONDecodeError, ValueError, httpx.ResponseNotRead): + error_data = {} + + error_type = error_data.get('type') + message = error_data.get('message', str(e)) + + if isinstance(error_type, str): + exception_cls = _A2A_ERROR_NAME_TO_CLS.get(error_type) + if exception_cls: + raise exception_cls(message) from e + except (json.JSONDecodeError, ValueError): + pass + + status_code = e.response.status_code + if status_code == httpx.codes.NOT_FOUND: + raise MethodNotFoundError( + f'Resource not found: {e.request.url}' + ) from e + + raise A2AClientError(f'HTTP Error {status_code}: {e}') from e + + async def _send_stream_request( + self, + method: str, + path: str, + context: ClientCallContext | None = None, + *, + json: dict[str, Any] | None = None, + ) -> AsyncGenerator[StreamResponse]: + http_kwargs = get_http_args(context) + http_kwargs.setdefault('headers', {}) + http_kwargs['headers'][VERSION_HEADER.lower()] = PROTOCOL_VERSION_0_3 + add_legacy_extension_header(http_kwargs['headers']) + + async for sse_data in send_http_stream_request( + self.httpx_client, + method, + f'{self.url}{path}', + self._handle_http_error, + json=json, + **http_kwargs, + ): + event_proto = a2a_v0_3_pb2.StreamResponse() + Parse(sse_data, event_proto, ignore_unknown_fields=True) + yield conversions.to_core_stream_response( + types_v03.SendStreamingMessageSuccessResponse( + result=proto_utils.FromProto.stream_response(event_proto) + ) + ) + + async def _send_request(self, request: httpx.Request) -> dict[str, Any]: + return await send_http_request( + self.httpx_client, request, self._handle_http_error + ) + + async def _execute_request( + self, + method: str, + path: str, + context: ClientCallContext | None = None, + *, + json: dict[str, Any] | None = None, + params: dict[str, Any] | None = None, + ) -> dict[str, Any]: + http_kwargs = get_http_args(context) + http_kwargs.setdefault('headers', {}) + http_kwargs['headers'][VERSION_HEADER.lower()] = PROTOCOL_VERSION_0_3 + add_legacy_extension_header(http_kwargs['headers']) + + request = self.httpx_client.build_request( + method, + f'{self.url}{path}', + json=json, + params=params, + **http_kwargs, + ) + return await self._send_request(request) diff --git a/src/a2a/types.py b/src/a2a/compat/v0_3/types.py similarity index 100% rename from src/a2a/types.py rename to src/a2a/compat/v0_3/types.py diff --git a/src/a2a/compat/v0_3/versions.py b/src/a2a/compat/v0_3/versions.py new file mode 100644 index 000000000..67808d5f2 --- /dev/null +++ b/src/a2a/compat/v0_3/versions.py @@ -0,0 +1,18 @@ +"""Utility functions for protocol version comparison and validation.""" + +from packaging.version import InvalidVersion, Version + +from a2a.utils.constants import PROTOCOL_VERSION_0_3, PROTOCOL_VERSION_1_0 + + +def is_legacy_version(version: str | None) -> bool: + """Determines if the given version is a legacy protocol version (>=0.3 and <1.0).""" + if not version: + return False + try: + v = Version(version) + return ( + Version(PROTOCOL_VERSION_0_3) <= v < Version(PROTOCOL_VERSION_1_0) + ) + except InvalidVersion: + return False diff --git a/src/a2a/contrib/tasks/vertex_task_converter.py b/src/a2a/contrib/tasks/vertex_task_converter.py index 16820a55f..9441d2153 100644 --- a/src/a2a/contrib/tasks/vertex_task_converter.py +++ b/src/a2a/contrib/tasks/vertex_task_converter.py @@ -14,7 +14,7 @@ from dataclasses import dataclass from typing import Any -from a2a.types import ( +from a2a.compat.v0_3.types import ( Artifact, DataPart, FilePart, @@ -173,7 +173,7 @@ def to_sdk_part( metadata=part_metadata, ) ) - if stored_part.file_data: + if stored_part.file_data and stored_part.file_data.file_uri: return Part( root=FilePart( file=FileWithUri( diff --git a/src/a2a/contrib/tasks/vertex_task_store.py b/src/a2a/contrib/tasks/vertex_task_store.py index 91f514af8..602d5c6fd 100644 --- a/src/a2a/contrib/tasks/vertex_task_store.py +++ b/src/a2a/contrib/tasks/vertex_task_store.py @@ -13,10 +13,12 @@ "'pip install a2a-sdk[vertex]'" ) from e +from a2a.compat.v0_3.conversions import to_compat_task, to_core_task +from a2a.compat.v0_3.types import Task as CompatTask from a2a.contrib.tasks import vertex_task_converter from a2a.server.context import ServerCallContext from a2a.server.tasks.task_store import TaskStore -from a2a.types import Task # Task is the Pydantic model +from a2a.types.a2a_pb2 import ListTasksRequest, ListTasksResponse, Task logger = logging.getLogger(__name__) @@ -30,7 +32,7 @@ class VertexTaskStore(TaskStore): def __init__( self, - client: vertexai.Client, + client: vertexai.Client, # type: ignore agent_engine_resource_id: str, ) -> None: """Initializes the VertexTaskStore. @@ -42,17 +44,16 @@ def __init__( self._client = client self._agent_engine_resource_id = agent_engine_resource_id - async def save( - self, task: Task, context: ServerCallContext | None = None - ) -> None: + async def save(self, task: Task, context: ServerCallContext) -> None: """Saves or updates a task in the store.""" - previous_task = await self._get_stored_task(task.id) + compat_task = to_compat_task(task) + previous_task = await self._get_stored_task(compat_task.id) if previous_task is None: - await self._create(task) + await self._create(compat_task) else: - await self._update(previous_task, task) + await self._update(previous_task, compat_task) - async def _create(self, sdk_task: Task) -> None: + async def _create(self, sdk_task: CompatTask) -> None: stored_task = vertex_task_converter.to_stored_task(sdk_task) await self._client.aio.agent_engines.a2a_tasks.create( name=self._agent_engine_resource_id, @@ -65,7 +66,10 @@ async def _create(self, sdk_task: Task) -> None: ) def _get_status_change_event( - self, previous_task: Task, task: Task, event_sequence_number: int + self, + previous_task: CompatTask, + task: CompatTask, + event_sequence_number: int, ) -> vertexai_types.TaskEvent | None: if task.status.state != previous_task.status.state: return vertexai_types.TaskEvent( @@ -82,8 +86,8 @@ def _get_status_change_event( def _get_status_details_change_event( self, - previous_task: Task, - task: Task, + previous_task: CompatTask, + task: CompatTask, event_sequence_number: int, ) -> vertexai_types.TaskEvent | None: if task.status.message != previous_task.status.message: @@ -107,7 +111,10 @@ def _get_status_details_change_event( return None def _get_metadata_change_event( - self, previous_task: Task, task: Task, event_sequence_number: int + self, + previous_task: CompatTask, + task: CompatTask, + event_sequence_number: int, ) -> vertexai_types.TaskEvent | None: # We generate metadata change events if the metadata was changed. # We don't generate events if the metadata was changed from @@ -126,7 +133,10 @@ def _get_metadata_change_event( return None def _get_artifacts_change_event( - self, previous_task: Task, task: Task, event_sequence_number: int + self, + previous_task: CompatTask, + task: CompatTask, + event_sequence_number: int, ) -> vertexai_types.TaskEvent | None: if task.artifacts != previous_task.artifacts: task_artifact_change = vertexai_types.TaskArtifactChange() @@ -176,7 +186,7 @@ def _get_artifacts_change_event( return None async def _update( - self, previous_stored_task: vertexai_types.A2aTask, task: Task + self, previous_stored_task: vertexai_types.A2aTask, task: CompatTask ) -> None: previous_task = vertex_task_converter.to_sdk_task(previous_stored_task) events = [] @@ -232,16 +242,22 @@ async def _get_stored_task( return a2a_task async def get( - self, task_id: str, context: ServerCallContext | None = None + self, task_id: str, context: ServerCallContext ) -> Task | None: """Retrieves a task from the database by ID.""" a2a_task = await self._get_stored_task(task_id) if a2a_task is None: return None - return vertex_task_converter.to_sdk_task(a2a_task) + return to_core_task(vertex_task_converter.to_sdk_task(a2a_task)) - async def delete( - self, task_id: str, context: ServerCallContext | None = None - ) -> None: + async def list( + self, + params: ListTasksRequest, + context: ServerCallContext, + ) -> ListTasksResponse: + """Retrieves a list of tasks from the store.""" + raise NotImplementedError + + async def delete(self, task_id: str, context: ServerCallContext) -> None: """The backend doesn't support deleting tasks, so this is not implemented.""" raise NotImplementedError diff --git a/src/a2a/extensions/common.py b/src/a2a/extensions/common.py index cba3517e4..06ccf8f40 100644 --- a/src/a2a/extensions/common.py +++ b/src/a2a/extensions/common.py @@ -1,9 +1,7 @@ -from typing import Any +from a2a.types.a2a_pb2 import AgentCard, AgentExtension -from a2a.types import AgentCard, AgentExtension - -HTTP_EXTENSION_HEADER = 'X-A2A-Extensions' +HTTP_EXTENSION_HEADER = 'A2A-Extensions' def get_requested_extensions(values: list[str]) -> set[str]: @@ -27,15 +25,3 @@ def find_extension_by_uri(card: AgentCard, uri: str) -> AgentExtension | None: return ext return None - - -def update_extension_header( - http_kwargs: dict[str, Any] | None, - extensions: list[str] | None, -) -> dict[str, Any]: - """Update the X-A2A-Extensions header with active extensions.""" - http_kwargs = http_kwargs or {} - if extensions is not None: - headers = http_kwargs.setdefault('headers', {}) - headers[HTTP_EXTENSION_HEADER] = ','.join(extensions) - return http_kwargs diff --git a/src/a2a/grpc/a2a_pb2.py b/src/a2a/grpc/a2a_pb2.py deleted file mode 100644 index 9b4b73013..000000000 --- a/src/a2a/grpc/a2a_pb2.py +++ /dev/null @@ -1,195 +0,0 @@ -# -*- coding: utf-8 -*- -# Generated by the protocol buffer compiler. DO NOT EDIT! -# NO CHECKED-IN PROTOBUF GENCODE -# source: a2a.proto -# Protobuf Python Version: 5.29.3 -"""Generated protocol buffer code.""" -from google.protobuf import descriptor as _descriptor -from google.protobuf import descriptor_pool as _descriptor_pool -from google.protobuf import runtime_version as _runtime_version -from google.protobuf import symbol_database as _symbol_database -from google.protobuf.internal import builder as _builder -_runtime_version.ValidateProtobufRuntimeVersion( - _runtime_version.Domain.PUBLIC, - 5, - 29, - 3, - '', - 'a2a.proto' -) -# @@protoc_insertion_point(imports) - -_sym_db = _symbol_database.Default() - - -from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 -from google.api import client_pb2 as google_dot_api_dot_client__pb2 -from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2 -from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 -from google.protobuf import struct_pb2 as google_dot_protobuf_dot_struct__pb2 -from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 - - -DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\ta2a.proto\x12\x06\x61\x32\x61.v1\x1a\x1cgoogle/api/annotations.proto\x1a\x17google/api/client.proto\x1a\x1fgoogle/api/field_behavior.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a\x1cgoogle/protobuf/struct.proto\x1a\x1fgoogle/protobuf/timestamp.proto\"\xde\x01\n\x18SendMessageConfiguration\x12\x32\n\x15\x61\x63\x63\x65pted_output_modes\x18\x01 \x03(\tR\x13\x61\x63\x63\x65ptedOutputModes\x12K\n\x11push_notification\x18\x02 \x01(\x0b\x32\x1e.a2a.v1.PushNotificationConfigR\x10pushNotification\x12%\n\x0ehistory_length\x18\x03 \x01(\x05R\rhistoryLength\x12\x1a\n\x08\x62locking\x18\x04 \x01(\x08R\x08\x62locking\"\xf1\x01\n\x04Task\x12\x0e\n\x02id\x18\x01 \x01(\tR\x02id\x12\x1d\n\ncontext_id\x18\x02 \x01(\tR\tcontextId\x12*\n\x06status\x18\x03 \x01(\x0b\x32\x12.a2a.v1.TaskStatusR\x06status\x12.\n\tartifacts\x18\x04 \x03(\x0b\x32\x10.a2a.v1.ArtifactR\tartifacts\x12)\n\x07history\x18\x05 \x03(\x0b\x32\x0f.a2a.v1.MessageR\x07history\x12\x33\n\x08metadata\x18\x06 \x01(\x0b\x32\x17.google.protobuf.StructR\x08metadata\"\x99\x01\n\nTaskStatus\x12\'\n\x05state\x18\x01 \x01(\x0e\x32\x11.a2a.v1.TaskStateR\x05state\x12(\n\x06update\x18\x02 \x01(\x0b\x32\x0f.a2a.v1.MessageR\x07message\x12\x38\n\ttimestamp\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.TimestampR\ttimestamp\"\xa9\x01\n\x04Part\x12\x14\n\x04text\x18\x01 \x01(\tH\x00R\x04text\x12&\n\x04\x66ile\x18\x02 \x01(\x0b\x32\x10.a2a.v1.FilePartH\x00R\x04\x66ile\x12&\n\x04\x64\x61ta\x18\x03 \x01(\x0b\x32\x10.a2a.v1.DataPartH\x00R\x04\x64\x61ta\x12\x33\n\x08metadata\x18\x04 \x01(\x0b\x32\x17.google.protobuf.StructR\x08metadataB\x06\n\x04part\"\x93\x01\n\x08\x46ilePart\x12$\n\rfile_with_uri\x18\x01 \x01(\tH\x00R\x0b\x66ileWithUri\x12(\n\x0f\x66ile_with_bytes\x18\x02 \x01(\x0cH\x00R\rfileWithBytes\x12\x1b\n\tmime_type\x18\x03 \x01(\tR\x08mimeType\x12\x12\n\x04name\x18\x04 \x01(\tR\x04nameB\x06\n\x04\x66ile\"7\n\x08\x44\x61taPart\x12+\n\x04\x64\x61ta\x18\x01 \x01(\x0b\x32\x17.google.protobuf.StructR\x04\x64\x61ta\"\xff\x01\n\x07Message\x12\x1d\n\nmessage_id\x18\x01 \x01(\tR\tmessageId\x12\x1d\n\ncontext_id\x18\x02 \x01(\tR\tcontextId\x12\x17\n\x07task_id\x18\x03 \x01(\tR\x06taskId\x12 \n\x04role\x18\x04 \x01(\x0e\x32\x0c.a2a.v1.RoleR\x04role\x12&\n\x07\x63ontent\x18\x05 \x03(\x0b\x32\x0c.a2a.v1.PartR\x07\x63ontent\x12\x33\n\x08metadata\x18\x06 \x01(\x0b\x32\x17.google.protobuf.StructR\x08metadata\x12\x1e\n\nextensions\x18\x07 \x03(\tR\nextensions\"\xda\x01\n\x08\x41rtifact\x12\x1f\n\x0b\x61rtifact_id\x18\x01 \x01(\tR\nartifactId\x12\x12\n\x04name\x18\x03 \x01(\tR\x04name\x12 \n\x0b\x64\x65scription\x18\x04 \x01(\tR\x0b\x64\x65scription\x12\"\n\x05parts\x18\x05 \x03(\x0b\x32\x0c.a2a.v1.PartR\x05parts\x12\x33\n\x08metadata\x18\x06 \x01(\x0b\x32\x17.google.protobuf.StructR\x08metadata\x12\x1e\n\nextensions\x18\x07 \x03(\tR\nextensions\"\xc6\x01\n\x15TaskStatusUpdateEvent\x12\x17\n\x07task_id\x18\x01 \x01(\tR\x06taskId\x12\x1d\n\ncontext_id\x18\x02 \x01(\tR\tcontextId\x12*\n\x06status\x18\x03 \x01(\x0b\x32\x12.a2a.v1.TaskStatusR\x06status\x12\x14\n\x05\x66inal\x18\x04 \x01(\x08R\x05\x66inal\x12\x33\n\x08metadata\x18\x05 \x01(\x0b\x32\x17.google.protobuf.StructR\x08metadata\"\xeb\x01\n\x17TaskArtifactUpdateEvent\x12\x17\n\x07task_id\x18\x01 \x01(\tR\x06taskId\x12\x1d\n\ncontext_id\x18\x02 \x01(\tR\tcontextId\x12,\n\x08\x61rtifact\x18\x03 \x01(\x0b\x32\x10.a2a.v1.ArtifactR\x08\x61rtifact\x12\x16\n\x06\x61ppend\x18\x04 \x01(\x08R\x06\x61ppend\x12\x1d\n\nlast_chunk\x18\x05 \x01(\x08R\tlastChunk\x12\x33\n\x08metadata\x18\x06 \x01(\x0b\x32\x17.google.protobuf.StructR\x08metadata\"\x94\x01\n\x16PushNotificationConfig\x12\x0e\n\x02id\x18\x01 \x01(\tR\x02id\x12\x10\n\x03url\x18\x02 \x01(\tR\x03url\x12\x14\n\x05token\x18\x03 \x01(\tR\x05token\x12\x42\n\x0e\x61uthentication\x18\x04 \x01(\x0b\x32\x1a.a2a.v1.AuthenticationInfoR\x0e\x61uthentication\"P\n\x12\x41uthenticationInfo\x12\x18\n\x07schemes\x18\x01 \x03(\tR\x07schemes\x12 \n\x0b\x63redentials\x18\x02 \x01(\tR\x0b\x63redentials\"@\n\x0e\x41gentInterface\x12\x10\n\x03url\x18\x01 \x01(\tR\x03url\x12\x1c\n\ttransport\x18\x02 \x01(\tR\ttransport\"\xc8\x07\n\tAgentCard\x12)\n\x10protocol_version\x18\x10 \x01(\tR\x0fprotocolVersion\x12\x12\n\x04name\x18\x01 \x01(\tR\x04name\x12 \n\x0b\x64\x65scription\x18\x02 \x01(\tR\x0b\x64\x65scription\x12\x10\n\x03url\x18\x03 \x01(\tR\x03url\x12/\n\x13preferred_transport\x18\x0e \x01(\tR\x12preferredTransport\x12K\n\x15\x61\x64\x64itional_interfaces\x18\x0f \x03(\x0b\x32\x16.a2a.v1.AgentInterfaceR\x14\x61\x64\x64itionalInterfaces\x12\x31\n\x08provider\x18\x04 \x01(\x0b\x32\x15.a2a.v1.AgentProviderR\x08provider\x12\x18\n\x07version\x18\x05 \x01(\tR\x07version\x12+\n\x11\x64ocumentation_url\x18\x06 \x01(\tR\x10\x64ocumentationUrl\x12=\n\x0c\x63\x61pabilities\x18\x07 \x01(\x0b\x32\x19.a2a.v1.AgentCapabilitiesR\x0c\x63\x61pabilities\x12Q\n\x10security_schemes\x18\x08 \x03(\x0b\x32&.a2a.v1.AgentCard.SecuritySchemesEntryR\x0fsecuritySchemes\x12,\n\x08security\x18\t \x03(\x0b\x32\x10.a2a.v1.SecurityR\x08security\x12.\n\x13\x64\x65\x66\x61ult_input_modes\x18\n \x03(\tR\x11\x64\x65\x66\x61ultInputModes\x12\x30\n\x14\x64\x65\x66\x61ult_output_modes\x18\x0b \x03(\tR\x12\x64\x65\x66\x61ultOutputModes\x12*\n\x06skills\x18\x0c \x03(\x0b\x32\x12.a2a.v1.AgentSkillR\x06skills\x12O\n$supports_authenticated_extended_card\x18\r \x01(\x08R!supportsAuthenticatedExtendedCard\x12:\n\nsignatures\x18\x11 \x03(\x0b\x32\x1a.a2a.v1.AgentCardSignatureR\nsignatures\x12\x19\n\x08icon_url\x18\x12 \x01(\tR\x07iconUrl\x1aZ\n\x14SecuritySchemesEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12,\n\x05value\x18\x02 \x01(\x0b\x32\x16.a2a.v1.SecuritySchemeR\x05value:\x02\x38\x01\"E\n\rAgentProvider\x12\x10\n\x03url\x18\x01 \x01(\tR\x03url\x12\"\n\x0corganization\x18\x02 \x01(\tR\x0corganization\"\x98\x01\n\x11\x41gentCapabilities\x12\x1c\n\tstreaming\x18\x01 \x01(\x08R\tstreaming\x12-\n\x12push_notifications\x18\x02 \x01(\x08R\x11pushNotifications\x12\x36\n\nextensions\x18\x03 \x03(\x0b\x32\x16.a2a.v1.AgentExtensionR\nextensions\"\x91\x01\n\x0e\x41gentExtension\x12\x10\n\x03uri\x18\x01 \x01(\tR\x03uri\x12 \n\x0b\x64\x65scription\x18\x02 \x01(\tR\x0b\x64\x65scription\x12\x1a\n\x08required\x18\x03 \x01(\x08R\x08required\x12/\n\x06params\x18\x04 \x01(\x0b\x32\x17.google.protobuf.StructR\x06params\"\xf4\x01\n\nAgentSkill\x12\x0e\n\x02id\x18\x01 \x01(\tR\x02id\x12\x12\n\x04name\x18\x02 \x01(\tR\x04name\x12 \n\x0b\x64\x65scription\x18\x03 \x01(\tR\x0b\x64\x65scription\x12\x12\n\x04tags\x18\x04 \x03(\tR\x04tags\x12\x1a\n\x08\x65xamples\x18\x05 \x03(\tR\x08\x65xamples\x12\x1f\n\x0binput_modes\x18\x06 \x03(\tR\ninputModes\x12!\n\x0coutput_modes\x18\x07 \x03(\tR\x0boutputModes\x12,\n\x08security\x18\x08 \x03(\x0b\x32\x10.a2a.v1.SecurityR\x08security\"\x8b\x01\n\x12\x41gentCardSignature\x12!\n\tprotected\x18\x01 \x01(\tB\x03\xe0\x41\x02R\tprotected\x12!\n\tsignature\x18\x02 \x01(\tB\x03\xe0\x41\x02R\tsignature\x12/\n\x06header\x18\x03 \x01(\x0b\x32\x17.google.protobuf.StructR\x06header\"\x8a\x01\n\x1aTaskPushNotificationConfig\x12\x12\n\x04name\x18\x01 \x01(\tR\x04name\x12X\n\x18push_notification_config\x18\x02 \x01(\x0b\x32\x1e.a2a.v1.PushNotificationConfigR\x16pushNotificationConfig\" \n\nStringList\x12\x12\n\x04list\x18\x01 \x03(\tR\x04list\"\x93\x01\n\x08Security\x12\x37\n\x07schemes\x18\x01 \x03(\x0b\x32\x1d.a2a.v1.Security.SchemesEntryR\x07schemes\x1aN\n\x0cSchemesEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12(\n\x05value\x18\x02 \x01(\x0b\x32\x12.a2a.v1.StringListR\x05value:\x02\x38\x01\"\xe6\x03\n\x0eSecurityScheme\x12U\n\x17\x61pi_key_security_scheme\x18\x01 \x01(\x0b\x32\x1c.a2a.v1.APIKeySecuritySchemeH\x00R\x14\x61piKeySecurityScheme\x12[\n\x19http_auth_security_scheme\x18\x02 \x01(\x0b\x32\x1e.a2a.v1.HTTPAuthSecuritySchemeH\x00R\x16httpAuthSecurityScheme\x12T\n\x16oauth2_security_scheme\x18\x03 \x01(\x0b\x32\x1c.a2a.v1.OAuth2SecuritySchemeH\x00R\x14oauth2SecurityScheme\x12k\n\x1fopen_id_connect_security_scheme\x18\x04 \x01(\x0b\x32#.a2a.v1.OpenIdConnectSecuritySchemeH\x00R\x1bopenIdConnectSecurityScheme\x12S\n\x14mtls_security_scheme\x18\x05 \x01(\x0b\x32\x1f.a2a.v1.MutualTlsSecuritySchemeH\x00R\x12mtlsSecuritySchemeB\x08\n\x06scheme\"h\n\x14\x41PIKeySecurityScheme\x12 \n\x0b\x64\x65scription\x18\x01 \x01(\tR\x0b\x64\x65scription\x12\x1a\n\x08location\x18\x02 \x01(\tR\x08location\x12\x12\n\x04name\x18\x03 \x01(\tR\x04name\"w\n\x16HTTPAuthSecurityScheme\x12 \n\x0b\x64\x65scription\x18\x01 \x01(\tR\x0b\x64\x65scription\x12\x16\n\x06scheme\x18\x02 \x01(\tR\x06scheme\x12#\n\rbearer_format\x18\x03 \x01(\tR\x0c\x62\x65\x61rerFormat\"\x92\x01\n\x14OAuth2SecurityScheme\x12 \n\x0b\x64\x65scription\x18\x01 \x01(\tR\x0b\x64\x65scription\x12(\n\x05\x66lows\x18\x02 \x01(\x0b\x32\x12.a2a.v1.OAuthFlowsR\x05\x66lows\x12.\n\x13oauth2_metadata_url\x18\x03 \x01(\tR\x11oauth2MetadataUrl\"n\n\x1bOpenIdConnectSecurityScheme\x12 \n\x0b\x64\x65scription\x18\x01 \x01(\tR\x0b\x64\x65scription\x12-\n\x13open_id_connect_url\x18\x02 \x01(\tR\x10openIdConnectUrl\";\n\x17MutualTlsSecurityScheme\x12 \n\x0b\x64\x65scription\x18\x01 \x01(\tR\x0b\x64\x65scription\"\xb0\x02\n\nOAuthFlows\x12S\n\x12\x61uthorization_code\x18\x01 \x01(\x0b\x32\".a2a.v1.AuthorizationCodeOAuthFlowH\x00R\x11\x61uthorizationCode\x12S\n\x12\x63lient_credentials\x18\x02 \x01(\x0b\x32\".a2a.v1.ClientCredentialsOAuthFlowH\x00R\x11\x63lientCredentials\x12\x37\n\x08implicit\x18\x03 \x01(\x0b\x32\x19.a2a.v1.ImplicitOAuthFlowH\x00R\x08implicit\x12\x37\n\x08password\x18\x04 \x01(\x0b\x32\x19.a2a.v1.PasswordOAuthFlowH\x00R\x08passwordB\x06\n\x04\x66low\"\x8a\x02\n\x1a\x41uthorizationCodeOAuthFlow\x12+\n\x11\x61uthorization_url\x18\x01 \x01(\tR\x10\x61uthorizationUrl\x12\x1b\n\ttoken_url\x18\x02 \x01(\tR\x08tokenUrl\x12\x1f\n\x0brefresh_url\x18\x03 \x01(\tR\nrefreshUrl\x12\x46\n\x06scopes\x18\x04 \x03(\x0b\x32..a2a.v1.AuthorizationCodeOAuthFlow.ScopesEntryR\x06scopes\x1a\x39\n\x0bScopesEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12\x14\n\x05value\x18\x02 \x01(\tR\x05value:\x02\x38\x01\"\xdd\x01\n\x1a\x43lientCredentialsOAuthFlow\x12\x1b\n\ttoken_url\x18\x01 \x01(\tR\x08tokenUrl\x12\x1f\n\x0brefresh_url\x18\x02 \x01(\tR\nrefreshUrl\x12\x46\n\x06scopes\x18\x03 \x03(\x0b\x32..a2a.v1.ClientCredentialsOAuthFlow.ScopesEntryR\x06scopes\x1a\x39\n\x0bScopesEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12\x14\n\x05value\x18\x02 \x01(\tR\x05value:\x02\x38\x01\"\xdb\x01\n\x11ImplicitOAuthFlow\x12+\n\x11\x61uthorization_url\x18\x01 \x01(\tR\x10\x61uthorizationUrl\x12\x1f\n\x0brefresh_url\x18\x02 \x01(\tR\nrefreshUrl\x12=\n\x06scopes\x18\x03 \x03(\x0b\x32%.a2a.v1.ImplicitOAuthFlow.ScopesEntryR\x06scopes\x1a\x39\n\x0bScopesEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12\x14\n\x05value\x18\x02 \x01(\tR\x05value:\x02\x38\x01\"\xcb\x01\n\x11PasswordOAuthFlow\x12\x1b\n\ttoken_url\x18\x01 \x01(\tR\x08tokenUrl\x12\x1f\n\x0brefresh_url\x18\x02 \x01(\tR\nrefreshUrl\x12=\n\x06scopes\x18\x03 \x03(\x0b\x32%.a2a.v1.PasswordOAuthFlow.ScopesEntryR\x06scopes\x1a\x39\n\x0bScopesEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12\x14\n\x05value\x18\x02 \x01(\tR\x05value:\x02\x38\x01\"\xc1\x01\n\x12SendMessageRequest\x12.\n\x07request\x18\x01 \x01(\x0b\x32\x0f.a2a.v1.MessageB\x03\xe0\x41\x02R\x07message\x12\x46\n\rconfiguration\x18\x02 \x01(\x0b\x32 .a2a.v1.SendMessageConfigurationR\rconfiguration\x12\x33\n\x08metadata\x18\x03 \x01(\x0b\x32\x17.google.protobuf.StructR\x08metadata\"P\n\x0eGetTaskRequest\x12\x17\n\x04name\x18\x01 \x01(\tB\x03\xe0\x41\x02R\x04name\x12%\n\x0ehistory_length\x18\x02 \x01(\x05R\rhistoryLength\"\'\n\x11\x43\x61ncelTaskRequest\x12\x12\n\x04name\x18\x01 \x01(\tR\x04name\":\n$GetTaskPushNotificationConfigRequest\x12\x12\n\x04name\x18\x01 \x01(\tR\x04name\"=\n\'DeleteTaskPushNotificationConfigRequest\x12\x12\n\x04name\x18\x01 \x01(\tR\x04name\"\xa9\x01\n\'CreateTaskPushNotificationConfigRequest\x12\x1b\n\x06parent\x18\x01 \x01(\tB\x03\xe0\x41\x02R\x06parent\x12 \n\tconfig_id\x18\x02 \x01(\tB\x03\xe0\x41\x02R\x08\x63onfigId\x12?\n\x06\x63onfig\x18\x03 \x01(\x0b\x32\".a2a.v1.TaskPushNotificationConfigB\x03\xe0\x41\x02R\x06\x63onfig\"-\n\x17TaskSubscriptionRequest\x12\x12\n\x04name\x18\x01 \x01(\tR\x04name\"{\n%ListTaskPushNotificationConfigRequest\x12\x16\n\x06parent\x18\x01 \x01(\tR\x06parent\x12\x1b\n\tpage_size\x18\x02 \x01(\x05R\x08pageSize\x12\x1d\n\npage_token\x18\x03 \x01(\tR\tpageToken\"\x15\n\x13GetAgentCardRequest\"m\n\x13SendMessageResponse\x12\"\n\x04task\x18\x01 \x01(\x0b\x32\x0c.a2a.v1.TaskH\x00R\x04task\x12\'\n\x03msg\x18\x02 \x01(\x0b\x32\x0f.a2a.v1.MessageH\x00R\x07messageB\t\n\x07payload\"\xfa\x01\n\x0eStreamResponse\x12\"\n\x04task\x18\x01 \x01(\x0b\x32\x0c.a2a.v1.TaskH\x00R\x04task\x12\'\n\x03msg\x18\x02 \x01(\x0b\x32\x0f.a2a.v1.MessageH\x00R\x07message\x12\x44\n\rstatus_update\x18\x03 \x01(\x0b\x32\x1d.a2a.v1.TaskStatusUpdateEventH\x00R\x0cstatusUpdate\x12J\n\x0f\x61rtifact_update\x18\x04 \x01(\x0b\x32\x1f.a2a.v1.TaskArtifactUpdateEventH\x00R\x0e\x61rtifactUpdateB\t\n\x07payload\"\x8e\x01\n&ListTaskPushNotificationConfigResponse\x12<\n\x07\x63onfigs\x18\x01 \x03(\x0b\x32\".a2a.v1.TaskPushNotificationConfigR\x07\x63onfigs\x12&\n\x0fnext_page_token\x18\x02 \x01(\tR\rnextPageToken*\xfa\x01\n\tTaskState\x12\x1a\n\x16TASK_STATE_UNSPECIFIED\x10\x00\x12\x18\n\x14TASK_STATE_SUBMITTED\x10\x01\x12\x16\n\x12TASK_STATE_WORKING\x10\x02\x12\x18\n\x14TASK_STATE_COMPLETED\x10\x03\x12\x15\n\x11TASK_STATE_FAILED\x10\x04\x12\x18\n\x14TASK_STATE_CANCELLED\x10\x05\x12\x1d\n\x19TASK_STATE_INPUT_REQUIRED\x10\x06\x12\x17\n\x13TASK_STATE_REJECTED\x10\x07\x12\x1c\n\x18TASK_STATE_AUTH_REQUIRED\x10\x08*;\n\x04Role\x12\x14\n\x10ROLE_UNSPECIFIED\x10\x00\x12\r\n\tROLE_USER\x10\x01\x12\x0e\n\nROLE_AGENT\x10\x02\x32\xbb\n\n\nA2AService\x12\x63\n\x0bSendMessage\x12\x1a.a2a.v1.SendMessageRequest\x1a\x1b.a2a.v1.SendMessageResponse\"\x1b\x82\xd3\xe4\x93\x02\x15\"\x10/v1/message:send:\x01*\x12k\n\x14SendStreamingMessage\x12\x1a.a2a.v1.SendMessageRequest\x1a\x16.a2a.v1.StreamResponse\"\x1d\x82\xd3\xe4\x93\x02\x17\"\x12/v1/message:stream:\x01*0\x01\x12R\n\x07GetTask\x12\x16.a2a.v1.GetTaskRequest\x1a\x0c.a2a.v1.Task\"!\xda\x41\x04name\x82\xd3\xe4\x93\x02\x14\x12\x12/v1/{name=tasks/*}\x12[\n\nCancelTask\x12\x19.a2a.v1.CancelTaskRequest\x1a\x0c.a2a.v1.Task\"$\x82\xd3\xe4\x93\x02\x1e\"\x19/v1/{name=tasks/*}:cancel:\x01*\x12s\n\x10TaskSubscription\x12\x1f.a2a.v1.TaskSubscriptionRequest\x1a\x16.a2a.v1.StreamResponse\"$\x82\xd3\xe4\x93\x02\x1e\x12\x1c/v1/{name=tasks/*}:subscribe0\x01\x12\xc5\x01\n CreateTaskPushNotificationConfig\x12/.a2a.v1.CreateTaskPushNotificationConfigRequest\x1a\".a2a.v1.TaskPushNotificationConfig\"L\xda\x41\rparent,config\x82\xd3\xe4\x93\x02\x36\",/v1/{parent=tasks/*/pushNotificationConfigs}:\x06\x63onfig\x12\xae\x01\n\x1dGetTaskPushNotificationConfig\x12,.a2a.v1.GetTaskPushNotificationConfigRequest\x1a\".a2a.v1.TaskPushNotificationConfig\";\xda\x41\x04name\x82\xd3\xe4\x93\x02.\x12,/v1/{name=tasks/*/pushNotificationConfigs/*}\x12\xbe\x01\n\x1eListTaskPushNotificationConfig\x12-.a2a.v1.ListTaskPushNotificationConfigRequest\x1a..a2a.v1.ListTaskPushNotificationConfigResponse\"=\xda\x41\x06parent\x82\xd3\xe4\x93\x02.\x12,/v1/{parent=tasks/*}/pushNotificationConfigs\x12P\n\x0cGetAgentCard\x12\x1b.a2a.v1.GetAgentCardRequest\x1a\x11.a2a.v1.AgentCard\"\x10\x82\xd3\xe4\x93\x02\n\x12\x08/v1/card\x12\xa8\x01\n DeleteTaskPushNotificationConfig\x12/.a2a.v1.DeleteTaskPushNotificationConfigRequest\x1a\x16.google.protobuf.Empty\";\xda\x41\x04name\x82\xd3\xe4\x93\x02.*,/v1/{name=tasks/*/pushNotificationConfigs/*}Bi\n\ncom.a2a.v1B\x08\x41\x32\x61ProtoP\x01Z\x18google.golang.org/a2a/v1\xa2\x02\x03\x41XX\xaa\x02\x06\x41\x32\x61.V1\xca\x02\x06\x41\x32\x61\\V1\xe2\x02\x12\x41\x32\x61\\V1\\GPBMetadata\xea\x02\x07\x41\x32\x61::V1b\x06proto3') - -_globals = globals() -_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) -_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'a2a_pb2', _globals) -if not _descriptor._USE_C_DESCRIPTORS: - _globals['DESCRIPTOR']._loaded_options = None - _globals['DESCRIPTOR']._serialized_options = b'\n\ncom.a2a.v1B\010A2aProtoP\001Z\030google.golang.org/a2a/v1\242\002\003AXX\252\002\006A2a.V1\312\002\006A2a\\V1\342\002\022A2a\\V1\\GPBMetadata\352\002\007A2a::V1' - _globals['_AGENTCARD_SECURITYSCHEMESENTRY']._loaded_options = None - _globals['_AGENTCARD_SECURITYSCHEMESENTRY']._serialized_options = b'8\001' - _globals['_AGENTCARDSIGNATURE'].fields_by_name['protected']._loaded_options = None - _globals['_AGENTCARDSIGNATURE'].fields_by_name['protected']._serialized_options = b'\340A\002' - _globals['_AGENTCARDSIGNATURE'].fields_by_name['signature']._loaded_options = None - _globals['_AGENTCARDSIGNATURE'].fields_by_name['signature']._serialized_options = b'\340A\002' - _globals['_SECURITY_SCHEMESENTRY']._loaded_options = None - _globals['_SECURITY_SCHEMESENTRY']._serialized_options = b'8\001' - _globals['_AUTHORIZATIONCODEOAUTHFLOW_SCOPESENTRY']._loaded_options = None - _globals['_AUTHORIZATIONCODEOAUTHFLOW_SCOPESENTRY']._serialized_options = b'8\001' - _globals['_CLIENTCREDENTIALSOAUTHFLOW_SCOPESENTRY']._loaded_options = None - _globals['_CLIENTCREDENTIALSOAUTHFLOW_SCOPESENTRY']._serialized_options = b'8\001' - _globals['_IMPLICITOAUTHFLOW_SCOPESENTRY']._loaded_options = None - _globals['_IMPLICITOAUTHFLOW_SCOPESENTRY']._serialized_options = b'8\001' - _globals['_PASSWORDOAUTHFLOW_SCOPESENTRY']._loaded_options = None - _globals['_PASSWORDOAUTHFLOW_SCOPESENTRY']._serialized_options = b'8\001' - _globals['_SENDMESSAGEREQUEST'].fields_by_name['request']._loaded_options = None - _globals['_SENDMESSAGEREQUEST'].fields_by_name['request']._serialized_options = b'\340A\002' - _globals['_GETTASKREQUEST'].fields_by_name['name']._loaded_options = None - _globals['_GETTASKREQUEST'].fields_by_name['name']._serialized_options = b'\340A\002' - _globals['_CREATETASKPUSHNOTIFICATIONCONFIGREQUEST'].fields_by_name['parent']._loaded_options = None - _globals['_CREATETASKPUSHNOTIFICATIONCONFIGREQUEST'].fields_by_name['parent']._serialized_options = b'\340A\002' - _globals['_CREATETASKPUSHNOTIFICATIONCONFIGREQUEST'].fields_by_name['config_id']._loaded_options = None - _globals['_CREATETASKPUSHNOTIFICATIONCONFIGREQUEST'].fields_by_name['config_id']._serialized_options = b'\340A\002' - _globals['_CREATETASKPUSHNOTIFICATIONCONFIGREQUEST'].fields_by_name['config']._loaded_options = None - _globals['_CREATETASKPUSHNOTIFICATIONCONFIGREQUEST'].fields_by_name['config']._serialized_options = b'\340A\002' - _globals['_A2ASERVICE'].methods_by_name['SendMessage']._loaded_options = None - _globals['_A2ASERVICE'].methods_by_name['SendMessage']._serialized_options = b'\202\323\344\223\002\025\"\020/v1/message:send:\001*' - _globals['_A2ASERVICE'].methods_by_name['SendStreamingMessage']._loaded_options = None - _globals['_A2ASERVICE'].methods_by_name['SendStreamingMessage']._serialized_options = b'\202\323\344\223\002\027\"\022/v1/message:stream:\001*' - _globals['_A2ASERVICE'].methods_by_name['GetTask']._loaded_options = None - _globals['_A2ASERVICE'].methods_by_name['GetTask']._serialized_options = b'\332A\004name\202\323\344\223\002\024\022\022/v1/{name=tasks/*}' - _globals['_A2ASERVICE'].methods_by_name['CancelTask']._loaded_options = None - _globals['_A2ASERVICE'].methods_by_name['CancelTask']._serialized_options = b'\202\323\344\223\002\036\"\031/v1/{name=tasks/*}:cancel:\001*' - _globals['_A2ASERVICE'].methods_by_name['TaskSubscription']._loaded_options = None - _globals['_A2ASERVICE'].methods_by_name['TaskSubscription']._serialized_options = b'\202\323\344\223\002\036\022\034/v1/{name=tasks/*}:subscribe' - _globals['_A2ASERVICE'].methods_by_name['CreateTaskPushNotificationConfig']._loaded_options = None - _globals['_A2ASERVICE'].methods_by_name['CreateTaskPushNotificationConfig']._serialized_options = b'\332A\rparent,config\202\323\344\223\0026\",/v1/{parent=tasks/*/pushNotificationConfigs}:\006config' - _globals['_A2ASERVICE'].methods_by_name['GetTaskPushNotificationConfig']._loaded_options = None - _globals['_A2ASERVICE'].methods_by_name['GetTaskPushNotificationConfig']._serialized_options = b'\332A\004name\202\323\344\223\002.\022,/v1/{name=tasks/*/pushNotificationConfigs/*}' - _globals['_A2ASERVICE'].methods_by_name['ListTaskPushNotificationConfig']._loaded_options = None - _globals['_A2ASERVICE'].methods_by_name['ListTaskPushNotificationConfig']._serialized_options = b'\332A\006parent\202\323\344\223\002.\022,/v1/{parent=tasks/*}/pushNotificationConfigs' - _globals['_A2ASERVICE'].methods_by_name['GetAgentCard']._loaded_options = None - _globals['_A2ASERVICE'].methods_by_name['GetAgentCard']._serialized_options = b'\202\323\344\223\002\n\022\010/v1/card' - _globals['_A2ASERVICE'].methods_by_name['DeleteTaskPushNotificationConfig']._loaded_options = None - _globals['_A2ASERVICE'].methods_by_name['DeleteTaskPushNotificationConfig']._serialized_options = b'\332A\004name\202\323\344\223\002.*,/v1/{name=tasks/*/pushNotificationConfigs/*}' - _globals['_TASKSTATE']._serialized_start=8066 - _globals['_TASKSTATE']._serialized_end=8316 - _globals['_ROLE']._serialized_start=8318 - _globals['_ROLE']._serialized_end=8377 - _globals['_SENDMESSAGECONFIGURATION']._serialized_start=202 - _globals['_SENDMESSAGECONFIGURATION']._serialized_end=424 - _globals['_TASK']._serialized_start=427 - _globals['_TASK']._serialized_end=668 - _globals['_TASKSTATUS']._serialized_start=671 - _globals['_TASKSTATUS']._serialized_end=824 - _globals['_PART']._serialized_start=827 - _globals['_PART']._serialized_end=996 - _globals['_FILEPART']._serialized_start=999 - _globals['_FILEPART']._serialized_end=1146 - _globals['_DATAPART']._serialized_start=1148 - _globals['_DATAPART']._serialized_end=1203 - _globals['_MESSAGE']._serialized_start=1206 - _globals['_MESSAGE']._serialized_end=1461 - _globals['_ARTIFACT']._serialized_start=1464 - _globals['_ARTIFACT']._serialized_end=1682 - _globals['_TASKSTATUSUPDATEEVENT']._serialized_start=1685 - _globals['_TASKSTATUSUPDATEEVENT']._serialized_end=1883 - _globals['_TASKARTIFACTUPDATEEVENT']._serialized_start=1886 - _globals['_TASKARTIFACTUPDATEEVENT']._serialized_end=2121 - _globals['_PUSHNOTIFICATIONCONFIG']._serialized_start=2124 - _globals['_PUSHNOTIFICATIONCONFIG']._serialized_end=2272 - _globals['_AUTHENTICATIONINFO']._serialized_start=2274 - _globals['_AUTHENTICATIONINFO']._serialized_end=2354 - _globals['_AGENTINTERFACE']._serialized_start=2356 - _globals['_AGENTINTERFACE']._serialized_end=2420 - _globals['_AGENTCARD']._serialized_start=2423 - _globals['_AGENTCARD']._serialized_end=3391 - _globals['_AGENTCARD_SECURITYSCHEMESENTRY']._serialized_start=3301 - _globals['_AGENTCARD_SECURITYSCHEMESENTRY']._serialized_end=3391 - _globals['_AGENTPROVIDER']._serialized_start=3393 - _globals['_AGENTPROVIDER']._serialized_end=3462 - _globals['_AGENTCAPABILITIES']._serialized_start=3465 - _globals['_AGENTCAPABILITIES']._serialized_end=3617 - _globals['_AGENTEXTENSION']._serialized_start=3620 - _globals['_AGENTEXTENSION']._serialized_end=3765 - _globals['_AGENTSKILL']._serialized_start=3768 - _globals['_AGENTSKILL']._serialized_end=4012 - _globals['_AGENTCARDSIGNATURE']._serialized_start=4015 - _globals['_AGENTCARDSIGNATURE']._serialized_end=4154 - _globals['_TASKPUSHNOTIFICATIONCONFIG']._serialized_start=4157 - _globals['_TASKPUSHNOTIFICATIONCONFIG']._serialized_end=4295 - _globals['_STRINGLIST']._serialized_start=4297 - _globals['_STRINGLIST']._serialized_end=4329 - _globals['_SECURITY']._serialized_start=4332 - _globals['_SECURITY']._serialized_end=4479 - _globals['_SECURITY_SCHEMESENTRY']._serialized_start=4401 - _globals['_SECURITY_SCHEMESENTRY']._serialized_end=4479 - _globals['_SECURITYSCHEME']._serialized_start=4482 - _globals['_SECURITYSCHEME']._serialized_end=4968 - _globals['_APIKEYSECURITYSCHEME']._serialized_start=4970 - _globals['_APIKEYSECURITYSCHEME']._serialized_end=5074 - _globals['_HTTPAUTHSECURITYSCHEME']._serialized_start=5076 - _globals['_HTTPAUTHSECURITYSCHEME']._serialized_end=5195 - _globals['_OAUTH2SECURITYSCHEME']._serialized_start=5198 - _globals['_OAUTH2SECURITYSCHEME']._serialized_end=5344 - _globals['_OPENIDCONNECTSECURITYSCHEME']._serialized_start=5346 - _globals['_OPENIDCONNECTSECURITYSCHEME']._serialized_end=5456 - _globals['_MUTUALTLSSECURITYSCHEME']._serialized_start=5458 - _globals['_MUTUALTLSSECURITYSCHEME']._serialized_end=5517 - _globals['_OAUTHFLOWS']._serialized_start=5520 - _globals['_OAUTHFLOWS']._serialized_end=5824 - _globals['_AUTHORIZATIONCODEOAUTHFLOW']._serialized_start=5827 - _globals['_AUTHORIZATIONCODEOAUTHFLOW']._serialized_end=6093 - _globals['_AUTHORIZATIONCODEOAUTHFLOW_SCOPESENTRY']._serialized_start=6036 - _globals['_AUTHORIZATIONCODEOAUTHFLOW_SCOPESENTRY']._serialized_end=6093 - _globals['_CLIENTCREDENTIALSOAUTHFLOW']._serialized_start=6096 - _globals['_CLIENTCREDENTIALSOAUTHFLOW']._serialized_end=6317 - _globals['_CLIENTCREDENTIALSOAUTHFLOW_SCOPESENTRY']._serialized_start=6036 - _globals['_CLIENTCREDENTIALSOAUTHFLOW_SCOPESENTRY']._serialized_end=6093 - _globals['_IMPLICITOAUTHFLOW']._serialized_start=6320 - _globals['_IMPLICITOAUTHFLOW']._serialized_end=6539 - _globals['_IMPLICITOAUTHFLOW_SCOPESENTRY']._serialized_start=6036 - _globals['_IMPLICITOAUTHFLOW_SCOPESENTRY']._serialized_end=6093 - _globals['_PASSWORDOAUTHFLOW']._serialized_start=6542 - _globals['_PASSWORDOAUTHFLOW']._serialized_end=6745 - _globals['_PASSWORDOAUTHFLOW_SCOPESENTRY']._serialized_start=6036 - _globals['_PASSWORDOAUTHFLOW_SCOPESENTRY']._serialized_end=6093 - _globals['_SENDMESSAGEREQUEST']._serialized_start=6748 - _globals['_SENDMESSAGEREQUEST']._serialized_end=6941 - _globals['_GETTASKREQUEST']._serialized_start=6943 - _globals['_GETTASKREQUEST']._serialized_end=7023 - _globals['_CANCELTASKREQUEST']._serialized_start=7025 - _globals['_CANCELTASKREQUEST']._serialized_end=7064 - _globals['_GETTASKPUSHNOTIFICATIONCONFIGREQUEST']._serialized_start=7066 - _globals['_GETTASKPUSHNOTIFICATIONCONFIGREQUEST']._serialized_end=7124 - _globals['_DELETETASKPUSHNOTIFICATIONCONFIGREQUEST']._serialized_start=7126 - _globals['_DELETETASKPUSHNOTIFICATIONCONFIGREQUEST']._serialized_end=7187 - _globals['_CREATETASKPUSHNOTIFICATIONCONFIGREQUEST']._serialized_start=7190 - _globals['_CREATETASKPUSHNOTIFICATIONCONFIGREQUEST']._serialized_end=7359 - _globals['_TASKSUBSCRIPTIONREQUEST']._serialized_start=7361 - _globals['_TASKSUBSCRIPTIONREQUEST']._serialized_end=7406 - _globals['_LISTTASKPUSHNOTIFICATIONCONFIGREQUEST']._serialized_start=7408 - _globals['_LISTTASKPUSHNOTIFICATIONCONFIGREQUEST']._serialized_end=7531 - _globals['_GETAGENTCARDREQUEST']._serialized_start=7533 - _globals['_GETAGENTCARDREQUEST']._serialized_end=7554 - _globals['_SENDMESSAGERESPONSE']._serialized_start=7556 - _globals['_SENDMESSAGERESPONSE']._serialized_end=7665 - _globals['_STREAMRESPONSE']._serialized_start=7668 - _globals['_STREAMRESPONSE']._serialized_end=7918 - _globals['_LISTTASKPUSHNOTIFICATIONCONFIGRESPONSE']._serialized_start=7921 - _globals['_LISTTASKPUSHNOTIFICATIONCONFIGRESPONSE']._serialized_end=8063 - _globals['_A2ASERVICE']._serialized_start=8380 - _globals['_A2ASERVICE']._serialized_end=9719 -# @@protoc_insertion_point(module_scope) diff --git a/src/a2a/helpers/__init__.py b/src/a2a/helpers/__init__.py new file mode 100644 index 000000000..c42429d43 --- /dev/null +++ b/src/a2a/helpers/__init__.py @@ -0,0 +1,34 @@ +"""Helper functions for the A2A Python SDK.""" + +from a2a.helpers.agent_card import display_agent_card +from a2a.helpers.proto_helpers import ( + get_artifact_text, + get_message_text, + get_stream_response_text, + get_text_parts, + new_artifact, + new_message, + new_task, + new_task_from_user_message, + new_text_artifact, + new_text_artifact_update_event, + new_text_message, + new_text_status_update_event, +) + + +__all__ = [ + 'display_agent_card', + 'get_artifact_text', + 'get_message_text', + 'get_stream_response_text', + 'get_text_parts', + 'new_artifact', + 'new_message', + 'new_task', + 'new_task_from_user_message', + 'new_text_artifact', + 'new_text_artifact_update_event', + 'new_text_message', + 'new_text_status_update_event', +] diff --git a/src/a2a/helpers/agent_card.py b/src/a2a/helpers/agent_card.py new file mode 100644 index 000000000..0962e67fb --- /dev/null +++ b/src/a2a/helpers/agent_card.py @@ -0,0 +1,76 @@ +"""Utility functions for inspecting AgentCard instances.""" + +from a2a.types.a2a_pb2 import AgentCard + + +def display_agent_card(card: AgentCard) -> None: + """Print a human-readable summary of an AgentCard to stdout. + + Args: + card: The AgentCard proto message to display. + """ + width = 52 + sep = '=' * width + thin = '-' * width + + lines: list[str] = [sep, 'AgentCard'.center(width), sep] + + lines += [ + '--- General ---', + f'Name : {card.name}', + f'Description : {card.description}', + f'Version : {card.version}', + ] + if card.documentation_url: + lines.append(f'Docs URL : {card.documentation_url}') + if card.icon_url: + lines.append(f'Icon URL : {card.icon_url}') + if card.HasField('provider'): + url_suffix = f' ({card.provider.url})' if card.provider.url else '' + lines.append(f'Provider : {card.provider.organization}{url_suffix}') + + lines += ['', '--- Interfaces ---'] + for i, iface in enumerate(card.supported_interfaces): + binding = f'{iface.protocol_binding} {iface.protocol_version}'.strip() + parts = [ + p + for p in [binding, f'tenant={iface.tenant}' if iface.tenant else ''] + if p + ] + suffix = f' ({", ".join(parts)})' if parts else '' + line = f' [{i}] {iface.url}{suffix}' + lines.append(line) + + lines += [ + '', + '--- Capabilities ---', + f'Streaming : {card.capabilities.streaming}', + f'Push notifications : {card.capabilities.push_notifications}', + f'Extended agent card : {card.capabilities.extended_agent_card}', + ] + + lines += [ + '', + '--- I/O Modes ---', + f'Input : {", ".join(card.default_input_modes) or "(none)"}', + f'Output : {", ".join(card.default_output_modes) or "(none)"}', + ] + + lines += ['', '--- Skills ---'] + if card.skills: + for skill in card.skills: + lines += [ + thin, + f' ID : {skill.id}', + f' Name : {skill.name}', + f' Description : {skill.description}', + f' Tags : {", ".join(skill.tags) or "(none)"}', + ] + if skill.examples: + for ex in skill.examples: + lines.append(f' Example : {ex}') + else: + lines.append(' (none)') + + lines.append(sep) + print('\n'.join(lines)) diff --git a/src/a2a/helpers/proto_helpers.py b/src/a2a/helpers/proto_helpers.py new file mode 100644 index 000000000..79e1f739d --- /dev/null +++ b/src/a2a/helpers/proto_helpers.py @@ -0,0 +1,214 @@ +"""Unified helper functions for creating and handling A2A types.""" + +import uuid + +from collections.abc import Sequence + +from a2a.types.a2a_pb2 import ( + Artifact, + Message, + Part, + Role, + StreamResponse, + Task, + TaskArtifactUpdateEvent, + TaskState, + TaskStatus, + TaskStatusUpdateEvent, +) + + +# --- Message Helpers --- + + +def new_message( + parts: list[Part], + role: Role = Role.ROLE_AGENT, + context_id: str | None = None, + task_id: str | None = None, +) -> Message: + """Creates a new message containing a list of Parts.""" + return Message( + role=role, + parts=parts, + message_id=str(uuid.uuid4()), + task_id=task_id, + context_id=context_id, + ) + + +def new_text_message( + text: str, + context_id: str | None = None, + task_id: str | None = None, + role: Role = Role.ROLE_AGENT, +) -> Message: + """Creates a new message containing a single text Part.""" + return new_message( + parts=[Part(text=text)], + role=role, + task_id=task_id, + context_id=context_id, + ) + + +def get_message_text(message: Message, delimiter: str = '\n') -> str: + """Extracts and joins all text content from a Message's parts.""" + return delimiter.join(get_text_parts(message.parts)) + + +# --- Artifact Helpers --- + + +def new_artifact( + parts: list[Part], + name: str, + description: str | None = None, + artifact_id: str | None = None, +) -> Artifact: + """Creates a new Artifact object.""" + return Artifact( + artifact_id=artifact_id or str(uuid.uuid4()), + parts=parts, + name=name, + description=description, + ) + + +def new_text_artifact( + name: str, + text: str, + description: str | None = None, + artifact_id: str | None = None, +) -> Artifact: + """Creates a new Artifact object containing only a single text Part.""" + return new_artifact( + [Part(text=text)], + name, + description, + artifact_id=artifact_id, + ) + + +def get_artifact_text(artifact: Artifact, delimiter: str = '\n') -> str: + """Extracts and joins all text content from an Artifact's parts.""" + return delimiter.join(get_text_parts(artifact.parts)) + + +# --- Task Helpers --- + + +def new_task_from_user_message(user_message: Message) -> Task: + """Creates a new Task object from an initial user message.""" + if user_message.role != Role.ROLE_USER: + raise ValueError('Message must be from a user') + if not user_message.parts: + raise ValueError('Message parts cannot be empty') + for part in user_message.parts: + if part.HasField('text') and not part.text: + raise ValueError('Message.text cannot be empty') + + return Task( + status=TaskStatus(state=TaskState.TASK_STATE_SUBMITTED), + id=user_message.task_id or str(uuid.uuid4()), + context_id=user_message.context_id or str(uuid.uuid4()), + history=[user_message], + ) + + +def new_task( + task_id: str, + context_id: str, + state: TaskState, + artifacts: list[Artifact] | None = None, + history: list[Message] | None = None, +) -> Task: + """Creates a Task object with a specified status.""" + if history is None: + history = [] + if artifacts is None: + artifacts = [] + + return Task( + status=TaskStatus(state=state), + id=task_id, + context_id=context_id, + artifacts=artifacts, + history=history, + ) + + +# --- Part Helpers --- + + +def get_text_parts(parts: Sequence[Part]) -> list[str]: + """Extracts text content from all text Parts.""" + return [part.text for part in parts if part.HasField('text')] + + +# --- Event & Stream Helpers --- + + +def new_text_status_update_event( + task_id: str, + context_id: str, + state: TaskState, + text: str, +) -> TaskStatusUpdateEvent: + """Creates a TaskStatusUpdateEvent with a single text message.""" + return TaskStatusUpdateEvent( + task_id=task_id, + context_id=context_id, + status=TaskStatus( + state=state, + message=new_text_message( + text=text, + role=Role.ROLE_AGENT, + context_id=context_id, + task_id=task_id, + ), + ), + ) + + +def new_text_artifact_update_event( # noqa: PLR0913 + task_id: str, + context_id: str, + name: str, + text: str, + append: bool = False, + last_chunk: bool = False, + artifact_id: str | None = None, +) -> TaskArtifactUpdateEvent: + """Creates a TaskArtifactUpdateEvent with a single text artifact.""" + return TaskArtifactUpdateEvent( + task_id=task_id, + context_id=context_id, + artifact=new_text_artifact( + name=name, text=text, artifact_id=artifact_id + ), + append=append, + last_chunk=last_chunk, + ) + + +def get_stream_response_text( + response: StreamResponse, delimiter: str = '\n' +) -> str: + """Extracts text content from a StreamResponse.""" + if response.HasField('message'): + return get_message_text(response.message, delimiter) + if response.HasField('task'): + texts = [ + get_artifact_text(a, delimiter) for a in response.task.artifacts + ] + return delimiter.join(t for t in texts if t) + if response.HasField('status_update'): + if response.status_update.status.HasField('message'): + return get_message_text( + response.status_update.status.message, delimiter + ) + return '' + if response.HasField('artifact_update'): + return get_artifact_text(response.artifact_update.artifact, delimiter) + return '' diff --git a/src/a2a/migrations/README.md b/src/a2a/migrations/README.md new file mode 100644 index 000000000..00b99f6fb --- /dev/null +++ b/src/a2a/migrations/README.md @@ -0,0 +1,123 @@ +# A2A SDK Database Migrations + +This directory handles the database schema updates for the A2A SDK. It uses a bundled CLI tool to simplify the migration process. + +## Installation + +To use the `a2a-db` migration tool, install the `a2a-sdk` with the `db-cli` extra. + +| Extra | `uv` Command | `pip` Command | +| :--- | :--- | :--- | +| **CLI Only** | `uv add "a2a-sdk[db-cli]"` | `pip install "a2a-sdk[db-cli]"` | +| **All Extras** | `uv add "a2a-sdk[all]"` | `pip install "a2a-sdk[all]"` | + + +## User Guide for Integrators + +When you install the `a2a-sdk`, you get a built-in command `a2a-db` which updates your database to make it compatible with the latest version of the SDK. + +### 1. Recommended: Back up your database + +Before running migrations, it is recommended to back up your database. + +### 2. Set your Database URL +Migrations require the `DATABASE_URL` environment variable to be set with an `async-compatible` driver. +You can set it globally with `export DATABASE_URL`. Examples for SQLite, PostgreSQL and MySQL, respectively: + +```bash +export DATABASE_URL="sqlite+aiosqlite://user:pass@host:port/your_database_name" + +export DATABASE_URL="postgresql+asyncpg://user:pass@localhost/your_database_name" + +export DATABASE_URL="mysql+aiomysql://user:pass@localhost/your_database_name" +``` + +Or you can use the `--database-url` flag to specify the database URL for a single command. + + +### 3. Apply Migrations +Always run this command after installing or upgrading the SDK to ensure your database matches the required schema. This will upgrade the tables `tasks` and `push_notification_configs` in your database by adding columns `owner` and `last_updated` and an index `(owner, last_updated)` to the `tasks` table and a column `owner` to the `push_notification_configs` table. + +```bash +uv run a2a-db +``` + +### 4. Customizing Defaults with Flags +#### --add_columns_owner_last_updated-default-owner +Allows you to pass custom values for the new `owner` column. If not set, it will default to the value `legacy_v03_no_user_info`. + +```bash +uv run a2a-db --add_columns_owner_last_updated-default-owner "admin_user" +``` +#### --database-url +You can use the `--database-url` flag to specify the database URL for a single command. + +```bash +uv run a2a-db --database-url "sqlite+aiosqlite:///my_database.db" +``` +#### --tasks-table / --push-notification-configs-table +Custom tasks and push notification configs tables to update. If not set, the default are `tasks` and `push_notification_configs`. + +```bash +uv run a2a-db --tasks-table "my_tasks" --push-notification-configs-table "my_configs" +``` +#### -v / --verbose +Enables verbose output by setting `sqlalchemy.engine` logging to `INFO`. + +```bash +uv run a2a-db -v +``` +#### --sql +Enables running migrations in `offline` mode. Migrations are generated as SQL scripts and printed to stdout instead of being run against the database. + +```bash +uv run a2a-db --sql +``` + +### 5. Rolling Back +If you need to revert a change: + +```bash +# Step back one version +uv run a2a-db downgrade -1 + +# Downgrade to a specific revision ID +uv run a2a-db downgrade + +# Revert all migrations +uv run a2a-db downgrade base + +# Revert all migrations in offline mode +uv run a2a-db downgrade head:base --sql +``` + +> [!NOTE] +> All flags except `--add_columns_owner_last_updated-default-owner` can be used during rollbacks. + +### 6. Verifying Current Status +To see the current revision applied to your database: + +```bash +uv run a2a-db current + +# To see more details (like revision dates, if available) +uv run a2a-db current -v +``` +--- + +## Developer Guide for SDK Contributors + +If you are modifying the SDK models and need to generate new migration files, use the following workflow. + +### Creating a New Migration +Developers should use the raw `alembic` command locally to generate migrations. Ensure you are in the project root. + +```bash +# Detect changes in models.py and generate a script +uv run alembic revision --autogenerate -m "description of changes" +``` + +### Internal Layout +- `env.py`: Configures the migration engine and applies the mandatory `DATABASE_URL` check. +- `versions/`: Contains the migration history. +- `script.py.mako`: The template for all new migration files. diff --git a/src/a2a/migrations/__init__.py b/src/a2a/migrations/__init__.py new file mode 100644 index 000000000..7b55fb93e --- /dev/null +++ b/src/a2a/migrations/__init__.py @@ -0,0 +1 @@ +"Alembic database migration package." diff --git a/src/a2a/migrations/env.py b/src/a2a/migrations/env.py new file mode 100644 index 000000000..448d39e87 --- /dev/null +++ b/src/a2a/migrations/env.py @@ -0,0 +1,127 @@ +import asyncio +import logging +import os + +from logging.config import fileConfig + +from sqlalchemy import Connection, pool +from sqlalchemy.ext.asyncio import async_engine_from_config + +from a2a.server.models import Base + +try: + from alembic import context +except ImportError as e: + raise ImportError( + "Migrations require Alembic. Install with: 'pip install a2a-sdk[db-cli]'." + ) from e + + +# this is the Alembic Config object, which provides +# access to the values within the .ini file in use. +config = context.config + +# Mandatory database configuration +db_url = os.getenv('DATABASE_URL') +if not db_url: + raise RuntimeError( + 'DATABASE_URL environment variable is not set. ' + "Please set it (e.g., export DATABASE_URL='sqlite+aiosqlite:///./my-database.db') before running migrations " + 'or use the --database-url flag.' + ) +config.set_main_option('sqlalchemy.url', db_url) + +# Interpret the config file for Python logging. +# This line sets up loggers basically. +if ( + config.config_file_name is not None + and os.path.exists(config.config_file_name) + and config.config_file_name.endswith('.ini') +): + fileConfig(config.config_file_name) + +if config.get_main_option('verbose') == 'true': + logging.getLogger('sqlalchemy.engine').setLevel(logging.INFO) + +# add your model's MetaData object here for 'autogenerate' support +target_metadata = Base.metadata + +# Version table name to avoid conflicts with existing alembic_version tables in the database. +# This ensures that the migration history for A2A is tracked separately. +VERSION_TABLE = 'a2a_alembic_version' + + +def run_migrations_offline() -> None: + """Run migrations in 'offline' mode. + + This configures the context with just a URL + and not an Engine, though an Engine is acceptable + here as well. By skipping the Engine creation + we don't even need a DBAPI to be available. + + Calls to context.execute() here emit the given string to the + script output. + + """ + url = config.get_main_option('sqlalchemy.url') + context.configure( + url=url, + target_metadata=target_metadata, + literal_binds=True, + dialect_opts={'paramstyle': 'named'}, + version_table=VERSION_TABLE, + ) + + with context.begin_transaction(): + context.run_migrations() + + +def do_run_migrations(connection: Connection) -> None: + """Run migrations in 'online' mode. + + This function is called within a synchronous context (via run_sync) + to configure the migration context with the provided connection + and target metadata, then execute the migrations within a transaction. + + Args: + connection: The SQLAlchemy connection to use for the migrations. + """ + context.configure( + connection=connection, + target_metadata=target_metadata, + version_table=VERSION_TABLE, + ) + + with context.begin_transaction(): + context.run_migrations() + + +async def run_async_migrations() -> None: + """Run migrations using an Engine. + + In this scenario we need to create an Engine + and associate a connection with the context. + """ + connectable = async_engine_from_config( + config.get_section(config.config_ini_section, {}), + prefix='sqlalchemy.', + poolclass=pool.NullPool, + ) + + async with connectable.connect() as connection: + await connection.run_sync(do_run_migrations) + + await connectable.dispose() + + +def run_migrations_online() -> None: + """Run migrations in 'online' mode.""" + asyncio.run(run_async_migrations()) + + +if context.is_offline_mode(): + logging.info('Running migrations in offline mode.') + run_migrations_offline() +else: + logging.info('Running migrations in online mode.') + run_migrations_online() diff --git a/src/a2a/migrations/migration_utils.py b/src/a2a/migrations/migration_utils.py new file mode 100644 index 000000000..4a09ede91 --- /dev/null +++ b/src/a2a/migrations/migration_utils.py @@ -0,0 +1,110 @@ +"""Utility functions for Alembic migrations.""" + +import logging +from typing import Any + +import sqlalchemy as sa + +try: + from alembic import context, op +except ImportError as e: + raise ImportError( + "A2A migrations require the 'db-cli' extra. Install with: 'pip install a2a-sdk[db-cli]'." + ) from e + + +def _get_inspector() -> sa.engine.reflection.Inspector: + """Get the current database inspector.""" + bind = op.get_bind() + inspector = sa.inspect(bind) + return inspector + + +def table_exists(table_name: str) -> bool: + """Check if a table exists in the database.""" + if context.is_offline_mode(): + return True + inspector = _get_inspector() + return table_name in inspector.get_table_names() + + +def column_exists( + table_name: str, column_name: str, downgrade_mode: bool = False +) -> bool: + """Check if a column exists in a table.""" + if context.is_offline_mode(): + return downgrade_mode + + inspector = _get_inspector() + columns = [c['name'] for c in inspector.get_columns(table_name)] + return column_name in columns + + +def index_exists( + table_name: str, index_name: str, downgrade_mode: bool = False +) -> bool: + """Check if an index exists on a table.""" + if context.is_offline_mode(): + return downgrade_mode + + inspector = _get_inspector() + indexes = [i['name'] for i in inspector.get_indexes(table_name)] + return index_name in indexes + + +def add_column( + table: str, + column_name: str, + nullable: bool, + type_: sa.types.TypeEngine, + default: Any | None = None, +) -> None: + """Add a column to a table if it doesn't already exist.""" + if not column_exists(table, column_name): + op.add_column( + table, + sa.Column( + column_name, + type_, + nullable=nullable, + server_default=default, + ), + ) + else: + logging.info( + f"Column '{column_name}' already exists in table '{table}'. Skipping." + ) + + +def drop_column(table: str, column_name: str) -> None: + """Drop a column from a table if it exists.""" + if column_exists(table, column_name, True): + op.drop_column(table, column_name) + else: + logging.info( + f"Column '{column_name}' does not exist in table '{table}'. Skipping." + ) + + +def add_index(table: str, index_name: str, columns: list[str]) -> None: + """Create an index on a table if it doesn't already exist.""" + if not index_exists(table, index_name): + op.create_index( + index_name, + table, + columns, + ) + else: + logging.info( + f"Index '{index_name}' already exists on table '{table}'. Skipping." + ) + + +def drop_index(table: str, index_name: str) -> None: + """Drop an index from a table if it exists.""" + if index_exists(table, index_name, True): + op.drop_index(index_name, table_name=table) + else: + logging.info( + f"Index '{index_name}' does not exist on table '{table}'. Skipping." + ) diff --git a/src/a2a/migrations/script.py.mako b/src/a2a/migrations/script.py.mako new file mode 100644 index 000000000..9caa81d6a --- /dev/null +++ b/src/a2a/migrations/script.py.mako @@ -0,0 +1,35 @@ +"""${message} + +Revision ID: ${up_revision} +Revises: ${down_revision | comma,n} +Create Date: ${create_date} + +""" +from typing import Sequence, Union + +import sqlalchemy as sa + +try: + from alembic import op +except ImportError as e: + raise ImportError( + "A2A migrations require the 'db-cli' extra. Install with: 'pip install a2a-sdk[db-cli]'." + ) from e + +${imports if imports else ""} + +# revision identifiers, used by Alembic. +revision: str = ${repr(up_revision)} +down_revision: Union[str, Sequence[str], None] = ${repr(down_revision)} +branch_labels: Union[str, Sequence[str], None] = ${repr(branch_labels)} +depends_on: Union[str, Sequence[str], None] = ${repr(depends_on)} + + +def upgrade() -> None: + """Upgrade schema.""" + ${upgrades if upgrades else "pass"} + + +def downgrade() -> None: + """Downgrade schema.""" + ${downgrades if downgrades else "pass"} diff --git a/src/a2a/migrations/versions/38ce57e08137_add_column_protocol_version.py b/src/a2a/migrations/versions/38ce57e08137_add_column_protocol_version.py new file mode 100644 index 000000000..58948aa8c --- /dev/null +++ b/src/a2a/migrations/versions/38ce57e08137_add_column_protocol_version.py @@ -0,0 +1,78 @@ +"""add column protocol version + +Revision ID: 38ce57e08137 +Revises: 6419d2d130f6 +Create Date: 2026-03-09 12:07:16.998955 + +""" + +import logging +from collections.abc import Sequence +from typing import Union + +import sqlalchemy as sa + +try: + from alembic import context +except ImportError as e: + raise ImportError( + "A2A migrations require the 'db-cli' extra. Install with: 'pip install a2a-sdk[db-cli]'." + ) from e + +from a2a.migrations.migration_utils import table_exists, add_column, drop_column + + +# revision identifiers, used by Alembic. +revision: str = '38ce57e08137' +down_revision: Union[str, Sequence[str], None] = '6419d2d130f6' +branch_labels: Union[str, Sequence[str], None] = None +depends_on: Union[str, Sequence[str], None] = None + + +def upgrade() -> None: + """Upgrade schema.""" + tasks_table = context.config.get_main_option('tasks_table', 'tasks') + push_notification_configs_table = context.config.get_main_option( + 'push_notification_configs_table', 'push_notification_configs' + ) + + if table_exists(tasks_table): + add_column(tasks_table, 'protocol_version', True, sa.String(16)) + else: + logging.warning( + f"Table '{tasks_table}' does not exist. Skipping upgrade for this table." + ) + + if table_exists(push_notification_configs_table): + add_column( + push_notification_configs_table, + 'protocol_version', + True, + sa.String(16), + ) + else: + logging.warning( + f"Table '{push_notification_configs_table}' does not exist. Skipping upgrade for this table." + ) + + +def downgrade() -> None: + """Downgrade schema.""" + tasks_table = context.config.get_main_option('tasks_table', 'tasks') + push_notification_configs_table = context.config.get_main_option( + 'push_notification_configs_table', 'push_notification_configs' + ) + + if table_exists(tasks_table): + drop_column(tasks_table, 'protocol_version') + else: + logging.warning( + f"Table '{tasks_table}' does not exist. Skipping downgrade for this table." + ) + + if table_exists(push_notification_configs_table): + drop_column(push_notification_configs_table, 'protocol_version') + else: + logging.warning( + f"Table '{push_notification_configs_table}' does not exist. Skipping downgrade for this table." + ) diff --git a/src/a2a/migrations/versions/6419d2d130f6_add_columns_owner_last_updated.py b/src/a2a/migrations/versions/6419d2d130f6_add_columns_owner_last_updated.py new file mode 100644 index 000000000..fc0f1097e --- /dev/null +++ b/src/a2a/migrations/versions/6419d2d130f6_add_columns_owner_last_updated.py @@ -0,0 +1,109 @@ +"""add_columns_owner_last_updated. + +Revision ID: 6419d2d130f6 +Revises: +Create Date: 2026-02-17 09:23:06.758085 + +""" + +import logging +from collections.abc import Sequence + +import sqlalchemy as sa + +try: + from alembic import context +except ImportError as e: + raise ImportError( + "'Add columns owner and last_updated to database tables' migration requires Alembic. Install with: 'pip install a2a-sdk[db-cli]'." + ) from e + +from a2a.migrations.migration_utils import ( + table_exists, + add_column, + add_index, + drop_column, + drop_index, +) + + +# revision identifiers, used by Alembic. +revision: str = '6419d2d130f6' +down_revision: str | Sequence[str] | None = None +branch_labels: str | Sequence[str] | None = None +depends_on: str | Sequence[str] | None = None + + +def upgrade() -> None: + """Upgrade schema.""" + # Get the default value from the config (passed via CLI) + owner = context.config.get_main_option( + 'add_columns_owner_last_updated_default_owner', + 'legacy_v03_no_user_info', + ) + tasks_table = context.config.get_main_option('tasks_table', 'tasks') + push_notification_configs_table = context.config.get_main_option( + 'push_notification_configs_table', 'push_notification_configs' + ) + + if table_exists(tasks_table): + add_column(tasks_table, 'owner', True, sa.String(255), owner) + add_column(tasks_table, 'last_updated', True, sa.DateTime()) + add_index( + tasks_table, + f'idx_{tasks_table}_owner_last_updated', + ['owner', 'last_updated'], + ) + else: + logging.warning( + f"Table '{tasks_table}' does not exist. Skipping upgrade for this table." + ) + + if table_exists(push_notification_configs_table): + add_column( + push_notification_configs_table, + 'owner', + True, + sa.String(255), + owner, + ) + add_index( + push_notification_configs_table, + f'ix_{push_notification_configs_table}_owner', + ['owner'], + ) + else: + logging.warning( + f"Table '{push_notification_configs_table}' does not exist. Skipping upgrade for this table." + ) + + +def downgrade() -> None: + """Downgrade schema.""" + tasks_table = context.config.get_main_option('tasks_table', 'tasks') + push_notification_configs_table = context.config.get_main_option( + 'push_notification_configs_table', 'push_notification_configs' + ) + + if table_exists(tasks_table): + drop_index( + tasks_table, + f'idx_{tasks_table}_owner_last_updated', + ) + drop_column(tasks_table, 'owner') + drop_column(tasks_table, 'last_updated') + else: + logging.warning( + f"Table '{tasks_table}' does not exist. Skipping downgrade for this table." + ) + + if table_exists(push_notification_configs_table): + drop_index( + push_notification_configs_table, + f'ix_{push_notification_configs_table}_owner', + ) + drop_column(push_notification_configs_table, 'owner') + else: + logging.warning( + f"Table '{push_notification_configs_table}' does not exist. Skipping downgrade for this table." + ) diff --git a/src/a2a/migrations/versions/__init__.py b/src/a2a/migrations/versions/__init__.py new file mode 100644 index 000000000..574828c67 --- /dev/null +++ b/src/a2a/migrations/versions/__init__.py @@ -0,0 +1 @@ +"""Alembic migrations scripts for the A2A project.""" diff --git a/src/a2a/server/agent_execution/active_task.py b/src/a2a/server/agent_execution/active_task.py new file mode 100644 index 000000000..5479a38c1 --- /dev/null +++ b/src/a2a/server/agent_execution/active_task.py @@ -0,0 +1,754 @@ +# ruff: noqa: TRY301, SLF001 +from __future__ import annotations + +import asyncio +import logging +import uuid + +from typing import TYPE_CHECKING, Any, cast + +from a2a.server.agent_execution.context import RequestContext + + +if TYPE_CHECKING: + from collections.abc import AsyncGenerator, Callable + + from a2a.server.agent_execution.agent_executor import AgentExecutor + from a2a.server.context import ServerCallContext + from a2a.server.tasks.push_notification_sender import ( + PushNotificationSender, + ) + from a2a.server.tasks.task_manager import TaskManager + +from a2a.server.events.event_queue_v2 import ( + AsyncQueue, + Event, + EventQueueSource, + QueueShutDown, + _create_async_queue, +) +from a2a.server.tasks import PushNotificationEvent +from a2a.types.a2a_pb2 import ( + Message, + Task, + TaskState, + TaskStatus, + TaskStatusUpdateEvent, +) +from a2a.utils.errors import ( + InvalidAgentResponseError, + InvalidParamsError, + TaskNotFoundError, +) + + +logger = logging.getLogger(__name__) + + +TERMINAL_TASK_STATES = { + TaskState.TASK_STATE_COMPLETED, + TaskState.TASK_STATE_CANCELED, + TaskState.TASK_STATE_FAILED, + TaskState.TASK_STATE_REJECTED, +} +INTERRUPTED_TASK_STATES = { + TaskState.TASK_STATE_AUTH_REQUIRED, + TaskState.TASK_STATE_INPUT_REQUIRED, +} + + +class _RequestStarted: + def __init__(self, request_id: uuid.UUID, request_context: RequestContext): + self.request_id = request_id + self.request_context = request_context + + +class _RequestCompleted: + def __init__(self, request_id: uuid.UUID): + self.request_id = request_id + + +class ActiveTask: + """Manages the lifecycle and execution of an active A2A task. + + It coordinates between the agent's execution (the producer), the + persistence and state management (the TaskManager), and the event + distribution to subscribers (the consumer). + + Concurrency Guarantees: + - This class is designed to be highly concurrent. It manages an internal + producer-consumer model using `asyncio.Task`s. + - `self._lock` (asyncio.Lock) ensures mutually exclusive access for critical + lifecycle state changes, such as starting the task, subscribing, and + determining if cleanup is safe to trigger. + + mutation to the observable result state (like `_exception`, + or `_is_finished`) notifies waiting coroutines (like `wait()`). + - `self._is_finished` (asyncio.Event) provides a thread-safe, non-blocking way + for external observers and internal loops to check if the ActiveTask has + permanently ceased execution and closed its queues. + """ + + def __init__( + self, + agent_executor: AgentExecutor, + task_id: str, + task_manager: TaskManager, + push_sender: PushNotificationSender | None = None, + on_cleanup: Callable[[ActiveTask], None] | None = None, + ) -> None: + """Initializes the ActiveTask. + + Args: + agent_executor: The executor to run the agent logic (producer). + task_id: The unique identifier of the task being managed. + task_manager: The manager for task state and database persistence. + push_sender: Optional sender for out-of-band push notifications. + on_cleanup: Optional callback triggered when the task is fully finished + and the last subscriber has disconnected. Used to prune + the task from the ActiveTaskRegistry. + """ + # --- Core Dependencies --- + self._agent_executor = agent_executor + self._task_id = task_id + self._event_queue_agent = EventQueueSource() + self._event_queue_subscribers = EventQueueSource( + create_default_sink=False + ) + self._task_manager = task_manager + self._push_sender = push_sender + self._on_cleanup = on_cleanup + + # --- Synchronization Primitives --- + # `_lock` protects structural lifecycle changes: start(), subscribe() counting, + # and _maybe_cleanup() race conditions. + self._lock = asyncio.Lock() + + # `_request_lock` protects parallel request processing. + self._request_lock = asyncio.Lock() + + # _task_created is set when initial version of task is stored in DB. + self._task_created = asyncio.Event() + + # `_is_finished` is set EXACTLY ONCE when the consumer loop exits, signifying + # the absolute end of the task's active lifecycle. + self._is_finished = asyncio.Event() + + # --- Lifecycle State --- + # The background task executing the agent logic. + self._producer_task: asyncio.Task[None] | None = None + # The background task reading from _event_queue and updating the DB. + self._consumer_task: asyncio.Task[None] | None = None + + # Tracks how many active SSE/gRPC streams are currently tailing this task. + # Protected by `_lock`. + self._reference_count = 0 + + # Holds any fatal exception that crashed the producer or consumer. + # TODO: Synchronize exception handling (ideally mix it in the queue). + self._exception: Exception | None = None + + # Queue for incoming requests + self._request_queue: AsyncQueue[tuple[RequestContext, uuid.UUID]] = ( + _create_async_queue() + ) + + @property + def task_id(self) -> str: + """The ID of the task.""" + return self._task_id + + async def enqueue_request( + self, request_context: RequestContext + ) -> uuid.UUID: + """Enqueues a request for the active task to process.""" + request_id = uuid.uuid4() + await self._request_queue.put((request_context, request_id)) + return request_id + + async def start( + self, + call_context: ServerCallContext, + create_task_if_missing: bool = False, + ) -> None: + """Starts the active task background processes. + + Concurrency Guarantee: + Uses `self._lock` to ensure the producer and consumer tasks are strictly + singleton instances for the lifetime of this ActiveTask. + """ + logger.debug('ActiveTask[%s]: Starting', self._task_id) + async with self._lock: + if self._is_finished.is_set(): + raise InvalidParamsError( + f'Task {self._task_id} is already completed. Cannot start it again.' + ) + + if ( + self._producer_task is not None + and self._consumer_task is not None + ): + logger.debug( + 'ActiveTask[%s]: Already started, ignoring start request', + self._task_id, + ) + return + + logger.debug( + 'ActiveTask[%s]: Executing setup (call_context: %s, create_task_if_missing: %s)', + self._task_id, + call_context, + create_task_if_missing, + ) + try: + self._task_manager._call_context = call_context + task = await self._task_manager.get_task() + logger.debug('TASK (start): %s', task) + + if task: + self._task_created.set() + if task.status.state in TERMINAL_TASK_STATES: + raise InvalidParamsError( + message=f'Task {task.id} is in terminal state: {task.status.state}' + ) + elif not create_task_if_missing: + raise TaskNotFoundError + + except Exception: + logger.debug( + 'ActiveTask[%s]: Setup failed, cleaning up', + self._task_id, + ) + self._is_finished.set() + if self._reference_count == 0 and self._on_cleanup: + self._on_cleanup(self) + raise + + # Spawn the background tasks that drive the lifecycle. + self._reference_count += 1 + self._producer_task = asyncio.create_task( + self._run_producer(), name=f'producer:{self._task_id}' + ) + self._consumer_task = asyncio.create_task( + self._run_consumer(), name=f'consumer:{self._task_id}' + ) + logger.debug( + 'ActiveTask[%s]: Background tasks created', self._task_id + ) + + async def _run_producer(self) -> None: + """Executes the agent logic. + + This method encapsulates the external `AgentExecutor.execute` call. It ensures + that regardless of how the agent finishes (success, unhandled exception, or + cancellation), the underlying `_event_queue` is safely closed, which signals + the consumer to wind down. + + Concurrency Guarantee: + Runs as a detached asyncio.Task. Safe to cancel. + """ + logger.debug('Producer[%s]: Started', self._task_id) + request_context = None + try: + while True: + ( + request_context, + request_id, + ) = await self._request_queue.get() + await self._request_lock.acquire() + # TODO: Should we create task manager every time? + self._task_manager._call_context = request_context.call_context + + request_context.current_task = ( + await self._task_manager.get_task() + ) + + logger.debug( + 'Producer[%s]: Executing agent task %s', + self._task_id, + request_context.current_task, + ) + + try: + await self._event_queue_agent.enqueue_event( + cast( + 'Event', + _RequestStarted(request_id, request_context), + ) + ) + + await self._agent_executor.execute( + request_context, self._event_queue_agent + ) + logger.debug( + 'Producer[%s]: Execution finished successfully', + self._task_id, + ) + finally: + logger.debug( + 'Producer[%s]: Enqueuing request completed event', + self._task_id, + ) + await self._event_queue_agent.enqueue_event( + cast('Event', _RequestCompleted(request_id)) + ) + self._request_queue.task_done() + except asyncio.CancelledError: + logger.debug('Producer[%s]: Cancelled', self._task_id) + + except QueueShutDown: + logger.debug('Producer[%s]: Queue shut down', self._task_id) + + except Exception as e: + logger.exception( + 'Producer[%s]: Execution failed', + self._task_id, + ) + # Create task and mark as failed. + if request_context: + await self._task_manager.ensure_task_id( + self._task_id, + request_context.context_id or '', + ) + self._task_created.set() + async with self._lock: + await self._mark_task_as_failed(e) + + finally: + self._request_queue.shutdown(immediate=True) + await self._event_queue_agent.close(immediate=False) + await self._event_queue_subscribers.close(immediate=False) + logger.debug('Producer[%s]: Completed', self._task_id) + + async def _run_consumer(self) -> None: # noqa: PLR0915, PLR0912 + """Consumes events from the agent and updates system state. + + This continuous loop dequeues events emitted by the producer, updates the + database via `TaskManager`, and intercepts critical task states (e.g., + INPUT_REQUIRED, COMPLETED, FAILED) to cache the final result. + + Concurrency Guarantee: + Runs as a detached asyncio.Task. The loop ends gracefully when the producer + closes the queue (raising `QueueShutDown`). Upon termination, it formally sets + `_is_finished`, unblocking all global subscribers and wait() calls. + """ + logger.debug('Consumer[%s]: Started', self._task_id) + task_mode = None + message_to_save = None + # TODO: Make helper methods + # TODO: Support Task enqueue + try: + try: + try: + while True: + # Dequeue event. This raises QueueShutDown when finished. + logger.debug( + 'Consumer[%s]: Waiting for event', + self._task_id, + ) + new_task = None + event = await self._event_queue_agent.dequeue_event() + logger.debug( + 'Consumer[%s]: Dequeued event %s', + self._task_id, + type(event).__name__, + ) + + try: + if isinstance(event, _RequestCompleted): + logger.debug( + 'Consumer[%s]: Request completed', + self._task_id, + ) + self._request_lock.release() + elif isinstance(event, _RequestStarted): + logger.debug( + 'Consumer[%s]: Request started', + self._task_id, + ) + message_to_save = event.request_context.message + + elif isinstance(event, Message): + if task_mode is not None: + if task_mode: + raise InvalidAgentResponseError( + 'Received Message object in task mode. Use TaskStatusUpdateEvent or TaskArtifactUpdateEvent instead.' + ) + raise InvalidAgentResponseError( + 'Multiple Message objects received.' + ) + task_mode = False + logger.debug( + 'Consumer[%s]: Setting result to Message: %s', + self._task_id, + event, + ) + else: + if task_mode is False: + raise InvalidAgentResponseError( + f'Received {type(event).__name__} in message mode. Use Task with TaskStatusUpdateEvent and TaskArtifactUpdateEvent instead.' + ) + + if isinstance(event, Task): + existing_task = ( + await self._task_manager.get_task() + ) + if existing_task: + logger.error( + 'Task %s already exists. Ignoring task replacement.', + self._task_id, + ) + else: + await ( + self._task_manager.save_task_event( + event + ) + ) + # Initial task should already contain the message. + message_to_save = None + else: + if ( + isinstance(event, TaskStatusUpdateEvent) + and not self._task_created.is_set() + ): + task = ( + await self._task_manager.get_task() + ) + if task is None: + raise InvalidAgentResponseError( + f'Agent should enqueue Task before {type(event).__name__} event' + ) + + new_task = ( + await self._task_manager.ensure_task_id( + self._task_id, + event.context_id, + ) + ) + + if message_to_save is not None: + new_task = self._task_manager.update_with_message( + message_to_save, + new_task, + ) + await ( + self._task_manager.save_task_event( + new_task + ) + ) + message_to_save = None + + task_mode = True + # Save structural events (like TaskStatusUpdate) to DB. + + self._task_manager.context_id = event.context_id + if not isinstance(event, Task): + await self._task_manager.process(event) + + # Check for AUTH_REQUIRED or INPUT_REQUIRED or TERMINAL states + new_task = await self._task_manager.get_task() + if new_task is None: + raise RuntimeError( + f'Task {self.task_id} not found' + ) + if isinstance(event, Task): + event = new_task + is_interrupted = ( + new_task.status.state + in INTERRUPTED_TASK_STATES + ) + is_terminal = ( + new_task.status.state + in TERMINAL_TASK_STATES + ) + + # If we hit a breakpoint or terminal state, lock in the result. + if is_interrupted or is_terminal: + logger.debug( + 'Consumer[%s]: Setting first result as Task (state=%s)', + self._task_id, + new_task.status.state, + ) + + if is_terminal: + logger.debug( + 'Consumer[%s]: Reached terminal state %s', + self._task_id, + new_task.status.state, + ) + if not self._is_finished.is_set(): + async with self._lock: + # TODO: what about _reference_count when task is failing? + self._reference_count -= 1 + # _maybe_cleanup() is called in finally block. + + # Terminate the ActiveTask globally. + self._is_finished.set() + self._request_queue.shutdown(immediate=True) + + if is_interrupted: + logger.debug( + 'Consumer[%s]: Interrupted with state %s', + self._task_id, + new_task.status.state, + ) + + if ( + self._push_sender + and self._task_id + and isinstance(event, PushNotificationEvent) + ): + logger.debug( + 'Consumer[%s]: Sending push notification', + self._task_id, + ) + await self._push_sender.send_notification( + self._task_id, event + ) + + self._task_created.set() + + finally: + if new_task is not None: + new_task_copy = Task() + new_task_copy.CopyFrom(new_task) + new_task = new_task_copy + if isinstance(event, Task): + new_task_copy = Task() + new_task_copy.CopyFrom(event) + event = new_task_copy + + logger.debug( + 'Consumer[%s]: Enqueuing\nEvent: %s\nNew Task: %s\n', + self._task_id, + event, + new_task, + ) + await self._event_queue_subscribers.enqueue_event( + cast('Any', (event, new_task)) + ) + self._event_queue_agent.task_done() + except QueueShutDown: + logger.debug( + 'Consumer[%s]: Event queue shut down', self._task_id + ) + except Exception as e: + logger.exception('Consumer[%s]: Failed', self._task_id) + # TODO: Make the task in database as failed. + async with self._lock: + await self._mark_task_as_failed(e) + finally: + # The consumer is dead. The ActiveTask is permanently finished. + self._is_finished.set() + self._request_queue.shutdown(immediate=True) + await self._event_queue_agent.close(immediate=True) + + logger.debug('Consumer[%s]: Finishing', self._task_id) + await self._maybe_cleanup() + finally: + logger.debug('Consumer[%s]: Completed', self._task_id) + + async def subscribe( # noqa: PLR0912, PLR0915 + self, + *, + request: RequestContext | None = None, + include_initial_task: bool = False, + replace_status_update_with_task: bool = False, + ) -> AsyncGenerator[Event, None]: + """Creates a queue tap and yields events as they are produced. + + Concurrency Guarantee: + Uses `_lock` to safely increment and decrement `_reference_count`. + Safely detaches its queue tap when the client disconnects or the task finishes, + triggering `_maybe_cleanup()` to potentially garbage collect the ActiveTask. + """ + logger.debug('Subscribe[%s]: New subscriber', self._task_id) + + async with self._lock: + if self._exception: + logger.debug( + 'Subscribe[%s]: Failed, exception already set', + self._task_id, + ) + raise self._exception + if self._is_finished.is_set(): + raise InvalidParamsError( + f'Task {self._task_id} is already completed.' + ) + self._reference_count += 1 + logger.debug( + 'Subscribe[%s]: Subscribers count: %d', + self._task_id, + self._reference_count, + ) + + tapped_queue = await self._event_queue_subscribers.tap() + request_id = await self.enqueue_request(request) if request else None + + try: + if include_initial_task: + logger.debug( + 'Subscribe[%s]: Including initial task', + self._task_id, + ) + task = await self.get_task() + yield task + + while True: + try: + if self._exception: + raise self._exception + + dequeued = await tapped_queue.dequeue_event() + event, updated_task = cast('Any', dequeued) + logger.debug( + 'Subscriber[%s]\nDequeued event %s\nUpdated task %s\n', + self._task_id, + event, + updated_task, + ) + if replace_status_update_with_task and isinstance( + event, TaskStatusUpdateEvent + ): + logger.debug( + 'Subscriber[%s]: Replacing TaskStatusUpdateEvent with Task: %s', + self._task_id, + updated_task, + ) + event = updated_task + if self._exception: + raise self._exception from None + if isinstance(event, _RequestCompleted): + if ( + request_id is not None + and event.request_id == request_id + ): + logger.debug( + 'Subscriber[%s]: Request completed', + self._task_id, + ) + return + continue + elif isinstance(event, _RequestStarted): + logger.debug( + 'Subscriber[%s]: Request started', + self._task_id, + ) + continue + try: + yield event + finally: + tapped_queue.task_done() + except (QueueShutDown, asyncio.CancelledError): + if self._exception: + raise self._exception from None + break + finally: + logger.debug('Subscribe[%s]: Unsubscribing', self._task_id) + await tapped_queue.close(immediate=True) + async with self._lock: + self._reference_count -= 1 + # Evaluate if this was the last subscriber on a finished task. + await self._maybe_cleanup() + + async def cancel(self, call_context: ServerCallContext) -> Task: + """Cancels the running active task. + + Concurrency Guarantee: + Uses `_lock` to ensure we don't attempt to cancel a producer that is + already winding down or hasn't started. It fires the cancellation signal + and blocks until the consumer processes the cancellation events. + """ + logger.debug('Cancel[%s]: Cancelling task', self._task_id) + + # TODO: Conflicts with call_context on the pending request. + self._task_manager._call_context = call_context + + task = await self._task_manager.get_task() + request_context = RequestContext( + call_context=call_context, + task_id=self._task_id, + context_id=task.context_id if task else None, + task=task, + ) + + async with self._lock: + if not self._is_finished.is_set() and self._producer_task: + logger.debug( + 'Cancel[%s]: Cancelling producer task', self._task_id + ) + self._producer_task.cancel() + try: + await self._agent_executor.cancel( + request_context, self._event_queue_agent + ) + except Exception as e: + logger.exception( + 'Cancel[%s]: Agent cancel failed', self._task_id + ) + await self._mark_task_as_failed(e) + raise + else: + logger.debug( + 'Cancel[%s]: Task already finished [%s] or producer not started [%s], not cancelling', + self._task_id, + self._is_finished.is_set(), + self._producer_task, + ) + + await self._is_finished.wait() + task = await self._task_manager.get_task() + if not task: + raise RuntimeError('Task should have been created') + return task + + async def _maybe_cleanup(self) -> None: + """Triggers cleanup if task is finished and has no subscribers. + + Concurrency Guarantee: + Protected by `_lock` to prevent race conditions where a new subscriber + attaches at the exact moment the task decides to garbage collect itself. + """ + async with self._lock: + logger.debug( + 'Cleanup[%s]: Subscribers count: %d is_finished: %s', + self._task_id, + self._reference_count, + self._is_finished.is_set(), + ) + + if ( + self._is_finished.is_set() + and self._reference_count == 0 + and self._on_cleanup + ): + logger.debug('Cleanup[%s]: Triggering cleanup', self._task_id) + self._on_cleanup(self) + + async def _mark_task_as_failed(self, exception: Exception) -> None: + if self._exception is None: + self._exception = exception + if self._task_created.is_set(): + try: + task = await self._task_manager.get_task() + if task is not None: + await self._event_queue_agent.enqueue_event( + TaskStatusUpdateEvent( + task_id=task.id, + context_id=task.context_id, + status=TaskStatus( + state=TaskState.TASK_STATE_FAILED, + ), + ) + ) + except QueueShutDown: + pass + + async def get_task(self) -> Task: + """Get task from db.""" + # TODO: THERE IS ZERO CONCURRENCY SAFETY HERE (Except inital task creation). + await self._task_created.wait() + task = await self._task_manager.get_task() + if not task: + raise RuntimeError('Task should have been created') + return task diff --git a/src/a2a/server/agent_execution/active_task_registry.py b/src/a2a/server/agent_execution/active_task_registry.py new file mode 100644 index 000000000..9c1299ab3 --- /dev/null +++ b/src/a2a/server/agent_execution/active_task_registry.py @@ -0,0 +1,88 @@ +from __future__ import annotations + +import asyncio +import logging + +from typing import TYPE_CHECKING + + +if TYPE_CHECKING: + from a2a.server.agent_execution.agent_executor import AgentExecutor + from a2a.server.context import ServerCallContext + from a2a.server.tasks.push_notification_sender import PushNotificationSender + from a2a.server.tasks.task_store import TaskStore + +from a2a.server.agent_execution.active_task import ActiveTask +from a2a.server.tasks.task_manager import TaskManager + + +logger = logging.getLogger(__name__) + + +class ActiveTaskRegistry: + """A registry for active ActiveTask instances.""" + + def __init__( + self, + agent_executor: AgentExecutor, + task_store: TaskStore, + push_sender: PushNotificationSender | None = None, + ): + self._agent_executor = agent_executor + self._task_store = task_store + self._push_sender = push_sender + self._active_tasks: dict[str, ActiveTask] = {} + self._lock = asyncio.Lock() + self._cleanup_tasks: set[asyncio.Task[None]] = set() + + async def get_or_create( + self, + task_id: str, + call_context: ServerCallContext, + context_id: str | None = None, + create_task_if_missing: bool = False, + ) -> ActiveTask: + """Retrieves an existing ActiveTask or creates a new one.""" + async with self._lock: + if task_id in self._active_tasks: + return self._active_tasks[task_id] + + task_manager = TaskManager( + task_id=task_id, + context_id=context_id, + task_store=self._task_store, + initial_message=None, + context=call_context, + ) + + active_task = ActiveTask( + agent_executor=self._agent_executor, + task_id=task_id, + task_manager=task_manager, + push_sender=self._push_sender, + on_cleanup=self._on_active_task_cleanup, + ) + self._active_tasks[task_id] = active_task + + await active_task.start( + call_context=call_context, + create_task_if_missing=create_task_if_missing, + ) + return active_task + + def _on_active_task_cleanup(self, active_task: ActiveTask) -> None: + """Called by ActiveTask when it's finished and has no subscribers.""" + logger.debug('Active task %s cleanup scheduled', active_task.task_id) + task = asyncio.create_task(self._remove_task(active_task.task_id)) + self._cleanup_tasks.add(task) + task.add_done_callback(self._cleanup_tasks.discard) + + async def _remove_task(self, task_id: str) -> None: + async with self._lock: + self._active_tasks.pop(task_id, None) + logger.debug('Removed active task for %s from registry', task_id) + + async def get(self, task_id: str) -> ActiveTask | None: + """Retrieves an existing task.""" + async with self._lock: + return self._active_tasks.get(task_id) diff --git a/src/a2a/server/agent_execution/agent_executor.py b/src/a2a/server/agent_execution/agent_executor.py index 38be9c11c..1c3866047 100644 --- a/src/a2a/server/agent_execution/agent_executor.py +++ b/src/a2a/server/agent_execution/agent_executor.py @@ -1,7 +1,7 @@ from abc import ABC, abstractmethod from a2a.server.agent_execution.context import RequestContext -from a2a.server.events.event_queue import EventQueue +from a2a.server.events.event_queue_v2 import EventQueue class AgentExecutor(ABC): @@ -23,6 +23,44 @@ async def execute( return once the agent's execution for this request is complete or yields control (e.g., enters an input-required state). + Request Lifecycle & AgentExecutor Responsibilities: + - **Concurrency**: The framework guarantees single execution per request; + `execute()` will not be called concurrently for the same request context. + - **Exception Handling**: Unhandled exceptions raised by `execute()` will be + caught by the framework and result in the task transitioning to + `TaskState.TASK_STATE_ERROR`. + - **Post-Completion**: Once `execute()` completes (returns or raises), the + executor must not access the `context` or `event_queue` anymore. + - **Terminal States**: Before completing the call normally, the executor + SHOULD publish a `TaskStatusUpdateEvent` to transition the task to a + terminal state (e.g., `TASK_STATE_COMPLETED`) or an interrupted state + (`TASK_STATE_INPUT_REQUIRED` or `TASK_STATE_AUTH_REQUIRED`). + - **Interrupted Workflows**: + - `TASK_STATE_INPUT_REQUIRED`: The executor publishes a `TaskStatusUpdateEvent` with + `TaskState.TASK_STATE_INPUT_REQUIRED` and returns to yield control. + The request will resume once user input is provided. + - `TASK_STATE_AUTH_REQUIRED`: There are in-bound and out-of-bound auth models. + In both scenarios, the agent publishes a `TaskStatusUpdateEvent` with + `TaskState.TASK_STATE_AUTH_REQUIRED`. + - In-bound: The agent should return from `execute()`. The framework will + call `execute()` again once the user response is received. + - Out-of-bound: The agent should not return from `execute()`. It should wait + for the out-of-band auth provider to complete the authentication and then + continue execution. + + - **Cancellation Workflow**: When a cancellation request is received, the + async task running `execute()` is cancelled (raising an `asyncio.CancelledError`), + and `cancel()` is explicitly called by the framework. + + Allowed Workflows: + - Immediate response: Enqueue a SINGLE `Message` object. + - Asynchronous/Long-running: Enqueue a `Task` object, perform work, and emit + multiple `TaskStatusUpdateEvent` / `TaskArtifactUpdateEvent` objects over time. + + Note that the framework waits with response to the send_message request with + `return_immediately=True` parameter until the first event (Message or Task) + is enqueued by AgentExecutor. + Args: context: The request context containing the message, task ID, etc. event_queue: The queue to publish events to. @@ -36,7 +74,7 @@ async def cancel( The agent should attempt to stop the task identified by the task_id in the context and publish a `TaskStatusUpdateEvent` with state - `TaskState.canceled` to the `event_queue`. + `TaskState.TASK_STATE_CANCELED` to the `event_queue`. Args: context: The request context containing the task ID to cancel. diff --git a/src/a2a/server/agent_execution/context.py b/src/a2a/server/agent_execution/context.py index cd9f8f973..5fcdf8697 100644 --- a/src/a2a/server/agent_execution/context.py +++ b/src/a2a/server/agent_execution/context.py @@ -1,20 +1,19 @@ from typing import Any +from a2a.helpers.proto_helpers import get_message_text from a2a.server.context import ServerCallContext from a2a.server.id_generator import ( IDGenerator, IDGeneratorContext, UUIDGenerator, ) -from a2a.types import ( - InvalidParamsError, +from a2a.types.a2a_pb2 import ( Message, - MessageSendConfiguration, - MessageSendParams, + SendMessageConfiguration, + SendMessageRequest, Task, ) -from a2a.utils import get_message_text -from a2a.utils.errors import ServerError +from a2a.utils.errors import InvalidParamsError class RequestContext: @@ -27,35 +26,35 @@ class RequestContext: def __init__( # noqa: PLR0913 self, - request: MessageSendParams | None = None, + call_context: ServerCallContext, + request: SendMessageRequest | None = None, task_id: str | None = None, context_id: str | None = None, task: Task | None = None, related_tasks: list[Task] | None = None, - call_context: ServerCallContext | None = None, task_id_generator: IDGenerator | None = None, context_id_generator: IDGenerator | None = None, ): """Initializes the RequestContext. Args: - request: The incoming `MessageSendParams` request payload. + call_context: The server call context associated with this request. + request: The incoming `SendMessageRequest` request payload. task_id: The ID of the task explicitly provided in the request or path. context_id: The ID of the context explicitly provided in the request or path. task: The existing `Task` object retrieved from the store, if any. related_tasks: A list of other tasks related to the current request (e.g., for tool use). - call_context: The server call context associated with this request. task_id_generator: ID generator for new task IDs. Defaults to UUID generator. context_id_generator: ID generator for new context IDs. Defaults to UUID generator. """ if related_tasks is None: related_tasks = [] + self._call_context = call_context self._params = request self._task_id = task_id self._context_id = context_id self._current_task = task self._related_tasks = related_tasks - self._call_context = call_context self._task_id_generator = ( task_id_generator if task_id_generator else UUIDGenerator() ) @@ -68,15 +67,13 @@ def __init__( # noqa: PLR0913 if task_id: self._params.message.task_id = task_id if task and task.id != task_id: - raise ServerError(InvalidParamsError(message='bad task id')) + raise InvalidParamsError(message='bad task id') else: self._check_or_generate_task_id() if context_id: self._params.message.context_id = context_id if task and task.context_id != context_id: - raise ServerError( - InvalidParamsError(message='bad context id') - ) + raise InvalidParamsError(message='bad context id') else: self._check_or_generate_context_id() @@ -123,7 +120,7 @@ def current_task(self) -> Task | None: return self._current_task @current_task.setter - def current_task(self, task: Task) -> None: + def current_task(self, task: Task | None) -> None: """Sets the current task object.""" self._current_task = task @@ -138,37 +135,31 @@ def context_id(self) -> str | None: return self._context_id @property - def configuration(self) -> MessageSendConfiguration | None: - """The `MessageSendConfiguration` from the request, if available.""" + def configuration(self) -> SendMessageConfiguration | None: + """The `SendMessageConfiguration` from the request, if available.""" return self._params.configuration if self._params else None @property - def call_context(self) -> ServerCallContext | None: + def call_context(self) -> ServerCallContext: """The server call context associated with this request.""" return self._call_context @property def metadata(self) -> dict[str, Any]: """Metadata associated with the request, if available.""" - return self._params.metadata or {} if self._params else {} + if self._params and self._params.metadata: + return dict(self._params.metadata) + return {} - def add_activated_extension(self, uri: str) -> None: - """Add an extension to the set of activated extensions for this request. - - This causes the extension to be indicated back to the client in the - response. - """ - if self._call_context: - self._call_context.activated_extensions.add(uri) + @property + def tenant(self) -> str: + """The tenant associated with this request.""" + return self._call_context.tenant @property def requested_extensions(self) -> set[str]: - """Extensions that the client requested to activate.""" - return ( - self._call_context.requested_extensions - if self._call_context - else set() - ) + """Extensions that the client requested for this interaction.""" + return self._call_context.requested_extensions def _check_or_generate_task_id(self) -> None: """Ensures a task ID is present, generating one if necessary.""" diff --git a/src/a2a/server/agent_execution/request_context_builder.py b/src/a2a/server/agent_execution/request_context_builder.py index 2a3ad4db5..cab82b401 100644 --- a/src/a2a/server/agent_execution/request_context_builder.py +++ b/src/a2a/server/agent_execution/request_context_builder.py @@ -2,7 +2,7 @@ from a2a.server.agent_execution import RequestContext from a2a.server.context import ServerCallContext -from a2a.types import MessageSendParams, Task +from a2a.types.a2a_pb2 import SendMessageRequest, Task class RequestContextBuilder(ABC): @@ -11,10 +11,10 @@ class RequestContextBuilder(ABC): @abstractmethod async def build( self, - params: MessageSendParams | None = None, + context: ServerCallContext, + params: SendMessageRequest | None = None, task_id: str | None = None, context_id: str | None = None, task: Task | None = None, - context: ServerCallContext | None = None, ) -> RequestContext: pass diff --git a/src/a2a/server/agent_execution/simple_request_context_builder.py b/src/a2a/server/agent_execution/simple_request_context_builder.py index 876b6561e..5f2b7c521 100644 --- a/src/a2a/server/agent_execution/simple_request_context_builder.py +++ b/src/a2a/server/agent_execution/simple_request_context_builder.py @@ -4,7 +4,7 @@ from a2a.server.context import ServerCallContext from a2a.server.id_generator import IDGenerator from a2a.server.tasks import TaskStore -from a2a.types import MessageSendParams, Task +from a2a.types.a2a_pb2 import SendMessageRequest, Task class SimpleRequestContextBuilder(RequestContextBuilder): @@ -35,11 +35,11 @@ def __init__( async def build( self, - params: MessageSendParams | None = None, + context: ServerCallContext, + params: SendMessageRequest | None = None, task_id: str | None = None, context_id: str | None = None, task: Task | None = None, - context: ServerCallContext | None = None, ) -> RequestContext: """Builds the request context for an agent execution. @@ -48,11 +48,11 @@ async def build( referenced in `params.message.reference_task_ids` from the `task_store`. Args: + context: The server call context, containing metadata about the call. params: The parameters of the incoming message send request. task_id: The ID of the task being executed. context_id: The ID of the current execution context. task: The primary task object associated with the request. - context: The server call context, containing metadata about the call. Returns: An instance of RequestContext populated with the provided information @@ -68,19 +68,19 @@ async def build( ): tasks = await asyncio.gather( *[ - self._task_store.get(task_id) + self._task_store.get(task_id, context) for task_id in params.message.reference_task_ids ] ) related_tasks = [x for x in tasks if x is not None] return RequestContext( + call_context=context, request=params, task_id=task_id, context_id=context_id, task=task, related_tasks=related_tasks, - call_context=context, task_id_generator=self._task_id_generator, context_id_generator=self._context_id_generator, ) diff --git a/src/a2a/server/apps/__init__.py b/src/a2a/server/apps/__init__.py deleted file mode 100644 index 579deaa54..000000000 --- a/src/a2a/server/apps/__init__.py +++ /dev/null @@ -1,18 +0,0 @@ -"""HTTP application components for the A2A server.""" - -from a2a.server.apps.jsonrpc import ( - A2AFastAPIApplication, - A2AStarletteApplication, - CallContextBuilder, - JSONRPCApplication, -) -from a2a.server.apps.rest import A2ARESTFastAPIApplication - - -__all__ = [ - 'A2AFastAPIApplication', - 'A2ARESTFastAPIApplication', - 'A2AStarletteApplication', - 'CallContextBuilder', - 'JSONRPCApplication', -] diff --git a/src/a2a/server/apps/jsonrpc/__init__.py b/src/a2a/server/apps/jsonrpc/__init__.py deleted file mode 100644 index 1121fdbc3..000000000 --- a/src/a2a/server/apps/jsonrpc/__init__.py +++ /dev/null @@ -1,20 +0,0 @@ -"""A2A JSON-RPC Applications.""" - -from a2a.server.apps.jsonrpc.fastapi_app import A2AFastAPIApplication -from a2a.server.apps.jsonrpc.jsonrpc_app import ( - CallContextBuilder, - DefaultCallContextBuilder, - JSONRPCApplication, - StarletteUserProxy, -) -from a2a.server.apps.jsonrpc.starlette_app import A2AStarletteApplication - - -__all__ = [ - 'A2AFastAPIApplication', - 'A2AStarletteApplication', - 'CallContextBuilder', - 'DefaultCallContextBuilder', - 'JSONRPCApplication', - 'StarletteUserProxy', -] diff --git a/src/a2a/server/apps/jsonrpc/fastapi_app.py b/src/a2a/server/apps/jsonrpc/fastapi_app.py deleted file mode 100644 index dfd92d87c..000000000 --- a/src/a2a/server/apps/jsonrpc/fastapi_app.py +++ /dev/null @@ -1,187 +0,0 @@ -import logging - -from collections.abc import Awaitable, Callable -from typing import TYPE_CHECKING, Any - - -if TYPE_CHECKING: - from fastapi import FastAPI - - _package_fastapi_installed = True -else: - try: - from fastapi import FastAPI - - _package_fastapi_installed = True - except ImportError: - FastAPI = Any - - _package_fastapi_installed = False - -from a2a.server.apps.jsonrpc.jsonrpc_app import ( - CallContextBuilder, - JSONRPCApplication, -) -from a2a.server.context import ServerCallContext -from a2a.server.request_handlers.jsonrpc_handler import RequestHandler -from a2a.types import A2ARequest, AgentCard -from a2a.utils.constants import ( - AGENT_CARD_WELL_KNOWN_PATH, - DEFAULT_RPC_URL, - EXTENDED_AGENT_CARD_PATH, - PREV_AGENT_CARD_WELL_KNOWN_PATH, -) - - -logger = logging.getLogger(__name__) - - -class A2AFastAPI(FastAPI): - """A FastAPI application that adds A2A-specific OpenAPI components.""" - - _a2a_components_added: bool = False - - def openapi(self) -> dict[str, Any]: - """Generates the OpenAPI schema for the application.""" - openapi_schema = super().openapi() - if not self._a2a_components_added: - a2a_request_schema = A2ARequest.model_json_schema( - ref_template='#/components/schemas/{model}' - ) - defs = a2a_request_schema.pop('$defs', {}) - component_schemas = openapi_schema.setdefault( - 'components', {} - ).setdefault('schemas', {}) - component_schemas.update(defs) - component_schemas['A2ARequest'] = a2a_request_schema - self._a2a_components_added = True - return openapi_schema - - -class A2AFastAPIApplication(JSONRPCApplication): - """A FastAPI application implementing the A2A protocol server endpoints. - - Handles incoming JSON-RPC requests, routes them to the appropriate - handler methods, and manages response generation including Server-Sent Events - (SSE). - """ - - def __init__( # noqa: PLR0913 - self, - agent_card: AgentCard, - http_handler: RequestHandler, - extended_agent_card: AgentCard | None = None, - context_builder: CallContextBuilder | None = None, - card_modifier: Callable[[AgentCard], Awaitable[AgentCard] | AgentCard] - | None = None, - extended_card_modifier: Callable[ - [AgentCard, ServerCallContext], Awaitable[AgentCard] | AgentCard - ] - | None = None, - max_content_length: int | None = 10 * 1024 * 1024, # 10MB - ) -> None: - """Initializes the A2AFastAPIApplication. - - Args: - agent_card: The AgentCard describing the agent's capabilities. - http_handler: The handler instance responsible for processing A2A - requests via http. - extended_agent_card: An optional, distinct AgentCard to be served - at the authenticated extended card endpoint. - context_builder: The CallContextBuilder used to construct the - ServerCallContext passed to the http_handler. If None, no - ServerCallContext is passed. - card_modifier: An optional callback to dynamically modify the public - agent card before it is served. - extended_card_modifier: An optional callback to dynamically modify - the extended agent card before it is served. It receives the - call context. - max_content_length: The maximum allowed content length for incoming - requests. Defaults to 10MB. Set to None for unbounded maximum. - """ - if not _package_fastapi_installed: - raise ImportError( - 'The `fastapi` package is required to use the `A2AFastAPIApplication`.' - ' It can be added as a part of `a2a-sdk` optional dependencies,' - ' `a2a-sdk[http-server]`.' - ) - super().__init__( - agent_card=agent_card, - http_handler=http_handler, - extended_agent_card=extended_agent_card, - context_builder=context_builder, - card_modifier=card_modifier, - extended_card_modifier=extended_card_modifier, - max_content_length=max_content_length, - ) - - def add_routes_to_app( - self, - app: FastAPI, - agent_card_url: str = AGENT_CARD_WELL_KNOWN_PATH, - rpc_url: str = DEFAULT_RPC_URL, - extended_agent_card_url: str = EXTENDED_AGENT_CARD_PATH, - ) -> None: - """Adds the routes to the FastAPI application. - - Args: - app: The FastAPI application to add the routes to. - agent_card_url: The URL for the agent card endpoint. - rpc_url: The URL for the A2A JSON-RPC endpoint. - extended_agent_card_url: The URL for the authenticated extended agent card endpoint. - """ - app.post( - rpc_url, - openapi_extra={ - 'requestBody': { - 'content': { - 'application/json': { - 'schema': { - '$ref': '#/components/schemas/A2ARequest' - } - } - }, - 'required': True, - 'description': 'A2ARequest', - } - }, - )(self._handle_requests) - app.get(agent_card_url)(self._handle_get_agent_card) - - if agent_card_url == AGENT_CARD_WELL_KNOWN_PATH: - # For backward compatibility, serve the agent card at the deprecated path as well. - # TODO: remove in a future release - app.get(PREV_AGENT_CARD_WELL_KNOWN_PATH)( - self._handle_get_agent_card - ) - - if self.agent_card.supports_authenticated_extended_card: - app.get(extended_agent_card_url)( - self._handle_get_authenticated_extended_agent_card - ) - - def build( - self, - agent_card_url: str = AGENT_CARD_WELL_KNOWN_PATH, - rpc_url: str = DEFAULT_RPC_URL, - extended_agent_card_url: str = EXTENDED_AGENT_CARD_PATH, - **kwargs: Any, - ) -> FastAPI: - """Builds and returns the FastAPI application instance. - - Args: - agent_card_url: The URL for the agent card endpoint. - rpc_url: The URL for the A2A JSON-RPC endpoint. - extended_agent_card_url: The URL for the authenticated extended agent card endpoint. - **kwargs: Additional keyword arguments to pass to the FastAPI constructor. - - Returns: - A configured FastAPI application instance. - """ - app = A2AFastAPI(**kwargs) - - self.add_routes_to_app( - app, agent_card_url, rpc_url, extended_agent_card_url - ) - - return app diff --git a/src/a2a/server/apps/jsonrpc/jsonrpc_app.py b/src/a2a/server/apps/jsonrpc/jsonrpc_app.py deleted file mode 100644 index c6f78d119..000000000 --- a/src/a2a/server/apps/jsonrpc/jsonrpc_app.py +++ /dev/null @@ -1,667 +0,0 @@ -import contextlib -import json -import logging -import traceback - -from abc import ABC, abstractmethod -from collections.abc import AsyncGenerator, Awaitable, Callable -from typing import TYPE_CHECKING, Any - -from pydantic import ValidationError - -from a2a.auth.user import UnauthenticatedUser -from a2a.auth.user import User as A2AUser -from a2a.extensions.common import ( - HTTP_EXTENSION_HEADER, - get_requested_extensions, -) -from a2a.server.context import ServerCallContext -from a2a.server.request_handlers.jsonrpc_handler import JSONRPCHandler -from a2a.server.request_handlers.request_handler import RequestHandler -from a2a.types import ( - A2AError, - A2ARequest, - AgentCard, - CancelTaskRequest, - DeleteTaskPushNotificationConfigRequest, - GetAuthenticatedExtendedCardRequest, - GetTaskPushNotificationConfigRequest, - GetTaskRequest, - InternalError, - InvalidParamsError, - InvalidRequestError, - JSONParseError, - JSONRPCError, - JSONRPCErrorResponse, - JSONRPCRequest, - JSONRPCResponse, - ListTaskPushNotificationConfigRequest, - MethodNotFoundError, - SendMessageRequest, - SendStreamingMessageRequest, - SendStreamingMessageResponse, - SetTaskPushNotificationConfigRequest, - TaskResubscriptionRequest, - UnsupportedOperationError, -) -from a2a.utils.constants import ( - AGENT_CARD_WELL_KNOWN_PATH, - DEFAULT_RPC_URL, - EXTENDED_AGENT_CARD_PATH, - PREV_AGENT_CARD_WELL_KNOWN_PATH, -) -from a2a.utils.errors import MethodNotImplementedError -from a2a.utils.helpers import maybe_await - - -logger = logging.getLogger(__name__) - -if TYPE_CHECKING: - from fastapi import FastAPI - from sse_starlette.sse import EventSourceResponse - from starlette.applications import Starlette - from starlette.authentication import BaseUser - from starlette.exceptions import HTTPException - from starlette.requests import Request - from starlette.responses import JSONResponse, Response - - try: - # Starlette v0.48.0 - from starlette.status import HTTP_413_CONTENT_TOO_LARGE - except ImportError: - from starlette.status import ( # type: ignore[no-redef] - HTTP_413_REQUEST_ENTITY_TOO_LARGE as HTTP_413_CONTENT_TOO_LARGE, - ) - - _package_starlette_installed = True -else: - FastAPI = Any - try: - from sse_starlette.sse import EventSourceResponse - from starlette.applications import Starlette - from starlette.authentication import BaseUser - from starlette.exceptions import HTTPException - from starlette.requests import Request - from starlette.responses import JSONResponse, Response - - try: - # Starlette v0.48.0 - from starlette.status import HTTP_413_CONTENT_TOO_LARGE - except ImportError: - from starlette.status import ( - HTTP_413_REQUEST_ENTITY_TOO_LARGE as HTTP_413_CONTENT_TOO_LARGE, - ) - - _package_starlette_installed = True - except ImportError: - _package_starlette_installed = False - # Provide placeholder types for runtime type hinting when dependencies are not installed. - # These will not be used if the code path that needs them is guarded by _http_server_installed. - EventSourceResponse = Any - Starlette = Any - BaseUser = Any - HTTPException = Any - Request = Any - JSONResponse = Any - Response = Any - HTTP_413_CONTENT_TOO_LARGE = Any - - -class StarletteUserProxy(A2AUser): - """Adapts the Starlette User class to the A2A user representation.""" - - def __init__(self, user: BaseUser): - self._user = user - - @property - def is_authenticated(self) -> bool: - """Returns whether the current user is authenticated.""" - return self._user.is_authenticated - - @property - def user_name(self) -> str: - """Returns the user name of the current user.""" - return self._user.display_name - - -class CallContextBuilder(ABC): - """A class for building ServerCallContexts using the Starlette Request.""" - - @abstractmethod - def build(self, request: Request) -> ServerCallContext: - """Builds a ServerCallContext from a Starlette Request.""" - - -class DefaultCallContextBuilder(CallContextBuilder): - """A default implementation of CallContextBuilder.""" - - def build(self, request: Request) -> ServerCallContext: - """Builds a ServerCallContext from a Starlette Request. - - Args: - request: The incoming Starlette Request object. - - Returns: - A ServerCallContext instance populated with user and state - information from the request. - """ - user: A2AUser = UnauthenticatedUser() - state = {} - with contextlib.suppress(Exception): - user = StarletteUserProxy(request.user) - state['auth'] = request.auth - state['headers'] = dict(request.headers) - return ServerCallContext( - user=user, - state=state, - requested_extensions=get_requested_extensions( - request.headers.getlist(HTTP_EXTENSION_HEADER) - ), - ) - - -class JSONRPCApplication(ABC): - """Base class for A2A JSONRPC applications. - - Handles incoming JSON-RPC requests, routes them to the appropriate - handler methods, and manages response generation including Server-Sent Events - (SSE). - """ - - # Method-to-model mapping for centralized routing - A2ARequestModel = ( - SendMessageRequest - | SendStreamingMessageRequest - | GetTaskRequest - | CancelTaskRequest - | SetTaskPushNotificationConfigRequest - | GetTaskPushNotificationConfigRequest - | ListTaskPushNotificationConfigRequest - | DeleteTaskPushNotificationConfigRequest - | TaskResubscriptionRequest - | GetAuthenticatedExtendedCardRequest - ) - - METHOD_TO_MODEL: dict[str, type[A2ARequestModel]] = { - model.model_fields['method'].default: model - for model in A2ARequestModel.__args__ - } - - def __init__( # noqa: PLR0913 - self, - agent_card: AgentCard, - http_handler: RequestHandler, - extended_agent_card: AgentCard | None = None, - context_builder: CallContextBuilder | None = None, - card_modifier: Callable[[AgentCard], Awaitable[AgentCard] | AgentCard] - | None = None, - extended_card_modifier: Callable[ - [AgentCard, ServerCallContext], Awaitable[AgentCard] | AgentCard - ] - | None = None, - max_content_length: int | None = 10 * 1024 * 1024, # 10MB - ) -> None: - """Initializes the JSONRPCApplication. - - Args: - agent_card: The AgentCard describing the agent's capabilities. - http_handler: The handler instance responsible for processing A2A - requests via http. - extended_agent_card: An optional, distinct AgentCard to be served - at the authenticated extended card endpoint. - context_builder: The CallContextBuilder used to construct the - ServerCallContext passed to the http_handler. If None, no - ServerCallContext is passed. - card_modifier: An optional callback to dynamically modify the public - agent card before it is served. - extended_card_modifier: An optional callback to dynamically modify - the extended agent card before it is served. It receives the - call context. - max_content_length: The maximum allowed content length for incoming - requests. Defaults to 10MB. Set to None for unbounded maximum. - """ - if not _package_starlette_installed: - raise ImportError( - 'Packages `starlette` and `sse-starlette` are required to use the' - ' `JSONRPCApplication`. They can be added as a part of `a2a-sdk`' - ' optional dependencies, `a2a-sdk[http-server]`.' - ) - self.agent_card = agent_card - self.extended_agent_card = extended_agent_card - self.card_modifier = card_modifier - self.extended_card_modifier = extended_card_modifier - self.handler = JSONRPCHandler( - agent_card=agent_card, - request_handler=http_handler, - extended_agent_card=extended_agent_card, - extended_card_modifier=extended_card_modifier, - ) - self._context_builder = context_builder or DefaultCallContextBuilder() - self._max_content_length = max_content_length - - def _generate_error_response( - self, request_id: str | int | None, error: JSONRPCError | A2AError - ) -> JSONResponse: - """Creates a Starlette JSONResponse for a JSON-RPC error. - - Logs the error based on its type. - - Args: - request_id: The ID of the request that caused the error. - error: The `JSONRPCError` or `A2AError` object. - - Returns: - A `JSONResponse` object formatted as a JSON-RPC error response. - """ - error_resp = JSONRPCErrorResponse( - id=request_id, - error=error if isinstance(error, JSONRPCError) else error.root, - ) - - log_level = ( - logging.ERROR - if not isinstance(error, A2AError) - or isinstance(error.root, InternalError) - else logging.WARNING - ) - logger.log( - log_level, - "Request Error (ID: %s): Code=%s, Message='%s'%s", - request_id, - error_resp.error.code, - error_resp.error.message, - ', Data=' + str(error_resp.error.data) - if error_resp.error.data - else '', - ) - return JSONResponse( - error_resp.model_dump(mode='json', exclude_none=True), - status_code=200, - ) - - def _allowed_content_length(self, request: Request) -> bool: - """Checks if the request content length is within the allowed maximum. - - Args: - request: The incoming Starlette Request object. - - Returns: - False if the content length is larger than the allowed maximum, True otherwise. - """ - if self._max_content_length is not None: - with contextlib.suppress(ValueError): - content_length = int(request.headers.get('content-length', '0')) - if content_length and content_length > self._max_content_length: - return False - return True - - async def _handle_requests(self, request: Request) -> Response: # noqa: PLR0911 - """Handles incoming POST requests to the main A2A endpoint. - - Parses the request body as JSON, validates it against A2A request types, - dispatches it to the appropriate handler method, and returns the response. - Handles JSON parsing errors, validation errors, and other exceptions, - returning appropriate JSON-RPC error responses. - - Args: - request: The incoming Starlette Request object. - - Returns: - A Starlette Response object (JSONResponse or EventSourceResponse). - - Raises: - (Implicitly handled): Various exceptions are caught and converted - into JSON-RPC error responses by this method. - """ - request_id = None - body = None - - try: - body = await request.json() - if isinstance(body, dict): - request_id = body.get('id') - # Ensure request_id is valid for JSON-RPC response (str/int/None only) - if request_id is not None and not isinstance( - request_id, str | int - ): - request_id = None - # Treat payloads lager than allowed as invalid request (-32600) before routing - if not self._allowed_content_length(request): - return self._generate_error_response( - request_id, - A2AError( - root=InvalidRequestError(message='Payload too large') - ), - ) - logger.debug('Request body: %s', body) - # 1) Validate base JSON-RPC structure only (-32600 on failure) - try: - base_request = JSONRPCRequest.model_validate(body) - except ValidationError as e: - logger.exception('Failed to validate base JSON-RPC request') - return self._generate_error_response( - request_id, - A2AError( - root=InvalidRequestError(data=json.loads(e.json())) - ), - ) - - # 2) Route by method name; unknown -> -32601, known -> validate params (-32602 on failure) - method = base_request.method - - model_class = self.METHOD_TO_MODEL.get(method) - if not model_class: - return self._generate_error_response( - request_id, A2AError(root=MethodNotFoundError()) - ) - try: - specific_request = model_class.model_validate(body) - except ValidationError as e: - logger.exception('Failed to validate base JSON-RPC request') - return self._generate_error_response( - request_id, - A2AError( - root=InvalidParamsError(data=json.loads(e.json())) - ), - ) - - # 3) Build call context and wrap the request for downstream handling - call_context = self._context_builder.build(request) - call_context.state['method'] = method - - request_id = specific_request.id - a2a_request = A2ARequest(root=specific_request) - request_obj = a2a_request.root - - if isinstance( - request_obj, - TaskResubscriptionRequest | SendStreamingMessageRequest, - ): - return await self._process_streaming_request( - request_id, a2a_request, call_context - ) - - return await self._process_non_streaming_request( - request_id, a2a_request, call_context - ) - except MethodNotImplementedError: - traceback.print_exc() - return self._generate_error_response( - request_id, A2AError(root=UnsupportedOperationError()) - ) - except json.decoder.JSONDecodeError as e: - traceback.print_exc() - return self._generate_error_response( - None, A2AError(root=JSONParseError(message=str(e))) - ) - except HTTPException as e: - if e.status_code == HTTP_413_CONTENT_TOO_LARGE: - return self._generate_error_response( - request_id, - A2AError( - root=InvalidRequestError(message='Payload too large') - ), - ) - raise e - except Exception as e: - logger.exception('Unhandled exception') - return self._generate_error_response( - request_id, A2AError(root=InternalError(message=str(e))) - ) - - async def _process_streaming_request( - self, - request_id: str | int | None, - a2a_request: A2ARequest, - context: ServerCallContext, - ) -> Response: - """Processes streaming requests (message/stream or tasks/resubscribe). - - Args: - request_id: The ID of the request. - a2a_request: The validated A2ARequest object. - context: The ServerCallContext for the request. - - Returns: - An `EventSourceResponse` object to stream results to the client. - """ - request_obj = a2a_request.root - handler_result: Any = None - if isinstance( - request_obj, - SendStreamingMessageRequest, - ): - handler_result = self.handler.on_message_send_stream( - request_obj, context - ) - elif isinstance(request_obj, TaskResubscriptionRequest): - handler_result = self.handler.on_resubscribe_to_task( - request_obj, context - ) - - return self._create_response(context, handler_result) - - async def _process_non_streaming_request( - self, - request_id: str | int | None, - a2a_request: A2ARequest, - context: ServerCallContext, - ) -> Response: - """Processes non-streaming requests (message/send, tasks/get, tasks/cancel, tasks/pushNotificationConfig/*). - - Args: - request_id: The ID of the request. - a2a_request: The validated A2ARequest object. - context: The ServerCallContext for the request. - - Returns: - A `JSONResponse` object containing the result or error. - """ - request_obj = a2a_request.root - handler_result: Any = None - match request_obj: - case SendMessageRequest(): - handler_result = await self.handler.on_message_send( - request_obj, context - ) - case CancelTaskRequest(): - handler_result = await self.handler.on_cancel_task( - request_obj, context - ) - case GetTaskRequest(): - handler_result = await self.handler.on_get_task( - request_obj, context - ) - case SetTaskPushNotificationConfigRequest(): - handler_result = ( - await self.handler.set_push_notification_config( - request_obj, - context, - ) - ) - case GetTaskPushNotificationConfigRequest(): - handler_result = ( - await self.handler.get_push_notification_config( - request_obj, - context, - ) - ) - case ListTaskPushNotificationConfigRequest(): - handler_result = ( - await self.handler.list_push_notification_config( - request_obj, - context, - ) - ) - case DeleteTaskPushNotificationConfigRequest(): - handler_result = ( - await self.handler.delete_push_notification_config( - request_obj, - context, - ) - ) - case GetAuthenticatedExtendedCardRequest(): - handler_result = ( - await self.handler.get_authenticated_extended_card( - request_obj, - context, - ) - ) - case _: - logger.error( - 'Unhandled validated request type: %s', type(request_obj) - ) - error = UnsupportedOperationError( - message=f'Request type {type(request_obj).__name__} is unknown.' - ) - handler_result = JSONRPCErrorResponse( - id=request_id, error=error - ) - - return self._create_response(context, handler_result) - - def _create_response( - self, - context: ServerCallContext, - handler_result: ( - AsyncGenerator[SendStreamingMessageResponse] - | JSONRPCErrorResponse - | JSONRPCResponse - ), - ) -> Response: - """Creates a Starlette Response based on the result from the request handler. - - Handles: - - AsyncGenerator for Server-Sent Events (SSE). - - JSONRPCErrorResponse for explicit errors returned by handlers. - - Pydantic RootModels (like GetTaskResponse) containing success or error - payloads. - - Args: - context: The ServerCallContext provided to the request handler. - handler_result: The result from a request handler method. Can be an - async generator for streaming or a Pydantic model for non-streaming. - - Returns: - A Starlette JSONResponse or EventSourceResponse. - """ - headers = {} - if exts := context.activated_extensions: - headers[HTTP_EXTENSION_HEADER] = ', '.join(sorted(exts)) - if isinstance(handler_result, AsyncGenerator): - # Result is a stream of SendStreamingMessageResponse objects - async def event_generator( - stream: AsyncGenerator[SendStreamingMessageResponse], - ) -> AsyncGenerator[dict[str, str]]: - async for item in stream: - yield {'data': item.root.model_dump_json(exclude_none=True)} - - return EventSourceResponse( - event_generator(handler_result), headers=headers - ) - if isinstance(handler_result, JSONRPCErrorResponse): - return JSONResponse( - handler_result.model_dump( - mode='json', - exclude_none=True, - ), - headers=headers, - ) - - return JSONResponse( - handler_result.root.model_dump(mode='json', exclude_none=True), - headers=headers, - ) - - async def _handle_get_agent_card(self, request: Request) -> JSONResponse: - """Handles GET requests for the agent card endpoint. - - Args: - request: The incoming Starlette Request object. - - Returns: - A JSONResponse containing the agent card data. - """ - if request.url.path == PREV_AGENT_CARD_WELL_KNOWN_PATH: - logger.warning( - "Deprecated agent card endpoint '%s' accessed. " - "Please use '%s' instead. This endpoint will be removed in a future version.", - PREV_AGENT_CARD_WELL_KNOWN_PATH, - AGENT_CARD_WELL_KNOWN_PATH, - ) - - card_to_serve = self.agent_card - if self.card_modifier: - card_to_serve = await maybe_await(self.card_modifier(card_to_serve)) - - return JSONResponse( - card_to_serve.model_dump( - exclude_none=True, - by_alias=True, - ) - ) - - async def _handle_get_authenticated_extended_agent_card( - self, request: Request - ) -> JSONResponse: - """Handles GET requests for the authenticated extended agent card.""" - logger.warning( - 'HTTP GET for authenticated extended card has been called by a client. ' - 'This endpoint is deprecated in favor of agent/authenticatedExtendedCard JSON-RPC method and will be removed in a future release.' - ) - if not self.agent_card.supports_authenticated_extended_card: - return JSONResponse( - {'error': 'Extended agent card not supported or not enabled.'}, - status_code=404, - ) - - card_to_serve = self.extended_agent_card - - if self.extended_card_modifier: - context = self._context_builder.build(request) - # If no base extended card is provided, pass the public card to the modifier - base_card = card_to_serve if card_to_serve else self.agent_card - card_to_serve = await maybe_await( - self.extended_card_modifier(base_card, context) - ) - - if card_to_serve: - return JSONResponse( - card_to_serve.model_dump( - exclude_none=True, - by_alias=True, - ) - ) - # If supports_authenticated_extended_card is true, but no - # extended_agent_card was provided, and no modifier produced a card, - # return a 404. - return JSONResponse( - { - 'error': 'Authenticated extended agent card is supported but not configured on the server.' - }, - status_code=404, - ) - - @abstractmethod - def build( - self, - agent_card_url: str = AGENT_CARD_WELL_KNOWN_PATH, - rpc_url: str = DEFAULT_RPC_URL, - extended_agent_card_url: str = EXTENDED_AGENT_CARD_PATH, - **kwargs: Any, - ) -> FastAPI | Starlette: - """Builds and returns the JSONRPC application instance. - - Args: - agent_card_url: The URL for the agent card endpoint. - rpc_url: The URL for the A2A JSON-RPC endpoint. - extended_agent_card_url: The URL for the authenticated extended - agent card endpoint. - **kwargs: Additional keyword arguments to pass to the FastAPI constructor. - - Returns: - A configured JSONRPC application instance. - """ - raise NotImplementedError( - 'Subclasses must implement the build method to create the application instance.' - ) diff --git a/src/a2a/server/apps/jsonrpc/starlette_app.py b/src/a2a/server/apps/jsonrpc/starlette_app.py deleted file mode 100644 index ceaf5ced1..000000000 --- a/src/a2a/server/apps/jsonrpc/starlette_app.py +++ /dev/null @@ -1,201 +0,0 @@ -import logging - -from collections.abc import Awaitable, Callable -from typing import TYPE_CHECKING, Any - - -if TYPE_CHECKING: - from starlette.applications import Starlette - from starlette.routing import Route - - _package_starlette_installed = True - -else: - try: - from starlette.applications import Starlette - from starlette.routing import Route - - _package_starlette_installed = True - except ImportError: - Starlette = Any - Route = Any - - _package_starlette_installed = False - -from a2a.server.apps.jsonrpc.jsonrpc_app import ( - CallContextBuilder, - JSONRPCApplication, -) -from a2a.server.context import ServerCallContext -from a2a.server.request_handlers.jsonrpc_handler import RequestHandler -from a2a.types import AgentCard -from a2a.utils.constants import ( - AGENT_CARD_WELL_KNOWN_PATH, - DEFAULT_RPC_URL, - EXTENDED_AGENT_CARD_PATH, - PREV_AGENT_CARD_WELL_KNOWN_PATH, -) - - -logger = logging.getLogger(__name__) - - -class A2AStarletteApplication(JSONRPCApplication): - """A Starlette application implementing the A2A protocol server endpoints. - - Handles incoming JSON-RPC requests, routes them to the appropriate - handler methods, and manages response generation including Server-Sent Events - (SSE). - """ - - def __init__( # noqa: PLR0913 - self, - agent_card: AgentCard, - http_handler: RequestHandler, - extended_agent_card: AgentCard | None = None, - context_builder: CallContextBuilder | None = None, - card_modifier: Callable[[AgentCard], Awaitable[AgentCard] | AgentCard] - | None = None, - extended_card_modifier: Callable[ - [AgentCard, ServerCallContext], Awaitable[AgentCard] | AgentCard - ] - | None = None, - max_content_length: int | None = 10 * 1024 * 1024, # 10MB - ) -> None: - """Initializes the A2AStarletteApplication. - - Args: - agent_card: The AgentCard describing the agent's capabilities. - http_handler: The handler instance responsible for processing A2A - requests via http. - extended_agent_card: An optional, distinct AgentCard to be served - at the authenticated extended card endpoint. - context_builder: The CallContextBuilder used to construct the - ServerCallContext passed to the http_handler. If None, no - ServerCallContext is passed. - card_modifier: An optional callback to dynamically modify the public - agent card before it is served. - extended_card_modifier: An optional callback to dynamically modify - the extended agent card before it is served. It receives the - call context. - max_content_length: The maximum allowed content length for incoming - requests. Defaults to 10MB. Set to None for unbounded maximum. - """ - if not _package_starlette_installed: - raise ImportError( - 'Packages `starlette` and `sse-starlette` are required to use the' - ' `A2AStarletteApplication`. It can be added as a part of `a2a-sdk`' - ' optional dependencies, `a2a-sdk[http-server]`.' - ) - super().__init__( - agent_card=agent_card, - http_handler=http_handler, - extended_agent_card=extended_agent_card, - context_builder=context_builder, - card_modifier=card_modifier, - extended_card_modifier=extended_card_modifier, - max_content_length=max_content_length, - ) - - def routes( - self, - agent_card_url: str = AGENT_CARD_WELL_KNOWN_PATH, - rpc_url: str = DEFAULT_RPC_URL, - extended_agent_card_url: str = EXTENDED_AGENT_CARD_PATH, - ) -> list[Route]: - """Returns the Starlette Routes for handling A2A requests. - - Args: - agent_card_url: The URL path for the agent card endpoint. - rpc_url: The URL path for the A2A JSON-RPC endpoint (POST requests). - extended_agent_card_url: The URL for the authenticated extended agent card endpoint. - - Returns: - A list of Starlette Route objects. - """ - app_routes = [ - Route( - rpc_url, - self._handle_requests, - methods=['POST'], - name='a2a_handler', - ), - Route( - agent_card_url, - self._handle_get_agent_card, - methods=['GET'], - name='agent_card', - ), - ] - - if agent_card_url == AGENT_CARD_WELL_KNOWN_PATH: - # For backward compatibility, serve the agent card at the deprecated path as well. - # TODO: remove in a future release - app_routes.append( - Route( - PREV_AGENT_CARD_WELL_KNOWN_PATH, - self._handle_get_agent_card, - methods=['GET'], - name='deprecated_agent_card', - ) - ) - - # TODO: deprecated endpoint to be removed in a future release - if self.agent_card.supports_authenticated_extended_card: - app_routes.append( - Route( - extended_agent_card_url, - self._handle_get_authenticated_extended_agent_card, - methods=['GET'], - name='authenticated_extended_agent_card', - ) - ) - return app_routes - - def add_routes_to_app( - self, - app: Starlette, - agent_card_url: str = AGENT_CARD_WELL_KNOWN_PATH, - rpc_url: str = DEFAULT_RPC_URL, - extended_agent_card_url: str = EXTENDED_AGENT_CARD_PATH, - ) -> None: - """Adds the routes to the Starlette application. - - Args: - app: The Starlette application to add the routes to. - agent_card_url: The URL path for the agent card endpoint. - rpc_url: The URL path for the A2A JSON-RPC endpoint (POST requests). - extended_agent_card_url: The URL for the authenticated extended agent card endpoint. - """ - routes = self.routes( - agent_card_url=agent_card_url, - rpc_url=rpc_url, - extended_agent_card_url=extended_agent_card_url, - ) - app.routes.extend(routes) - - def build( - self, - agent_card_url: str = AGENT_CARD_WELL_KNOWN_PATH, - rpc_url: str = DEFAULT_RPC_URL, - extended_agent_card_url: str = EXTENDED_AGENT_CARD_PATH, - **kwargs: Any, - ) -> Starlette: - """Builds and returns the Starlette application instance. - - Args: - agent_card_url: The URL path for the agent card endpoint. - rpc_url: The URL path for the A2A JSON-RPC endpoint (POST requests). - extended_agent_card_url: The URL for the authenticated extended agent card endpoint. - **kwargs: Additional keyword arguments to pass to the Starlette constructor. - - Returns: - A configured Starlette application instance. - """ - app = Starlette(**kwargs) - - self.add_routes_to_app( - app, agent_card_url, rpc_url, extended_agent_card_url - ) - - return app diff --git a/src/a2a/server/apps/rest/__init__.py b/src/a2a/server/apps/rest/__init__.py deleted file mode 100644 index bafe4cb60..000000000 --- a/src/a2a/server/apps/rest/__init__.py +++ /dev/null @@ -1,8 +0,0 @@ -"""A2A REST Applications.""" - -from a2a.server.apps.rest.fastapi_app import A2ARESTFastAPIApplication - - -__all__ = [ - 'A2ARESTFastAPIApplication', -] diff --git a/src/a2a/server/apps/rest/fastapi_app.py b/src/a2a/server/apps/rest/fastapi_app.py deleted file mode 100644 index 12a03de84..000000000 --- a/src/a2a/server/apps/rest/fastapi_app.py +++ /dev/null @@ -1,121 +0,0 @@ -import logging - -from collections.abc import Awaitable, Callable -from typing import TYPE_CHECKING, Any - - -if TYPE_CHECKING: - from fastapi import APIRouter, FastAPI, Request, Response - from fastapi.responses import JSONResponse - - _package_fastapi_installed = True -else: - try: - from fastapi import APIRouter, FastAPI, Request, Response - from fastapi.responses import JSONResponse - - _package_fastapi_installed = True - except ImportError: - APIRouter = Any - FastAPI = Any - Request = Any - Response = Any - - _package_fastapi_installed = False - - -from a2a.server.apps.jsonrpc.jsonrpc_app import CallContextBuilder -from a2a.server.apps.rest.rest_adapter import RESTAdapter -from a2a.server.context import ServerCallContext -from a2a.server.request_handlers.request_handler import RequestHandler -from a2a.types import AgentCard -from a2a.utils.constants import AGENT_CARD_WELL_KNOWN_PATH - - -logger = logging.getLogger(__name__) - - -class A2ARESTFastAPIApplication: - """A FastAPI application implementing the A2A protocol server REST endpoints. - - Handles incoming REST requests, routes them to the appropriate - handler methods, and manages response generation including Server-Sent Events - (SSE). - """ - - def __init__( # noqa: PLR0913 - self, - agent_card: AgentCard, - http_handler: RequestHandler, - extended_agent_card: AgentCard | None = None, - context_builder: CallContextBuilder | None = None, - card_modifier: Callable[[AgentCard], Awaitable[AgentCard] | AgentCard] - | None = None, - extended_card_modifier: Callable[ - [AgentCard, ServerCallContext], Awaitable[AgentCard] | AgentCard - ] - | None = None, - ): - """Initializes the A2ARESTFastAPIApplication. - - Args: - agent_card: The AgentCard describing the agent's capabilities. - http_handler: The handler instance responsible for processing A2A - requests via http. - extended_agent_card: An optional, distinct AgentCard to be served - at the authenticated extended card endpoint. - context_builder: The CallContextBuilder used to construct the - ServerCallContext passed to the http_handler. If None, no - ServerCallContext is passed. - card_modifier: An optional callback to dynamically modify the public - agent card before it is served. - extended_card_modifier: An optional callback to dynamically modify - the extended agent card before it is served. It receives the - call context. - """ - if not _package_fastapi_installed: - raise ImportError( - 'The `fastapi` package is required to use the' - ' `A2ARESTFastAPIApplication`. It can be added as a part of' - ' `a2a-sdk` optional dependencies, `a2a-sdk[http-server]`.' - ) - self._adapter = RESTAdapter( - agent_card=agent_card, - http_handler=http_handler, - extended_agent_card=extended_agent_card, - context_builder=context_builder, - card_modifier=card_modifier, - extended_card_modifier=extended_card_modifier, - ) - - def build( - self, - agent_card_url: str = AGENT_CARD_WELL_KNOWN_PATH, - rpc_url: str = '', - **kwargs: Any, - ) -> FastAPI: - """Builds and returns the FastAPI application instance. - - Args: - agent_card_url: The URL for the agent card endpoint. - rpc_url: The URL for the A2A JSON-RPC endpoint. - extended_agent_card_url: The URL for the authenticated extended agent card endpoint. - **kwargs: Additional keyword arguments to pass to the FastAPI constructor. - - Returns: - A configured FastAPI application instance. - """ - app = FastAPI(**kwargs) - router = APIRouter() - for route, callback in self._adapter.routes().items(): - router.add_api_route( - f'{rpc_url}{route[0]}', callback, methods=[route[1]] - ) - - @router.get(f'{rpc_url}{agent_card_url}') - async def get_agent_card(request: Request) -> Response: - card = await self._adapter.handle_get_agent_card(request) - return JSONResponse(card) - - app.include_router(router) - return app diff --git a/src/a2a/server/apps/rest/rest_adapter.py b/src/a2a/server/apps/rest/rest_adapter.py deleted file mode 100644 index 719085604..000000000 --- a/src/a2a/server/apps/rest/rest_adapter.py +++ /dev/null @@ -1,252 +0,0 @@ -import functools -import logging - -from collections.abc import AsyncIterable, AsyncIterator, Awaitable, Callable -from typing import TYPE_CHECKING, Any - -from a2a.utils.helpers import maybe_await - - -if TYPE_CHECKING: - from sse_starlette.sse import EventSourceResponse - from starlette.requests import Request - from starlette.responses import JSONResponse, Response - - _package_starlette_installed = True - -else: - try: - from sse_starlette.sse import EventSourceResponse - from starlette.requests import Request - from starlette.responses import JSONResponse, Response - - _package_starlette_installed = True - except ImportError: - EventSourceResponse = Any - Request = Any - JSONResponse = Any - Response = Any - - _package_starlette_installed = False - -from a2a.server.apps.jsonrpc import ( - CallContextBuilder, - DefaultCallContextBuilder, -) -from a2a.server.context import ServerCallContext -from a2a.server.request_handlers.request_handler import RequestHandler -from a2a.server.request_handlers.rest_handler import RESTHandler -from a2a.types import AgentCard, AuthenticatedExtendedCardNotConfiguredError -from a2a.utils.error_handlers import ( - rest_error_handler, - rest_stream_error_handler, -) -from a2a.utils.errors import InvalidRequestError, ServerError - - -logger = logging.getLogger(__name__) - - -class RESTAdapter: - """Adapter to make RequestHandler work with RESTful API. - - Defines REST requests processors and the routes to attach them too, as well as - manages response generation including Server-Sent Events (SSE). - """ - - def __init__( # noqa: PLR0913 - self, - agent_card: AgentCard, - http_handler: RequestHandler, - extended_agent_card: AgentCard | None = None, - context_builder: CallContextBuilder | None = None, - card_modifier: Callable[[AgentCard], Awaitable[AgentCard] | AgentCard] - | None = None, - extended_card_modifier: Callable[ - [AgentCard, ServerCallContext], Awaitable[AgentCard] | AgentCard - ] - | None = None, - ): - """Initializes the RESTApplication. - - Args: - agent_card: The AgentCard describing the agent's capabilities. - http_handler: The handler instance responsible for processing A2A - requests via http. - extended_agent_card: An optional, distinct AgentCard to be served - at the authenticated extended card endpoint. - context_builder: The CallContextBuilder used to construct the - ServerCallContext passed to the http_handler. If None, no - ServerCallContext is passed. - card_modifier: An optional callback to dynamically modify the public - agent card before it is served. - extended_card_modifier: An optional callback to dynamically modify - the extended agent card before it is served. It receives the - call context. - """ - if not _package_starlette_installed: - raise ImportError( - 'Packages `starlette` and `sse-starlette` are required to use' - ' the `RESTAdapter`. They can be added as a part of `a2a-sdk`' - ' optional dependencies, `a2a-sdk[http-server]`.' - ) - self.agent_card = agent_card - self.extended_agent_card = extended_agent_card - self.card_modifier = card_modifier - self.extended_card_modifier = extended_card_modifier - self.handler = RESTHandler( - agent_card=agent_card, request_handler=http_handler - ) - self._context_builder = context_builder or DefaultCallContextBuilder() - - @rest_error_handler - async def _handle_request( - self, - method: Callable[[Request, ServerCallContext], Awaitable[Any]], - request: Request, - ) -> Response: - call_context = self._context_builder.build(request) - response = await method(request, call_context) - return JSONResponse(content=response) - - @rest_stream_error_handler - async def _handle_streaming_request( - self, - method: Callable[[Request, ServerCallContext], AsyncIterable[Any]], - request: Request, - ) -> EventSourceResponse: - # Pre-consume and cache the request body to prevent deadlock in streaming context - # This is required because Starlette's request.body() can only be consumed once, - # and attempting to consume it after EventSourceResponse starts causes deadlock - try: - await request.body() - except (ValueError, RuntimeError, OSError) as e: - raise ServerError( - error=InvalidRequestError( - message=f'Failed to pre-consume request body: {e}' - ) - ) from e - - call_context = self._context_builder.build(request) - - async def event_generator( - stream: AsyncIterable[Any], - ) -> AsyncIterator[dict[str, dict[str, Any]]]: - async for item in stream: - yield {'data': item} - - return EventSourceResponse( - event_generator(method(request, call_context)) - ) - - async def handle_get_agent_card( - self, request: Request, call_context: ServerCallContext | None = None - ) -> dict[str, Any]: - """Handles GET requests for the agent card endpoint. - - Args: - request: The incoming Starlette Request object. - call_context: ServerCallContext - - Returns: - A JSONResponse containing the agent card data. - """ - card_to_serve = self.agent_card - if self.card_modifier: - card_to_serve = await maybe_await(self.card_modifier(card_to_serve)) - - return card_to_serve.model_dump(mode='json', exclude_none=True) - - async def handle_authenticated_agent_card( - self, request: Request, call_context: ServerCallContext | None = None - ) -> dict[str, Any]: - """Hook for per credential agent card response. - - If a dynamic card is needed based on the credentials provided in the request - override this method and return the customized content. - - Args: - request: The incoming Starlette Request object. - call_context: ServerCallContext - - Returns: - A JSONResponse containing the authenticated card. - """ - if not self.agent_card.supports_authenticated_extended_card: - raise ServerError( - error=AuthenticatedExtendedCardNotConfiguredError( - message='Authenticated card not supported' - ) - ) - card_to_serve = self.extended_agent_card - - if not card_to_serve: - card_to_serve = self.agent_card - - if self.extended_card_modifier: - context = self._context_builder.build(request) - card_to_serve = await maybe_await( - self.extended_card_modifier(card_to_serve, context) - ) - elif self.card_modifier: - card_to_serve = await maybe_await(self.card_modifier(card_to_serve)) - - return card_to_serve.model_dump(mode='json', exclude_none=True) - - def routes(self) -> dict[tuple[str, str], Callable[[Request], Any]]: - """Constructs a dictionary of API routes and their corresponding handlers. - - This method maps URL paths and HTTP methods to the appropriate handler - functions from the RESTHandler. It can be used by a web framework - (like Starlette or FastAPI) to set up the application's endpoints. - - Returns: - A dictionary where each key is a tuple of (path, http_method) and - the value is the callable handler for that route. - """ - routes: dict[tuple[str, str], Callable[[Request], Any]] = { - ('/v1/message:send', 'POST'): functools.partial( - self._handle_request, self.handler.on_message_send - ), - ('/v1/message:stream', 'POST'): functools.partial( - self._handle_streaming_request, - self.handler.on_message_send_stream, - ), - ('/v1/tasks/{id}:cancel', 'POST'): functools.partial( - self._handle_request, self.handler.on_cancel_task - ), - ('/v1/tasks/{id}:subscribe', 'GET'): functools.partial( - self._handle_streaming_request, - self.handler.on_resubscribe_to_task, - ), - ('/v1/tasks/{id}', 'GET'): functools.partial( - self._handle_request, self.handler.on_get_task - ), - ( - '/v1/tasks/{id}/pushNotificationConfigs/{push_id}', - 'GET', - ): functools.partial( - self._handle_request, self.handler.get_push_notification - ), - ( - '/v1/tasks/{id}/pushNotificationConfigs', - 'POST', - ): functools.partial( - self._handle_request, self.handler.set_push_notification - ), - ( - '/v1/tasks/{id}/pushNotificationConfigs', - 'GET', - ): functools.partial( - self._handle_request, self.handler.list_push_notifications - ), - ('/v1/tasks', 'GET'): functools.partial( - self._handle_request, self.handler.list_tasks - ), - } - if self.agent_card.supports_authenticated_extended_card: - routes[('/v1/card', 'GET')] = functools.partial( - self._handle_request, self.handle_authenticated_agent_card - ) - - return routes diff --git a/src/a2a/server/context.py b/src/a2a/server/context.py index 2fac162ba..833ca44c4 100644 --- a/src/a2a/server/context.py +++ b/src/a2a/server/context.py @@ -21,5 +21,5 @@ class ServerCallContext(BaseModel): state: State = Field(default_factory=dict) user: User = Field(default_factory=UnauthenticatedUser) + tenant: str = Field(default='') requested_extensions: set[str] = Field(default_factory=set) - activated_extensions: set[str] = Field(default_factory=set) diff --git a/src/a2a/server/events/__init__.py b/src/a2a/server/events/__init__.py index 64f6da217..8af917ef7 100644 --- a/src/a2a/server/events/__init__.py +++ b/src/a2a/server/events/__init__.py @@ -1,7 +1,7 @@ """Event handling components for the A2A server.""" from a2a.server.events.event_consumer import EventConsumer -from a2a.server.events.event_queue import Event, EventQueue +from a2a.server.events.event_queue import Event, EventQueue, EventQueueLegacy from a2a.server.events.in_memory_queue_manager import InMemoryQueueManager from a2a.server.events.queue_manager import ( NoTaskQueue, @@ -14,6 +14,7 @@ 'Event', 'EventConsumer', 'EventQueue', + 'EventQueueLegacy', 'InMemoryQueueManager', 'NoTaskQueue', 'QueueManager', diff --git a/src/a2a/server/events/event_consumer.py b/src/a2a/server/events/event_consumer.py index de0f6bd9d..8414e2d17 100644 --- a/src/a2a/server/events/event_consumer.py +++ b/src/a2a/server/events/event_consumer.py @@ -1,30 +1,20 @@ import asyncio import logging -import sys from collections.abc import AsyncGenerator from pydantic import ValidationError -from a2a.server.events.event_queue import Event, EventQueue -from a2a.types import ( - InternalError, +from a2a.server.events.event_queue import Event, EventQueueLegacy, QueueShutDown +from a2a.types.a2a_pb2 import ( Message, Task, TaskState, TaskStatusUpdateEvent, ) -from a2a.utils.errors import ServerError from a2a.utils.telemetry import SpanKind, trace_class -# This is an alias to the exception for closed queue -QueueClosed: type[Exception] = asyncio.QueueEmpty - -# When using python 3.13 or higher, the closed queue signal is QueueShutdown -if sys.version_info >= (3, 13): - QueueClosed = asyncio.QueueShutDown - logger = logging.getLogger(__name__) @@ -32,7 +22,7 @@ class EventConsumer: """Consumer to read events from the agent event queue.""" - def __init__(self, queue: EventQueue): + def __init__(self, queue: EventQueueLegacy): """Initializes the EventConsumer. Args: @@ -43,31 +33,6 @@ def __init__(self, queue: EventQueue): self._exception: BaseException | None = None logger.debug('EventConsumer initialized') - async def consume_one(self) -> Event: - """Consume one event from the agent event queue non-blocking. - - Returns: - The next event from the queue. - - Raises: - ServerError: If the queue is empty when attempting to dequeue - immediately. - """ - logger.debug('Attempting to consume one event.') - try: - event = await self.queue.dequeue_event(no_wait=True) - except asyncio.QueueEmpty as e: - logger.warning('Event queue was empty in consume_one.') - raise ServerError( - InternalError(message='Agent did not return any response') - ) from e - - logger.debug('Dequeued event of type: %s in consume_one.', type(event)) - - self.queue.task_done() - - return event - async def consume_all(self) -> AsyncGenerator[Event]: """Consume all the generated streaming events from the agent. @@ -102,20 +67,16 @@ async def consume_all(self) -> AsyncGenerator[Event]: 'Marked task as done in event queue in consume_all' ) - is_final_event = ( - (isinstance(event, TaskStatusUpdateEvent) and event.final) - or isinstance(event, Message) - or ( - isinstance(event, Task) - and event.status.state - in ( - TaskState.completed, - TaskState.canceled, - TaskState.failed, - TaskState.rejected, - TaskState.unknown, - TaskState.input_required, - ) + is_final_event = isinstance(event, Message) or ( + isinstance(event, Task | TaskStatusUpdateEvent) + and event.status.state + in ( + TaskState.TASK_STATE_COMPLETED, + TaskState.TASK_STATE_CANCELED, + TaskState.TASK_STATE_FAILED, + TaskState.TASK_STATE_REJECTED, + TaskState.TASK_STATE_UNSPECIFIED, + TaskState.TASK_STATE_INPUT_REQUIRED, ) ) @@ -135,7 +96,7 @@ async def consume_all(self) -> AsyncGenerator[Event]: except asyncio.TimeoutError: # pyright: ignore [reportUnusedExcept] # This class was made an alias of built-in TimeoutError after 3.11 continue - except (QueueClosed, asyncio.QueueEmpty): + except (QueueShutDown, asyncio.QueueEmpty): # Confirm that the queue is closed, e.g. we aren't on # python 3.12 and get a queue empty error on an open queue if self.queue.is_closed(): diff --git a/src/a2a/server/events/event_queue.py b/src/a2a/server/events/event_queue.py index 357fcb02e..bb4d7b9b4 100644 --- a/src/a2a/server/events/event_queue.py +++ b/src/a2a/server/events/event_queue.py @@ -2,7 +2,34 @@ import logging import sys -from a2a.types import ( +from abc import ABC, abstractmethod +from types import TracebackType +from typing import Any, cast + +from typing_extensions import Self + + +if sys.version_info >= (3, 13): + from asyncio import Queue as AsyncQueue + from asyncio import QueueShutDown + + def _create_async_queue(maxsize: int = 0) -> AsyncQueue[Any]: + """Create a backwards-compatible queue object.""" + return AsyncQueue(maxsize=maxsize) +else: + import culsans + + from culsans import AsyncQueue # type: ignore[no-redef] + from culsans import ( + AsyncQueueShutDown as QueueShutDown, # type: ignore[no-redef] + ) + + def _create_async_queue(maxsize: int = 0) -> AsyncQueue[Any]: + """Create a backwards-compatible queue object.""" + return culsans.Queue(maxsize=maxsize).async_q # type: ignore[no-any-return] + + +from a2a.types.a2a_pb2 import ( Message, Task, TaskArtifactUpdateEvent, @@ -20,8 +47,54 @@ DEFAULT_MAX_QUEUE_SIZE = 1024 +class EventQueue(ABC): + """Base class and factory for EventQueueSource. + + EventQueue provides an abstraction for a queue of events that can be tapped + by multiple consumers. + EventQueue maintain main queue and source and maintain child queues in sync. + GUARANTEE: All sinks (including the default one) will receive events in the exact same order. + + WARNING (Concurrency): All events from all sinks (both the default queue and any + tapped child queues) must be regularly consumed and marked as done. If any single + consumer stops processing and its queue reaches capacity, it can block the event + dispatcher and stall the entire system, causing a widespread deadlock. + + WARNING (Memory Leak): Event queues spawn background tasks. To prevent memory + and task leaks, all queue objects (both source and sinks) MUST be explicitly + closed via `await queue.close()` or by using the async context manager (`async with queue:`). + Child queues are automatically closed when parent queue is closed, but you + should still close them explicitly to prevent queues from reaching capacity by + unconsumed events. + + Typical usage: + queue = EventQueue() + child_queue1 = await queue.tap() + child_queue2 = await queue.tap() + + async for event in child_queue1: + do_some_work(event) + child_queue1.task_done() + """ + + def __new__(cls, *args: Any, **kwargs: Any) -> Self: + """Redirects instantiation to EventQueueLegacy for backwards compatibility.""" + if cls is EventQueue: + instance = EventQueueLegacy.__new__(EventQueueLegacy) + EventQueueLegacy.__init__(instance, *args, **kwargs) + return cast('Self', instance) + return super().__new__(cls) + + @abstractmethod + async def enqueue_event(self, event: Event) -> None: + """Pushes an event into the queue. + + Only main queue can enqueue events. Child queues can only dequeue events. + """ + + @trace_class(kind=SpanKind.SERVER) -class EventQueue: +class EventQueueLegacy(EventQueue): """Event queue for A2A responses from agent. Acts as a buffer between the agent's asynchronous execution and the @@ -37,12 +110,32 @@ def __init__(self, max_queue_size: int = DEFAULT_MAX_QUEUE_SIZE) -> None: if max_queue_size <= 0: raise ValueError('max_queue_size must be greater than 0') - self.queue: asyncio.Queue[Event] = asyncio.Queue(maxsize=max_queue_size) - self._children: list[EventQueue] = [] + self._queue: AsyncQueue[Event] = _create_async_queue( + maxsize=max_queue_size + ) + self._children: list[EventQueueLegacy] = [] self._is_closed = False self._lock = asyncio.Lock() logger.debug('EventQueue initialized.') + @property + def queue(self) -> AsyncQueue[Event]: + """[DEPRECATED] Returns the underlying asyncio.Queue.""" + return self._queue + + async def __aenter__(self) -> Self: + """Enters the async context manager, returning the queue itself.""" + return self + + async def __aexit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> None: + """Exits the async context manager, ensuring close() is called.""" + await self.close() + async def enqueue_event(self, event: Event) -> None: """Enqueues an event to this queue and all its children. @@ -56,12 +149,16 @@ async def enqueue_event(self, event: Event) -> None: logger.debug('Enqueuing event of type: %s', type(event)) - # Make sure to use put instead of put_nowait to avoid blocking the event loop. - await self.queue.put(event) + try: + await self.queue.put(event) + except QueueShutDown: + logger.warning('Queue was closed during enqueuing. Event dropped.') + return + for child in self._children: await child.enqueue_event(event) - async def dequeue_event(self, no_wait: bool = False) -> Event: + async def dequeue_event(self) -> Event: """Dequeues an event from the queue. This implementation expects that dequeue to raise an exception when @@ -70,42 +167,22 @@ async def dequeue_event(self, no_wait: bool = False) -> Event: the user is awaiting the queue.get method. Python<=3.12 this needs to manage this lifecycle itself. The current implementation can lead to blocking if the dequeue_event is called before the EventQueue has been - closed but when there are no events on the queue. Two ways to avoid this - are to call this with no_wait = True which won't block, but is the - callers responsibility to retry as appropriate. Alternatively, one can - use an async Task management solution to cancel the get task if the queue + closed but when there are no events on the queue. One way to avoid this + is to use an async Task management solution to cancel the get task if the queue has closed or some other condition is met. The implementation of the EventConsumer uses an async.wait with a timeout to abort the dequeue_event call and retry, when it will return with a closed error. - Args: - no_wait: If True, retrieve an event immediately or raise `asyncio.QueueEmpty`. - If False (default), wait until an event is available. - Returns: The next event from the queue. Raises: - asyncio.QueueEmpty: If `no_wait` is True and the queue is empty. asyncio.QueueShutDown: If the queue has been closed and is empty. """ async with self._lock: - if ( - sys.version_info < (3, 13) - and self._is_closed - and self.queue.empty() - ): - # On 3.13+, skip early raise; await self.queue.get() will raise QueueShutDown after shutdown() + if self._is_closed and self.queue.empty(): logger.warning('Queue is closed. Event will not be dequeued.') - raise asyncio.QueueEmpty('Queue is closed.') - - if no_wait: - logger.debug('Attempting to dequeue event (no_wait=True).') - event = self.queue.get_nowait() - logger.debug( - 'Dequeued event (no_wait=True) of type: %s', type(event) - ) - return event + raise QueueShutDown('Queue is closed.') logger.debug('Attempting to dequeue event (waiting).') event = await self.queue.get() @@ -120,125 +197,44 @@ def task_done(self) -> None: logger.debug('Marking task as done in EventQueue.') self.queue.task_done() - def tap(self) -> 'EventQueue': - """Taps the event queue to create a new child queue that receives all future events. + async def tap( + self, max_queue_size: int = DEFAULT_MAX_QUEUE_SIZE + ) -> 'EventQueueLegacy': + """Taps the event queue to create a new child queue that receives future events. Returns: A new `EventQueue` instance that will receive all events enqueued to this parent queue from this point forward. """ logger.debug('Tapping EventQueue to create a child queue.') - queue = EventQueue() + queue = EventQueueLegacy(max_queue_size=max_queue_size) self._children.append(queue) return queue async def close(self, immediate: bool = False) -> None: """Closes the queue for future push events and also closes all child queues. - Once closed, no new events can be enqueued. Behavior is consistent across - Python versions: - - Python >= 3.13: Uses `asyncio.Queue.shutdown` to stop the queue. With - `immediate=True` the queue is shut down and pending events are cleared; with - `immediate=False` the queue is shut down and we wait for it to drain via - `queue.join()`. - - Python < 3.13: Emulates the same semantics by clearing on `immediate=True` - or awaiting `queue.join()` on `immediate=False`. - - Consumers attempting to dequeue after close on an empty queue will observe - `asyncio.QueueShutDown` on Python >= 3.13 and `asyncio.QueueEmpty` on - Python < 3.13. - Args: - immediate (bool): - - True: Immediately closes the queue and clears all unprocessed events without waiting for them to be consumed. This is suitable for scenarios where you need to forcefully interrupt and quickly release resources. - - False (default): Gracefully closes the queue, waiting for all queued events to be processed (i.e., the queue is drained) before closing. This is suitable when you want to ensure all events are handled. - + immediate: If True, immediately flushes the queue, discarding all pending + events, and causes any currently blocked `dequeue_event` calls to raise + `QueueShutDown`. If False (default), the queue is marked as closed to new + events, but existing events can still be dequeued and processed until the + queue is fully drained. """ logger.debug('Closing EventQueue.') async with self._lock: - # If already closed, just return. if self._is_closed and not immediate: return - if not self._is_closed: - self._is_closed = True - # If using python 3.13 or higher, use shutdown but match <3.13 semantics - if sys.version_info >= (3, 13): - if immediate: - # Immediate: stop queue and clear any pending events, then close children - self.queue.shutdown(True) - await self.clear_events(True) - for child in self._children: - await child.close(True) - return - # Graceful: prevent further gets/puts via shutdown, then wait for drain and children - self.queue.shutdown(False) - await asyncio.gather( - self.queue.join(), *(child.close() for child in self._children) - ) - # Otherwise, join the queue - else: - if immediate: - await self.clear_events(True) - for child in self._children: - await child.close(immediate) - return - await asyncio.gather( - self.queue.join(), *(child.close() for child in self._children) - ) + self._is_closed = True + + self.queue.shutdown(immediate) + + await asyncio.gather( + *(child.close(immediate) for child in self._children) + ) + if not immediate: + await self.queue.join() def is_closed(self) -> bool: """Checks if the queue is closed.""" return self._is_closed - - async def clear_events(self, clear_child_queues: bool = True) -> None: - """Clears all events from the current queue and optionally all child queues. - - This method removes all pending events from the queue without processing them. - Child queues can be optionally cleared based on the clear_child_queues parameter. - - Args: - clear_child_queues: If True (default), clear all child queues as well. - If False, only clear the current queue, leaving child queues untouched. - """ - logger.debug('Clearing all events from EventQueue and child queues.') - - # Clear all events from the queue, even if closed - cleared_count = 0 - async with self._lock: - try: - while True: - event = self.queue.get_nowait() - logger.debug( - 'Discarding unprocessed event of type: %s, content: %s', - type(event), - event, - ) - self.queue.task_done() - cleared_count += 1 - except asyncio.QueueEmpty: - pass - except Exception as e: - # Handle Python 3.13+ QueueShutDown - if ( - sys.version_info >= (3, 13) - and type(e).__name__ == 'QueueShutDown' - ): - pass - else: - raise - - if cleared_count > 0: - logger.debug( - 'Cleared %d unprocessed events from EventQueue.', - cleared_count, - ) - - # Clear all child queues (lock released before awaiting child tasks) - if clear_child_queues and self._children: - child_tasks = [ - asyncio.create_task(child.clear_events()) - for child in self._children - ] - - if child_tasks: - await asyncio.gather(*child_tasks, return_exceptions=True) diff --git a/src/a2a/server/events/event_queue_v2.py b/src/a2a/server/events/event_queue_v2.py new file mode 100644 index 000000000..224cb8e56 --- /dev/null +++ b/src/a2a/server/events/event_queue_v2.py @@ -0,0 +1,389 @@ +import asyncio +import contextlib +import logging + +from types import TracebackType + +from typing_extensions import Self + +from a2a.server.events.event_queue import ( + DEFAULT_MAX_QUEUE_SIZE, + AsyncQueue, + Event, + EventQueue, + QueueShutDown, + _create_async_queue, +) +from a2a.utils.telemetry import SpanKind, trace_class + + +logger = logging.getLogger(__name__) + + +@trace_class(kind=SpanKind.SERVER) +class EventQueueSource(EventQueue): + """The Parent EventQueue. + + Acts as the single entry point for producers. Events pushed here are buffered + in `_incoming_queue` and distributed to all child Sinks by a background dispatcher task. + """ + + def __init__( + self, + max_queue_size: int = DEFAULT_MAX_QUEUE_SIZE, + create_default_sink: bool = True, + ) -> None: + """Initializes the EventQueueSource.""" + if max_queue_size <= 0: + raise ValueError('max_queue_size must be greater than 0') + + self._incoming_queue: AsyncQueue[Event] = _create_async_queue( + maxsize=max_queue_size + ) + self._lock = asyncio.Lock() + self._sinks: set[EventQueueSink] = set() + self._is_closed = False + + # Internal sink for backward compatibility + self._default_sink: EventQueueSink | None + if create_default_sink: + self._default_sink = EventQueueSink( + parent=self, max_queue_size=max_queue_size + ) + self._sinks.add(self._default_sink) + else: + self._default_sink = None + + self._dispatcher_task = asyncio.create_task(self._dispatch_loop()) + + self._dispatcher_task_expected_to_cancel = False + + logger.debug('EventQueueSource initialized.') + + @property + def queue(self) -> AsyncQueue[Event]: + """Returns the underlying asyncio.Queue of the default sink.""" + if self._default_sink is None: + raise ValueError('No default sink available.') + return self._default_sink.queue + + async def _dispatch_loop(self) -> None: + try: + while True: + event = await self._incoming_queue.get() + + async with self._lock: + active_sinks = list(self._sinks) + + if active_sinks: + results = await asyncio.gather( + *( + sink._put_internal(event) # noqa: SLF001 + for sink in active_sinks + ), + return_exceptions=True, + ) + for result in results: + if isinstance(result, Exception): + logger.error( + 'Error dispatching event to sink', + exc_info=result, + ) + + self._incoming_queue.task_done() + except asyncio.CancelledError: + logger.debug( + 'EventQueueSource._dispatch_loop() for %s was cancelled', + self, + ) + if not self._dispatcher_task_expected_to_cancel: + # This should only happen on forced shutdown (e.g. tests, server forced stop, etc). + logger.info( + 'EventQueueSource._dispatch_loop() for %s was cancelled without ' + 'calling EventQueue.close() first.', + self, + ) + async with self._lock: + self._is_closed = True + sinks_to_close = list(self._sinks) + + self._incoming_queue.shutdown(immediate=True) + await asyncio.gather( + *(sink.close(immediate=True) for sink in sinks_to_close) + ) + raise + except QueueShutDown: + logger.debug('EventQueueSource._dispatch_loop() shutdown %s', self) + except Exception: + logger.exception( + 'EventQueueSource._dispatch_loop() failed %s', self + ) + raise + finally: + logger.debug('EventQueueSource._dispatch_loop() Completed %s', self) + + async def _join_incoming_queue(self) -> None: + """Helper to wait for join() while monitoring the dispatcher task.""" + if self._dispatcher_task.done(): + logger.warning( + 'Dispatcher task is not running. Cannot wait for event dispatch.' + ) + return + + join_task = asyncio.create_task(self._incoming_queue.join()) + try: + done, _pending = await asyncio.wait( + [join_task, self._dispatcher_task], + return_when=asyncio.FIRST_COMPLETED, + ) + except asyncio.CancelledError: + join_task.cancel() + raise + + if join_task in done: + return + + # Dispatcher task finished before join() + join_task.cancel() + with contextlib.suppress(asyncio.CancelledError): + await join_task + + try: + if self._dispatcher_task.exception(): + logger.error( + 'Dispatcher task failed. Events may be lost.', + exc_info=self._dispatcher_task.exception(), + ) + else: + logger.warning( + 'Dispatcher task finished unexpectedly. Events may be lost.' + ) + except (asyncio.CancelledError, asyncio.InvalidStateError): + logger.warning( + 'Dispatcher task was cancelled or finished. Events may be lost.' + ) + + async def tap( + self, max_queue_size: int = DEFAULT_MAX_QUEUE_SIZE + ) -> 'EventQueueSink': + """Taps the event queue to create a new child queue that receives future events. + + Note: The tapped queue may receive some old events if the incoming event + queue is lagging behind and hasn't dispatched them yet. + """ + async with self._lock: + if self._is_closed: + raise QueueShutDown('Cannot tap a closed EventQueueSource.') + sink = EventQueueSink(parent=self, max_queue_size=max_queue_size) + self._sinks.add(sink) + return sink + + async def remove_sink(self, sink: 'EventQueueSink') -> None: + """Removes a sink from the source's internal list.""" + async with self._lock: + self._sinks.remove(sink) + + async def enqueue_event(self, event: Event) -> None: + """Enqueues an event to this queue and all its children.""" + logger.debug('Enqueuing event of type: %s', type(event)) + try: + await self._incoming_queue.put(event) + except QueueShutDown: + logger.warning('Queue was closed during enqueuing. Event dropped.') + return + + async def dequeue_event(self) -> Event: + """Pulls an event from the default internal sink queue.""" + if self._default_sink is None: + raise ValueError('No default sink available.') + return await self._default_sink.dequeue_event() + + def task_done(self) -> None: + """Signals that a work on dequeued event is complete via the default internal sink queue.""" + if self._default_sink is None: + raise ValueError('No default sink available.') + self._default_sink.task_done() + + async def close(self, immediate: bool = False) -> None: + """Closes the queue and all its child sinks. + + It is safe to call it multiple times. + If immediate is True, the queue will be closed without waiting for all events to be processed. + If immediate is False, the queue will be closed after all events are processed (and confirmed with task_done() calls). + + WARNING: Closing the parent queue with immediate=False is a deadlock risk if there are unconsumed events + in any of the child sinks and the consumer has crashed without draining its queue. + It is highly recommended to wrap graceful shutdowns with a timeout, e.g., + `asyncio.wait_for(queue.close(immediate=False), timeout=...)`. + """ + logger.debug('Closing EventQueueSource: immediate=%s', immediate) + async with self._lock: + # No more tap() allowed. + self._is_closed = True + # No more new events can be enqueued. + self._incoming_queue.shutdown(immediate=immediate) + sinks_to_close = list(self._sinks) + + if immediate: + self._dispatcher_task_expected_to_cancel = True + self._dispatcher_task.cancel() + await asyncio.gather( + *(sink.close(immediate=True) for sink in sinks_to_close) + ) + else: + # Wait for all already-enqueued events to be dispatched + await self._join_incoming_queue() + self._dispatcher_task_expected_to_cancel = True + self._dispatcher_task.cancel() + await asyncio.gather( + *(sink.close(immediate=False) for sink in sinks_to_close) + ) + + def is_closed(self) -> bool: + """[DEPRECATED] Checks if the queue is closed. + + NOTE: Relying on this for enqueue logic introduces race conditions. + It is maintained primarily for backwards compatibility, workarounds for + Python 3.10/3.12 async queues in consumers, and for the test suite. + """ + return self._is_closed + + async def test_only_join_incoming_queue(self) -> None: + """Wait for incoming queue to be fully processed.""" + await self._join_incoming_queue() + + async def __aenter__(self) -> Self: + """Enters the async context manager, returning the queue itself. + + WARNING: See `__aexit__` for important deadlock risks associated with + exiting this context manager if unconsumed events remain. + """ + return self + + async def __aexit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> None: + """Exits the async context manager, ensuring close() is called. + + WARNING: The context manager calls `close(immediate=False)` by default. + If a consumer exits the `async with` block early (e.g., due to an exception + or an explicit `break`) while unconsumed events remain in the queue, + `__aexit__` will deadlock waiting for `task_done()` to be called on those events. + """ + await self.close() + + +class EventQueueSink(EventQueue): + """The Child EventQueue. + + Acts as a read-only consumer endpoint. Events are pushed here exclusively + by the parent EventQueueSource's dispatcher task. + """ + + def __init__( + self, + parent: EventQueueSource, + max_queue_size: int = DEFAULT_MAX_QUEUE_SIZE, + ) -> None: + """Initializes the EventQueueSink.""" + if max_queue_size <= 0: + raise ValueError('max_queue_size must be greater than 0') + + self._parent = parent + self._queue: AsyncQueue[Event] = _create_async_queue( + maxsize=max_queue_size + ) + self._is_closed = False + self._lock = asyncio.Lock() + + logger.debug('EventQueueSink initialized.') + + @property + def queue(self) -> AsyncQueue[Event]: + """Returns the underlying asyncio.Queue of this sink.""" + return self._queue + + async def _put_internal(self, event: Event) -> None: + with contextlib.suppress(QueueShutDown): + await self._queue.put(event) + + async def enqueue_event(self, event: Event) -> None: + """Sinks are read-only and cannot have events directly enqueued to them.""" + raise RuntimeError('Cannot enqueue to a sink-only queue') + + async def dequeue_event(self) -> Event: + """Pulls an event from the sink queue.""" + logger.debug('Attempting to dequeue event (waiting).') + event = await self._queue.get() + logger.debug('Dequeued event: %s', event) + return event + + def task_done(self) -> None: + """Signals that a work on dequeued event is complete in this sink queue.""" + logger.debug('Marking task as done in EventQueueSink.') + self._queue.task_done() + + async def tap( + self, max_queue_size: int = DEFAULT_MAX_QUEUE_SIZE + ) -> 'EventQueueSink': + """Creates a child queue that receives future events. + + Note: The tapped queue may receive some old events if the incoming event + queue is lagging behind and hasn't dispatched them yet. + """ + # Delegate tap to the parent source so all sinks are flat under the source + return await self._parent.tap(max_queue_size=max_queue_size) + + async def close(self, immediate: bool = False) -> None: + """Closes the child sink queue. + + It is safe to call it multiple times. + If immediate is True, the queue will be closed without waiting for all events to be processed. + If immediate is False, the queue will be closed after all events are processed (and confirmed with task_done() calls). + """ + logger.debug('Closing EventQueueSink.') + async with self._lock: + self._is_closed = True + self._queue.shutdown(immediate=immediate) + + # Ignore KeyError (close have to be idempotent). + with contextlib.suppress(KeyError): + await self._parent.remove_sink(self) + + if not immediate: + await self._queue.join() + + def is_closed(self) -> bool: + """[DEPRECATED] Checks if the queue is closed. + + NOTE: Relying on this for enqueue logic introduces race conditions. + It is maintained primarily for backwards compatibility, workarounds for + Python 3.10/3.12 async queues in consumers, and for the test suite. + """ + return self._is_closed + + async def __aenter__(self) -> Self: + """Enters the async context manager, returning the queue itself. + + WARNING: See `__aexit__` for important deadlock risks associated with + exiting this context manager if unconsumed events remain. + """ + return self + + async def __aexit__( + self, + exc_type: type[BaseException] | None, + exc_val: BaseException | None, + exc_tb: TracebackType | None, + ) -> None: + """Exits the async context manager, ensuring close() is called. + + WARNING: The context manager calls `close(immediate=False)` by default. + If a consumer exits the `async with` block early (e.g., due to an exception + or an explicit `break`) while unconsumed events remain in the queue, + `__aexit__` will deadlock waiting for `task_done()` to be called on those events. + """ + await self.close() diff --git a/src/a2a/server/events/in_memory_queue_manager.py b/src/a2a/server/events/in_memory_queue_manager.py index 53a3b7dd2..0beb354f9 100644 --- a/src/a2a/server/events/in_memory_queue_manager.py +++ b/src/a2a/server/events/in_memory_queue_manager.py @@ -1,6 +1,6 @@ import asyncio -from a2a.server.events.event_queue import EventQueue +from a2a.server.events.event_queue import EventQueueLegacy from a2a.server.events.queue_manager import ( NoTaskQueue, QueueManager, @@ -23,10 +23,10 @@ class InMemoryQueueManager(QueueManager): def __init__(self) -> None: """Initializes the InMemoryQueueManager.""" - self._task_queue: dict[str, EventQueue] = {} + self._task_queue: dict[str, EventQueueLegacy] = {} self._lock = asyncio.Lock() - async def add(self, task_id: str, queue: EventQueue) -> None: + async def add(self, task_id: str, queue: EventQueueLegacy) -> None: """Adds a new event queue for a task ID. Raises: @@ -37,27 +37,27 @@ async def add(self, task_id: str, queue: EventQueue) -> None: raise TaskQueueExists self._task_queue[task_id] = queue - async def get(self, task_id: str) -> EventQueue | None: + async def get(self, task_id: str) -> EventQueueLegacy | None: """Retrieves the event queue for a task ID. Returns: - The `EventQueue` instance for the `task_id`, or `None` if not found. + The `EventQueueLegacy` instance for the `task_id`, or `None` if not found. """ async with self._lock: if task_id not in self._task_queue: return None return self._task_queue[task_id] - async def tap(self, task_id: str) -> EventQueue | None: + async def tap(self, task_id: str) -> EventQueueLegacy | None: """Taps the event queue for a task ID to create a child queue. Returns: - A new child `EventQueue` instance, or `None` if the task ID is not found. + A new child `EventQueueLegacy` instance, or `None` if the task ID is not found. """ async with self._lock: if task_id not in self._task_queue: return None - return self._task_queue[task_id].tap() + return await self._task_queue[task_id].tap() async def close(self, task_id: str) -> None: """Closes and removes the event queue for a task ID. @@ -71,15 +71,15 @@ async def close(self, task_id: str) -> None: queue = self._task_queue.pop(task_id) await queue.close() - async def create_or_tap(self, task_id: str) -> EventQueue: + async def create_or_tap(self, task_id: str) -> EventQueueLegacy: """Creates a new event queue for a task ID if one doesn't exist, otherwise taps the existing one. Returns: - A new or child `EventQueue` instance for the `task_id`. + A new or child `EventQueueLegacy` instance for the `task_id`. """ async with self._lock: if task_id not in self._task_queue: - queue = EventQueue() + queue = EventQueueLegacy() self._task_queue[task_id] = queue return queue - return self._task_queue[task_id].tap() + return await self._task_queue[task_id].tap() diff --git a/src/a2a/server/events/queue_manager.py b/src/a2a/server/events/queue_manager.py index ed69aae68..b3ec204a5 100644 --- a/src/a2a/server/events/queue_manager.py +++ b/src/a2a/server/events/queue_manager.py @@ -1,21 +1,21 @@ from abc import ABC, abstractmethod -from a2a.server.events.event_queue import EventQueue +from a2a.server.events.event_queue import EventQueueLegacy class QueueManager(ABC): """Interface for managing the event queue lifecycles per task.""" @abstractmethod - async def add(self, task_id: str, queue: EventQueue) -> None: + async def add(self, task_id: str, queue: EventQueueLegacy) -> None: """Adds a new event queue associated with a task ID.""" @abstractmethod - async def get(self, task_id: str) -> EventQueue | None: + async def get(self, task_id: str) -> EventQueueLegacy | None: """Retrieves the event queue for a task ID.""" @abstractmethod - async def tap(self, task_id: str) -> EventQueue | None: + async def tap(self, task_id: str) -> EventQueueLegacy | None: """Creates a child event queue (tap) for an existing task ID.""" @abstractmethod @@ -23,7 +23,7 @@ async def close(self, task_id: str) -> None: """Closes and removes the event queue for a task ID.""" @abstractmethod - async def create_or_tap(self, task_id: str) -> EventQueue: + async def create_or_tap(self, task_id: str) -> EventQueueLegacy: """Creates a queue if one doesn't exist, otherwise taps the existing one.""" diff --git a/src/a2a/server/jsonrpc_models.py b/src/a2a/server/jsonrpc_models.py new file mode 100644 index 000000000..f5a056282 --- /dev/null +++ b/src/a2a/server/jsonrpc_models.py @@ -0,0 +1,56 @@ +from typing import Any, Literal + +from pydantic import BaseModel + + +class JSONRPCBaseModel(BaseModel): + """Base model for JSON-RPC objects.""" + + model_config = { + 'extra': 'allow', + 'populate_by_name': True, + 'arbitrary_types_allowed': True, + } + + +class JSONRPCError(JSONRPCBaseModel): + """Base model for JSON-RPC error objects.""" + + code: int + message: str + data: Any | None = None + + +class JSONParseError(JSONRPCError): + """Error raised when invalid JSON was received by the server.""" + + code: Literal[-32700] = -32700 # pyright: ignore [reportIncompatibleVariableOverride] + message: str = 'Parse error' + + +class InvalidRequestError(JSONRPCError): + """Error raised when the JSON sent is not a valid Request object.""" + + code: Literal[-32600] = -32600 # pyright: ignore [reportIncompatibleVariableOverride] + message: str = 'Invalid Request' + + +class MethodNotFoundError(JSONRPCError): + """Error raised when the method does not exist / is not available.""" + + code: Literal[-32601] = -32601 # pyright: ignore [reportIncompatibleVariableOverride] + message: str = 'Method not found' + + +class InvalidParamsError(JSONRPCError): + """Error raised when invalid method parameter(s).""" + + code: Literal[-32602] = -32602 # pyright: ignore [reportIncompatibleVariableOverride] + message: str = 'Invalid params' + + +class InternalError(JSONRPCError): + """Error raised when internal JSON-RPC error.""" + + code: Literal[-32603] = -32603 # pyright: ignore [reportIncompatibleVariableOverride] + message: str = 'Internal error' diff --git a/src/a2a/server/models.py b/src/a2a/server/models.py index 4b0f7504c..b3ae1a389 100644 --- a/src/a2a/server/models.py +++ b/src/a2a/server/models.py @@ -1,4 +1,5 @@ -from typing import TYPE_CHECKING, Any, Generic, TypeVar +from datetime import datetime +from typing import TYPE_CHECKING, Any if TYPE_CHECKING: @@ -10,20 +11,17 @@ def override(func): # noqa: ANN001, ANN201 return func -from pydantic import BaseModel - -from a2a.types import Artifact, Message, TaskStatus +from a2a.types.a2a_pb2 import Artifact, Message, TaskStatus try: - from sqlalchemy import JSON, Dialect, LargeBinary, String + from sqlalchemy import JSON, DateTime, Index, LargeBinary, String from sqlalchemy.orm import ( DeclarativeBase, Mapped, declared_attr, mapped_column, ) - from sqlalchemy.types import TypeDecorator except ImportError as e: raise ImportError( 'Database models require SQLAlchemy. ' @@ -35,84 +33,6 @@ def override(func): # noqa: ANN001, ANN201 ) from e -T = TypeVar('T', bound=BaseModel) - - -class PydanticType(TypeDecorator[T], Generic[T]): - """SQLAlchemy type that handles Pydantic model serialization.""" - - impl = JSON - cache_ok = True - - def __init__(self, pydantic_type: type[T], **kwargs: dict[str, Any]): - """Initialize the PydanticType. - - Args: - pydantic_type: The Pydantic model type to handle. - **kwargs: Additional arguments for TypeDecorator. - """ - self.pydantic_type = pydantic_type - super().__init__(**kwargs) - - def process_bind_param( - self, value: T | None, dialect: Dialect - ) -> dict[str, Any] | None: - """Convert Pydantic model to a JSON-serializable dictionary for the database.""" - if value is None: - return None - return ( - value.model_dump(mode='json') - if isinstance(value, BaseModel) - else value - ) - - def process_result_value( - self, value: dict[str, Any] | None, dialect: Dialect - ) -> T | None: - """Convert a JSON-like dictionary from the database back to a Pydantic model.""" - if value is None: - return None - return self.pydantic_type.model_validate(value) - - -class PydanticListType(TypeDecorator, Generic[T]): - """SQLAlchemy type that handles lists of Pydantic models.""" - - impl = JSON - cache_ok = True - - def __init__(self, pydantic_type: type[T], **kwargs: dict[str, Any]): - """Initialize the PydanticListType. - - Args: - pydantic_type: The Pydantic model type for items in the list. - **kwargs: Additional arguments for TypeDecorator. - """ - self.pydantic_type = pydantic_type - super().__init__(**kwargs) - - def process_bind_param( - self, value: list[T] | None, dialect: Dialect - ) -> list[dict[str, Any]] | None: - """Convert a list of Pydantic models to a JSON-serializable list for the DB.""" - if value is None: - return None - return [ - item.model_dump(mode='json') - if isinstance(item, BaseModel) - else item - for item in value - ] - - def process_result_value( - self, value: list[dict[str, Any]] | None, dialect: Dialect - ) -> list[T] | None: - """Convert a JSON-like list from the DB back to a list of Pydantic models.""" - if value is None: - return None - return [self.pydantic_type.model_validate(item) for item in value] - - # Base class for all database models class Base(DeclarativeBase): """Base class for declarative models in A2A SDK.""" @@ -127,14 +47,17 @@ class TaskMixin: kind: Mapped[str] = mapped_column( String(16), nullable=False, default='task' ) - - # Properly typed Pydantic fields with automatic serialization - status: Mapped[TaskStatus] = mapped_column(PydanticType(TaskStatus)) + owner: Mapped[str] = mapped_column(String(255), nullable=True) + last_updated: Mapped[datetime | None] = mapped_column( + DateTime, nullable=True + ) + status: Mapped[TaskStatus] = mapped_column(JSON, nullable=False) artifacts: Mapped[list[Artifact] | None] = mapped_column( - PydanticListType(Artifact), nullable=True + JSON, nullable=True ) - history: Mapped[list[Message] | None] = mapped_column( - PydanticListType(Message), nullable=True + history: Mapped[list[Message] | None] = mapped_column(JSON, nullable=True) + protocol_version: Mapped[str | None] = mapped_column( + String(16), nullable=True ) # Using declared_attr to avoid conflict with Pydantic's metadata @@ -152,6 +75,17 @@ def __repr__(self) -> str: f'context_id="{self.context_id}", status="{self.status}")>' ) + @declared_attr.directive + @classmethod + def __table_args__(cls) -> tuple[Any, ...]: + """Define a composite index (owner, last_updated) for each table that uses the mixin.""" + tablename = getattr(cls, '__tablename__', 'tasks') + return ( + Index( + f'idx_{tablename}_owner_last_updated', 'owner', 'last_updated' + ), + ) + def create_task_model( table_name: str = 'tasks', base: type[DeclarativeBase] = Base @@ -212,6 +146,10 @@ class PushNotificationConfigMixin: task_id: Mapped[str] = mapped_column(String(36), primary_key=True) config_id: Mapped[str] = mapped_column(String(255), primary_key=True) config_data: Mapped[bytes] = mapped_column(LargeBinary, nullable=False) + owner: Mapped[str] = mapped_column(String(255), nullable=True, index=True) + protocol_version: Mapped[str | None] = mapped_column( + String(16), nullable=True + ) @override def __repr__(self) -> str: diff --git a/src/a2a/server/owner_resolver.py b/src/a2a/server/owner_resolver.py new file mode 100644 index 000000000..4fca42d24 --- /dev/null +++ b/src/a2a/server/owner_resolver.py @@ -0,0 +1,13 @@ +from collections.abc import Callable + +from a2a.server.context import ServerCallContext + + +# Definition +OwnerResolver = Callable[[ServerCallContext], str] + + +# Example Default Implementation +def resolve_user_scope(context: ServerCallContext) -> str: + """Resolves the owner scope based on the user in the context.""" + return context.user.user_name diff --git a/src/a2a/server/request_handlers/__init__.py b/src/a2a/server/request_handlers/__init__.py index 43ebc8e25..34654cb58 100644 --- a/src/a2a/server/request_handlers/__init__.py +++ b/src/a2a/server/request_handlers/__init__.py @@ -3,22 +3,28 @@ import logging from a2a.server.request_handlers.default_request_handler import ( - DefaultRequestHandler, + LegacyRequestHandler, +) +from a2a.server.request_handlers.default_request_handler_v2 import ( + DefaultRequestHandlerV2, +) +from a2a.server.request_handlers.request_handler import ( + RequestHandler, + validate_request_params, ) -from a2a.server.request_handlers.jsonrpc_handler import JSONRPCHandler -from a2a.server.request_handlers.request_handler import RequestHandler from a2a.server.request_handlers.response_helpers import ( build_error_response, prepare_response_object, ) -from a2a.server.request_handlers.rest_handler import RESTHandler logger = logging.getLogger(__name__) try: from a2a.server.request_handlers.grpc_handler import ( + DefaultGrpcServerCallContextBuilder, GrpcHandler, # type: ignore + GrpcServerCallContextBuilder, ) except ImportError as e: _original_error = e @@ -37,12 +43,17 @@ def __init__(self, *args, **kwargs): ) from _original_error +DefaultRequestHandler = DefaultRequestHandlerV2 + __all__ = [ + 'DefaultGrpcServerCallContextBuilder', 'DefaultRequestHandler', + 'DefaultRequestHandlerV2', 'GrpcHandler', - 'JSONRPCHandler', - 'RESTHandler', + 'GrpcServerCallContextBuilder', + 'LegacyRequestHandler', 'RequestHandler', 'build_error_response', 'prepare_response_object', + 'validate_request_params', ] diff --git a/src/a2a/server/request_handlers/default_request_handler.py b/src/a2a/server/request_handlers/default_request_handler.py index 3bd6a0dc2..e803b567f 100644 --- a/src/a2a/server/request_handlers/default_request_handler.py +++ b/src/a2a/server/request_handlers/default_request_handler.py @@ -1,7 +1,7 @@ import asyncio import logging -from collections.abc import AsyncGenerator +from collections.abc import AsyncGenerator, Awaitable, Callable from typing import cast from a2a.server.agent_execution import ( @@ -14,52 +14,70 @@ from a2a.server.events import ( Event, EventConsumer, - EventQueue, + EventQueueLegacy, InMemoryQueueManager, QueueManager, ) -from a2a.server.request_handlers.request_handler import RequestHandler +from a2a.server.request_handlers.request_handler import ( + RequestHandler, + validate, + validate_request_params, +) from a2a.server.tasks import ( PushNotificationConfigStore, + PushNotificationEvent, PushNotificationSender, ResultAggregator, TaskManager, TaskStore, ) -from a2a.types import ( - DeleteTaskPushNotificationConfigParams, - GetTaskPushNotificationConfigParams, - InternalError, - InvalidParamsError, - ListTaskPushNotificationConfigParams, +from a2a.types.a2a_pb2 import ( + AgentCard, + CancelTaskRequest, + DeleteTaskPushNotificationConfigRequest, + GetExtendedAgentCardRequest, + GetTaskPushNotificationConfigRequest, + GetTaskRequest, + ListTaskPushNotificationConfigsRequest, + ListTaskPushNotificationConfigsResponse, + ListTasksRequest, + ListTasksResponse, Message, - MessageSendParams, + SendMessageRequest, + SubscribeToTaskRequest, Task, - TaskIdParams, - TaskNotCancelableError, - TaskNotFoundError, TaskPushNotificationConfig, - TaskQueryParams, TaskState, +) +from a2a.utils.errors import ( + ExtendedAgentCardNotConfiguredError, + InternalError, + InvalidParamsError, + PushNotificationNotSupportedError, + TaskNotCancelableError, + TaskNotFoundError, UnsupportedOperationError, ) -from a2a.utils.errors import ServerError -from a2a.utils.task import apply_history_length +from a2a.utils.task import ( + apply_history_length, + validate_history_length, + validate_page_size, +) from a2a.utils.telemetry import SpanKind, trace_class logger = logging.getLogger(__name__) TERMINAL_TASK_STATES = { - TaskState.completed, - TaskState.canceled, - TaskState.failed, - TaskState.rejected, + TaskState.TASK_STATE_COMPLETED, + TaskState.TASK_STATE_CANCELED, + TaskState.TASK_STATE_FAILED, + TaskState.TASK_STATE_REJECTED, } @trace_class(kind=SpanKind.SERVER) -class DefaultRequestHandler(RequestHandler): +class LegacyRequestHandler(RequestHandler): """Default request handler for all incoming requests. This handler provides default implementations for all A2A JSON-RPC methods, @@ -74,27 +92,39 @@ def __init__( # noqa: PLR0913 self, agent_executor: AgentExecutor, task_store: TaskStore, + agent_card: AgentCard, queue_manager: QueueManager | None = None, push_config_store: PushNotificationConfigStore | None = None, push_sender: PushNotificationSender | None = None, request_context_builder: RequestContextBuilder | None = None, + extended_agent_card: AgentCard | None = None, + extended_card_modifier: Callable[ + [AgentCard, ServerCallContext], Awaitable[AgentCard] + ] + | None = None, ) -> None: """Initializes the DefaultRequestHandler. Args: agent_executor: The `AgentExecutor` instance to run agent logic. task_store: The `TaskStore` instance to manage task persistence. + agent_card: The `AgentCard` describing the agent's capabilities. queue_manager: The `QueueManager` instance to manage event queues. Defaults to `InMemoryQueueManager`. push_config_store: The `PushNotificationConfigStore` instance for managing push notification configurations. Defaults to None. push_sender: The `PushNotificationSender` instance for sending push notifications. Defaults to None. request_context_builder: The `RequestContextBuilder` instance used to build request contexts. Defaults to `SimpleRequestContextBuilder`. + extended_agent_card: An optional, distinct `AgentCard` to be served at the extended card endpoint. + extended_card_modifier: An optional callback to dynamically modify the extended `AgentCard` before it is served. """ self.agent_executor = agent_executor self.task_store = task_store + self._agent_card = agent_card self._queue_manager = queue_manager or InMemoryQueueManager() self._push_config_store = push_config_store self._push_sender = push_sender + self.extended_agent_card = extended_agent_card + self.extended_card_modifier = extended_card_modifier self._request_context_builder = ( request_context_builder or SimpleRequestContextBuilder( @@ -108,36 +138,63 @@ def __init__( # noqa: PLR0913 # asyncio tasks and to surface unexpected exceptions. self._background_tasks = set() + @validate_request_params async def on_get_task( self, - params: TaskQueryParams, - context: ServerCallContext | None = None, + params: GetTaskRequest, + context: ServerCallContext, ) -> Task | None: """Default handler for 'tasks/get'.""" - task: Task | None = await self.task_store.get(params.id, context) + validate_history_length(params) + + task_id = params.id + task: Task | None = await self.task_store.get(task_id, context) if not task: - raise ServerError(error=TaskNotFoundError()) + raise TaskNotFoundError - # Apply historyLength parameter if specified - return apply_history_length(task, params.history_length) + return apply_history_length(task, params) + @validate_request_params + async def on_list_tasks( + self, + params: ListTasksRequest, + context: ServerCallContext, + ) -> ListTasksResponse: + """Default handler for 'tasks/list'.""" + validate_history_length(params) + if params.HasField('page_size'): + validate_page_size(params.page_size) + + page = await self.task_store.list(params, context) + for task in page.tasks: + if not params.include_artifacts: + task.ClearField('artifacts') + + updated_task = apply_history_length(task, params) + if updated_task is not task: + task.CopyFrom(updated_task) + + return page + + @validate_request_params async def on_cancel_task( - self, params: TaskIdParams, context: ServerCallContext | None = None + self, + params: CancelTaskRequest, + context: ServerCallContext, ) -> Task | None: """Default handler for 'tasks/cancel'. Attempts to cancel the task managed by the `AgentExecutor`. """ - task: Task | None = await self.task_store.get(params.id, context) + task_id = params.id + task: Task | None = await self.task_store.get(task_id, context) if not task: - raise ServerError(error=TaskNotFoundError()) + raise TaskNotFoundError # Check if task is in a non-cancelable state (completed, canceled, failed, rejected) if task.status.state in TERMINAL_TASK_STATES: - raise ServerError( - error=TaskNotCancelableError( - message=f'Task cannot be canceled - current state: {task.status.state}' - ) + raise TaskNotCancelableError( + message=f'Task cannot be canceled - current state: {task.status.state}' ) task_manager = TaskManager( @@ -151,11 +208,12 @@ async def on_cancel_task( queue = await self._queue_manager.tap(task.id) if not queue: - queue = EventQueue() + queue = EventQueueLegacy() await self.agent_executor.cancel( RequestContext( - None, + call_context=context, + request=None, task_id=task.id, context_id=task.context_id, task=task, @@ -169,23 +227,19 @@ async def on_cancel_task( consumer = EventConsumer(queue) result = await result_aggregator.consume_all(consumer) if not isinstance(result, Task): - raise ServerError( - error=InternalError( - message='Agent did not return valid response for cancel' - ) + raise InternalError( + message='Agent did not return valid response for cancel' ) - if result.status.state != TaskState.canceled: - raise ServerError( - error=TaskNotCancelableError( - message=f'Task cannot be canceled - current state: {result.status.state}' - ) + if result.status.state != TaskState.TASK_STATE_CANCELED: + raise TaskNotCancelableError( + message=f'Task cannot be canceled - current state: {result.status.state}' ) return result async def _run_event_stream( - self, request: RequestContext, queue: EventQueue + self, request: RequestContext, queue: EventQueueLegacy ) -> None: """Runs the agent's `execute` method and closes the queue afterwards. @@ -198,18 +252,23 @@ async def _run_event_stream( async def _setup_message_execution( self, - params: MessageSendParams, - context: ServerCallContext | None = None, - ) -> tuple[TaskManager, str, EventQueue, ResultAggregator, asyncio.Task]: + params: SendMessageRequest, + context: ServerCallContext, + ) -> tuple[ + TaskManager, str, EventQueueLegacy, ResultAggregator, asyncio.Task + ]: """Common setup logic for both streaming and non-streaming message handling. Returns: A tuple of (task_manager, task_id, queue, result_aggregator, producer_task) """ # Create task manager and validate existing task + # Proto empty strings should be treated as None + task_id = params.message.task_id or None + context_id = params.message.context_id or None task_manager = TaskManager( - task_id=params.message.task_id, - context_id=params.message.context_id, + task_id=task_id, + context_id=context_id, task_store=self.task_store, initial_message=params.message, context=context, @@ -218,18 +277,14 @@ async def _setup_message_execution( if task: if task.status.state in TERMINAL_TASK_STATES: - raise ServerError( - error=InvalidParamsError( - message=f'Task {task.id} is in terminal state: {task.status.state.value}' - ) + raise InvalidParamsError( + message=f'Task {task.id} is in terminal state: {task.status.state}' ) task = task_manager.update_with_message(params.message, task) elif params.message.task_id: - raise ServerError( - error=TaskNotFoundError( - message=f'Task {params.message.task_id} was specified but does not exist' - ) + raise TaskNotFoundError( + message=f'Task {params.message.task_id} was specified but does not exist' ) # Build request context @@ -249,10 +304,12 @@ async def _setup_message_execution( if ( self._push_config_store and params.configuration - and params.configuration.push_notification_config + and params.configuration.task_push_notification_config ): await self._push_config_store.set_info( - task_id, params.configuration.push_notification_config + task_id, + params.configuration.task_push_notification_config, + context, ) queue = await self._queue_manager.create_or_tap(task_id) @@ -273,29 +330,32 @@ def _validate_task_id_match(self, task_id: str, event_task_id: str) -> None: event_task_id, task_id, ) - raise ServerError( - InternalError(message='Task ID mismatch in agent response') - ) + raise InternalError(message='Task ID mismatch in agent response') async def _send_push_notification_if_needed( - self, task_id: str, result_aggregator: ResultAggregator + self, task_id: str, event: Event ) -> None: - """Sends push notification if configured and task is available.""" - if self._push_sender and task_id: - latest_task = await result_aggregator.current_result - if isinstance(latest_task, Task): - await self._push_sender.send_notification(latest_task) + """Sends push notification if configured.""" + if ( + self._push_sender + and task_id + and isinstance(event, PushNotificationEvent) + ): + await self._push_sender.send_notification(task_id, event) + @validate_request_params async def on_message_send( self, - params: MessageSendParams, - context: ServerCallContext | None = None, + params: SendMessageRequest, + context: ServerCallContext, ) -> Message | Task: """Default handler for 'message/send' interface (non-streaming). Starts the agent execution for the message and waits for the final result (Task or Message). """ + validate_history_length(params.configuration) + ( _task_manager, task_id, @@ -307,17 +367,13 @@ async def on_message_send( consumer = EventConsumer(queue) producer_task.add_done_callback(consumer.agent_task_callback) - blocking = True # Default to blocking behavior - if params.configuration and params.configuration.blocking is False: - blocking = False + blocking = not params.configuration.return_immediately interrupted_or_non_blocking = False try: # Create async callback for push notifications - async def push_notification_callback() -> None: - await self._send_push_notification_if_needed( - task_id, result_aggregator - ) + async def push_notification_callback(event: Event) -> None: + await self._send_push_notification_if_needed(task_id, event) ( result, @@ -348,23 +404,24 @@ async def push_notification_callback() -> None: await self._cleanup_producer(producer_task, task_id) if not result: - raise ServerError(error=InternalError()) + raise InternalError if isinstance(result, Task): self._validate_task_id_match(task_id, result.id) if params.configuration: - result = apply_history_length( - result, params.configuration.history_length - ) - - await self._send_push_notification_if_needed(task_id, result_aggregator) + result = apply_history_length(result, params.configuration) return result + @validate_request_params + @validate( + lambda self: self._agent_card.capabilities.streaming, + 'Streaming is not supported by the agent', + ) async def on_message_send_stream( self, - params: MessageSendParams, - context: ServerCallContext | None = None, + params: SendMessageRequest, + context: ServerCallContext, ) -> AsyncGenerator[Event]: """Default handler for 'message/stream' (streaming). @@ -386,9 +443,7 @@ async def on_message_send_stream( if isinstance(event, Task): self._validate_task_id_match(task_id, event.id) - await self._send_push_notification_if_needed( - task_id, result_aggregator - ) + await self._send_push_notification_if_needed(task_id, event) yield event except (asyncio.CancelledError, GeneratorExit): # Client disconnected: continue consuming and persisting events in the background @@ -451,81 +506,100 @@ async def _cleanup_producer( async with self._running_agents_lock: self._running_agents.pop(task_id, None) - async def on_set_task_push_notification_config( + @validate_request_params + @validate( + lambda self: self._agent_card.capabilities.push_notifications, + error_message='Push notifications are not supported by the agent', + error_type=PushNotificationNotSupportedError, + ) + async def on_create_task_push_notification_config( self, params: TaskPushNotificationConfig, - context: ServerCallContext | None = None, + context: ServerCallContext, ) -> TaskPushNotificationConfig: - """Default handler for 'tasks/pushNotificationConfig/set'. + """Default handler for 'tasks/pushNotificationConfig/create'. Requires a `PushNotifier` to be configured. """ if not self._push_config_store: - raise ServerError(error=UnsupportedOperationError()) + raise PushNotificationNotSupportedError - task: Task | None = await self.task_store.get(params.task_id, context) + task_id = params.task_id + task: Task | None = await self.task_store.get(task_id, context) if not task: - raise ServerError(error=TaskNotFoundError()) + raise TaskNotFoundError await self._push_config_store.set_info( - params.task_id, - params.push_notification_config, + task_id, + params, + context, ) return params + @validate_request_params + @validate( + lambda self: self._agent_card.capabilities.push_notifications, + error_message='Push notifications are not supported by the agent', + error_type=PushNotificationNotSupportedError, + ) async def on_get_task_push_notification_config( self, - params: TaskIdParams | GetTaskPushNotificationConfigParams, - context: ServerCallContext | None = None, + params: GetTaskPushNotificationConfigRequest, + context: ServerCallContext, ) -> TaskPushNotificationConfig: """Default handler for 'tasks/pushNotificationConfig/get'. Requires a `PushConfigStore` to be configured. """ if not self._push_config_store: - raise ServerError(error=UnsupportedOperationError()) + raise PushNotificationNotSupportedError - task: Task | None = await self.task_store.get(params.id, context) + task_id = params.task_id + config_id = params.id + task: Task | None = await self.task_store.get(task_id, context) if not task: - raise ServerError(error=TaskNotFoundError()) + raise TaskNotFoundError - push_notification_config = await self._push_config_store.get_info( - params.id + push_notification_configs: list[TaskPushNotificationConfig] = ( + await self._push_config_store.get_info(task_id, context) or [] ) - if not push_notification_config or not push_notification_config[0]: - raise ServerError( - error=InternalError( - message='Push notification config not found' - ) - ) - return TaskPushNotificationConfig( - task_id=params.id, - push_notification_config=push_notification_config[0], - ) + for config in push_notification_configs: + if config.id == config_id: + return config - async def on_resubscribe_to_task( + raise TaskNotFoundError + + @validate_request_params + @validate( + lambda self: self._agent_card.capabilities.streaming, + 'Streaming is not supported by the agent', + ) + async def on_subscribe_to_task( self, - params: TaskIdParams, - context: ServerCallContext | None = None, - ) -> AsyncGenerator[Event]: - """Default handler for 'tasks/resubscribe'. + params: SubscribeToTaskRequest, + context: ServerCallContext, + ) -> AsyncGenerator[Event, None]: + """Default handler for 'SubscribeToTask'. Allows a client to re-attach to a running streaming task's event stream. Requires the task and its queue to still be active. """ - task: Task | None = await self.task_store.get(params.id, context) + task_id = params.id + task: Task | None = await self.task_store.get(task_id, context) if not task: - raise ServerError(error=TaskNotFoundError()) + raise TaskNotFoundError if task.status.state in TERMINAL_TASK_STATES: - raise ServerError( - error=InvalidParamsError( - message=f'Task {task.id} is in terminal state: {task.status.state.value}' - ) + raise UnsupportedOperationError( + message=f'Task {task.id} is in terminal state: {task.status.state}' ) + # The operation MUST return a Task object as the first event in the stream + # https://a2a-protocol.org/latest/specification/#316-subscribe-to-task + yield task + task_manager = TaskManager( task_id=task.id, context_id=task.context_id, @@ -538,55 +612,90 @@ async def on_resubscribe_to_task( queue = await self._queue_manager.tap(task.id) if not queue: - raise ServerError(error=TaskNotFoundError()) + raise TaskNotFoundError consumer = EventConsumer(queue) async for event in result_aggregator.consume_and_emit(consumer): yield event - async def on_list_task_push_notification_config( + @validate_request_params + @validate( + lambda self: self._agent_card.capabilities.push_notifications, + error_message='Push notifications are not supported by the agent', + error_type=PushNotificationNotSupportedError, + ) + async def on_list_task_push_notification_configs( self, - params: ListTaskPushNotificationConfigParams, - context: ServerCallContext | None = None, - ) -> list[TaskPushNotificationConfig]: - """Default handler for 'tasks/pushNotificationConfig/list'. + params: ListTaskPushNotificationConfigsRequest, + context: ServerCallContext, + ) -> ListTaskPushNotificationConfigsResponse: + """Default handler for 'ListTaskPushNotificationConfigs'. Requires a `PushConfigStore` to be configured. """ if not self._push_config_store: - raise ServerError(error=UnsupportedOperationError()) + raise PushNotificationNotSupportedError - task: Task | None = await self.task_store.get(params.id, context) + task_id = params.task_id + task: Task | None = await self.task_store.get(task_id, context) if not task: - raise ServerError(error=TaskNotFoundError()) + raise TaskNotFoundError push_notification_config_list = await self._push_config_store.get_info( - params.id + task_id, context ) - return [ - TaskPushNotificationConfig( - task_id=params.id, push_notification_config=config - ) - for config in push_notification_config_list - ] + return ListTaskPushNotificationConfigsResponse( + configs=push_notification_config_list + ) + @validate_request_params + @validate( + lambda self: self._agent_card.capabilities.push_notifications, + error_message='Push notifications are not supported by the agent', + error_type=PushNotificationNotSupportedError, + ) async def on_delete_task_push_notification_config( self, - params: DeleteTaskPushNotificationConfigParams, - context: ServerCallContext | None = None, + params: DeleteTaskPushNotificationConfigRequest, + context: ServerCallContext, ) -> None: """Default handler for 'tasks/pushNotificationConfig/delete'. Requires a `PushConfigStore` to be configured. """ if not self._push_config_store: - raise ServerError(error=UnsupportedOperationError()) + raise PushNotificationNotSupportedError - task: Task | None = await self.task_store.get(params.id, context) + task_id = params.task_id + config_id = params.id + task: Task | None = await self.task_store.get(task_id, context) if not task: - raise ServerError(error=TaskNotFoundError()) + raise TaskNotFoundError - await self._push_config_store.delete_info( - params.id, params.push_notification_config_id - ) + await self._push_config_store.delete_info(task_id, context, config_id) + + @validate_request_params + @validate( + lambda self: self._agent_card.capabilities.extended_agent_card, + error_message='The agent does not support authenticated extended cards', + ) + async def on_get_extended_agent_card( + self, + params: GetExtendedAgentCardRequest, + context: ServerCallContext, + ) -> AgentCard: + """Default handler for 'GetExtendedAgentCard'. + + Requires `capabilities.extended_agent_card` to be true. + """ + extended_card = self.extended_agent_card + if not extended_card: + raise ExtendedAgentCardNotConfiguredError + + if self.extended_card_modifier: + extended_card = await self.extended_card_modifier( + extended_card, context + ) + + return extended_card diff --git a/src/a2a/server/request_handlers/default_request_handler_v2.py b/src/a2a/server/request_handlers/default_request_handler_v2.py new file mode 100644 index 000000000..ecdc0cfef --- /dev/null +++ b/src/a2a/server/request_handlers/default_request_handler_v2.py @@ -0,0 +1,482 @@ +from __future__ import annotations + +import asyncio # noqa: TC003 +import logging + +from typing import TYPE_CHECKING, Any, cast + +from a2a.server.agent_execution import ( + AgentExecutor, + RequestContext, + RequestContextBuilder, + SimpleRequestContextBuilder, +) +from a2a.server.agent_execution.active_task import ( + INTERRUPTED_TASK_STATES, + TERMINAL_TASK_STATES, +) +from a2a.server.agent_execution.active_task_registry import ActiveTaskRegistry +from a2a.server.request_handlers.request_handler import ( + RequestHandler, + validate, + validate_request_params, +) +from a2a.types.a2a_pb2 import ( + AgentCard, + CancelTaskRequest, + DeleteTaskPushNotificationConfigRequest, + GetExtendedAgentCardRequest, + GetTaskPushNotificationConfigRequest, + GetTaskRequest, + ListTaskPushNotificationConfigsRequest, + ListTaskPushNotificationConfigsResponse, + ListTasksRequest, + ListTasksResponse, + Message, + SendMessageRequest, + SubscribeToTaskRequest, + Task, + TaskPushNotificationConfig, + TaskStatusUpdateEvent, +) +from a2a.utils.errors import ( + ExtendedAgentCardNotConfiguredError, + InternalError, + InvalidParamsError, + PushNotificationNotSupportedError, + TaskNotCancelableError, + TaskNotFoundError, +) +from a2a.utils.task import ( + apply_history_length, + validate_history_length, + validate_page_size, +) +from a2a.utils.telemetry import SpanKind, trace_class + + +if TYPE_CHECKING: + from collections.abc import AsyncGenerator, Awaitable, Callable + + from a2a.server.agent_execution.active_task import ActiveTask + from a2a.server.context import ServerCallContext + from a2a.server.events import Event + from a2a.server.tasks import ( + PushNotificationConfigStore, + PushNotificationSender, + TaskStore, + ) + + +logger = logging.getLogger(__name__) + + +# TODO: cleanup context_id management + + +@trace_class(kind=SpanKind.SERVER) +class DefaultRequestHandlerV2(RequestHandler): + """Default request handler for all incoming requests.""" + + _background_tasks: set[asyncio.Task] + + def __init__( # noqa: PLR0913 + self, + agent_executor: AgentExecutor, + task_store: TaskStore, + agent_card: AgentCard, + queue_manager: Any + | None = None, # Kept for backward compat in signature + push_config_store: PushNotificationConfigStore | None = None, + push_sender: PushNotificationSender | None = None, + request_context_builder: RequestContextBuilder | None = None, + extended_agent_card: AgentCard | None = None, + extended_card_modifier: Callable[ + [AgentCard, ServerCallContext], Awaitable[AgentCard] + ] + | None = None, + ) -> None: + self.agent_executor = agent_executor + self.task_store = task_store + self._agent_card = agent_card + self._push_config_store = push_config_store + self._push_sender = push_sender + self.extended_agent_card = extended_agent_card + self.extended_card_modifier = extended_card_modifier + self._request_context_builder = ( + request_context_builder + or SimpleRequestContextBuilder( + should_populate_referred_tasks=False, task_store=self.task_store + ) + ) + self._active_task_registry = ActiveTaskRegistry( + agent_executor=self.agent_executor, + task_store=self.task_store, + push_sender=self._push_sender, + ) + self._background_tasks = set() + + @validate_request_params + async def on_get_task( # noqa: D102 + self, + params: GetTaskRequest, + context: ServerCallContext, + ) -> Task | None: + validate_history_length(params) + + task_id = params.id + task: Task | None = await self.task_store.get(task_id, context) + if not task: + raise TaskNotFoundError + + return apply_history_length(task, params) + + @validate_request_params + async def on_list_tasks( # noqa: D102 + self, + params: ListTasksRequest, + context: ServerCallContext, + ) -> ListTasksResponse: + validate_history_length(params) + if params.HasField('page_size'): + validate_page_size(params.page_size) + + page = await self.task_store.list(params, context) + for task in page.tasks: + if not params.include_artifacts: + task.ClearField('artifacts') + + updated_task = apply_history_length(task, params) + if updated_task is not task: + task.CopyFrom(updated_task) + + return page + + @validate_request_params + async def on_cancel_task( # noqa: D102 + self, + params: CancelTaskRequest, + context: ServerCallContext, + ) -> Task | None: + task_id = params.id + + try: + active_task = await self._active_task_registry.get_or_create( + task_id, call_context=context, create_task_if_missing=False + ) + result = await active_task.cancel(context) + except InvalidParamsError as e: + raise TaskNotCancelableError from e + + if isinstance(result, Message): + raise InternalError( + message='Cancellation returned a message instead of a task.' + ) + + return result + + def _validate_task_id_match(self, task_id: str, event_task_id: str) -> None: + if task_id != event_task_id: + logger.error( + 'Agent generated task_id=%s does not match the RequestContext task_id=%s.', + event_task_id, + task_id, + ) + raise InternalError(message='Task ID mismatch in agent response') + + async def _setup_active_task( + self, + params: SendMessageRequest, + call_context: ServerCallContext, + ) -> tuple[ActiveTask, RequestContext]: + validate_history_length(params.configuration) + + original_task_id = params.message.task_id or None + original_context_id = params.message.context_id or None + + if original_task_id: + task = await self.task_store.get(original_task_id, call_context) + if not task: + raise TaskNotFoundError(f'Task {original_task_id} not found') + + # Build context to resolve or generate missing IDs + request_context = await self._request_context_builder.build( + params=params, + task_id=original_task_id, + context_id=original_context_id, + # We will get the task when we have to process the request to avoid concurrent read/write issues. + task=None, + context=call_context, + ) + + task_id = cast('str', request_context.task_id) + context_id = cast('str', request_context.context_id) + + if ( + self._push_config_store + and params.configuration + and params.configuration.task_push_notification_config + ): + await self._push_config_store.set_info( + task_id, + params.configuration.task_push_notification_config, + call_context, + ) + + active_task = await self._active_task_registry.get_or_create( + task_id, + context_id=context_id, + call_context=call_context, + create_task_if_missing=True, + ) + + return active_task, request_context + + @validate_request_params + async def on_message_send( # noqa: D102 + self, + params: SendMessageRequest, + context: ServerCallContext, + ) -> Message | Task: + active_task, request_context = await self._setup_active_task( + params, context + ) + task_id = cast('str', request_context.task_id) + + result: Message | Task | None = None + + async for raw_event in active_task.subscribe( + request=request_context, + include_initial_task=False, + replace_status_update_with_task=True, + ): + event = raw_event + logger.debug( + 'Processing[%s] event [%s] %s', + params.message.task_id, + type(event).__name__, + event, + ) + if isinstance(event, TaskStatusUpdateEvent): + self._validate_task_id_match(task_id, event.task_id) + event = await active_task.get_task() + logger.debug( + 'Replaced TaskStatusUpdateEvent with Task: %s', event + ) + + if isinstance(event, Task) and ( + params.configuration.return_immediately + or event.status.state + in (TERMINAL_TASK_STATES | INTERRUPTED_TASK_STATES) + ): + self._validate_task_id_match(task_id, event.id) + result = event + # DO break here as it's "return_immediately". + # AgentExecutor will continue to run in the background. + break + + if isinstance(event, Message): + result = event + # Do NOT break here as Message is supposed to be the only + # event in "Message-only" interaction. + # ActiveTask consumer (see active_task.py) validates the event + # stream and raises InvalidAgentResponseError if more events are + # pushed after a Message. + + if result is None: + logger.debug('Missing result for task %s', request_context.task_id) + result = await active_task.get_task() + + if isinstance(result, Task): + result = apply_history_length(result, params.configuration) + + logger.debug( + 'Returning result for task %s: %s', + request_context.task_id, + result, + ) + return result + + @validate_request_params + @validate( + lambda self: self._agent_card.capabilities.streaming, + 'Streaming is not supported by the agent', + ) + async def on_message_send_stream( # noqa: D102 + self, + params: SendMessageRequest, + context: ServerCallContext, + ) -> AsyncGenerator[Event, None]: + active_task, request_context = await self._setup_active_task( + params, context + ) + + task_id = cast('str', request_context.task_id) + + async for event in active_task.subscribe( + request=request_context, + include_initial_task=False, + ): + # Do NOT break here as we rely on AgentExecutor to yield control. + # ActiveTask consumer (see active_task.py) validates the event + # stream and raises InvalidAgentResponseError on misbehaving agents: + # - an event after a Message + # - Message after entering task mode + # - an event after a terminal state + if isinstance(event, Task): + self._validate_task_id_match(task_id, event.id) + yield apply_history_length(event, params.configuration) + else: + yield event + + @validate_request_params + @validate( + lambda self: self._agent_card.capabilities.push_notifications, + error_message='Push notifications are not supported by the agent', + error_type=PushNotificationNotSupportedError, + ) + async def on_create_task_push_notification_config( # noqa: D102 + self, + params: TaskPushNotificationConfig, + context: ServerCallContext, + ) -> TaskPushNotificationConfig: + if not self._push_config_store: + raise PushNotificationNotSupportedError + + task_id = params.task_id + task: Task | None = await self.task_store.get(task_id, context) + if not task: + raise TaskNotFoundError + + await self._push_config_store.set_info( + task_id, + params, + context, + ) + + return params + + @validate_request_params + @validate( + lambda self: self._agent_card.capabilities.push_notifications, + error_message='Push notifications are not supported by the agent', + error_type=PushNotificationNotSupportedError, + ) + async def on_get_task_push_notification_config( # noqa: D102 + self, + params: GetTaskPushNotificationConfigRequest, + context: ServerCallContext, + ) -> TaskPushNotificationConfig: + if not self._push_config_store: + raise PushNotificationNotSupportedError + + task_id = params.task_id + config_id = params.id + task: Task | None = await self.task_store.get(task_id, context) + if not task: + raise TaskNotFoundError + + push_notification_configs: list[TaskPushNotificationConfig] = ( + await self._push_config_store.get_info(task_id, context) or [] + ) + + for config in push_notification_configs: + if config.id == config_id: + return config + + raise TaskNotFoundError + + @validate_request_params + @validate( + lambda self: self._agent_card.capabilities.streaming, + 'Streaming is not supported by the agent', + ) + async def on_subscribe_to_task( # noqa: D102 + self, + params: SubscribeToTaskRequest, + context: ServerCallContext, + ) -> AsyncGenerator[Event, None]: + task_id = params.id + + active_task = await self._active_task_registry.get_or_create( + task_id, + call_context=context, + create_task_if_missing=False, + ) + + async for event in active_task.subscribe(include_initial_task=True): + yield event + + @validate_request_params + @validate( + lambda self: self._agent_card.capabilities.push_notifications, + error_message='Push notifications are not supported by the agent', + error_type=PushNotificationNotSupportedError, + ) + async def on_list_task_push_notification_configs( # noqa: D102 + self, + params: ListTaskPushNotificationConfigsRequest, + context: ServerCallContext, + ) -> ListTaskPushNotificationConfigsResponse: + if not self._push_config_store: + raise PushNotificationNotSupportedError + + task_id = params.task_id + task: Task | None = await self.task_store.get(task_id, context) + if not task: + raise TaskNotFoundError + + push_notification_config_list = await self._push_config_store.get_info( + task_id, context + ) + + return ListTaskPushNotificationConfigsResponse( + configs=push_notification_config_list + ) + + @validate_request_params + @validate( + lambda self: self._agent_card.capabilities.push_notifications, + error_message='Push notifications are not supported by the agent', + error_type=PushNotificationNotSupportedError, + ) + async def on_delete_task_push_notification_config( # noqa: D102 + self, + params: DeleteTaskPushNotificationConfigRequest, + context: ServerCallContext, + ) -> None: + if not self._push_config_store: + raise PushNotificationNotSupportedError + + task_id = params.task_id + config_id = params.id + task: Task | None = await self.task_store.get(task_id, context) + if not task: + raise TaskNotFoundError + + await self._push_config_store.delete_info(task_id, context, config_id) + + @validate_request_params + @validate( + lambda self: self._agent_card.capabilities.extended_agent_card, + error_message='The agent does not support authenticated extended cards', + ) + async def on_get_extended_agent_card( + self, + params: GetExtendedAgentCardRequest, + context: ServerCallContext, + ) -> AgentCard: + """Default handler for 'GetExtendedAgentCard'. + + Requires `capabilities.extended_agent_card` to be true. + """ + extended_card = self.extended_agent_card + if not extended_card: + raise ExtendedAgentCardNotConfiguredError + + if self.extended_card_modifier: + extended_card = await self.extended_card_modifier( + extended_card, context + ) + + return extended_card diff --git a/src/a2a/server/request_handlers/grpc_handler.py b/src/a2a/server/request_handlers/grpc_handler.py index 1280b92aa..8cd421e93 100644 --- a/src/a2a/server/request_handlers/grpc_handler.py +++ b/src/a2a/server/request_handlers/grpc_handler.py @@ -1,51 +1,70 @@ # ruff: noqa: N802 -import contextlib import logging from abc import ABC, abstractmethod -from collections.abc import AsyncIterable, Awaitable +from collections.abc import AsyncIterable, Awaitable, Callable +from typing import TypeVar try: - import grpc - import grpc.aio + import grpc # type: ignore[reportMissingModuleSource] + import grpc.aio # type: ignore[reportMissingModuleSource] + + from grpc_status import rpc_status except ImportError as e: raise ImportError( - 'GrpcHandler requires grpcio and grpcio-tools to be installed. ' + 'GrpcHandler requires grpcio, grpcio-tools, and grpcio-status to be installed. ' 'Install with: ' "'pip install a2a-sdk[grpc]'" ) from e -from collections.abc import Callable +from google.protobuf import any_pb2, empty_pb2, message +from google.rpc import error_details_pb2, status_pb2 -import a2a.grpc.a2a_pb2_grpc as a2a_grpc +import a2a.types.a2a_pb2_grpc as a2a_grpc from a2a import types -from a2a.auth.user import UnauthenticatedUser +from a2a.auth.user import UnauthenticatedUser, User from a2a.extensions.common import ( HTTP_EXTENSION_HEADER, get_requested_extensions, ) -from a2a.grpc import a2a_pb2 from a2a.server.context import ServerCallContext from a2a.server.request_handlers.request_handler import RequestHandler -from a2a.types import AgentCard, TaskNotFoundError +from a2a.types import a2a_pb2 from a2a.utils import proto_utils -from a2a.utils.errors import ServerError -from a2a.utils.helpers import maybe_await, validate, validate_async_generator +from a2a.utils.errors import A2A_ERROR_REASONS, A2AError, TaskNotFoundError +from a2a.utils.proto_utils import validation_errors_to_bad_request logger = logging.getLogger(__name__) -# For now we use a trivial wrapper on the grpc context object - -class CallContextBuilder(ABC): - """A class for building ServerCallContexts using the Starlette Request.""" +class GrpcServerCallContextBuilder(ABC): + """Interface for building ServerCallContext from gRPC context.""" @abstractmethod def build(self, context: grpc.aio.ServicerContext) -> ServerCallContext: - """Builds a ServerCallContext from a gRPC Request.""" + """Builds a ServerCallContext from a gRPC ServicerContext.""" + + +class DefaultGrpcServerCallContextBuilder(GrpcServerCallContextBuilder): + """Default implementation of GrpcServerCallContextBuilder.""" + + def build(self, context: grpc.aio.ServicerContext) -> ServerCallContext: + """Builds a ServerCallContext from a gRPC ServicerContext.""" + state = {'grpc_context': context} + return ServerCallContext( + user=self.build_user(context), + state=state, + requested_extensions=get_requested_extensions( + _get_metadata_value(context, HTTP_EXTENSION_HEADER) + ), + ) + + def build_user(self, context: grpc.aio.ServicerContext) -> User: + """Builds a User from a gRPC ServicerContext.""" + return UnauthenticatedUser() def _get_metadata_value( @@ -63,22 +82,24 @@ def _get_metadata_value( ] -class DefaultCallContextBuilder(CallContextBuilder): - """A default implementation of CallContextBuilder.""" +_ERROR_CODE_MAP = { + types.InvalidRequestError: grpc.StatusCode.INVALID_ARGUMENT, + types.MethodNotFoundError: grpc.StatusCode.NOT_FOUND, + types.InvalidParamsError: grpc.StatusCode.INVALID_ARGUMENT, + types.InternalError: grpc.StatusCode.INTERNAL, + types.TaskNotFoundError: grpc.StatusCode.NOT_FOUND, + types.TaskNotCancelableError: grpc.StatusCode.FAILED_PRECONDITION, + types.PushNotificationNotSupportedError: grpc.StatusCode.UNIMPLEMENTED, + types.UnsupportedOperationError: grpc.StatusCode.UNIMPLEMENTED, + types.ContentTypeNotSupportedError: grpc.StatusCode.INVALID_ARGUMENT, + types.InvalidAgentResponseError: grpc.StatusCode.INTERNAL, + types.ExtendedAgentCardNotConfiguredError: grpc.StatusCode.FAILED_PRECONDITION, + types.ExtensionSupportRequiredError: grpc.StatusCode.FAILED_PRECONDITION, + types.VersionNotSupportedError: grpc.StatusCode.UNIMPLEMENTED, +} - def build(self, context: grpc.aio.ServicerContext) -> ServerCallContext: - """Builds the ServerCallContext.""" - user = UnauthenticatedUser() - state = {} - with contextlib.suppress(Exception): - state['grpc_context'] = context - return ServerCallContext( - user=user, - state=state, - requested_extensions=get_requested_extensions( - _get_metadata_value(context, HTTP_EXTENSION_HEADER) - ), - ) + +TResponse = TypeVar('TResponse') class GrpcHandler(a2a_grpc.A2AServiceServicer): @@ -86,338 +107,324 @@ class GrpcHandler(a2a_grpc.A2AServiceServicer): def __init__( self, - agent_card: AgentCard, request_handler: RequestHandler, - context_builder: CallContextBuilder | None = None, - card_modifier: Callable[[AgentCard], Awaitable[AgentCard] | AgentCard] - | None = None, + context_builder: GrpcServerCallContextBuilder | None = None, ): """Initializes the GrpcHandler. Args: - agent_card: The AgentCard describing the agent's capabilities. request_handler: The underlying `RequestHandler` instance to delegate requests to. - context_builder: The CallContextBuilder object. If none the - DefaultCallContextBuilder is used. - card_modifier: An optional callback to dynamically modify the public - agent card before it is served. + context_builder: The GrpcContextBuilder used to construct the + ServerCallContext passed to the request_handler. If None the + DefaultGrpcContextBuilder is used. """ - self.agent_card = agent_card self.request_handler = request_handler - self.context_builder = context_builder or DefaultCallContextBuilder() - self.card_modifier = card_modifier + self._context_builder = ( + context_builder or DefaultGrpcServerCallContextBuilder() + ) + + async def _handle_unary( + self, + request: message.Message, + context: grpc.aio.ServicerContext, + handler_func: Callable[[ServerCallContext], Awaitable[TResponse]], + default_response: TResponse, + ) -> TResponse: + """Centralized error handling and context management for unary calls.""" + try: + server_context = self._build_call_context(context, request) + result = await handler_func(server_context) + except A2AError as e: + await self.abort_context(e, context) + else: + return result + return default_response + + async def _handle_stream( + self, + request: message.Message, + context: grpc.aio.ServicerContext, + handler_func: Callable[[ServerCallContext], AsyncIterable[TResponse]], + ) -> AsyncIterable[TResponse]: + """Centralized error handling and context management for streaming calls.""" + try: + server_context = self._build_call_context(context, request) + async for item in handler_func(server_context): + yield item + except A2AError as e: + await self.abort_context(e, context) async def SendMessage( self, request: a2a_pb2.SendMessageRequest, context: grpc.aio.ServicerContext, ) -> a2a_pb2.SendMessageResponse: - """Handles the 'SendMessage' gRPC method. - - Args: - request: The incoming `SendMessageRequest` object. - context: Context provided by the server. + """Handles the 'SendMessage' gRPC method.""" - Returns: - A `SendMessageResponse` object containing the result (Task or - Message) or throws an error response if a `ServerError` is raised - by the handler. - """ - try: - # Construct the server context object - server_context = self.context_builder.build(context) - # Transform the proto object to the python internal objects - a2a_request = proto_utils.FromProto.message_send_params( - request, - ) + async def _handler( + server_context: ServerCallContext, + ) -> a2a_pb2.SendMessageResponse: task_or_message = await self.request_handler.on_message_send( - a2a_request, server_context + request, server_context ) - self._set_extension_metadata(context, server_context) - return proto_utils.ToProto.task_or_message(task_or_message) - except ServerError as e: - await self.abort_context(e, context) - return a2a_pb2.SendMessageResponse() + if isinstance(task_or_message, a2a_pb2.Task): + return a2a_pb2.SendMessageResponse(task=task_or_message) + return a2a_pb2.SendMessageResponse(message=task_or_message) + + return await self._handle_unary( + request, context, _handler, a2a_pb2.SendMessageResponse() + ) - @validate_async_generator( - lambda self: self.agent_card.capabilities.streaming, - 'Streaming is not supported by the agent', - ) async def SendStreamingMessage( self, request: a2a_pb2.SendMessageRequest, context: grpc.aio.ServicerContext, ) -> AsyncIterable[a2a_pb2.StreamResponse]: - """Handles the 'StreamMessage' gRPC method. - - Yields response objects as they are produced by the underlying handler's - stream. - - Args: - request: The incoming `SendMessageRequest` object. - context: Context provided by the server. + """Handles the 'StreamMessage' gRPC method.""" - Yields: - `StreamResponse` objects containing streaming events - (Task, Message, TaskStatusUpdateEvent, TaskArtifactUpdateEvent) - or gRPC error responses if a `ServerError` is raised. - """ - server_context = self.context_builder.build(context) - # Transform the proto object to the python internal objects - a2a_request = proto_utils.FromProto.message_send_params( - request, - ) - try: + async def _handler( + server_context: ServerCallContext, + ) -> AsyncIterable[a2a_pb2.StreamResponse]: async for event in self.request_handler.on_message_send_stream( - a2a_request, server_context + request, server_context ): - yield proto_utils.ToProto.stream_response(event) - self._set_extension_metadata(context, server_context) - except ServerError as e: - await self.abort_context(e, context) - return + yield proto_utils.to_stream_response(event) + + async for item in self._handle_stream(request, context, _handler): + yield item async def CancelTask( self, request: a2a_pb2.CancelTaskRequest, context: grpc.aio.ServicerContext, ) -> a2a_pb2.Task: - """Handles the 'CancelTask' gRPC method. - - Args: - request: The incoming `CancelTaskRequest` object. - context: Context provided by the server. + """Handles the 'CancelTask' gRPC method.""" - Returns: - A `Task` object containing the updated Task or a gRPC error. - """ - try: - server_context = self.context_builder.build(context) - task_id_params = proto_utils.FromProto.task_id_params(request) + async def _handler(server_context: ServerCallContext) -> a2a_pb2.Task: task = await self.request_handler.on_cancel_task( - task_id_params, server_context + request, server_context ) if task: - return proto_utils.ToProto.task(task) - await self.abort_context( - ServerError(error=TaskNotFoundError()), context - ) - except ServerError as e: - await self.abort_context(e, context) - return a2a_pb2.Task() + return task + raise TaskNotFoundError - @validate_async_generator( - lambda self: self.agent_card.capabilities.streaming, - 'Streaming is not supported by the agent', - ) - async def TaskSubscription( + return await self._handle_unary( + request, context, _handler, a2a_pb2.Task() + ) + + async def SubscribeToTask( self, - request: a2a_pb2.TaskSubscriptionRequest, + request: a2a_pb2.SubscribeToTaskRequest, context: grpc.aio.ServicerContext, ) -> AsyncIterable[a2a_pb2.StreamResponse]: - """Handles the 'TaskSubscription' gRPC method. + """Handles the 'SubscribeToTask' gRPC method.""" - Yields response objects as they are produced by the underlying handler's - stream. - - Args: - request: The incoming `TaskSubscriptionRequest` object. - context: Context provided by the server. - - Yields: - `StreamResponse` objects containing streaming events - """ - try: - server_context = self.context_builder.build(context) - async for event in self.request_handler.on_resubscribe_to_task( - proto_utils.FromProto.task_id_params(request), - server_context, + async def _handler( + server_context: ServerCallContext, + ) -> AsyncIterable[a2a_pb2.StreamResponse]: + async for event in self.request_handler.on_subscribe_to_task( + request, server_context ): - yield proto_utils.ToProto.stream_response(event) - except ServerError as e: - await self.abort_context(e, context) + yield proto_utils.to_stream_response(event) + + async for item in self._handle_stream(request, context, _handler): + yield item async def GetTaskPushNotificationConfig( self, request: a2a_pb2.GetTaskPushNotificationConfigRequest, context: grpc.aio.ServicerContext, ) -> a2a_pb2.TaskPushNotificationConfig: - """Handles the 'GetTaskPushNotificationConfig' gRPC method. - - Args: - request: The incoming `GetTaskPushNotificationConfigRequest` object. - context: Context provided by the server. + """Handles the 'GetTaskPushNotificationConfig' gRPC method.""" - Returns: - A `TaskPushNotificationConfig` object containing the config. - """ - try: - server_context = self.context_builder.build(context) - config = ( + async def _handler( + server_context: ServerCallContext, + ) -> a2a_pb2.TaskPushNotificationConfig: + return ( await self.request_handler.on_get_task_push_notification_config( - proto_utils.FromProto.task_id_params(request), - server_context, + request, server_context ) ) - return proto_utils.ToProto.task_push_notification_config(config) - except ServerError as e: - await self.abort_context(e, context) - return a2a_pb2.TaskPushNotificationConfig() - @validate( - lambda self: self.agent_card.capabilities.push_notifications, - 'Push notifications are not supported by the agent', - ) + return await self._handle_unary( + request, context, _handler, a2a_pb2.TaskPushNotificationConfig() + ) + async def CreateTaskPushNotificationConfig( self, - request: a2a_pb2.CreateTaskPushNotificationConfigRequest, + request: a2a_pb2.TaskPushNotificationConfig, context: grpc.aio.ServicerContext, ) -> a2a_pb2.TaskPushNotificationConfig: - """Handles the 'CreateTaskPushNotificationConfig' gRPC method. + """Handles the 'CreateTaskPushNotificationConfig' gRPC method.""" - Requires the agent to support push notifications. + async def _handler( + server_context: ServerCallContext, + ) -> a2a_pb2.TaskPushNotificationConfig: + return await self.request_handler.on_create_task_push_notification_config( + request, server_context + ) - Args: - request: The incoming `CreateTaskPushNotificationConfigRequest` object. - context: Context provided by the server. + return await self._handle_unary( + request, context, _handler, a2a_pb2.TaskPushNotificationConfig() + ) - Returns: - A `TaskPushNotificationConfig` object + async def ListTaskPushNotificationConfigs( + self, + request: a2a_pb2.ListTaskPushNotificationConfigsRequest, + context: grpc.aio.ServicerContext, + ) -> a2a_pb2.ListTaskPushNotificationConfigsResponse: + """Handles the 'ListTaskPushNotificationConfig' gRPC method.""" + + async def _handler( + server_context: ServerCallContext, + ) -> a2a_pb2.ListTaskPushNotificationConfigsResponse: + return await self.request_handler.on_list_task_push_notification_configs( + request, server_context + ) - Raises: - ServerError: If push notifications are not supported by the agent - (due to the `@validate` decorator). - """ - try: - server_context = self.context_builder.build(context) - config = ( - await self.request_handler.on_set_task_push_notification_config( - proto_utils.FromProto.task_push_notification_config_request( - request, - ), - server_context, - ) + return await self._handle_unary( + request, + context, + _handler, + a2a_pb2.ListTaskPushNotificationConfigsResponse(), + ) + + async def DeleteTaskPushNotificationConfig( + self, + request: a2a_pb2.DeleteTaskPushNotificationConfigRequest, + context: grpc.aio.ServicerContext, + ) -> empty_pb2.Empty: + """Handles the 'DeleteTaskPushNotificationConfig' gRPC method.""" + + async def _handler( + server_context: ServerCallContext, + ) -> empty_pb2.Empty: + await self.request_handler.on_delete_task_push_notification_config( + request, server_context ) - return proto_utils.ToProto.task_push_notification_config(config) - except ServerError as e: - await self.abort_context(e, context) - return a2a_pb2.TaskPushNotificationConfig() + return empty_pb2.Empty() + + return await self._handle_unary( + request, context, _handler, empty_pb2.Empty() + ) async def GetTask( self, request: a2a_pb2.GetTaskRequest, context: grpc.aio.ServicerContext, ) -> a2a_pb2.Task: - """Handles the 'GetTask' gRPC method. - - Args: - request: The incoming `GetTaskRequest` object. - context: Context provided by the server. + """Handles the 'GetTask' gRPC method.""" - Returns: - A `Task` object. - """ - try: - server_context = self.context_builder.build(context) + async def _handler(server_context: ServerCallContext) -> a2a_pb2.Task: task = await self.request_handler.on_get_task( - proto_utils.FromProto.task_query_params(request), server_context + request, server_context ) if task: - return proto_utils.ToProto.task(task) - await self.abort_context( - ServerError(error=TaskNotFoundError()), context + return task + raise TaskNotFoundError + + return await self._handle_unary( + request, context, _handler, a2a_pb2.Task() + ) + + async def ListTasks( + self, + request: a2a_pb2.ListTasksRequest, + context: grpc.aio.ServicerContext, + ) -> a2a_pb2.ListTasksResponse: + """Handles the 'ListTasks' gRPC method.""" + + async def _handler( + server_context: ServerCallContext, + ) -> a2a_pb2.ListTasksResponse: + return await self.request_handler.on_list_tasks( + request, server_context ) - except ServerError as e: - await self.abort_context(e, context) - return a2a_pb2.Task() - async def GetAgentCard( + return await self._handle_unary( + request, context, _handler, a2a_pb2.ListTasksResponse() + ) + + async def GetExtendedAgentCard( self, - request: a2a_pb2.GetAgentCardRequest, + request: a2a_pb2.GetExtendedAgentCardRequest, context: grpc.aio.ServicerContext, ) -> a2a_pb2.AgentCard: - """Get the agent card for the agent served.""" - card_to_serve = self.agent_card - if self.card_modifier: - card_to_serve = await maybe_await(self.card_modifier(card_to_serve)) - return proto_utils.ToProto.agent_card(card_to_serve) + """Get the extended agent card for the agent served.""" + + async def _handler( + server_context: ServerCallContext, + ) -> a2a_pb2.AgentCard: + return await self.request_handler.on_get_extended_agent_card( + request, server_context + ) + + return await self._handle_unary( + request, context, _handler, a2a_pb2.AgentCard() + ) async def abort_context( - self, error: ServerError, context: grpc.aio.ServicerContext + self, error: A2AError, context: grpc.aio.ServicerContext ) -> None: """Sets the grpc errors appropriately in the context.""" - match error.error: - case types.JSONParseError(): - await context.abort( - grpc.StatusCode.INTERNAL, - f'JSONParseError: {error.error.message}', - ) - case types.InvalidRequestError(): - await context.abort( - grpc.StatusCode.INVALID_ARGUMENT, - f'InvalidRequestError: {error.error.message}', - ) - case types.MethodNotFoundError(): - await context.abort( - grpc.StatusCode.NOT_FOUND, - f'MethodNotFoundError: {error.error.message}', - ) - case types.InvalidParamsError(): - await context.abort( - grpc.StatusCode.INVALID_ARGUMENT, - f'InvalidParamsError: {error.error.message}', - ) - case types.InternalError(): - await context.abort( - grpc.StatusCode.INTERNAL, - f'InternalError: {error.error.message}', - ) - case types.TaskNotFoundError(): - await context.abort( - grpc.StatusCode.NOT_FOUND, - f'TaskNotFoundError: {error.error.message}', - ) - case types.TaskNotCancelableError(): - await context.abort( - grpc.StatusCode.UNIMPLEMENTED, - f'TaskNotCancelableError: {error.error.message}', - ) - case types.PushNotificationNotSupportedError(): - await context.abort( - grpc.StatusCode.UNIMPLEMENTED, - f'PushNotificationNotSupportedError: {error.error.message}', - ) - case types.UnsupportedOperationError(): - await context.abort( - grpc.StatusCode.UNIMPLEMENTED, - f'UnsupportedOperationError: {error.error.message}', - ) - case types.ContentTypeNotSupportedError(): - await context.abort( - grpc.StatusCode.UNIMPLEMENTED, - f'ContentTypeNotSupportedError: {error.error.message}', - ) - case types.InvalidAgentResponseError(): - await context.abort( - grpc.StatusCode.INTERNAL, - f'InvalidAgentResponseError: {error.error.message}', - ) - case _: - await context.abort( - grpc.StatusCode.UNKNOWN, - f'Unknown error type: {error.error}', + code = _ERROR_CODE_MAP.get(type(error)) + + if code: + reason = A2A_ERROR_REASONS.get(type(error), 'UNKNOWN_ERROR') + error_info = error_details_pb2.ErrorInfo( + reason=reason, + domain='a2a-protocol.org', + ) + + status_code = code.value[0] + error_msg = ( + error.message if hasattr(error, 'message') else str(error) + ) + + # Create standard Status with ErrorInfo for all A2A errors + status = status_pb2.Status(code=status_code, message=error_msg) + error_info_detail = any_pb2.Any() + error_info_detail.Pack(error_info) + status.details.append(error_info_detail) + + # Append structured field violations for validation errors + if ( + isinstance(error, types.InvalidParamsError) + and error.data + and error.data.get('errors') + ): + bad_request_detail = any_pb2.Any() + bad_request_detail.Pack( + validation_errors_to_bad_request(error.data['errors']) ) + status.details.append(bad_request_detail) + + # Use grpc_status to safely generate standard trailing metadata + rich_status = rpc_status.to_status(status) + + new_metadata: list[tuple[str, str | bytes]] = [] + trailing = context.trailing_metadata() + if trailing: + for k, v in trailing: + new_metadata.append((str(k), v)) + + for k, v in rich_status.trailing_metadata: + new_metadata.append((str(k), v)) + + context.set_trailing_metadata(tuple(new_metadata)) + await context.abort(rich_status.code, rich_status.details) + else: + await context.abort( + grpc.StatusCode.UNKNOWN, + f'Unknown error type: {error}', + ) - def _set_extension_metadata( + def _build_call_context( self, context: grpc.aio.ServicerContext, - server_context: ServerCallContext, - ) -> None: - if server_context.activated_extensions: - context.set_trailing_metadata( - [ - (HTTP_EXTENSION_HEADER.lower(), e) - for e in sorted(server_context.activated_extensions) - ] - ) + request: message.Message, + ) -> ServerCallContext: + server_context = self._context_builder.build(context) + server_context.tenant = getattr(request, 'tenant', '') + return server_context diff --git a/src/a2a/server/request_handlers/jsonrpc_handler.py b/src/a2a/server/request_handlers/jsonrpc_handler.py deleted file mode 100644 index 6df872fca..000000000 --- a/src/a2a/server/request_handlers/jsonrpc_handler.py +++ /dev/null @@ -1,464 +0,0 @@ -import logging - -from collections.abc import AsyncIterable, Awaitable, Callable - -from a2a.server.context import ServerCallContext -from a2a.server.request_handlers.request_handler import RequestHandler -from a2a.server.request_handlers.response_helpers import prepare_response_object -from a2a.types import ( - AgentCard, - AuthenticatedExtendedCardNotConfiguredError, - CancelTaskRequest, - CancelTaskResponse, - CancelTaskSuccessResponse, - DeleteTaskPushNotificationConfigRequest, - DeleteTaskPushNotificationConfigResponse, - DeleteTaskPushNotificationConfigSuccessResponse, - GetAuthenticatedExtendedCardRequest, - GetAuthenticatedExtendedCardResponse, - GetAuthenticatedExtendedCardSuccessResponse, - GetTaskPushNotificationConfigRequest, - GetTaskPushNotificationConfigResponse, - GetTaskPushNotificationConfigSuccessResponse, - GetTaskRequest, - GetTaskResponse, - GetTaskSuccessResponse, - InternalError, - JSONRPCErrorResponse, - ListTaskPushNotificationConfigRequest, - ListTaskPushNotificationConfigResponse, - ListTaskPushNotificationConfigSuccessResponse, - Message, - SendMessageRequest, - SendMessageResponse, - SendMessageSuccessResponse, - SendStreamingMessageRequest, - SendStreamingMessageResponse, - SendStreamingMessageSuccessResponse, - SetTaskPushNotificationConfigRequest, - SetTaskPushNotificationConfigResponse, - SetTaskPushNotificationConfigSuccessResponse, - Task, - TaskArtifactUpdateEvent, - TaskNotFoundError, - TaskPushNotificationConfig, - TaskResubscriptionRequest, - TaskStatusUpdateEvent, -) -from a2a.utils.errors import ServerError -from a2a.utils.helpers import maybe_await, validate -from a2a.utils.telemetry import SpanKind, trace_class - - -logger = logging.getLogger(__name__) - - -@trace_class(kind=SpanKind.SERVER) -class JSONRPCHandler: - """Maps incoming JSON-RPC requests to the appropriate request handler method and formats responses.""" - - def __init__( - self, - agent_card: AgentCard, - request_handler: RequestHandler, - extended_agent_card: AgentCard | None = None, - extended_card_modifier: Callable[ - [AgentCard, ServerCallContext], Awaitable[AgentCard] | AgentCard - ] - | None = None, - card_modifier: Callable[[AgentCard], Awaitable[AgentCard] | AgentCard] - | None = None, - ): - """Initializes the JSONRPCHandler. - - Args: - agent_card: The AgentCard describing the agent's capabilities. - request_handler: The underlying `RequestHandler` instance to delegate requests to. - extended_agent_card: An optional, distinct Extended AgentCard to be served - extended_card_modifier: An optional callback to dynamically modify - the extended agent card before it is served. It receives the - call context. - card_modifier: An optional callback to dynamically modify the public - agent card before it is served. - """ - self.agent_card = agent_card - self.request_handler = request_handler - self.extended_agent_card = extended_agent_card - self.extended_card_modifier = extended_card_modifier - self.card_modifier = card_modifier - - async def on_message_send( - self, - request: SendMessageRequest, - context: ServerCallContext | None = None, - ) -> SendMessageResponse: - """Handles the 'message/send' JSON-RPC method. - - Args: - request: The incoming `SendMessageRequest` object. - context: Context provided by the server. - - Returns: - A `SendMessageResponse` object containing the result (Task or Message) - or a JSON-RPC error response if a `ServerError` is raised by the handler. - """ - # TODO: Wrap in error handler to return error states - try: - task_or_message = await self.request_handler.on_message_send( - request.params, context - ) - return prepare_response_object( - request.id, - task_or_message, - (Task, Message), - SendMessageSuccessResponse, - SendMessageResponse, - ) - except ServerError as e: - return SendMessageResponse( - root=JSONRPCErrorResponse( - id=request.id, error=e.error if e.error else InternalError() - ) - ) - - @validate( - lambda self: self.agent_card.capabilities.streaming, - 'Streaming is not supported by the agent', - ) - async def on_message_send_stream( - self, - request: SendStreamingMessageRequest, - context: ServerCallContext | None = None, - ) -> AsyncIterable[SendStreamingMessageResponse]: - """Handles the 'message/stream' JSON-RPC method. - - Yields response objects as they are produced by the underlying handler's stream. - - Args: - request: The incoming `SendStreamingMessageRequest` object. - context: Context provided by the server. - - Yields: - `SendStreamingMessageResponse` objects containing streaming events - (Task, Message, TaskStatusUpdateEvent, TaskArtifactUpdateEvent) - or JSON-RPC error responses if a `ServerError` is raised. - """ - try: - async for event in self.request_handler.on_message_send_stream( - request.params, context - ): - yield prepare_response_object( - request.id, - event, - ( - Task, - Message, - TaskArtifactUpdateEvent, - TaskStatusUpdateEvent, - ), - SendStreamingMessageSuccessResponse, - SendStreamingMessageResponse, - ) - except ServerError as e: - yield SendStreamingMessageResponse( - root=JSONRPCErrorResponse( - id=request.id, error=e.error if e.error else InternalError() - ) - ) - - async def on_cancel_task( - self, - request: CancelTaskRequest, - context: ServerCallContext | None = None, - ) -> CancelTaskResponse: - """Handles the 'tasks/cancel' JSON-RPC method. - - Args: - request: The incoming `CancelTaskRequest` object. - context: Context provided by the server. - - Returns: - A `CancelTaskResponse` object containing the updated Task or a JSON-RPC error. - """ - try: - task = await self.request_handler.on_cancel_task( - request.params, context - ) - except ServerError as e: - return CancelTaskResponse( - root=JSONRPCErrorResponse( - id=request.id, error=e.error if e.error else InternalError() - ) - ) - - if task: - return prepare_response_object( - request.id, - task, - (Task,), - CancelTaskSuccessResponse, - CancelTaskResponse, - ) - - return CancelTaskResponse( - root=JSONRPCErrorResponse(id=request.id, error=TaskNotFoundError()) - ) - - async def on_resubscribe_to_task( - self, - request: TaskResubscriptionRequest, - context: ServerCallContext | None = None, - ) -> AsyncIterable[SendStreamingMessageResponse]: - """Handles the 'tasks/resubscribe' JSON-RPC method. - - Yields response objects as they are produced by the underlying handler's stream. - - Args: - request: The incoming `TaskResubscriptionRequest` object. - context: Context provided by the server. - - Yields: - `SendStreamingMessageResponse` objects containing streaming events - or JSON-RPC error responses if a `ServerError` is raised. - """ - try: - async for event in self.request_handler.on_resubscribe_to_task( - request.params, context - ): - yield prepare_response_object( - request.id, - event, - ( - Task, - Message, - TaskArtifactUpdateEvent, - TaskStatusUpdateEvent, - ), - SendStreamingMessageSuccessResponse, - SendStreamingMessageResponse, - ) - except ServerError as e: - yield SendStreamingMessageResponse( - root=JSONRPCErrorResponse( - id=request.id, error=e.error if e.error else InternalError() - ) - ) - - async def get_push_notification_config( - self, - request: GetTaskPushNotificationConfigRequest, - context: ServerCallContext | None = None, - ) -> GetTaskPushNotificationConfigResponse: - """Handles the 'tasks/pushNotificationConfig/get' JSON-RPC method. - - Args: - request: The incoming `GetTaskPushNotificationConfigRequest` object. - context: Context provided by the server. - - Returns: - A `GetTaskPushNotificationConfigResponse` object containing the config or a JSON-RPC error. - """ - try: - config = ( - await self.request_handler.on_get_task_push_notification_config( - request.params, context - ) - ) - return prepare_response_object( - request.id, - config, - (TaskPushNotificationConfig,), - GetTaskPushNotificationConfigSuccessResponse, - GetTaskPushNotificationConfigResponse, - ) - except ServerError as e: - return GetTaskPushNotificationConfigResponse( - root=JSONRPCErrorResponse( - id=request.id, error=e.error if e.error else InternalError() - ) - ) - - @validate( - lambda self: self.agent_card.capabilities.push_notifications, - 'Push notifications are not supported by the agent', - ) - async def set_push_notification_config( - self, - request: SetTaskPushNotificationConfigRequest, - context: ServerCallContext | None = None, - ) -> SetTaskPushNotificationConfigResponse: - """Handles the 'tasks/pushNotificationConfig/set' JSON-RPC method. - - Requires the agent to support push notifications. - - Args: - request: The incoming `SetTaskPushNotificationConfigRequest` object. - context: Context provided by the server. - - Returns: - A `SetTaskPushNotificationConfigResponse` object containing the config or a JSON-RPC error. - - Raises: - ServerError: If push notifications are not supported by the agent - (due to the `@validate` decorator). - """ - try: - config = ( - await self.request_handler.on_set_task_push_notification_config( - request.params, context - ) - ) - return prepare_response_object( - request.id, - config, - (TaskPushNotificationConfig,), - SetTaskPushNotificationConfigSuccessResponse, - SetTaskPushNotificationConfigResponse, - ) - except ServerError as e: - return SetTaskPushNotificationConfigResponse( - root=JSONRPCErrorResponse( - id=request.id, error=e.error if e.error else InternalError() - ) - ) - - async def on_get_task( - self, - request: GetTaskRequest, - context: ServerCallContext | None = None, - ) -> GetTaskResponse: - """Handles the 'tasks/get' JSON-RPC method. - - Args: - request: The incoming `GetTaskRequest` object. - context: Context provided by the server. - - Returns: - A `GetTaskResponse` object containing the Task or a JSON-RPC error. - """ - try: - task = await self.request_handler.on_get_task( - request.params, context - ) - except ServerError as e: - return GetTaskResponse( - root=JSONRPCErrorResponse( - id=request.id, error=e.error if e.error else InternalError() - ) - ) - - if task: - return prepare_response_object( - request.id, - task, - (Task,), - GetTaskSuccessResponse, - GetTaskResponse, - ) - - return GetTaskResponse( - root=JSONRPCErrorResponse(id=request.id, error=TaskNotFoundError()) - ) - - async def list_push_notification_config( - self, - request: ListTaskPushNotificationConfigRequest, - context: ServerCallContext | None = None, - ) -> ListTaskPushNotificationConfigResponse: - """Handles the 'tasks/pushNotificationConfig/list' JSON-RPC method. - - Args: - request: The incoming `ListTaskPushNotificationConfigRequest` object. - context: Context provided by the server. - - Returns: - A `ListTaskPushNotificationConfigResponse` object containing the config or a JSON-RPC error. - """ - try: - config = await self.request_handler.on_list_task_push_notification_config( - request.params, context - ) - return prepare_response_object( - request.id, - config, - (list,), - ListTaskPushNotificationConfigSuccessResponse, - ListTaskPushNotificationConfigResponse, - ) - except ServerError as e: - return ListTaskPushNotificationConfigResponse( - root=JSONRPCErrorResponse( - id=request.id, error=e.error if e.error else InternalError() - ) - ) - - async def delete_push_notification_config( - self, - request: DeleteTaskPushNotificationConfigRequest, - context: ServerCallContext | None = None, - ) -> DeleteTaskPushNotificationConfigResponse: - """Handles the 'tasks/pushNotificationConfig/list' JSON-RPC method. - - Args: - request: The incoming `DeleteTaskPushNotificationConfigRequest` object. - context: Context provided by the server. - - Returns: - A `DeleteTaskPushNotificationConfigResponse` object containing the config or a JSON-RPC error. - """ - try: - ( - await self.request_handler.on_delete_task_push_notification_config( - request.params, context - ) - ) - return DeleteTaskPushNotificationConfigResponse( - root=DeleteTaskPushNotificationConfigSuccessResponse( - id=request.id, result=None - ) - ) - except ServerError as e: - return DeleteTaskPushNotificationConfigResponse( - root=JSONRPCErrorResponse( - id=request.id, error=e.error if e.error else InternalError() - ) - ) - - async def get_authenticated_extended_card( - self, - request: GetAuthenticatedExtendedCardRequest, - context: ServerCallContext | None = None, - ) -> GetAuthenticatedExtendedCardResponse: - """Handles the 'agent/authenticatedExtendedCard' JSON-RPC method. - - Args: - request: The incoming `GetAuthenticatedExtendedCardRequest` object. - context: Context provided by the server. - - Returns: - A `GetAuthenticatedExtendedCardResponse` object containing the config or a JSON-RPC error. - """ - if not self.agent_card.supports_authenticated_extended_card: - raise ServerError( - error=AuthenticatedExtendedCardNotConfiguredError( - message='Authenticated card not supported' - ) - ) - - base_card = self.extended_agent_card - if base_card is None: - base_card = self.agent_card - - card_to_serve = base_card - if self.extended_card_modifier and context: - card_to_serve = await maybe_await( - self.extended_card_modifier(base_card, context) - ) - elif self.card_modifier: - card_to_serve = await maybe_await(self.card_modifier(base_card)) - - return GetAuthenticatedExtendedCardResponse( - root=GetAuthenticatedExtendedCardSuccessResponse( - id=request.id, result=card_to_serve - ) - ) diff --git a/src/a2a/server/request_handlers/request_handler.py b/src/a2a/server/request_handlers/request_handler.py index 7ce76cc90..6fb42098f 100644 --- a/src/a2a/server/request_handlers/request_handler.py +++ b/src/a2a/server/request_handlers/request_handler.py @@ -1,35 +1,48 @@ +import functools +import inspect +import logging + from abc import ABC, abstractmethod -from collections.abc import AsyncGenerator +from collections.abc import AsyncGenerator, Callable +from typing import Any + +from google.protobuf.message import Message as ProtoMessage from a2a.server.context import ServerCallContext from a2a.server.events.event_queue import Event -from a2a.types import ( - DeleteTaskPushNotificationConfigParams, - GetTaskPushNotificationConfigParams, - ListTaskPushNotificationConfigParams, +from a2a.types.a2a_pb2 import ( + AgentCard, + CancelTaskRequest, + DeleteTaskPushNotificationConfigRequest, + GetExtendedAgentCardRequest, + GetTaskPushNotificationConfigRequest, + GetTaskRequest, + ListTaskPushNotificationConfigsRequest, + ListTaskPushNotificationConfigsResponse, + ListTasksRequest, + ListTasksResponse, Message, - MessageSendParams, + SendMessageRequest, + SubscribeToTaskRequest, Task, - TaskIdParams, TaskPushNotificationConfig, - TaskQueryParams, - UnsupportedOperationError, ) -from a2a.utils.errors import ServerError +from a2a.utils.errors import UnsupportedOperationError +from a2a.utils.proto_utils import validate_proto_required_fields class RequestHandler(ABC): """A2A request handler interface. This interface defines the methods that an A2A server implementation must - provide to handle incoming JSON-RPC requests. + provide to handle incoming A2A requests from any transport (gRPC, REST, JSON-RPC). """ @abstractmethod async def on_get_task( self, - params: TaskQueryParams, - context: ServerCallContext | None = None, + params: GetTaskRequest, + context: ServerCallContext, ) -> Task | None: """Handles the 'tasks/get' method. @@ -43,11 +56,28 @@ async def on_get_task( The `Task` object if found, otherwise `None`. """ + @abstractmethod + async def on_list_tasks( + self, params: ListTasksRequest, context: ServerCallContext + ) -> ListTasksResponse: + """Handles the tasks/list method. + + Retrieves all tasks for an agent. Supports filtering, pagination, + ordering, limiting the history length, excluding artifacts, etc. + + Args: + params: Parameters with filtering criteria. + context: Context provided by the server. + + Returns: + The `ListTasksResponse` containing the tasks. + """ + @abstractmethod async def on_cancel_task( self, - params: TaskIdParams, - context: ServerCallContext | None = None, + params: CancelTaskRequest, + context: ServerCallContext, ) -> Task | None: """Handles the 'tasks/cancel' method. @@ -64,8 +94,8 @@ async def on_cancel_task( @abstractmethod async def on_message_send( self, - params: MessageSendParams, - context: ServerCallContext | None = None, + params: SendMessageRequest, + context: ServerCallContext, ) -> Task | Message: """Handles the 'message/send' method (non-streaming). @@ -83,8 +113,8 @@ async def on_message_send( @abstractmethod async def on_message_send_stream( self, - params: MessageSendParams, - context: ServerCallContext | None = None, + params: SendMessageRequest, + context: ServerCallContext, ) -> AsyncGenerator[Event]: """Handles the 'message/stream' method (streaming). @@ -97,20 +127,18 @@ async def on_message_send_stream( Yields: `Event` objects from the agent's execution. - - Raises: - ServerError(UnsupportedOperationError): By default, if not implemented. """ - raise ServerError(error=UnsupportedOperationError()) + # This is needed for typechecker to recognise this method as an async generator. + raise UnsupportedOperationError yield @abstractmethod - async def on_set_task_push_notification_config( + async def on_create_task_push_notification_config( self, params: TaskPushNotificationConfig, - context: ServerCallContext | None = None, + context: ServerCallContext, ) -> TaskPushNotificationConfig: - """Handles the 'tasks/pushNotificationConfig/set' method. + """Handles the 'tasks/pushNotificationConfig/create' method. Sets or updates the push notification configuration for a task. @@ -125,8 +153,8 @@ async def on_set_task_push_notification_config( @abstractmethod async def on_get_task_push_notification_config( self, - params: TaskIdParams | GetTaskPushNotificationConfigParams, - context: ServerCallContext | None = None, + params: GetTaskPushNotificationConfigRequest, + context: ServerCallContext, ) -> TaskPushNotificationConfig: """Handles the 'tasks/pushNotificationConfig/get' method. @@ -141,14 +169,14 @@ async def on_get_task_push_notification_config( """ @abstractmethod - async def on_resubscribe_to_task( + async def on_subscribe_to_task( self, - params: TaskIdParams, - context: ServerCallContext | None = None, + params: SubscribeToTaskRequest, + context: ServerCallContext, ) -> AsyncGenerator[Event]: - """Handles the 'tasks/resubscribe' method. + """Handles the 'SubscribeToTask' method. - Allows a client to re-subscribe to a running streaming task's event stream. + Allows a client to subscribe to a running streaming task's event stream. Args: params: Parameters including the task ID. @@ -156,20 +184,17 @@ async def on_resubscribe_to_task( Yields: `Event` objects from the agent's ongoing execution for the specified task. - - Raises: - ServerError(UnsupportedOperationError): By default, if not implemented. """ - raise ServerError(error=UnsupportedOperationError()) + raise UnsupportedOperationError yield @abstractmethod - async def on_list_task_push_notification_config( + async def on_list_task_push_notification_configs( self, - params: ListTaskPushNotificationConfigParams, - context: ServerCallContext | None = None, - ) -> list[TaskPushNotificationConfig]: - """Handles the 'tasks/pushNotificationConfig/list' method. + params: ListTaskPushNotificationConfigsRequest, + context: ServerCallContext, + ) -> ListTaskPushNotificationConfigsResponse: + """Handles the 'ListTaskPushNotificationConfigs' method. Retrieves the current push notification configurations for a task. @@ -184,8 +209,8 @@ async def on_list_task_push_notification_config( @abstractmethod async def on_delete_task_push_notification_config( self, - params: DeleteTaskPushNotificationConfigParams, - context: ServerCallContext | None = None, + params: DeleteTaskPushNotificationConfigRequest, + context: ServerCallContext, ) -> None: """Handles the 'tasks/pushNotificationConfig/delete' method. @@ -198,3 +223,190 @@ async def on_delete_task_push_notification_config( Returns: None """ + + @abstractmethod + async def on_get_extended_agent_card( + self, + params: GetExtendedAgentCardRequest, + context: ServerCallContext, + ) -> AgentCard: + """Handles the 'GetExtendedAgentCard' method. + + Retrieves the extended agent card for the agent. + + Args: + params: Parameters for the request. + context: Context provided by the server. + + Returns: + The `AgentCard` object representing the extended properties of the agent. + + """ + + +def validate_request_params(method: Callable) -> Callable: + """Decorator for RequestHandler methods to validate required fields on incoming requests.""" + if inspect.isasyncgenfunction(method): + + @functools.wraps(method) + async def async_gen_wrapper( + self: RequestHandler, + params: ProtoMessage, + context: ServerCallContext, + *args: Any, + **kwargs: Any, + ) -> Any: + if params is not None: + validate_proto_required_fields(params) + # Ensure the inner async generator is closed explicitly; + # bare async-for does not call aclose() on GeneratorExit, + # which on Python 3.12+ prevents the except/finally blocks + # in on_message_send_stream from running on client disconnect + # (background_consume and cleanup_producer tasks are never created). + inner = method(self, params, context, *args, **kwargs) + try: + async for item in inner: + yield item + finally: + await inner.aclose() + + return async_gen_wrapper + + @functools.wraps(method) + async def async_wrapper( + self: RequestHandler, + params: ProtoMessage, + context: ServerCallContext, + *args: Any, + **kwargs: Any, + ) -> Any: + if params is not None: + validate_proto_required_fields(params) + return await method(self, params, context, *args, **kwargs) + + return async_wrapper + + +def validate( + expression: Callable[[Any], bool], + error_message: str | None = None, + error_type: type[Exception] = UnsupportedOperationError, +) -> Callable: + """Decorator that validates if a given expression evaluates to True. + + Typically used on class methods to check capabilities or configuration + before executing the method's logic. If the expression is False, + the specified `error_type` (defaults to `UnsupportedOperationError`) is raised. + + Args: + expression: A callable that takes the instance (`self`) as its argument + and returns a boolean. + error_message: An optional custom error message for the error raised. + If None, the string representation of the expression will be used. + error_type: The exception class to raise on validation failure. + Must take a `message` keyword argument (inherited from A2AError). + + Examples: + Demonstrating with an async method: + >>> import asyncio + >>> from a2a.utils.errors import UnsupportedOperationError + >>> + >>> class MyAgent: + ... def __init__(self, streaming_enabled: bool): + ... self.streaming_enabled = streaming_enabled + ... + ... @validate( + ... lambda self: self.streaming_enabled, + ... 'Streaming is not enabled for this agent', + ... ) + ... async def stream_response(self, message: str): + ... return f'Streaming: {message}' + >>> + >>> async def run_async_test(): + ... # Successful call + ... agent_ok = MyAgent(streaming_enabled=True) + ... result = await agent_ok.stream_response('hello') + ... print(result) + ... + ... # Call that fails validation + ... agent_fail = MyAgent(streaming_enabled=False) + ... try: + ... await agent_fail.stream_response('world') + ... except UnsupportedOperationError as e: + ... print(e.message) + >>> + >>> asyncio.run(run_async_test()) + Streaming: hello + Streaming is not enabled for this agent + + Demonstrating with a sync method: + >>> class SecureAgent: + ... def __init__(self): + ... self.auth_enabled = False + ... + ... @validate( + ... lambda self: self.auth_enabled, + ... 'Authentication must be enabled for this operation', + ... ) + ... def secure_operation(self, data: str): + ... return f'Processing secure data: {data}' + >>> + >>> # Error case example + >>> agent = SecureAgent() + >>> try: + ... agent.secure_operation('secret') + ... except UnsupportedOperationError as e: + ... print(e.message) + Authentication must be enabled for this operation + + Note: + This decorator works with both sync and async methods automatically. + """ + + def decorator(function: Callable) -> Callable: + if inspect.isasyncgenfunction(function): + + @functools.wraps(function) + async def async_gen_wrapper(self: Any, *args, **kwargs) -> Any: + if not expression(self): + final_message = error_message or str(expression) + logging.getLogger(__name__).error( + 'Validation failure: %s', final_message + ) + raise error_type(final_message) + inner = function(self, *args, **kwargs) + try: + async for item in inner: + yield item + finally: + await inner.aclose() + + return async_gen_wrapper + + if inspect.iscoroutinefunction(function): + + @functools.wraps(function) + async def async_wrapper(self: Any, *args, **kwargs) -> Any: + if not expression(self): + final_message = error_message or str(expression) + logging.getLogger(__name__).error( + 'Validation failure: %s', final_message + ) + raise error_type(final_message) + return await function(self, *args, **kwargs) + + return async_wrapper + + @functools.wraps(function) + def sync_wrapper(self: Any, *args, **kwargs) -> Any: + if not expression(self): + final_message = error_message or str(expression) + logging.getLogger(__name__).error( + 'Validation failure: %s', final_message + ) + raise error_type(final_message) + return function(self, *args, **kwargs) + + return sync_wrapper + + return decorator diff --git a/src/a2a/server/request_handlers/response_helpers.py b/src/a2a/server/request_handlers/response_helpers.py index 4c55c4197..15a0c5263 100644 --- a/src/a2a/server/request_handlers/response_helpers.py +++ b/src/a2a/server/request_handlers/response_helpers.py @@ -1,142 +1,180 @@ """Helper functions for building A2A JSON-RPC responses.""" -# response types -from typing import TypeVar +from typing import Any -from a2a.types import ( - A2AError, - CancelTaskResponse, - CancelTaskSuccessResponse, - DeleteTaskPushNotificationConfigResponse, - DeleteTaskPushNotificationConfigSuccessResponse, - GetTaskPushNotificationConfigResponse, - GetTaskPushNotificationConfigSuccessResponse, - GetTaskResponse, - GetTaskSuccessResponse, - InvalidAgentResponseError, +from google.protobuf.json_format import MessageToDict +from google.protobuf.message import Message as ProtoMessage +from jsonrpc.jsonrpc2 import JSONRPC20Response + +from a2a.compat.v0_3.conversions import to_compat_agent_card +from a2a.server.jsonrpc_models import ( + InternalError as JSONRPCInternalError, +) +from a2a.server.jsonrpc_models import ( JSONRPCError, - JSONRPCErrorResponse, - ListTaskPushNotificationConfigResponse, - ListTaskPushNotificationConfigSuccessResponse, +) +from a2a.types.a2a_pb2 import ( + AgentCard, + ListTasksResponse, Message, - SendMessageResponse, - SendMessageSuccessResponse, - SendStreamingMessageResponse, - SendStreamingMessageSuccessResponse, - SetTaskPushNotificationConfigResponse, - SetTaskPushNotificationConfigSuccessResponse, + StreamResponse, Task, TaskArtifactUpdateEvent, TaskPushNotificationConfig, TaskStatusUpdateEvent, ) - - -RT = TypeVar( - 'RT', - GetTaskResponse, - CancelTaskResponse, - SendMessageResponse, - SetTaskPushNotificationConfigResponse, - GetTaskPushNotificationConfigResponse, - SendStreamingMessageResponse, - ListTaskPushNotificationConfigResponse, - DeleteTaskPushNotificationConfigResponse, +from a2a.types.a2a_pb2 import ( + SendMessageResponse as SendMessageResponseProto, ) -"""Type variable for RootModel response types.""" - -# success types -SPT = TypeVar( - 'SPT', - GetTaskSuccessResponse, - CancelTaskSuccessResponse, - SendMessageSuccessResponse, - SetTaskPushNotificationConfigSuccessResponse, - GetTaskPushNotificationConfigSuccessResponse, - SendStreamingMessageSuccessResponse, - ListTaskPushNotificationConfigSuccessResponse, - DeleteTaskPushNotificationConfigSuccessResponse, +from a2a.utils.errors import ( + JSON_RPC_ERROR_CODE_MAP, + A2AError, + ContentTypeNotSupportedError, + ExtendedAgentCardNotConfiguredError, + ExtensionSupportRequiredError, + InternalError, + InvalidAgentResponseError, + InvalidParamsError, + InvalidRequestError, + MethodNotFoundError, + PushNotificationNotSupportedError, + TaskNotCancelableError, + TaskNotFoundError, + UnsupportedOperationError, + VersionNotSupportedError, ) -"""Type variable for SuccessResponse types.""" -# result types + +EXCEPTION_MAP: dict[type[A2AError], type[JSONRPCError]] = { + TaskNotFoundError: JSONRPCError, + TaskNotCancelableError: JSONRPCError, + PushNotificationNotSupportedError: JSONRPCError, + UnsupportedOperationError: JSONRPCError, + ContentTypeNotSupportedError: JSONRPCError, + InvalidAgentResponseError: JSONRPCError, + ExtendedAgentCardNotConfiguredError: JSONRPCError, + InvalidParamsError: JSONRPCError, + InvalidRequestError: JSONRPCError, + MethodNotFoundError: JSONRPCError, + InternalError: JSONRPCInternalError, + ExtensionSupportRequiredError: JSONRPCError, + VersionNotSupportedError: JSONRPCError, +} + + +# Tuple of all A2AError types for isinstance checks +_A2A_ERROR_TYPES: tuple[type, ...] = (A2AError,) + + +# Result types for handler responses EventTypes = ( Task | Message | TaskArtifactUpdateEvent | TaskStatusUpdateEvent | TaskPushNotificationConfig + | StreamResponse + | SendMessageResponseProto | A2AError | JSONRPCError | list[TaskPushNotificationConfig] + | ListTasksResponse ) """Type alias for possible event types produced by handlers.""" +def agent_card_to_dict(card: AgentCard) -> dict[str, Any]: + """Convert AgentCard to dict and inject backward compatibility fields.""" + result = MessageToDict(card) + + try: + compat_card = to_compat_agent_card(card) + compat_dict = compat_card.model_dump(exclude_none=True) + except VersionNotSupportedError: + compat_dict = {} + + # Do not include supportsAuthenticatedExtendedCard if false + if not compat_dict.get('supportsAuthenticatedExtendedCard'): + compat_dict.pop('supportsAuthenticatedExtendedCard', None) + + def merge(dict1: dict[str, Any], dict2: dict[str, Any]) -> dict[str, Any]: + for k, v in dict2.items(): + if k not in dict1: + dict1[k] = v + elif isinstance(v, dict) and isinstance(dict1[k], dict): + merge(dict1[k], v) + elif isinstance(v, list) and isinstance(dict1[k], list): + for i in range(min(len(dict1[k]), len(v))): + if isinstance(dict1[k][i], dict) and isinstance(v[i], dict): + merge(dict1[k][i], v[i]) + return dict1 + + return merge(result, compat_dict) + + def build_error_response( request_id: str | int | None, error: A2AError | JSONRPCError, - response_wrapper_type: type[RT], -) -> RT: - """Helper method to build a JSONRPCErrorResponse wrapped in the appropriate response type. +) -> dict[str, Any]: + """Build a JSON-RPC error response dict. Args: request_id: The ID of the request that caused the error. error: The A2AError or JSONRPCError object. - response_wrapper_type: The Pydantic RootModel type that wraps the response - for the specific RPC method (e.g., `SendMessageResponse`). Returns: - A Pydantic model representing the JSON-RPC error response, - wrapped in the specified response type. + A dict representing the JSON-RPC error response. """ - return response_wrapper_type( - JSONRPCErrorResponse( - id=request_id, - error=error.root if isinstance(error, A2AError) else error, + jsonrpc_error: JSONRPCError + if isinstance(error, JSONRPCError): + jsonrpc_error = error + elif isinstance(error, A2AError): + error_type = type(error) + model_class = EXCEPTION_MAP.get(error_type, JSONRPCInternalError) + code = JSON_RPC_ERROR_CODE_MAP.get(error_type, -32603) + jsonrpc_error = model_class( + code=code, + message=str(error), + data=error.data, ) - ) + else: + jsonrpc_error = JSONRPCInternalError(message=str(error)) + + error_dict = jsonrpc_error.model_dump(exclude_none=True) + return JSONRPC20Response(error=error_dict, _id=request_id).data def prepare_response_object( request_id: str | int | None, response: EventTypes, success_response_types: tuple[type, ...], - success_payload_type: type[SPT], - response_type: type[RT], -) -> RT: - """Helper method to build appropriate JSONRPCResponse object for RPC methods. +) -> dict[str, Any]: + """Build a JSON-RPC response dict from handler output. Based on the type of the `response` object received from the handler, - it constructs either a success response wrapped in the appropriate payload type - or an error response. + it constructs either a success response or an error response. Args: request_id: The ID of the request. response: The object received from the request handler. - success_response_types: A tuple of expected Pydantic model types for a successful result. - success_payload_type: The Pydantic model type for the success payload - (e.g., `SendMessageSuccessResponse`). - response_type: The Pydantic RootModel type that wraps the final response - (e.g., `SendMessageResponse`). + success_response_types: A tuple of expected types for a successful result. Returns: - A Pydantic model representing the final JSON-RPC response (success or error). + A dict representing the JSON-RPC response (success or error). """ if isinstance(response, success_response_types): - return response_type( - root=success_payload_type(id=request_id, result=response) # type:ignore - ) + # Convert proto message to dict for JSON serialization + result: Any = response + if isinstance(response, ProtoMessage): + result = MessageToDict(response, preserving_proto_field_name=False) + return JSONRPC20Response(result=result, _id=request_id).data if isinstance(response, A2AError | JSONRPCError): - return build_error_response(request_id, response, response_type) + return build_error_response(request_id, response) - # If consumer_data is not an expected success type and not an error, - # it's an invalid type of response from the agent for this specific method. - response = A2AError( - root=InvalidAgentResponseError( - message='Agent returned invalid type response for this method' - ) + # If response is not an expected success type and not an error, + # it's an invalid type of response from the agent for this method. + error = InvalidAgentResponseError( + message='Agent returned invalid type response for this method' ) - - return build_error_response(request_id, response, response_type) + return build_error_response(request_id, error) diff --git a/src/a2a/server/request_handlers/rest_handler.py b/src/a2a/server/request_handlers/rest_handler.py deleted file mode 100644 index 59057487c..000000000 --- a/src/a2a/server/request_handlers/rest_handler.py +++ /dev/null @@ -1,307 +0,0 @@ -import logging - -from collections.abc import AsyncIterable, AsyncIterator -from typing import TYPE_CHECKING, Any - -from google.protobuf.json_format import MessageToDict, MessageToJson, Parse - - -if TYPE_CHECKING: - from starlette.requests import Request -else: - try: - from starlette.requests import Request - except ImportError: - Request = Any - - -from a2a.grpc import a2a_pb2 -from a2a.server.context import ServerCallContext -from a2a.server.request_handlers.request_handler import RequestHandler -from a2a.types import ( - AgentCard, - GetTaskPushNotificationConfigParams, - TaskIdParams, - TaskNotFoundError, - TaskQueryParams, -) -from a2a.utils import proto_utils -from a2a.utils.errors import ServerError -from a2a.utils.helpers import validate -from a2a.utils.telemetry import SpanKind, trace_class - - -logger = logging.getLogger(__name__) - - -@trace_class(kind=SpanKind.SERVER) -class RESTHandler: - """Maps incoming REST-like (JSON+HTTP) requests to the appropriate request handler method and formats responses. - - This uses the protobuf definitions of the gRPC service as the source of truth. By - doing this, it ensures that this implementation and the gRPC transcoding - (via Envoy) are equivalent. This handler should be used if using the gRPC handler - with Envoy is not feasible for a given deployment solution. Use this handler - and a related application if you desire to ONLY server the RESTful API. - """ - - def __init__( - self, - agent_card: AgentCard, - request_handler: RequestHandler, - ): - """Initializes the RESTHandler. - - Args: - agent_card: The AgentCard describing the agent's capabilities. - request_handler: The underlying `RequestHandler` instance to delegate requests to. - """ - self.agent_card = agent_card - self.request_handler = request_handler - - async def on_message_send( - self, - request: Request, - context: ServerCallContext, - ) -> dict[str, Any]: - """Handles the 'message/send' REST method. - - Args: - request: The incoming `Request` object. - context: Context provided by the server. - - Returns: - A `dict` containing the result (Task or Message) - """ - body = await request.body() - params = a2a_pb2.SendMessageRequest() - Parse(body, params) - # Transform the proto object to the python internal objects - a2a_request = proto_utils.FromProto.message_send_params( - params, - ) - task_or_message = await self.request_handler.on_message_send( - a2a_request, context - ) - return MessageToDict( - proto_utils.ToProto.task_or_message(task_or_message) - ) - - @validate( - lambda self: self.agent_card.capabilities.streaming, - 'Streaming is not supported by the agent', - ) - async def on_message_send_stream( - self, - request: Request, - context: ServerCallContext, - ) -> AsyncIterator[str]: - """Handles the 'message/stream' REST method. - - Yields response objects as they are produced by the underlying handler's stream. - - Args: - request: The incoming `Request` object. - context: Context provided by the server. - - Yields: - JSON serialized objects containing streaming events - (Task, Message, TaskStatusUpdateEvent, TaskArtifactUpdateEvent) as JSON - """ - body = await request.body() - params = a2a_pb2.SendMessageRequest() - Parse(body, params) - # Transform the proto object to the python internal objects - a2a_request = proto_utils.FromProto.message_send_params( - params, - ) - async for event in self.request_handler.on_message_send_stream( - a2a_request, context - ): - response = proto_utils.ToProto.stream_response(event) - yield MessageToJson(response) - - async def on_cancel_task( - self, - request: Request, - context: ServerCallContext, - ) -> dict[str, Any]: - """Handles the 'tasks/cancel' REST method. - - Args: - request: The incoming `Request` object. - context: Context provided by the server. - - Returns: - A `dict` containing the updated Task - """ - task_id = request.path_params['id'] - task = await self.request_handler.on_cancel_task( - TaskIdParams(id=task_id), context - ) - if task: - return MessageToDict(proto_utils.ToProto.task(task)) - raise ServerError(error=TaskNotFoundError()) - - @validate( - lambda self: self.agent_card.capabilities.streaming, - 'Streaming is not supported by the agent', - ) - async def on_resubscribe_to_task( - self, - request: Request, - context: ServerCallContext, - ) -> AsyncIterable[str]: - """Handles the 'tasks/resubscribe' REST method. - - Yields response objects as they are produced by the underlying handler's stream. - - Args: - request: The incoming `Request` object. - context: Context provided by the server. - - Yields: - JSON serialized objects containing streaming events - """ - task_id = request.path_params['id'] - async for event in self.request_handler.on_resubscribe_to_task( - TaskIdParams(id=task_id), context - ): - yield MessageToJson(proto_utils.ToProto.stream_response(event)) - - async def get_push_notification( - self, - request: Request, - context: ServerCallContext, - ) -> dict[str, Any]: - """Handles the 'tasks/pushNotificationConfig/get' REST method. - - Args: - request: The incoming `Request` object. - context: Context provided by the server. - - Returns: - A `dict` containing the config - """ - task_id = request.path_params['id'] - push_id = request.path_params['push_id'] - params = GetTaskPushNotificationConfigParams( - id=task_id, push_notification_config_id=push_id - ) - config = ( - await self.request_handler.on_get_task_push_notification_config( - params, context - ) - ) - return MessageToDict( - proto_utils.ToProto.task_push_notification_config(config) - ) - - @validate( - lambda self: self.agent_card.capabilities.push_notifications, - 'Push notifications are not supported by the agent', - ) - async def set_push_notification( - self, - request: Request, - context: ServerCallContext, - ) -> dict[str, Any]: - """Handles the 'tasks/pushNotificationConfig/set' REST method. - - Requires the agent to support push notifications. - - Args: - request: The incoming `TaskPushNotificationConfig` object. - context: Context provided by the server. - - Returns: - A `dict` containing the config object. - - Raises: - ServerError: If push notifications are not supported by the agent - (due to the `@validate` decorator), A2AError if processing error is - found. - """ - task_id = request.path_params['id'] - body = await request.body() - params = a2a_pb2.CreateTaskPushNotificationConfigRequest() - Parse(body, params) - a2a_request = ( - proto_utils.FromProto.task_push_notification_config_request( - params, - ) - ) - a2a_request.task_id = task_id - config = ( - await self.request_handler.on_set_task_push_notification_config( - a2a_request, context - ) - ) - return MessageToDict( - proto_utils.ToProto.task_push_notification_config(config) - ) - - async def on_get_task( - self, - request: Request, - context: ServerCallContext, - ) -> dict[str, Any]: - """Handles the 'v1/tasks/{id}' REST method. - - Args: - request: The incoming `Request` object. - context: Context provided by the server. - - Returns: - A `Task` object containing the Task. - """ - task_id = request.path_params['id'] - history_length_str = request.query_params.get('historyLength') - history_length = int(history_length_str) if history_length_str else None - params = TaskQueryParams(id=task_id, history_length=history_length) - task = await self.request_handler.on_get_task(params, context) - if task: - return MessageToDict(proto_utils.ToProto.task(task)) - raise ServerError(error=TaskNotFoundError()) - - async def list_push_notifications( - self, - request: Request, - context: ServerCallContext, - ) -> dict[str, Any]: - """Handles the 'tasks/pushNotificationConfig/list' REST method. - - This method is currently not implemented. - - Args: - request: The incoming `Request` object. - context: Context provided by the server. - - Returns: - A list of `dict` representing the `TaskPushNotificationConfig` objects. - - Raises: - NotImplementedError: This method is not yet implemented. - """ - raise NotImplementedError('list notifications not implemented') - - async def list_tasks( - self, - request: Request, - context: ServerCallContext, - ) -> dict[str, Any]: - """Handles the 'tasks/list' REST method. - - This method is currently not implemented. - - Args: - request: The incoming `Request` object. - context: Context provided by the server. - - Returns: - A list of dict representing the`Task` objects. - - Raises: - NotImplementedError: This method is not yet implemented. - """ - raise NotImplementedError('list tasks not implemented') diff --git a/src/a2a/server/routes/__init__.py b/src/a2a/server/routes/__init__.py new file mode 100644 index 000000000..007e2722f --- /dev/null +++ b/src/a2a/server/routes/__init__.py @@ -0,0 +1,18 @@ +"""A2A Routes.""" + +from a2a.server.routes.agent_card_routes import create_agent_card_routes +from a2a.server.routes.common import ( + DefaultServerCallContextBuilder, + ServerCallContextBuilder, +) +from a2a.server.routes.jsonrpc_routes import create_jsonrpc_routes +from a2a.server.routes.rest_routes import create_rest_routes + + +__all__ = [ + 'DefaultServerCallContextBuilder', + 'ServerCallContextBuilder', + 'create_agent_card_routes', + 'create_jsonrpc_routes', + 'create_rest_routes', +] diff --git a/src/a2a/server/routes/agent_card_routes.py b/src/a2a/server/routes/agent_card_routes.py new file mode 100644 index 000000000..924a3d9dc --- /dev/null +++ b/src/a2a/server/routes/agent_card_routes.py @@ -0,0 +1,55 @@ +from collections.abc import Awaitable, Callable +from typing import TYPE_CHECKING, Any + + +if TYPE_CHECKING: + from starlette.requests import Request + from starlette.responses import JSONResponse, Response + from starlette.routing import Route + + _package_starlette_installed = True +else: + try: + from starlette.requests import Request + from starlette.responses import JSONResponse, Response + from starlette.routing import Route + + _package_starlette_installed = True + except ImportError: + Route = Any + Request = Any + Response = Any + JSONResponse = Any + + _package_starlette_installed = False + +from a2a.server.request_handlers.response_helpers import agent_card_to_dict +from a2a.types.a2a_pb2 import AgentCard +from a2a.utils.constants import AGENT_CARD_WELL_KNOWN_PATH + + +def create_agent_card_routes( + agent_card: AgentCard, + card_modifier: Callable[[AgentCard], Awaitable[AgentCard]] | None = None, + card_url: str = AGENT_CARD_WELL_KNOWN_PATH, +) -> list['Route']: + """Creates the Starlette Route for the A2A protocol agent card endpoint.""" + if not _package_starlette_installed: + raise ImportError( + 'The `starlette` package is required to use `create_agent_card_routes`. ' + 'It can be installed as part of `a2a-sdk` optional dependencies, `a2a-sdk[http-server]`.' + ) + + async def _get_agent_card(request: Request) -> Response: + card_to_serve = agent_card + if card_modifier: + card_to_serve = await card_modifier(card_to_serve) + return JSONResponse(agent_card_to_dict(card_to_serve)) + + return [ + Route( + path=card_url, + endpoint=_get_agent_card, + methods=['GET'], + ) + ] diff --git a/src/a2a/server/routes/common.py b/src/a2a/server/routes/common.py new file mode 100644 index 000000000..18b6865c5 --- /dev/null +++ b/src/a2a/server/routes/common.py @@ -0,0 +1,85 @@ +from abc import ABC, abstractmethod +from typing import TYPE_CHECKING, Any + + +if TYPE_CHECKING: + from starlette.authentication import BaseUser + from starlette.requests import Request +else: + try: + from starlette.authentication import BaseUser + from starlette.requests import Request + except ImportError: + Request = Any + BaseUser = Any + +from a2a.auth.user import UnauthenticatedUser, User +from a2a.extensions.common import ( + HTTP_EXTENSION_HEADER, + get_requested_extensions, +) +from a2a.server.context import ServerCallContext + + +class StarletteUser(User): + """Adapts a Starlette BaseUser to the A2A User interface.""" + + def __init__(self, user: BaseUser): + self._user = user + + @property + def is_authenticated(self) -> bool: + """Returns whether the current user is authenticated.""" + return self._user.is_authenticated + + @property + def user_name(self) -> str: + """Returns the user name of the current user.""" + return self._user.display_name + + +class ServerCallContextBuilder(ABC): + """A class for building ServerCallContexts using the Starlette Request.""" + + @abstractmethod + def build(self, request: Request) -> ServerCallContext: + """Builds a ServerCallContext from a Starlette Request.""" + + +class DefaultServerCallContextBuilder(ServerCallContextBuilder): + """A default implementation of ServerCallContextBuilder.""" + + def build(self, request: Request) -> ServerCallContext: + """Builds a ServerCallContext from a Starlette Request. + + Args: + request: The incoming Starlette Request object. + + Returns: + A ServerCallContext instance populated with user and state + information from the request. + """ + state = {} + if 'auth' in request.scope: + state['auth'] = request.auth + state['headers'] = dict(request.headers) + return ServerCallContext( + user=self.build_user(request), + state=state, + requested_extensions=get_requested_extensions( + request.headers.getlist(HTTP_EXTENSION_HEADER) + ), + ) + + def build_user(self, request: Request) -> User: + """Builds a User from a Starlette Request. + + Args: + request: The incoming Starlette Request object. + + Returns: + A User instance populated with user information from the request. + """ + if 'user' in request.scope: + return StarletteUser(request.user) + return UnauthenticatedUser() diff --git a/src/a2a/server/routes/jsonrpc_dispatcher.py b/src/a2a/server/routes/jsonrpc_dispatcher.py new file mode 100644 index 000000000..cb4e93bf1 --- /dev/null +++ b/src/a2a/server/routes/jsonrpc_dispatcher.py @@ -0,0 +1,603 @@ +"""JSON-RPC application for A2A server.""" + +import json +import logging +import traceback + +from collections.abc import AsyncGenerator +from typing import TYPE_CHECKING, Any + +from google.protobuf.json_format import MessageToDict, ParseDict +from jsonrpc.jsonrpc2 import JSONRPC20Request, JSONRPC20Response + +from a2a.compat.v0_3.jsonrpc_adapter import JSONRPC03Adapter +from a2a.server.context import ServerCallContext +from a2a.server.events import Event +from a2a.server.jsonrpc_models import ( + InternalError, + InvalidParamsError, + InvalidRequestError, + JSONParseError, + JSONRPCError, + MethodNotFoundError, +) +from a2a.server.request_handlers.request_handler import RequestHandler +from a2a.server.request_handlers.response_helpers import ( + build_error_response, +) +from a2a.server.routes.common import ( + DefaultServerCallContextBuilder, + ServerCallContextBuilder, +) +from a2a.types.a2a_pb2 import ( + CancelTaskRequest, + DeleteTaskPushNotificationConfigRequest, + GetExtendedAgentCardRequest, + GetTaskPushNotificationConfigRequest, + GetTaskRequest, + ListTaskPushNotificationConfigsRequest, + ListTasksRequest, + SendMessageRequest, + SendMessageResponse, + SubscribeToTaskRequest, + Task, + TaskPushNotificationConfig, +) +from a2a.utils import constants, proto_utils +from a2a.utils.errors import ( + A2AError, + TaskNotFoundError, + UnsupportedOperationError, +) +from a2a.utils.telemetry import SpanKind, trace_class +from a2a.utils.version_validator import validate_version + + +INTERNAL_ERROR_CODE = -32603 + +logger = logging.getLogger(__name__) + +if TYPE_CHECKING: + from sse_starlette.sse import EventSourceResponse + from starlette.exceptions import HTTPException + from starlette.requests import Request + from starlette.responses import JSONResponse, Response + + try: + # Starlette v0.48.0 + from starlette.status import HTTP_413_CONTENT_TOO_LARGE + except ImportError: + from starlette.status import ( # type: ignore[no-redef] + HTTP_413_REQUEST_ENTITY_TOO_LARGE as HTTP_413_CONTENT_TOO_LARGE, + ) + + _package_starlette_installed = True +else: + try: + from sse_starlette.sse import EventSourceResponse + from starlette.exceptions import HTTPException + from starlette.requests import Request + from starlette.responses import JSONResponse, Response + + try: + # Starlette v0.48.0 + from starlette.status import HTTP_413_CONTENT_TOO_LARGE + except ImportError: + from starlette.status import ( + HTTP_413_REQUEST_ENTITY_TOO_LARGE as HTTP_413_CONTENT_TOO_LARGE, + ) + + _package_starlette_installed = True + except ImportError: + _package_starlette_installed = False + # Provide placeholder types for runtime type hinting when dependencies are not installed. + # These will not be used if the code path that needs them is guarded by _http_server_installed. + EventSourceResponse = Any + HTTPException = Any + Request = Any + JSONResponse = Any + Response = Any + HTTP_413_CONTENT_TOO_LARGE = Any + + +@trace_class(kind=SpanKind.SERVER) +class JsonRpcDispatcher: + """Base class for A2A JSONRPC applications. + + Handles incoming JSON-RPC requests, routes them to the appropriate + handler methods, and manages response generation including Server-Sent Events + (SSE). + """ + + # Method-to-model mapping for centralized routing + # Proto types don't have model_fields, so we define the mapping explicitly + # Method names match gRPC service method names + METHOD_TO_MODEL: dict[str, type] = { + 'SendMessage': SendMessageRequest, + 'SendStreamingMessage': SendMessageRequest, # Same proto type as SendMessage + 'GetTask': GetTaskRequest, + 'ListTasks': ListTasksRequest, + 'CancelTask': CancelTaskRequest, + 'CreateTaskPushNotificationConfig': TaskPushNotificationConfig, + 'GetTaskPushNotificationConfig': GetTaskPushNotificationConfigRequest, + 'ListTaskPushNotificationConfigs': ListTaskPushNotificationConfigsRequest, + 'DeleteTaskPushNotificationConfig': DeleteTaskPushNotificationConfigRequest, + 'SubscribeToTask': SubscribeToTaskRequest, + 'GetExtendedAgentCard': GetExtendedAgentCardRequest, + } + + def __init__( + self, + request_handler: RequestHandler, + context_builder: ServerCallContextBuilder | None = None, + enable_v0_3_compat: bool = False, + ) -> None: + """Initializes the JsonRpcDispatcher. + + Args: + request_handler: The handler instance responsible for processing A2A + requests via http. + context_builder: The ServerCallContextBuilder used to construct the + ServerCallContext passed to the request_handler. If None the + DefaultServerCallContextBuilder is used. + enable_v0_3_compat: Whether to enable v0.3 backward compatibility on the same endpoint. + """ + if not _package_starlette_installed: + raise ImportError( + 'Packages `starlette` and `sse-starlette` are required to use the' + ' `JsonRpcDispatcher`. They can be added as a part of `a2a-sdk`' + ' optional dependencies, `a2a-sdk[http-server]`.' + ) + + self.request_handler = request_handler + self._context_builder = ( + context_builder or DefaultServerCallContextBuilder() + ) + self.enable_v0_3_compat = enable_v0_3_compat + self._v03_adapter: JSONRPC03Adapter | None = None + + if self.enable_v0_3_compat: + self._v03_adapter = JSONRPC03Adapter( + http_handler=request_handler, + context_builder=self._context_builder, + ) + + def _generate_error_response( + self, + request_id: str | int | None, + error: Exception | JSONRPCError | A2AError, + ) -> JSONResponse: + """Creates a Starlette JSONResponse for a JSON-RPC error. + + Logs the error based on its type. + + Args: + request_id: The ID of the request that caused the error. + error: The error object (one of the JSONRPCError types). + + Returns: + A `JSONResponse` object formatted as a JSON-RPC error response. + """ + if not isinstance(error, A2AError | JSONRPCError): + error = InternalError(message=str(error)) + + response_data = build_error_response(request_id, error) + error_info = response_data.get('error', {}) + code = error_info.get('code') + message = error_info.get('message') + data = error_info.get('data') + + log_level = logging.WARNING + if code == INTERNAL_ERROR_CODE: + log_level = logging.ERROR + + logger.log( + log_level, + "Request Error (ID: %s): Code=%s, Message='%s'%s", + request_id, + code, + message, + f', Data={data}' if data else '', + ) + return JSONResponse( + response_data, + status_code=200, + ) + + async def handle_requests(self, request: Request) -> Response: # noqa: PLR0911, PLR0912 + """Handles incoming POST requests to the main A2A endpoint. + + Parses the request body as JSON, validates it against A2A request types, + dispatches it to the appropriate handler method, and returns the response. + Handles JSON parsing errors, validation errors, and other exceptions, + returning appropriate JSON-RPC error responses. + + Args: + request: The incoming Starlette Request object. + + Returns: + A Starlette Response object (JSONResponse or EventSourceResponse). + + Raises: + (Implicitly handled): Various exceptions are caught and converted + into JSON-RPC error responses by this method. + """ + request_id = None + body = None + + try: + body = await request.json() + if isinstance(body, dict): + request_id = body.get('id') + # Ensure request_id is valid for JSON-RPC response (str/int/None only) + if request_id is not None and not isinstance( + request_id, str | int + ): + request_id = None + logger.debug('Request body: %s', body) + # 1) Validate base JSON-RPC structure only (-32600 on failure) + try: + base_request = JSONRPC20Request.from_data(body) + if not isinstance(base_request, JSONRPC20Request): + # Batch requests are not supported + return self._generate_error_response( + request_id, + InvalidRequestError( + message='Batch requests are not supported' + ), + ) + if body.get('jsonrpc') != '2.0': + return self._generate_error_response( + request_id, + InvalidRequestError( + message="Invalid request: 'jsonrpc' must be exactly '2.0'" + ), + ) + except Exception as e: + logger.exception('Failed to validate base JSON-RPC request') + return self._generate_error_response( + request_id, + InvalidRequestError(data=str(e)), + ) + + # 2) Route by method name; unknown -> -32601, known -> validate params (-32602 on failure) + method: str | None = base_request.method + request_id = base_request._id # noqa: SLF001 + + if not method: + return self._generate_error_response( + request_id, + InvalidRequestError(message='Method is required'), + ) + + if ( + self.enable_v0_3_compat + and self._v03_adapter + and self._v03_adapter.supports_method(method) + ): + return await self._v03_adapter.handle_request( + request_id=request_id, + method=method, + body=body, + request=request, + ) + + model_class = self.METHOD_TO_MODEL.get(method) + if not model_class: + return self._generate_error_response( + request_id, MethodNotFoundError() + ) + try: + # Parse the params field into the proto message type + params = body.get('params', {}) + specific_request = ParseDict(params, model_class()) + except Exception as e: + logger.exception('Failed to parse request params') + return self._generate_error_response( + request_id, + InvalidParamsError(data=str(e)), + ) + + # 3) Build call context and wrap the request for downstream handling + call_context = self._context_builder.build(request) + call_context.tenant = getattr(specific_request, 'tenant', '') + call_context.state['method'] = method + call_context.state['request_id'] = request_id + + # Route streaming requests by method name + handler_result: ( + AsyncGenerator[dict[str, Any], None] | dict[str, Any] + ) + if method in ('SendStreamingMessage', 'SubscribeToTask'): + handler_result = await self._process_streaming_request( + request_id, specific_request, call_context + ) + else: + try: + raw_result = await self._process_non_streaming_request( + specific_request, call_context + ) + handler_result = JSONRPC20Response( + result=raw_result, _id=request_id + ).data + except A2AError as e: + handler_result = build_error_response(request_id, e) + return self._create_response(call_context, handler_result) + except json.decoder.JSONDecodeError as e: + traceback.print_exc() + return self._generate_error_response( + None, JSONParseError(message=str(e)) + ) + except HTTPException as e: + if e.status_code == HTTP_413_CONTENT_TOO_LARGE: + return self._generate_error_response( + request_id, + InvalidRequestError(message='Payload too large'), + ) + raise e + except A2AError as e: + return self._generate_error_response(request_id, e) + except Exception as e: + logger.exception('Unhandled exception') + return self._generate_error_response( + request_id, InternalError(message=str(e)) + ) + + @validate_version(constants.PROTOCOL_VERSION_1_0) + async def _process_streaming_request( + self, + request_id: str | int | None, + request_obj: Any, + context: ServerCallContext, + ) -> AsyncGenerator[dict[str, Any], None]: + """Processes streaming requests (SendStreamingMessage or SubscribeToTask). + + Args: + request_id: The ID of the request. + request_obj: The proto request message. + context: The ServerCallContext for the request. + + Returns: + An `AsyncGenerator` object to stream results to the client. + """ + stream: AsyncGenerator | None = None + method = context.state.get('method') + if method == 'SendStreamingMessage': + stream = self.request_handler.on_message_send_stream( + request_obj, context + ) + elif method == 'SubscribeToTask': + stream = self.request_handler.on_subscribe_to_task( + request_obj, context + ) + + if stream is None: + raise UnsupportedOperationError(message='Stream not supported') + + # Eagerly fetch the first event to trigger validation/upfront errors + try: + first_event = await anext(stream) + except StopAsyncIteration: + first_event = None + + async def _wrap_stream( + st: AsyncGenerator, first_evt: Event | None + ) -> AsyncGenerator[dict[str, Any], None]: + def _map_event(evt: Event) -> dict[str, Any]: + stream_response = proto_utils.to_stream_response(evt) + result = MessageToDict( + stream_response, preserving_proto_field_name=False + ) + return JSONRPC20Response(result=result, _id=request_id).data + + try: + if first_evt is not None: + yield _map_event(first_evt) + + async for event in st: + yield _map_event(event) + except A2AError as e: + yield build_error_response(request_id, e) + + return _wrap_stream(stream, first_event) + + async def _handle_send_message( + self, request_obj: SendMessageRequest, context: ServerCallContext + ) -> dict[str, Any]: + task_or_message = await self.request_handler.on_message_send( + request_obj, context + ) + if isinstance(task_or_message, Task): + return MessageToDict(SendMessageResponse(task=task_or_message)) + return MessageToDict(SendMessageResponse(message=task_or_message)) + + async def _handle_cancel_task( + self, request_obj: CancelTaskRequest, context: ServerCallContext + ) -> dict[str, Any]: + task = await self.request_handler.on_cancel_task(request_obj, context) + if task: + return MessageToDict(task, preserving_proto_field_name=False) + raise TaskNotFoundError + + async def _handle_get_task( + self, request_obj: GetTaskRequest, context: ServerCallContext + ) -> dict[str, Any]: + task = await self.request_handler.on_get_task(request_obj, context) + if task: + return MessageToDict(task, preserving_proto_field_name=False) + raise TaskNotFoundError + + async def _handle_list_tasks( + self, request_obj: ListTasksRequest, context: ServerCallContext + ) -> dict[str, Any]: + tasks_response = await self.request_handler.on_list_tasks( + request_obj, context + ) + return MessageToDict( + tasks_response, + preserving_proto_field_name=False, + always_print_fields_with_no_presence=True, + ) + + async def _handle_create_task_push_notification_config( + self, + request_obj: TaskPushNotificationConfig, + context: ServerCallContext, + ) -> dict[str, Any]: + result_config = ( + await self.request_handler.on_create_task_push_notification_config( + request_obj, context + ) + ) + return MessageToDict(result_config, preserving_proto_field_name=False) + + async def _handle_get_task_push_notification_config( + self, + request_obj: GetTaskPushNotificationConfigRequest, + context: ServerCallContext, + ) -> dict[str, Any]: + config = ( + await self.request_handler.on_get_task_push_notification_config( + request_obj, context + ) + ) + return MessageToDict(config, preserving_proto_field_name=False) + + async def _handle_list_task_push_notification_configs( + self, + request_obj: ListTaskPushNotificationConfigsRequest, + context: ServerCallContext, + ) -> dict[str, Any]: + configs_response = ( + await self.request_handler.on_list_task_push_notification_configs( + request_obj, context + ) + ) + return MessageToDict( + configs_response, preserving_proto_field_name=False + ) + + async def _handle_delete_task_push_notification_config( + self, + request_obj: DeleteTaskPushNotificationConfigRequest, + context: ServerCallContext, + ) -> None: + await self.request_handler.on_delete_task_push_notification_config( + request_obj, context + ) + + async def _handle_get_extended_agent_card( + self, + request_obj: GetExtendedAgentCardRequest, + context: ServerCallContext, + ) -> dict[str, Any]: + card = await self.request_handler.on_get_extended_agent_card( + request_obj, context + ) + return MessageToDict(card, preserving_proto_field_name=False) + + @validate_version(constants.PROTOCOL_VERSION_1_0) + async def _process_non_streaming_request( # noqa: PLR0911 + self, + request_obj: Any, + context: ServerCallContext, + ) -> dict[str, Any] | None: + """Processes non-streaming requests. + + Args: + request_obj: The proto request message. + context: The ServerCallContext for the request. + + Returns: + A dict containing the result or error. + """ + method = context.state.get('method') + match method: + case 'SendMessage': + return await self._handle_send_message(request_obj, context) + case 'CancelTask': + return await self._handle_cancel_task(request_obj, context) + case 'GetTask': + return await self._handle_get_task(request_obj, context) + case 'ListTasks': + return await self._handle_list_tasks(request_obj, context) + case 'CreateTaskPushNotificationConfig': + return await self._handle_create_task_push_notification_config( + request_obj, context + ) + case 'GetTaskPushNotificationConfig': + return await self._handle_get_task_push_notification_config( + request_obj, context + ) + case 'ListTaskPushNotificationConfigs': + return await self._handle_list_task_push_notification_configs( + request_obj, context + ) + case 'DeleteTaskPushNotificationConfig': + await self._handle_delete_task_push_notification_config( + request_obj, context + ) + return None + case 'GetExtendedAgentCard': + return await self._handle_get_extended_agent_card( + request_obj, context + ) + case _: + logger.error('Unhandled method: %s', method) + raise UnsupportedOperationError( + message=f'Method {method} is not supported.' + ) + + def _create_response( + self, + context: ServerCallContext, + handler_result: AsyncGenerator[dict[str, Any]] | dict[str, Any], + ) -> Response: + """Creates a Starlette Response based on the result from the request handler. + + Handles: + - AsyncGenerator for Server-Sent Events (SSE). + - Dict responses from handlers. + + Args: + context: The ServerCallContext provided to the request handler. + handler_result: The result from a request handler method. Can be an + async generator for streaming or a dict for non-streaming. + + Returns: + A Starlette JSONResponse or EventSourceResponse. + """ + if isinstance(handler_result, AsyncGenerator): + # Result is a stream of dict objects + async def event_generator( + stream: AsyncGenerator[dict[str, Any]], + ) -> AsyncGenerator[dict[str, str]]: + try: + async for item in stream: + event: dict[str, str] = { + 'data': json.dumps(item), + } + if 'error' in item: + event['event'] = 'error' + yield event + except Exception as e: + logger.exception( + 'Unhandled error during JSON-RPC SSE stream' + ) + rpc_error: A2AError | JSONRPCError = ( + e + if isinstance(e, A2AError | JSONRPCError) + else InternalError(message=str(e)) + ) + error_response = build_error_response( + context.state.get('request_id'), rpc_error + ) + yield { + 'event': 'error', + 'data': json.dumps(error_response), + } + + return EventSourceResponse(event_generator(handler_result)) + + # handler_result is a dict (JSON-RPC response) + return JSONResponse(handler_result) diff --git a/src/a2a/server/routes/jsonrpc_routes.py b/src/a2a/server/routes/jsonrpc_routes.py new file mode 100644 index 000000000..a94d513ae --- /dev/null +++ b/src/a2a/server/routes/jsonrpc_routes.py @@ -0,0 +1,68 @@ +import logging + +from typing import TYPE_CHECKING, Any + + +if TYPE_CHECKING: + from starlette.routing import Route + + _package_starlette_installed = True +else: + try: + from starlette.routing import Route + + _package_starlette_installed = True + except ImportError: + Route = Any + + _package_starlette_installed = False + +from a2a.server.request_handlers.request_handler import RequestHandler +from a2a.server.routes.common import ServerCallContextBuilder +from a2a.server.routes.jsonrpc_dispatcher import JsonRpcDispatcher + + +logger = logging.getLogger(__name__) + + +def create_jsonrpc_routes( + request_handler: RequestHandler, + rpc_url: str, + context_builder: ServerCallContextBuilder | None = None, + enable_v0_3_compat: bool = False, +) -> list['Route']: + """Creates the Starlette Route for the A2A protocol JSON-RPC endpoint. + + Handles incoming JSON-RPC requests, routes them to the appropriate + handler methods, and manages response generation including Server-Sent Events + (SSE). + + Args: + request_handler: The handler instance responsible for processing A2A + requests via http. + rpc_url: The URL prefix for the RPC endpoints. Should start with a leading slash '/'. + context_builder: The ServerCallContextBuilder used to construct the + ServerCallContext passed to the request_handler. If None the + DefaultServerCallContextBuilder is used. + enable_v0_3_compat: Whether to enable v0.3 backward compatibility on the same endpoint. + """ + if not _package_starlette_installed: + raise ImportError( + 'The `starlette` package is required to use `create_jsonrpc_routes`.' + ' It can be added as a part of `a2a-sdk` optional dependencies,' + ' `a2a-sdk[http-server]`.' + ) + + dispatcher = JsonRpcDispatcher( + request_handler=request_handler, + context_builder=context_builder, + enable_v0_3_compat=enable_v0_3_compat, + ) + + return [ + Route( + path=rpc_url, + endpoint=dispatcher.handle_requests, + methods=['POST'], + ) + ] diff --git a/src/a2a/server/routes/rest_dispatcher.py b/src/a2a/server/routes/rest_dispatcher.py new file mode 100644 index 000000000..adbdba96e --- /dev/null +++ b/src/a2a/server/routes/rest_dispatcher.py @@ -0,0 +1,363 @@ +import json +import logging + +from collections.abc import AsyncIterator, Awaitable, Callable +from typing import TYPE_CHECKING, Any, TypeVar + +from google.protobuf.json_format import MessageToDict, Parse + +from a2a.server.context import ServerCallContext +from a2a.server.request_handlers.request_handler import RequestHandler +from a2a.server.routes.common import ( + DefaultServerCallContextBuilder, + ServerCallContextBuilder, +) +from a2a.types import a2a_pb2 +from a2a.types.a2a_pb2 import ( + CancelTaskRequest, + GetTaskPushNotificationConfigRequest, + SubscribeToTaskRequest, +) +from a2a.utils import constants, proto_utils +from a2a.utils.error_handlers import ( + build_rest_error_payload, + rest_error_handler, + rest_stream_error_handler, +) +from a2a.utils.errors import ( + InvalidRequestError, + TaskNotFoundError, +) +from a2a.utils.telemetry import SpanKind, trace_class +from a2a.utils.version_validator import validate_version + + +if TYPE_CHECKING: + from sse_starlette.event import ServerSentEvent + from sse_starlette.sse import EventSourceResponse + from starlette.requests import Request + from starlette.responses import JSONResponse, Response + + _package_starlette_installed = True +else: + try: + from sse_starlette.event import ServerSentEvent + from sse_starlette.sse import EventSourceResponse + from starlette.requests import Request + from starlette.responses import JSONResponse, Response + + _package_starlette_installed = True + except ImportError: + EventSourceResponse = Any + ServerSentEvent = Any + Request = Any + JSONResponse = Any + Response = Any + + _package_starlette_installed = False + +logger = logging.getLogger(__name__) + +TResponse = TypeVar('TResponse') + + +@trace_class(kind=SpanKind.SERVER) +class RestDispatcher: + """Dispatches incoming REST requests to the appropriate handler methods. + + Handles context building, routing to RequestHandler directly, and response formatting (JSON/SSE). + """ + + def __init__( + self, + request_handler: RequestHandler, + context_builder: ServerCallContextBuilder | None = None, + ) -> None: + """Initializes the RestDispatcher. + + Args: + request_handler: The underlying `RequestHandler` instance to delegate requests to. + context_builder: The ServerCallContextBuilder used to construct the + ServerCallContext passed to the request_handler. If None the + DefaultServerCallContextBuilder is used. + """ + if not _package_starlette_installed: + raise ImportError( + 'Packages `starlette` and `sse-starlette` are required to use the' + ' `RestDispatcher`. They can be added as a part of `a2a-sdk` ' + 'optional dependencies, `a2a-sdk[http-server]`.' + ) + + self._context_builder = ( + context_builder or DefaultServerCallContextBuilder() + ) + self.request_handler = request_handler + + def _build_call_context(self, request: Request) -> ServerCallContext: + call_context = self._context_builder.build(request) + if 'tenant' in request.path_params: + call_context.tenant = request.path_params['tenant'] + return call_context + + async def _handle_non_streaming( + self, + request: Request, + handler_func: Callable[[ServerCallContext], Awaitable[TResponse]], + ) -> TResponse: + """Centralized error handling and context management for unary calls.""" + context = self._build_call_context(request) + return await handler_func(context) + + async def _handle_streaming( + self, + request: Request, + handler_func: Callable[[ServerCallContext], AsyncIterator[Any]], + ) -> EventSourceResponse: + """Centralized error handling and context management for streaming calls.""" + # Pre-consume and cache the request body to prevent deadlock in streaming context + # This is required because Starlette's request.body() can only be consumed once, + # and attempting to consume it after EventSourceResponse starts causes deadlock + try: + await request.body() + except (ValueError, RuntimeError, OSError) as e: + raise InvalidRequestError( + message=f'Failed to pre-consume request body: {e}' + ) from e + + context = self._build_call_context(request) + + # Eagerly fetch the first item from the stream so that errors raised + # before any event is yielded (e.g. validation, parsing, or handler + # failures) propagate here and are caught by + # @rest_stream_error_handler, which returns a JSONResponse with + # the correct HTTP status code instead of starting an SSE stream. + # Without this, the error would be raised after SSE headers are + # already sent, and the client would see a broken stream instead + stream = aiter(handler_func(context)) + try: + first_item = await anext(stream) + except StopAsyncIteration: + return EventSourceResponse(iter([])) + + async def event_generator() -> AsyncIterator[ServerSentEvent]: + yield ServerSentEvent(data=json.dumps(first_item)) + try: + async for item in stream: + yield ServerSentEvent(data=json.dumps(item)) + except Exception as e: + logger.exception('Error during REST SSE stream') + yield ServerSentEvent( + data=json.dumps(build_rest_error_payload(e)), + event='error', + ) + + return EventSourceResponse(event_generator()) + + @rest_error_handler + async def on_message_send(self, request: Request) -> Response: + """Handles the 'message/send' REST method.""" + + @validate_version(constants.PROTOCOL_VERSION_1_0) + async def _handler( + context: ServerCallContext, + ) -> a2a_pb2.SendMessageResponse: + body = await request.body() + params = a2a_pb2.SendMessageRequest() + Parse(body, params) + task_or_message = await self.request_handler.on_message_send( + params, context + ) + if isinstance(task_or_message, a2a_pb2.Task): + return a2a_pb2.SendMessageResponse(task=task_or_message) + return a2a_pb2.SendMessageResponse(message=task_or_message) + + response = await self._handle_non_streaming(request, _handler) + return JSONResponse(content=MessageToDict(response)) + + @rest_stream_error_handler + async def on_message_send_stream( + self, request: Request + ) -> EventSourceResponse: + """Handles the 'message/stream' REST method.""" + + @validate_version(constants.PROTOCOL_VERSION_1_0) + async def _handler( + context: ServerCallContext, + ) -> AsyncIterator[dict[str, Any]]: + body = await request.body() + params = a2a_pb2.SendMessageRequest() + Parse(body, params) + async for event in self.request_handler.on_message_send_stream( + params, context + ): + response = proto_utils.to_stream_response(event) + yield MessageToDict(response) + + return await self._handle_streaming(request, _handler) + + @rest_error_handler + async def on_cancel_task(self, request: Request) -> Response: + """Handles the 'tasks/cancel' REST method.""" + + @validate_version(constants.PROTOCOL_VERSION_1_0) + async def _handler(context: ServerCallContext) -> a2a_pb2.Task: + task_id = request.path_params['id'] + task = await self.request_handler.on_cancel_task( + CancelTaskRequest(id=task_id), context + ) + if task: + return task + raise TaskNotFoundError + + response = await self._handle_non_streaming(request, _handler) + return JSONResponse(content=MessageToDict(response)) + + @rest_stream_error_handler + async def on_subscribe_to_task( + self, request: Request + ) -> EventSourceResponse: + """Handles the 'SubscribeToTask' REST method.""" + task_id = request.path_params['id'] + + @validate_version(constants.PROTOCOL_VERSION_1_0) + async def _handler( + context: ServerCallContext, + ) -> AsyncIterator[dict[str, Any]]: + async for event in self.request_handler.on_subscribe_to_task( + SubscribeToTaskRequest(id=task_id), context + ): + response = proto_utils.to_stream_response(event) + yield MessageToDict(response) + + return await self._handle_streaming(request, _handler) + + @rest_error_handler + async def on_get_task(self, request: Request) -> Response: + """Handles the 'tasks/{id}' REST method.""" + + @validate_version(constants.PROTOCOL_VERSION_1_0) + async def _handler(context: ServerCallContext) -> a2a_pb2.Task: + params = a2a_pb2.GetTaskRequest() + proto_utils.parse_params(request.query_params, params) + params.id = request.path_params['id'] + task = await self.request_handler.on_get_task(params, context) + if task: + return task + raise TaskNotFoundError + + response = await self._handle_non_streaming(request, _handler) + return JSONResponse(content=MessageToDict(response)) + + @rest_error_handler + async def get_push_notification(self, request: Request) -> Response: + """Handles the 'tasks/pushNotificationConfig/get' REST method.""" + + @validate_version(constants.PROTOCOL_VERSION_1_0) + async def _handler( + context: ServerCallContext, + ) -> a2a_pb2.TaskPushNotificationConfig: + task_id = request.path_params['id'] + push_id = request.path_params['push_id'] + params = GetTaskPushNotificationConfigRequest( + task_id=task_id, id=push_id + ) + return ( + await self.request_handler.on_get_task_push_notification_config( + params, context + ) + ) + + response = await self._handle_non_streaming(request, _handler) + return JSONResponse(content=MessageToDict(response)) + + @rest_error_handler + async def delete_push_notification(self, request: Request) -> Response: + """Handles the 'tasks/pushNotificationConfig/delete' REST method.""" + + @validate_version(constants.PROTOCOL_VERSION_1_0) + async def _handler(context: ServerCallContext) -> None: + task_id = request.path_params['id'] + push_id = request.path_params['push_id'] + params = a2a_pb2.DeleteTaskPushNotificationConfigRequest( + task_id=task_id, id=push_id + ) + await self.request_handler.on_delete_task_push_notification_config( + params, context + ) + + await self._handle_non_streaming(request, _handler) + return JSONResponse(content={}) + + @rest_error_handler + async def set_push_notification(self, request: Request) -> Response: + """Handles the 'tasks/pushNotificationConfig/set' REST method.""" + + @validate_version(constants.PROTOCOL_VERSION_1_0) + async def _handler( + context: ServerCallContext, + ) -> a2a_pb2.TaskPushNotificationConfig: + body = await request.body() + params = a2a_pb2.TaskPushNotificationConfig() + Parse(body, params) + params.task_id = request.path_params['id'] + return await self.request_handler.on_create_task_push_notification_config( + params, context + ) + + response = await self._handle_non_streaming(request, _handler) + return JSONResponse(content=MessageToDict(response)) + + @rest_error_handler + async def list_push_notifications(self, request: Request) -> Response: + """Handles the 'tasks/pushNotificationConfig/list' REST method.""" + + @validate_version(constants.PROTOCOL_VERSION_1_0) + async def _handler( + context: ServerCallContext, + ) -> a2a_pb2.ListTaskPushNotificationConfigsResponse: + params = a2a_pb2.ListTaskPushNotificationConfigsRequest() + proto_utils.parse_params(request.query_params, params) + params.task_id = request.path_params['id'] + return await self.request_handler.on_list_task_push_notification_configs( + params, context + ) + + response = await self._handle_non_streaming(request, _handler) + return JSONResponse(content=MessageToDict(response)) + + @rest_error_handler + async def list_tasks(self, request: Request) -> Response: + """Handles the 'tasks/list' REST method.""" + + @validate_version(constants.PROTOCOL_VERSION_1_0) + async def _handler( + context: ServerCallContext, + ) -> a2a_pb2.ListTasksResponse: + params = a2a_pb2.ListTasksRequest() + proto_utils.parse_params(request.query_params, params) + return await self.request_handler.on_list_tasks(params, context) + + response = await self._handle_non_streaming(request, _handler) + return JSONResponse( + content=MessageToDict( + response, always_print_fields_with_no_presence=True + ) + ) + + @rest_error_handler + async def handle_authenticated_agent_card( + self, request: Request + ) -> Response: + """Handles the 'agentCard' REST method.""" + + @validate_version(constants.PROTOCOL_VERSION_1_0) + async def _handler( + context: ServerCallContext, + ) -> a2a_pb2.AgentCard: + params = a2a_pb2.GetExtendedAgentCardRequest() + return await self.request_handler.on_get_extended_agent_card( + params, context + ) + + response = await self._handle_non_streaming(request, _handler) + return JSONResponse(content=MessageToDict(response)) diff --git a/src/a2a/server/routes/rest_routes.py b/src/a2a/server/routes/rest_routes.py new file mode 100644 index 000000000..2ba8cecfc --- /dev/null +++ b/src/a2a/server/routes/rest_routes.py @@ -0,0 +1,118 @@ +import logging + +from typing import TYPE_CHECKING, Any + +from a2a.compat.v0_3.rest_adapter import REST03Adapter +from a2a.server.request_handlers.request_handler import RequestHandler +from a2a.server.routes.common import ServerCallContextBuilder +from a2a.server.routes.rest_dispatcher import RestDispatcher + + +if TYPE_CHECKING: + from starlette.routing import BaseRoute, Mount, Route + + _package_starlette_installed = True +else: + try: + from starlette.routing import BaseRoute, Mount, Route + + _package_starlette_installed = True + except ImportError: + Route = Any + Mount = Any + BaseRoute = Any + + _package_starlette_installed = False + +logger = logging.getLogger(__name__) + + +def create_rest_routes( + request_handler: RequestHandler, + context_builder: ServerCallContextBuilder | None = None, + enable_v0_3_compat: bool = False, + path_prefix: str = '', +) -> list['BaseRoute']: + """Creates the Starlette Routes for the A2A protocol REST endpoint. + + Args: + request_handler: The handler instance responsible for processing A2A + requests via http. + context_builder: The ServerCallContextBuilder used to construct the + ServerCallContext passed to the request_handler. If None the + DefaultServerCallContextBuilder is used. + enable_v0_3_compat: If True, mounts backward-compatible v0.3 protocol + endpoints using REST03Adapter. + path_prefix: The URL prefix for the REST endpoints. + """ + if not _package_starlette_installed: + raise ImportError( + 'Packages `starlette` and `sse-starlette` are required to use' + ' the `create_rest_routes`. They can be added as a part of `a2a-sdk` ' + 'optional dependencies, `a2a-sdk[http-server]`.' + ) + + dispatcher = RestDispatcher( + request_handler=request_handler, + context_builder=context_builder, + ) + + routes: list[BaseRoute] = [] + if enable_v0_3_compat: + v03_adapter = REST03Adapter( + http_handler=request_handler, + context_builder=context_builder, + ) + v03_routes = v03_adapter.routes() + for (path, method), endpoint in v03_routes.items(): + routes.append( + Route( + path=f'{path_prefix}{path}', + endpoint=endpoint, + methods=[method], + ) + ) + + base_routes = { + ('/message:send', 'POST'): dispatcher.on_message_send, + ('/message:stream', 'POST'): dispatcher.on_message_send_stream, + ('/tasks/{id}:cancel', 'POST'): dispatcher.on_cancel_task, + ('/tasks/{id}:subscribe', 'GET'): dispatcher.on_subscribe_to_task, + ('/tasks/{id}:subscribe', 'POST'): dispatcher.on_subscribe_to_task, + ('/tasks/{id}', 'GET'): dispatcher.on_get_task, + ( + '/tasks/{id}/pushNotificationConfigs/{push_id}', + 'GET', + ): dispatcher.get_push_notification, + ( + '/tasks/{id}/pushNotificationConfigs/{push_id}', + 'DELETE', + ): dispatcher.delete_push_notification, + ( + '/tasks/{id}/pushNotificationConfigs', + 'POST', + ): dispatcher.set_push_notification, + ( + '/tasks/{id}/pushNotificationConfigs', + 'GET', + ): dispatcher.list_push_notifications, + ('/tasks', 'GET'): dispatcher.list_tasks, + ( + '/extendedAgentCard', + 'GET', + ): dispatcher.handle_authenticated_agent_card, + } + + base_route_objects = [] + for (path, method), endpoint in base_routes.items(): + base_route_objects.append( + Route( + path=f'{path_prefix}{path}', + endpoint=endpoint, + methods=[method], + ) + ) + routes.extend(base_route_objects) + routes.append(Mount(path='/{tenant}', routes=base_route_objects)) + + return routes diff --git a/src/a2a/server/tasks/__init__.py b/src/a2a/server/tasks/__init__.py index 641195ead..ea7745cc3 100644 --- a/src/a2a/server/tasks/__init__.py +++ b/src/a2a/server/tasks/__init__.py @@ -12,7 +12,10 @@ from a2a.server.tasks.push_notification_config_store import ( PushNotificationConfigStore, ) -from a2a.server.tasks.push_notification_sender import PushNotificationSender +from a2a.server.tasks.push_notification_sender import ( + PushNotificationEvent, + PushNotificationSender, +) from a2a.server.tasks.result_aggregator import ResultAggregator from a2a.server.tasks.task_manager import TaskManager from a2a.server.tasks.task_store import TaskStore @@ -72,6 +75,7 @@ def __init__(self, *args, **kwargs): 'InMemoryPushNotificationConfigStore', 'InMemoryTaskStore', 'PushNotificationConfigStore', + 'PushNotificationEvent', 'PushNotificationSender', 'ResultAggregator', 'TaskManager', diff --git a/src/a2a/server/tasks/base_push_notification_sender.py b/src/a2a/server/tasks/base_push_notification_sender.py index 087d2973d..4a4929e8f 100644 --- a/src/a2a/server/tasks/base_push_notification_sender.py +++ b/src/a2a/server/tasks/base_push_notification_sender.py @@ -3,11 +3,18 @@ import httpx +from google.protobuf.json_format import MessageToDict + +from a2a.server.context import ServerCallContext from a2a.server.tasks.push_notification_config_store import ( PushNotificationConfigStore, ) -from a2a.server.tasks.push_notification_sender import PushNotificationSender -from a2a.types import PushNotificationConfig, Task +from a2a.server.tasks.push_notification_sender import ( + PushNotificationEvent, + PushNotificationSender, +) +from a2a.types.a2a_pb2 import TaskPushNotificationConfig +from a2a.utils.proto_utils import to_stream_response logger = logging.getLogger(__name__) @@ -20,54 +27,65 @@ def __init__( self, httpx_client: httpx.AsyncClient, config_store: PushNotificationConfigStore, + context: ServerCallContext, ) -> None: """Initializes the BasePushNotificationSender. Args: httpx_client: An async HTTP client instance to send notifications. config_store: A PushNotificationConfigStore instance to retrieve configurations. + context: The `ServerCallContext` that this push notification is produced under. """ self._client = httpx_client self._config_store = config_store + self._call_context: ServerCallContext = context - async def send_notification(self, task: Task) -> None: - """Sends a push notification for a task if configuration exists.""" - push_configs = await self._config_store.get_info(task.id) + async def send_notification( + self, task_id: str, event: PushNotificationEvent + ) -> None: + """Sends a push notification for an event if configuration exists.""" + push_configs = await self._config_store.get_info( + task_id, self._call_context + ) if not push_configs: return awaitables = [ - self._dispatch_notification(task, push_info) + self._dispatch_notification(event, push_info, task_id) for push_info in push_configs ] results = await asyncio.gather(*awaitables) if not all(results): logger.warning( - 'Some push notifications failed to send for task_id=%s', task.id + 'Some push notifications failed to send for task_id=%s', task_id ) async def _dispatch_notification( - self, task: Task, push_info: PushNotificationConfig + self, + event: PushNotificationEvent, + push_info: TaskPushNotificationConfig, + task_id: str, ) -> bool: url = push_info.url try: headers = None if push_info.token: headers = {'X-A2A-Notification-Token': push_info.token} + response = await self._client.post( url, - json=task.model_dump(mode='json', exclude_none=True), + json=MessageToDict(to_stream_response(event)), headers=headers, ) response.raise_for_status() logger.info( - 'Push-notification sent for task_id=%s to URL: %s', task.id, url + 'Push-notification sent for task_id=%s to URL: %s', task_id, url ) except Exception: logger.exception( 'Error sending push-notification for task_id=%s to URL: %s.', - task.id, + task_id, url, ) return False diff --git a/src/a2a/server/tasks/copying_task_store.py b/src/a2a/server/tasks/copying_task_store.py new file mode 100644 index 000000000..f7f41bf1f --- /dev/null +++ b/src/a2a/server/tasks/copying_task_store.py @@ -0,0 +1,57 @@ +from __future__ import annotations + +import logging + +from typing import TYPE_CHECKING + + +if TYPE_CHECKING: + from a2a.server.context import ServerCallContext +from a2a.server.tasks.task_store import TaskStore +from a2a.types.a2a_pb2 import ListTasksRequest, ListTasksResponse, Task + + +logger = logging.getLogger(__name__) + + +class CopyingTaskStoreAdapter(TaskStore): + """An adapter that ensures deep copies of tasks are passed to and returned from the underlying TaskStore. + + This prevents accidental shared mutable state bugs where code modifies a Task object + retrieved from the store without explicitly saving it, which hides missing save calls. + """ + + def __init__(self, underlying_store: TaskStore): + self._store = underlying_store + + async def save(self, task: Task, context: ServerCallContext) -> None: + """Saves a copy of the task to the underlying store.""" + task_copy = Task() + task_copy.CopyFrom(task) + await self._store.save(task_copy, context) + + async def get( + self, task_id: str, context: ServerCallContext + ) -> Task | None: + """Retrieves a task from the underlying store and returns a copy.""" + task = await self._store.get(task_id, context) + if task is None: + return None + task_copy = Task() + task_copy.CopyFrom(task) + return task_copy + + async def list( + self, + params: ListTasksRequest, + context: ServerCallContext, + ) -> ListTasksResponse: + """Retrieves a list of tasks from the underlying store and returns a copy.""" + response = await self._store.list(params, context) + response_copy = ListTasksResponse() + response_copy.CopyFrom(response) + return response_copy + + async def delete(self, task_id: str, context: ServerCallContext) -> None: + """Deletes a task from the underlying store.""" + await self._store.delete(task_id, context) diff --git a/src/a2a/server/tasks/database_push_notification_config_store.py b/src/a2a/server/tasks/database_push_notification_config_store.py index e125f22a1..31cd676c8 100644 --- a/src/a2a/server/tasks/database_push_notification_config_store.py +++ b/src/a2a/server/tasks/database_push_notification_config_store.py @@ -1,18 +1,13 @@ # ruff: noqa: PLC0415 -import json import logging from typing import TYPE_CHECKING -from pydantic import ValidationError +from google.protobuf.json_format import MessageToJson, Parse try: - from sqlalchemy import ( - Table, - delete, - select, - ) + from sqlalchemy import Table, and_, delete, select from sqlalchemy.ext.asyncio import ( AsyncEngine, AsyncSession, @@ -29,21 +24,27 @@ "or 'pip install a2a-sdk[sql]'" ) from e +from collections.abc import Callable + +from a2a.compat.v0_3.model_conversions import ( + compat_push_notification_config_model_to_core, +) +from a2a.server.context import ServerCallContext from a2a.server.models import ( Base, PushNotificationConfigModel, create_push_notification_config_model, ) +from a2a.server.owner_resolver import OwnerResolver, resolve_user_scope from a2a.server.tasks.push_notification_config_store import ( PushNotificationConfigStore, ) -from a2a.types import PushNotificationConfig +from a2a.types.a2a_pb2 import TaskPushNotificationConfig if TYPE_CHECKING: from cryptography.fernet import Fernet - logger = logging.getLogger(__name__) @@ -59,13 +60,35 @@ class DatabasePushNotificationConfigStore(PushNotificationConfigStore): _initialized: bool config_model: type[PushNotificationConfigModel] _fernet: 'Fernet | None' + owner_resolver: OwnerResolver + core_to_model_conversion: ( + Callable[ + [str, TaskPushNotificationConfig, str, 'Fernet | None'], + PushNotificationConfigModel, + ] + | None + ) + model_to_core_conversion: ( + Callable[[PushNotificationConfigModel], TaskPushNotificationConfig] + | None + ) - def __init__( + def __init__( # noqa: PLR0913 self, engine: AsyncEngine, create_table: bool = True, table_name: str = 'push_notification_configs', encryption_key: str | bytes | None = None, + owner_resolver: OwnerResolver = resolve_user_scope, + core_to_model_conversion: Callable[ + [str, TaskPushNotificationConfig, str, 'Fernet | None'], + PushNotificationConfigModel, + ] + | None = None, + model_to_core_conversion: Callable[ + [PushNotificationConfigModel], TaskPushNotificationConfig + ] + | None = None, ) -> None: """Initializes the DatabasePushNotificationConfigStore. @@ -76,6 +99,9 @@ def __init__( encryption_key: A key for encrypting sensitive configuration data. If provided, `config_data` will be encrypted in the database. The key must be a URL-safe base64-encoded 32-byte key. + owner_resolver: Function to resolve the owner from the context. + core_to_model_conversion: Optional function to convert a TaskPushNotificationConfig to a TaskPushNotificationConfigModel. + model_to_core_conversion: Optional function to convert a TaskPushNotificationConfigModel to a TaskPushNotificationConfig. """ logger.debug( 'Initializing DatabasePushNotificationConfigStore with existing engine, table: %s', @@ -87,16 +113,21 @@ def __init__( ) self.create_table = create_table self._initialized = False + self.owner_resolver = owner_resolver self.config_model = ( PushNotificationConfigModel if table_name == 'push_notification_configs' else create_push_notification_config_model(table_name) ) self._fernet = None + self.core_to_model_conversion = core_to_model_conversion + self.model_to_core_conversion = model_to_core_conversion if encryption_key: try: - from cryptography.fernet import Fernet + from cryptography.fernet import ( + Fernet, + ) except ImportError as e: raise ImportError( "DatabasePushNotificationConfigStore with encryption requires the 'cryptography' " @@ -139,13 +170,18 @@ async def _ensure_initialized(self) -> None: await self.initialize() def _to_orm( - self, task_id: str, config: PushNotificationConfig + self, task_id: str, config: TaskPushNotificationConfig, owner: str ) -> PushNotificationConfigModel: - """Maps a Pydantic PushNotificationConfig to a SQLAlchemy model instance. + """Maps a TaskPushNotificationConfig proto to a SQLAlchemy model instance. The config data is serialized to JSON bytes, and encrypted if a key is configured. """ - json_payload = config.model_dump_json().encode('utf-8') + if self.core_to_model_conversion: + return self.core_to_model_conversion( + task_id, config, owner, self._fernet + ) + + json_payload = MessageToJson(config).encode('utf-8') if self._fernet: data_to_store = self._fernet.encrypt(json_payload) @@ -155,52 +191,72 @@ def _to_orm( return self.config_model( task_id=task_id, config_id=config.id, + owner=owner, config_data=data_to_store, + protocol_version='1.0', ) def _from_orm( self, model_instance: PushNotificationConfigModel - ) -> PushNotificationConfig: - """Maps a SQLAlchemy model instance to a Pydantic PushNotificationConfig. + ) -> TaskPushNotificationConfig: + """Maps a SQLAlchemy model instance to a TaskPushNotificationConfig proto. Handles decryption if a key is configured, with a fallback to plain JSON. """ + if self.model_to_core_conversion: + return self.model_to_core_conversion(model_instance) + payload = model_instance.config_data if self._fernet: - from cryptography.fernet import InvalidToken + from cryptography.fernet import ( + InvalidToken, + ) try: decrypted_payload = self._fernet.decrypt(payload) - return PushNotificationConfig.model_validate_json( - decrypted_payload - ) - except (json.JSONDecodeError, ValidationError) as e: - logger.exception( - 'Failed to parse decrypted push notification config for task %s, config %s. ' - 'Data is corrupted or not valid JSON after decryption.', + return self._parse_config( + decrypted_payload.decode('utf-8'), model_instance.task_id, - model_instance.config_id, - ) - raise ValueError( - 'Failed to parse decrypted push notification config data' - ) from e - except InvalidToken: - # Decryption failed. This could be because the data is not encrypted. - # We'll log a warning and try to parse it as plain JSON as a fallback. - logger.warning( - 'Failed to decrypt push notification config for task %s, config %s. ' - 'Attempting to parse as unencrypted JSON. ' - 'This may indicate an incorrect encryption key or unencrypted data in the database.', - model_instance.task_id, - model_instance.config_id, + model_instance.protocol_version, ) - # Fall through to the unencrypted parsing logic below. + except Exception as e: + if isinstance(e, InvalidToken): + # Decryption failed. This could be because the data is not encrypted. + # We'll log a warning and try to parse it as plain JSON as a fallback. + logger.warning( + 'Failed to decrypt push notification config for task %s, config %s. ' + 'Attempting to parse as unencrypted JSON. ' + 'This may indicate an incorrect encryption key or unencrypted data in the database.', + model_instance.task_id, + model_instance.config_id, + ) + # Fall through to the unencrypted parsing logic below. + else: + logger.exception( + 'Failed to parse decrypted push notification config for task %s, config %s. ' + 'Data is corrupted or not valid JSON after decryption.', + model_instance.task_id, + model_instance.config_id, + ) + raise ValueError( # noqa: TRY004 + 'Failed to parse decrypted push notification config data' + ) from e # Try to parse as plain JSON. try: - return PushNotificationConfig.model_validate_json(payload) - except (json.JSONDecodeError, ValidationError) as e: + payload_str = ( + payload.decode('utf-8') + if isinstance(payload, bytes) + else payload + ) + return self._parse_config( + payload_str, + model_instance.task_id, + model_instance.protocol_version, + ) + + except Exception as e: if self._fernet: logger.exception( 'Failed to parse push notification config for task %s, config %s. ' @@ -223,30 +279,45 @@ def _from_orm( ) from e async def set_info( - self, task_id: str, notification_config: PushNotificationConfig + self, + task_id: str, + notification_config: TaskPushNotificationConfig, + context: ServerCallContext, ) -> None: """Sets or updates the push notification configuration for a task.""" await self._ensure_initialized() + owner = self.owner_resolver(context) - config_to_save = notification_config.model_copy() - if config_to_save.id is None: + # Create a copy of the config using proto CopyFrom + config_to_save = TaskPushNotificationConfig() + config_to_save.CopyFrom(notification_config) + if not config_to_save.id: config_to_save.id = task_id - db_config = self._to_orm(task_id, config_to_save) + db_config = self._to_orm(task_id, config_to_save, owner) async with self.async_session_maker.begin() as session: await session.merge(db_config) logger.debug( - 'Push notification config for task %s with config id %s saved/updated.', + 'Push notification config for task %s with config id %s for owner %s saved/updated.', task_id, config_to_save.id, + owner, ) - async def get_info(self, task_id: str) -> list[PushNotificationConfig]: - """Retrieves all push notification configurations for a task.""" + async def get_info( + self, + task_id: str, + context: ServerCallContext, + ) -> list[TaskPushNotificationConfig]: + """Retrieves all push notification configurations for a task, for the given owner.""" await self._ensure_initialized() + owner = self.owner_resolver(context) async with self.async_session_maker() as session: stmt = select(self.config_model).where( - self.config_model.task_id == task_id + and_( + self.config_model.task_id == task_id, + self.config_model.owner == owner, + ) ) result = await session.execute(stmt) models = result.scalars().all() @@ -257,39 +328,70 @@ async def get_info(self, task_id: str) -> list[PushNotificationConfig]: configs.append(self._from_orm(model)) except ValueError: # noqa: PERF203 logger.exception( - 'Could not deserialize push notification config for task %s, config %s', + 'Could not deserialize push notification config for task %s, config %s, owner %s', model.task_id, model.config_id, + owner, ) return configs async def delete_info( - self, task_id: str, config_id: str | None = None + self, + task_id: str, + context: ServerCallContext, + config_id: str | None = None, ) -> None: """Deletes push notification configurations for a task. If config_id is provided, only that specific configuration is deleted. - If config_id is None, all configurations for the task are deleted. + If config_id is None, all configurations for the task for the owner are deleted. """ await self._ensure_initialized() + owner = self.owner_resolver(context) async with self.async_session_maker.begin() as session: stmt = delete(self.config_model).where( - self.config_model.task_id == task_id + and_( + self.config_model.task_id == task_id, + self.config_model.owner == owner, + ) ) if config_id is not None: stmt = stmt.where(self.config_model.config_id == config_id) result = await session.execute(stmt) - if result.rowcount > 0: + if result.rowcount > 0: # type: ignore[attr-defined] logger.info( - 'Deleted %s push notification config(s) for task %s.', - result.rowcount, + 'Deleted %s push notification config(s) for task %s, owner %s.', + result.rowcount, # type: ignore[attr-defined] task_id, + owner, ) else: logger.warning( - 'Attempted to delete push notification config for task %s with config_id: %s that does not exist.', + 'Attempted to delete push notification config for task %s, owner %s with config_id: %s that does not exist.', task_id, + owner, config_id, ) + + def _parse_config( + self, + json_payload: str, + task_id: str | None = None, + protocol_version: str | None = None, + ) -> TaskPushNotificationConfig: + """Parses a JSON payload into a TaskPushNotificationConfig proto. + + Args: + json_payload: The JSON payload to parse. + task_id: The unique identifier of the task. Only required for legacy + (0.3) protocol versions. + protocol_version: The protocol version used for serialization. + """ + if protocol_version == '1.0': + return Parse(json_payload, TaskPushNotificationConfig()) + + return compat_push_notification_config_model_to_core( + json_payload, task_id or '' + ) diff --git a/src/a2a/server/tasks/database_task_store.py b/src/a2a/server/tasks/database_task_store.py index 07ba7e970..62a760b24 100644 --- a/src/a2a/server/tasks/database_task_store.py +++ b/src/a2a/server/tasks/database_task_store.py @@ -1,8 +1,11 @@ import logging +from collections.abc import Callable +from datetime import datetime, timezone + try: - from sqlalchemy import Table, delete, select + from sqlalchemy import Table, and_, delete, func, or_, select from sqlalchemy.ext.asyncio import ( AsyncEngine, AsyncSession, @@ -18,11 +21,20 @@ "'pip install a2a-sdk[sqlite]', " "or 'pip install a2a-sdk[sql]'" ) from e +from google.protobuf.json_format import MessageToDict, ParseDict +from a2a.compat.v0_3.model_conversions import ( + compat_task_model_to_core, +) from a2a.server.context import ServerCallContext from a2a.server.models import Base, TaskModel, create_task_model +from a2a.server.owner_resolver import OwnerResolver, resolve_user_scope from a2a.server.tasks.task_store import TaskStore -from a2a.types import Task # Task is the Pydantic model +from a2a.types import a2a_pb2 +from a2a.types.a2a_pb2 import Task +from a2a.utils.constants import DEFAULT_LIST_TASKS_PAGE_SIZE +from a2a.utils.errors import InvalidParamsError +from a2a.utils.task import decode_page_token, encode_page_token logger = logging.getLogger(__name__) @@ -39,12 +51,19 @@ class DatabaseTaskStore(TaskStore): create_table: bool _initialized: bool task_model: type[TaskModel] + owner_resolver: OwnerResolver + core_to_model_conversion: Callable[[Task, str], TaskModel] | None = None + model_to_core_conversion: Callable[[TaskModel], Task] | None = None - def __init__( + def __init__( # noqa: PLR0913 self, engine: AsyncEngine, create_table: bool = True, table_name: str = 'tasks', + owner_resolver: OwnerResolver = resolve_user_scope, + core_to_model_conversion: Callable[[Task, str], TaskModel] + | None = None, + model_to_core_conversion: Callable[[TaskModel], Task] | None = None, ) -> None: """Initializes the DatabaseTaskStore. @@ -52,6 +71,9 @@ def __init__( engine: An existing SQLAlchemy AsyncEngine to be used by Task Store create_table: If true, create tasks table on initialization. table_name: Name of the database table. Defaults to 'tasks'. + owner_resolver: Function to resolve the owner from the context. + core_to_model_conversion: Optional function to convert a Task to a TaskModel. + model_to_core_conversion: Optional function to convert a TaskModel to a Task. """ logger.debug( 'Initializing DatabaseTaskStore with existing engine, table: %s', @@ -63,6 +85,9 @@ def __init__( ) self.create_table = create_table self._initialized = False + self.owner_resolver = owner_resolver + self.core_to_model_conversion = core_to_model_conversion + self.model_to_core_conversion = model_to_core_conversion self.task_model = ( TaskModel @@ -93,74 +118,223 @@ async def _ensure_initialized(self) -> None: if not self._initialized: await self.initialize() - def _to_orm(self, task: Task) -> TaskModel: - """Maps a Pydantic Task to a SQLAlchemy TaskModel instance.""" + def _to_orm(self, task: Task, owner: str) -> TaskModel: + """Maps a Proto Task to a SQLAlchemy TaskModel instance.""" + if self.core_to_model_conversion: + return self.core_to_model_conversion(task, owner) + return self.task_model( id=task.id, context_id=task.context_id, - kind=task.kind, - status=task.status, - artifacts=task.artifacts, - history=task.history, - task_metadata=task.metadata, + kind='task', # Default kind for tasks + owner=owner, + last_updated=( + task.status.timestamp.ToDatetime() + if task.status.HasField('timestamp') + else None + ), + status=MessageToDict(task.status), + artifacts=[MessageToDict(artifact) for artifact in task.artifacts], + history=[MessageToDict(history) for history in task.history], + task_metadata=( + MessageToDict(task.metadata) if task.metadata.fields else None + ), + protocol_version='1.0', ) def _from_orm(self, task_model: TaskModel) -> Task: - """Maps a SQLAlchemy TaskModel to a Pydantic Task instance.""" - # Map database columns to Pydantic model fields - task_data_from_db = { - 'id': task_model.id, - 'context_id': task_model.context_id, - 'kind': task_model.kind, - 'status': task_model.status, - 'artifacts': task_model.artifacts, - 'history': task_model.history, - 'metadata': task_model.task_metadata, # Map task_metadata column to metadata field - } - # Pydantic's model_validate will parse the nested dicts/lists from JSON - return Task.model_validate(task_data_from_db) - - async def save( - self, task: Task, context: ServerCallContext | None = None - ) -> None: - """Saves or updates a task in the database.""" + """Maps a SQLAlchemy TaskModel to a Proto Task instance.""" + if self.model_to_core_conversion: + return self.model_to_core_conversion(task_model) + + if task_model.protocol_version == '1.0': + task = Task( + id=task_model.id, + context_id=task_model.context_id, + ) + if task_model.status: + ParseDict(task_model.status, task.status) + if task_model.artifacts: + for art_dict in task_model.artifacts: + art = task.artifacts.add() + ParseDict(art_dict, art) + if task_model.history: + for msg_dict in task_model.history: + msg = task.history.add() + ParseDict(msg_dict, msg) + if task_model.task_metadata: + task.metadata.update(task_model.task_metadata) + return task + + # Legacy conversion + return compat_task_model_to_core(task_model) + + async def save(self, task: Task, context: ServerCallContext) -> None: + """Saves or updates a task in the database for the resolved owner.""" await self._ensure_initialized() - db_task = self._to_orm(task) + owner = self.owner_resolver(context) + db_task = self._to_orm(task, owner) async with self.async_session_maker.begin() as session: await session.merge(db_task) - logger.debug('Task %s saved/updated successfully.', task.id) + logger.debug( + 'Task %s for owner %s saved/updated successfully.', + task.id, + owner, + ) async def get( - self, task_id: str, context: ServerCallContext | None = None + self, task_id: str, context: ServerCallContext ) -> Task | None: - """Retrieves a task from the database by ID.""" + """Retrieves a task from the database by ID, for the given owner.""" await self._ensure_initialized() + owner = self.owner_resolver(context) async with self.async_session_maker() as session: - stmt = select(self.task_model).where(self.task_model.id == task_id) + stmt = select(self.task_model).where( + and_( + self.task_model.id == task_id, + self.task_model.owner == owner, + ) + ) result = await session.execute(stmt) task_model = result.scalar_one_or_none() if task_model: task = self._from_orm(task_model) - logger.debug('Task %s retrieved successfully.', task_id) + logger.debug( + 'Task %s retrieved successfully for owner %s.', + task_id, + owner, + ) return task - logger.debug('Task %s not found in store.', task_id) + logger.debug( + 'Task %s not found in store for owner %s.', task_id, owner + ) return None - async def delete( - self, task_id: str, context: ServerCallContext | None = None - ) -> None: - """Deletes a task from the database by ID.""" + async def list( + self, + params: a2a_pb2.ListTasksRequest, + context: ServerCallContext, + ) -> a2a_pb2.ListTasksResponse: + """Retrieves tasks from the database based on provided parameters, for the given owner.""" + await self._ensure_initialized() + owner = self.owner_resolver(context) + logger.debug('Listing tasks for owner %s with params %s', owner, params) + + async with self.async_session_maker() as session: + timestamp_col = self.task_model.last_updated + base_stmt = select(self.task_model).where( + self.task_model.owner == owner + ) + + # Add filters + if params.context_id: + base_stmt = base_stmt.where( + self.task_model.context_id == params.context_id + ) + if params.status: + base_stmt = base_stmt.where( + self.task_model.status['state'].as_string() + == a2a_pb2.TaskState.Name(params.status) + ) + if params.HasField('status_timestamp_after'): + last_updated_after = params.status_timestamp_after.ToDatetime() + base_stmt = base_stmt.where(timestamp_col >= last_updated_after) + + # Get total count + count_stmt = select(func.count()).select_from(base_stmt.alias()) + total_count = (await session.execute(count_stmt)).scalar_one() + + # Use coalesce to treat NULL timestamps as datetime.min, + # which sort last in descending order + stmt = base_stmt.order_by( + func.coalesce( + timestamp_col, + datetime.min.replace(tzinfo=timezone.utc), + ).desc(), + self.task_model.id.desc(), + ) + + # Get paginated results + if params.page_token: + start_task_id = decode_page_token(params.page_token) + start_task = ( + await session.execute( + select(self.task_model).where( + and_( + self.task_model.id == start_task_id, + self.task_model.owner == owner, + ) + ) + ) + ).scalar_one_or_none() + if not start_task: + raise InvalidParamsError( + f'Invalid page token: {params.page_token}' + ) + + start_task_timestamp = start_task.last_updated + where_clauses = [] + if start_task_timestamp: + where_clauses.append( + and_( + timestamp_col == start_task_timestamp, + self.task_model.id <= start_task_id, + ) + ) + where_clauses.append(timestamp_col < start_task_timestamp) + where_clauses.append(timestamp_col.is_(None)) + else: + where_clauses.append( + and_( + timestamp_col.is_(None), + self.task_model.id <= start_task_id, + ) + ) + stmt = stmt.where(or_(*where_clauses)) + + page_size = params.page_size or DEFAULT_LIST_TASKS_PAGE_SIZE + stmt = stmt.limit(page_size + 1) # Add 1 for next page token + + result = await session.execute(stmt) + tasks_models = result.scalars().all() + tasks = [self._from_orm(task_model) for task_model in tasks_models] + + next_page_token = ( + encode_page_token(tasks[-1].id) + if len(tasks) == page_size + 1 + else None + ) + + return a2a_pb2.ListTasksResponse( + tasks=tasks[:page_size], + total_size=total_count, + next_page_token=next_page_token, + page_size=page_size, + ) + + async def delete(self, task_id: str, context: ServerCallContext) -> None: + """Deletes a task from the database by ID, for the given owner.""" await self._ensure_initialized() + owner = self.owner_resolver(context) async with self.async_session_maker.begin() as session: - stmt = delete(self.task_model).where(self.task_model.id == task_id) + stmt = delete(self.task_model).where( + and_( + self.task_model.id == task_id, + self.task_model.owner == owner, + ) + ) result = await session.execute(stmt) # Commit is automatic when using session.begin() - if result.rowcount > 0: - logger.info('Task %s deleted successfully.', task_id) + if result.rowcount > 0: # type: ignore[attr-defined] + logger.info( + 'Task %s deleted successfully for owner %s.', task_id, owner + ) else: logger.warning( - 'Attempted to delete nonexistent task with id: %s', task_id + 'Attempted to delete nonexistent task with id: %s and owner %s', + task_id, + owner, ) diff --git a/src/a2a/server/tasks/inmemory_push_notification_config_store.py b/src/a2a/server/tasks/inmemory_push_notification_config_store.py index c5bc5dbe6..d5b0a5b1f 100644 --- a/src/a2a/server/tasks/inmemory_push_notification_config_store.py +++ b/src/a2a/server/tasks/inmemory_push_notification_config_store.py @@ -1,10 +1,12 @@ import asyncio import logging +from a2a.server.context import ServerCallContext +from a2a.server.owner_resolver import OwnerResolver, resolve_user_scope from a2a.server.tasks.push_notification_config_store import ( PushNotificationConfigStore, ) -from a2a.types import PushNotificationConfig +from a2a.types.a2a_pb2 import TaskPushNotificationConfig logger = logging.getLogger(__name__) @@ -13,56 +15,122 @@ class InMemoryPushNotificationConfigStore(PushNotificationConfigStore): """In-memory implementation of PushNotificationConfigStore interface. - Stores push notification configurations in memory + Stores push notification configurations in a nested dictionary in memory, + keyed by owner then task_id. """ - def __init__(self) -> None: + def __init__( + self, + owner_resolver: OwnerResolver = resolve_user_scope, + ) -> None: """Initializes the InMemoryPushNotificationConfigStore.""" self.lock = asyncio.Lock() self._push_notification_infos: dict[ - str, list[PushNotificationConfig] + str, dict[str, list[TaskPushNotificationConfig]] ] = {} + self.owner_resolver = owner_resolver + + def _get_owner_push_notification_infos( + self, owner: str + ) -> dict[str, list[TaskPushNotificationConfig]]: + return self._push_notification_infos.get(owner, {}) async def set_info( - self, task_id: str, notification_config: PushNotificationConfig + self, + task_id: str, + notification_config: TaskPushNotificationConfig, + context: ServerCallContext, ) -> None: """Sets or updates the push notification configuration for a task in memory.""" + owner = self.owner_resolver(context) + if owner not in self._push_notification_infos: + self._push_notification_infos[owner] = {} async with self.lock: - if task_id not in self._push_notification_infos: - self._push_notification_infos[task_id] = [] + owner_infos = self._push_notification_infos[owner] + if task_id not in owner_infos: + owner_infos[task_id] = [] - if notification_config.id is None: + if not notification_config.id: notification_config.id = task_id - for config in self._push_notification_infos[task_id]: + # Remove existing config with the same ID + for config in owner_infos[task_id]: if config.id == notification_config.id: - self._push_notification_infos[task_id].remove(config) + owner_infos[task_id].remove(config) break - self._push_notification_infos[task_id].append(notification_config) - - async def get_info(self, task_id: str) -> list[PushNotificationConfig]: - """Retrieves the push notification configuration for a task from memory.""" + owner_infos[task_id].append(notification_config) + logger.debug( + 'Push notification config for task %s with config id %s for owner %s saved/updated.', + task_id, + notification_config.id, + owner, + ) + + async def get_info( + self, + task_id: str, + context: ServerCallContext, + ) -> list[TaskPushNotificationConfig]: + """Retrieves all push notification configurations for a task from memory, for the given owner.""" + owner = self.owner_resolver(context) async with self.lock: - return self._push_notification_infos.get(task_id) or [] + owner_infos = self._get_owner_push_notification_infos(owner) + return list(owner_infos.get(task_id, [])) async def delete_info( - self, task_id: str, config_id: str | None = None + self, + task_id: str, + context: ServerCallContext, + config_id: str | None = None, ) -> None: - """Deletes the push notification configuration for a task from memory.""" - async with self.lock: - if config_id is None: - config_id = task_id + """Deletes push notification configurations for a task from memory. - if task_id in self._push_notification_infos: - configurations = self._push_notification_infos[task_id] - if not configurations: - return + If config_id is provided, only that specific configuration is deleted. + If config_id is None, all configurations for the task for the owner are deleted. + """ + owner = self.owner_resolver(context) + async with self.lock: + owner_infos = self._get_owner_push_notification_infos(owner) + if task_id not in owner_infos: + logger.warning( + 'Attempted to delete push notification config for task %s, owner %s that does not exist.', + task_id, + owner, + ) + return + if config_id is None: + del owner_infos[task_id] + logger.info( + 'Deleted all push notification configs for task %s, owner %s.', + task_id, + owner, + ) + else: + configurations = owner_infos[task_id] + found = False for config in configurations: if config.id == config_id: configurations.remove(config) + found = True break - - if len(configurations) == 0: - del self._push_notification_infos[task_id] + if found: + logger.info( + 'Deleted push notification config %s for task %s, owner %s.', + config_id, + task_id, + owner, + ) + if len(configurations) == 0: + del owner_infos[task_id] + else: + logger.warning( + 'Attempted to delete push notification config %s for task %s, owner %s that does not exist.', + config_id, + task_id, + owner, + ) + + if not owner_infos: + del self._push_notification_infos[owner] diff --git a/src/a2a/server/tasks/inmemory_task_store.py b/src/a2a/server/tasks/inmemory_task_store.py index 4e192af08..75d2269bc 100644 --- a/src/a2a/server/tasks/inmemory_task_store.py +++ b/src/a2a/server/tasks/inmemory_task_store.py @@ -2,57 +2,229 @@ import logging from a2a.server.context import ServerCallContext +from a2a.server.owner_resolver import OwnerResolver, resolve_user_scope +from a2a.server.tasks.copying_task_store import CopyingTaskStoreAdapter from a2a.server.tasks.task_store import TaskStore -from a2a.types import Task +from a2a.types import a2a_pb2 +from a2a.types.a2a_pb2 import Task +from a2a.utils.constants import DEFAULT_LIST_TASKS_PAGE_SIZE +from a2a.utils.errors import InvalidParamsError +from a2a.utils.task import decode_page_token, encode_page_token logger = logging.getLogger(__name__) -class InMemoryTaskStore(TaskStore): - """In-memory implementation of TaskStore. +class _InMemoryTaskStoreImpl(TaskStore): + """Internal In-memory implementation of TaskStore. - Stores task objects in a dictionary in memory. Task data is lost when the - server process stops. + Stores task objects in a nested dictionary in memory, keyed by owner then task_id. + Task data is lost when the server process stops. """ - def __init__(self) -> None: - """Initializes the InMemoryTaskStore.""" - logger.debug('Initializing InMemoryTaskStore') - self.tasks: dict[str, Task] = {} + def __init__( + self, + owner_resolver: OwnerResolver = resolve_user_scope, + ) -> None: + """Initializes the internal _InMemoryTaskStoreImpl.""" + logger.debug('Initializing _InMemoryTaskStoreImpl') + self.tasks: dict[str, dict[str, Task]] = {} self.lock = asyncio.Lock() + self.owner_resolver = owner_resolver + + def _get_owner_tasks(self, owner: str) -> dict[str, Task]: + return self.tasks.get(owner, {}) + + async def save(self, task: Task, context: ServerCallContext) -> None: + """Saves or updates a task in the in-memory store for the resolved owner.""" + owner = self.owner_resolver(context) + if owner not in self.tasks: + self.tasks[owner] = {} - async def save( - self, task: Task, context: ServerCallContext | None = None - ) -> None: - """Saves or updates a task in the in-memory store.""" async with self.lock: - self.tasks[task.id] = task - logger.debug('Task %s saved successfully.', task.id) + self.tasks[owner][task.id] = task + logger.debug( + 'Task %s for owner %s saved successfully.', task.id, owner + ) async def get( - self, task_id: str, context: ServerCallContext | None = None + self, task_id: str, context: ServerCallContext ) -> Task | None: - """Retrieves a task from the in-memory store by ID.""" + """Retrieves a task from the in-memory store by ID, for the given owner.""" + owner = self.owner_resolver(context) async with self.lock: - logger.debug('Attempting to get task with id: %s', task_id) - task = self.tasks.get(task_id) + logger.debug( + 'Attempting to get task with id: %s for owner: %s', + task_id, + owner, + ) + owner_tasks = self._get_owner_tasks(owner) + task = owner_tasks.get(task_id) if task: - logger.debug('Task %s retrieved successfully.', task_id) - else: - logger.debug('Task %s not found in store.', task_id) - return task + logger.debug( + 'Task %s retrieved successfully for owner %s.', + task_id, + owner, + ) + return task + logger.debug( + 'Task %s not found in store for owner %s.', task_id, owner + ) + return None - async def delete( - self, task_id: str, context: ServerCallContext | None = None - ) -> None: - """Deletes a task from the in-memory store by ID.""" + async def list( + self, + params: a2a_pb2.ListTasksRequest, + context: ServerCallContext, + ) -> a2a_pb2.ListTasksResponse: + """Retrieves a list of tasks from the store, for the given owner.""" + owner = self.owner_resolver(context) + logger.debug('Listing tasks for owner %s with params %s', owner, params) + + async with self.lock: + owner_tasks = self._get_owner_tasks(owner) + tasks = list(owner_tasks.values()) + + # Filter tasks + if params.context_id: + tasks = [ + task for task in tasks if task.context_id == params.context_id + ] + if params.status: + tasks = [ + task for task in tasks if task.status.state == params.status + ] + if params.HasField('status_timestamp_after'): + last_updated_after_iso = ( + params.status_timestamp_after.ToJsonString() + ) + tasks = [ + task + for task in tasks + if ( + task.HasField('status') + and task.status.HasField('timestamp') + and task.status.timestamp.ToJsonString() + >= last_updated_after_iso + ) + ] + + # Order tasks by last update time. To ensure stable sorting, in cases where timestamps are null or not unique, do a second order comparison of IDs. + tasks.sort( + key=lambda task: ( + task.status.HasField('timestamp') + if task.HasField('status') + else False, + task.status.timestamp.ToJsonString() + if task.HasField('status') and task.status.HasField('timestamp') + else '', + task.id, + ), + reverse=True, + ) + + # Paginate tasks + total_size = len(tasks) + start_idx = 0 + if params.page_token: + start_task_id = decode_page_token(params.page_token) + valid_token = False + for i, task in enumerate(tasks): + if task.id == start_task_id: + start_idx = i + valid_token = True + break + if not valid_token: + raise InvalidParamsError( + f'Invalid page token: {params.page_token}' + ) + page_size = params.page_size or DEFAULT_LIST_TASKS_PAGE_SIZE + end_idx = start_idx + page_size + next_page_token = ( + encode_page_token(tasks[end_idx].id) + if end_idx < total_size + else None + ) + tasks = tasks[start_idx:end_idx] + + return a2a_pb2.ListTasksResponse( + next_page_token=next_page_token, + tasks=tasks, + total_size=total_size, + page_size=page_size, + ) + + async def delete(self, task_id: str, context: ServerCallContext) -> None: + """Deletes a task from the in-memory store by ID, for the given owner.""" + owner = self.owner_resolver(context) async with self.lock: - logger.debug('Attempting to delete task with id: %s', task_id) - if task_id in self.tasks: - del self.tasks[task_id] - logger.debug('Task %s deleted successfully.', task_id) - else: + logger.debug( + 'Attempting to delete task with id: %s for owner %s', + task_id, + owner, + ) + + owner_tasks = self._get_owner_tasks(owner) + if task_id not in owner_tasks: logger.warning( - 'Attempted to delete nonexistent task with id: %s', task_id + 'Attempted to delete nonexistent task with id: %s for owner %s', + task_id, + owner, ) + return + + del owner_tasks[task_id] + logger.debug( + 'Task %s deleted successfully for owner %s.', task_id, owner + ) + if not owner_tasks: + del self.tasks[owner] + logger.debug('Removed empty owner %s from store.', owner) + + +class InMemoryTaskStore(TaskStore): + """In-memory implementation of TaskStore. + + Can optionally use CopyingTaskStoreAdapter to wrap the internal dictionary-based + implementation, preventing shared mutable state issues by always returning and + storing deep copies. + """ + + def __init__( + self, + owner_resolver: OwnerResolver = resolve_user_scope, + use_copying: bool = True, + ) -> None: + """Initializes the InMemoryTaskStore. + + Args: + owner_resolver: Resolver for task owners. + use_copying: If True, the store will return and save deep copies of tasks. + Copying behavior is consistent with database task stores. + """ + self._impl = _InMemoryTaskStoreImpl(owner_resolver=owner_resolver) + self._store: TaskStore = ( + CopyingTaskStoreAdapter(self._impl) if use_copying else self._impl + ) + + async def save(self, task: Task, context: ServerCallContext) -> None: + """Saves or updates a task in the store.""" + await self._store.save(task, context) + + async def get( + self, task_id: str, context: ServerCallContext + ) -> Task | None: + """Retrieves a task from the store by ID.""" + return await self._store.get(task_id, context) + + async def list( + self, + params: a2a_pb2.ListTasksRequest, + context: ServerCallContext, + ) -> a2a_pb2.ListTasksResponse: + """Retrieves a list of tasks from the store.""" + return await self._store.list(params, context) + + async def delete(self, task_id: str, context: ServerCallContext) -> None: + """Deletes a task from the store by ID.""" + await self._store.delete(task_id, context) diff --git a/src/a2a/server/tasks/push_notification_config_store.py b/src/a2a/server/tasks/push_notification_config_store.py index efe46b40a..6b5b35245 100644 --- a/src/a2a/server/tasks/push_notification_config_store.py +++ b/src/a2a/server/tasks/push_notification_config_store.py @@ -1,6 +1,7 @@ from abc import ABC, abstractmethod -from a2a.types import PushNotificationConfig +from a2a.server.context import ServerCallContext +from a2a.types.a2a_pb2 import TaskPushNotificationConfig class PushNotificationConfigStore(ABC): @@ -8,16 +9,26 @@ class PushNotificationConfigStore(ABC): @abstractmethod async def set_info( - self, task_id: str, notification_config: PushNotificationConfig + self, + task_id: str, + notification_config: TaskPushNotificationConfig, + context: ServerCallContext, ) -> None: """Sets or updates the push notification configuration for a task.""" @abstractmethod - async def get_info(self, task_id: str) -> list[PushNotificationConfig]: + async def get_info( + self, + task_id: str, + context: ServerCallContext, + ) -> list[TaskPushNotificationConfig]: """Retrieves the push notification configuration for a task.""" @abstractmethod async def delete_info( - self, task_id: str, config_id: str | None = None + self, + task_id: str, + context: ServerCallContext, + config_id: str | None = None, ) -> None: """Deletes the push notification configuration for a task.""" diff --git a/src/a2a/server/tasks/push_notification_sender.py b/src/a2a/server/tasks/push_notification_sender.py index d9389d4a4..95fa43b69 100644 --- a/src/a2a/server/tasks/push_notification_sender.py +++ b/src/a2a/server/tasks/push_notification_sender.py @@ -1,11 +1,20 @@ from abc import ABC, abstractmethod -from a2a.types import Task +from a2a.types.a2a_pb2 import ( + Task, + TaskArtifactUpdateEvent, + TaskStatusUpdateEvent, +) + + +PushNotificationEvent = Task | TaskStatusUpdateEvent | TaskArtifactUpdateEvent class PushNotificationSender(ABC): """Interface for sending push notifications for tasks.""" @abstractmethod - async def send_notification(self, task: Task) -> None: + async def send_notification( + self, task_id: str, event: PushNotificationEvent + ) -> None: """Sends a push notification containing the latest task state.""" diff --git a/src/a2a/server/tasks/result_aggregator.py b/src/a2a/server/tasks/result_aggregator.py index 8c424bda7..32a323a4a 100644 --- a/src/a2a/server/tasks/result_aggregator.py +++ b/src/a2a/server/tasks/result_aggregator.py @@ -5,7 +5,7 @@ from a2a.server.events import Event, EventConsumer from a2a.server.tasks.task_manager import TaskManager -from a2a.types import Message, Task, TaskState, TaskStatusUpdateEvent +from a2a.types.a2a_pb2 import Message, Task, TaskState, TaskStatusUpdateEvent logger = logging.getLogger(__name__) @@ -98,9 +98,9 @@ async def consume_and_break_on_interrupt( self, consumer: EventConsumer, blocking: bool = True, - event_callback: Callable[[], Awaitable[None]] | None = None, + event_callback: Callable[[Event], Awaitable[None]] | None = None, ) -> tuple[Task | Message | None, bool, asyncio.Task | None]: - """Processes the event stream until completion or an interruptable state is encountered. + """Processes the event stream until completion or an interruptible state is encountered. If `blocking` is False, it returns after the first event that creates a Task or Message. If `blocking` is True, it waits for completion unless an `auth_required` @@ -138,10 +138,13 @@ async def consume_and_break_on_interrupt( return event, False, None await self.task_manager.process(event) + if event_callback: + await event_callback(event) + should_interrupt = False is_auth_required = ( isinstance(event, Task | TaskStatusUpdateEvent) - and event.status.state == TaskState.auth_required + and event.status.state == TaskState.TASK_STATE_AUTH_REQUIRED ) # Always interrupt on auth_required, as it needs external action. @@ -176,11 +179,11 @@ async def consume_and_break_on_interrupt( async def _continue_consuming( self, event_stream: AsyncIterator[Event], - event_callback: Callable[[], Awaitable[None]] | None = None, + event_callback: Callable[[Event], Awaitable[None]] | None = None, ) -> None: """Continues processing an event stream in a background task. - Used after an interruptable state (like auth_required) is encountered + Used after an interruptible state (like auth_required) is encountered in the synchronous consumption flow. Args: @@ -190,4 +193,4 @@ async def _continue_consuming( async for event in event_stream: await self.task_manager.process(event) if event_callback: - await event_callback() + await event_callback(event) diff --git a/src/a2a/server/tasks/task_manager.py b/src/a2a/server/tasks/task_manager.py index 5c363703b..e5d899c1e 100644 --- a/src/a2a/server/tasks/task_manager.py +++ b/src/a2a/server/tasks/task_manager.py @@ -3,8 +3,8 @@ from a2a.server.context import ServerCallContext from a2a.server.events.event_queue import Event from a2a.server.tasks.task_store import TaskStore -from a2a.types import ( - InvalidParamsError, +from a2a.types.a2a_pb2 import ( + Artifact, Message, Task, TaskArtifactUpdateEvent, @@ -12,13 +12,77 @@ TaskStatus, TaskStatusUpdateEvent, ) -from a2a.utils import append_artifact_to_task -from a2a.utils.errors import ServerError +from a2a.utils.errors import InvalidParamsError +from a2a.utils.telemetry import trace_function logger = logging.getLogger(__name__) +@trace_function() +def append_artifact_to_task(task: Task, event: TaskArtifactUpdateEvent) -> None: + """Helper method for updating a Task object with new artifact data from an event. + + Handles creating the artifacts list if it doesn't exist, adding new artifacts, + and appending parts to existing artifacts based on the `append` flag in the event. + + Args: + task: The `Task` object to modify. + event: The `TaskArtifactUpdateEvent` containing the artifact data. + """ + new_artifact_data: Artifact = event.artifact + artifact_id: str = new_artifact_data.artifact_id + append_parts: bool = event.append + + existing_artifact: Artifact | None = None + existing_artifact_list_index: int | None = None + + # Find existing artifact by its id + for i, art in enumerate(task.artifacts): + if art.artifact_id == artifact_id: + existing_artifact = art + existing_artifact_list_index = i + break + + if not append_parts: + # This represents the first chunk for this artifact index. + if existing_artifact_list_index is not None: + # Replace the existing artifact entirely with the new data + logger.debug( + 'Replacing artifact at id %s for task %s', artifact_id, task.id + ) + task.artifacts[existing_artifact_list_index].CopyFrom( + new_artifact_data + ) + else: + # Append the new artifact since no artifact with this index exists yet + logger.debug( + 'Adding new artifact with id %s for task %s', + artifact_id, + task.id, + ) + task.artifacts.append(new_artifact_data) + elif existing_artifact: + # Append new parts to the existing artifact's part list + logger.debug( + 'Appending parts to artifact id %s for task %s', + artifact_id, + task.id, + ) + existing_artifact.parts.extend(new_artifact_data.parts) + existing_artifact.metadata.update( + dict(new_artifact_data.metadata.items()) + ) + else: + # We received a chunk to append, but we don't have an existing artifact. + # we will ignore this chunk + logger.warning( + 'Received append=True for nonexistent artifact index %s in task %s. Ignoring chunk.', + artifact_id, + task.id, + ) + + class TaskManager: """Helps manage a task's lifecycle during execution of a request. @@ -28,31 +92,31 @@ class TaskManager: def __init__( self, + task_store: TaskStore, + context: ServerCallContext, task_id: str | None, context_id: str | None, - task_store: TaskStore, initial_message: Message | None, - context: ServerCallContext | None = None, ): """Initializes the TaskManager. Args: + task_store: The `TaskStore` instance for persistence. + context: The `ServerCallContext` that this task is produced under. task_id: The ID of the task, if known from the request. context_id: The ID of the context, if known from the request. - task_store: The `TaskStore` instance for persistence. initial_message: The `Message` that initiated the task, if any. Used when creating a new task object. - context: The `ServerCallContext` that this task is produced under. """ if task_id is not None and not (isinstance(task_id, str) and task_id): raise ValueError('Task ID must be a non-empty string') + self.task_store = task_store + self._call_context: ServerCallContext = context self.task_id = task_id self.context_id = context_id - self.task_store = task_store self._initial_message = initial_message self._current_task: Task | None = None - self._call_context: ServerCallContext | None = context logger.debug( 'TaskManager initialized with task_id: %s, context_id: %s', task_id, @@ -101,7 +165,7 @@ async def save_task_event( The updated `Task` object after processing the event. Raises: - ServerError: If the task ID in the event conflicts with the TaskManager's ID + InvalidParamsError: If the task ID in the event conflicts with the TaskManager's ID when the TaskManager's ID is already set. """ task_id_from_event = ( @@ -109,18 +173,14 @@ async def save_task_event( ) # If task id is known, make sure it is matched if self.task_id and self.task_id != task_id_from_event: - raise ServerError( - error=InvalidParamsError( - message=f"Task in event doesn't match TaskManager {self.task_id} : {task_id_from_event}" - ) + raise InvalidParamsError( + message=f"Task in event doesn't match TaskManager {self.task_id} : {task_id_from_event}" ) if not self.task_id: self.task_id = task_id_from_event if self.context_id and self.context_id != event.context_id: - raise ServerError( - error=InvalidParamsError( - message=f"Context in event doesn't match TaskManager {self.context_id} : {event.context_id}" - ) + raise InvalidParamsError( + message=f"Context in event doesn't match TaskManager {self.context_id} : {event.context_id}" ) if not self.context_id: self.context_id = event.context_id @@ -140,16 +200,11 @@ async def save_task_event( logger.debug( 'Updating task %s status to: %s', task.id, event.status.state ) - if task.status.message: - if not task.history: - task.history = [task.status.message] - else: - task.history.append(task.status.message) + if task.status.HasField('message'): + task.history.append(task.status.message) if event.metadata: - if not task.metadata: - task.metadata = {} - task.metadata.update(event.metadata) - task.status = event.status + task.metadata.MergeFrom(event.metadata) + task.status.CopyFrom(event.status) else: logger.debug('Appending artifact to task %s', task.id) append_artifact_to_task(task, event) @@ -157,13 +212,12 @@ async def save_task_event( await self._save_task(task) return task - async def ensure_task( - self, event: TaskStatusUpdateEvent | TaskArtifactUpdateEvent - ) -> Task: + async def ensure_task_id(self, task_id: str, context_id: str) -> Task: """Ensures a Task object exists in memory, loading from store or creating new if needed. Args: - event: The task-related event triggering the need for a Task object. + task_id: The ID for the new task. + context_id: The context ID for the new task. Returns: An existing or newly created `Task` object. @@ -178,16 +232,29 @@ async def ensure_task( if not task: logger.info( 'Task not found or task_id not set. Creating new task for event (task_id: %s, context_id: %s).', - event.task_id, - event.context_id, + task_id, + context_id, ) # streaming agent did not previously stream task object. # Create a task object with the available information and persist the event - task = self._init_task_obj(event.task_id, event.context_id) + task = self._init_task_obj(task_id, context_id) await self._save_task(task) return task + async def ensure_task( + self, event: TaskStatusUpdateEvent | TaskArtifactUpdateEvent + ) -> Task: + """Ensures a Task object exists in memory, loading from store or creating new if needed. + + Args: + event: The task-related event triggering the need for a Task object. + + Returns: + An existing or newly created `Task` object. + """ + return await self.ensure_task_id(event.task_id, event.context_id) + async def process(self, event: Event) -> Event: """Processes an event, updates the task state if applicable, stores it, and returns the event. @@ -226,7 +293,7 @@ def _init_task_obj(self, task_id: str, context_id: str) -> Task: return Task( id=task_id, context_id=context_id, - status=TaskStatus(state=TaskState.submitted), + status=TaskStatus(state=TaskState.TASK_STATE_SUBMITTED), history=history, ) @@ -257,15 +324,9 @@ def update_with_message(self, message: Message, task: Task) -> Task: Returns: The updated `Task` object (updated in-place). """ - if task.status.message: - if task.history: - task.history.append(task.status.message) - else: - task.history = [task.status.message] - task.status.message = None - if task.history: - task.history.append(message) - else: - task.history = [message] + if task.status.HasField('message'): + task.history.append(task.status.message) + task.status.ClearField('message') + task.history.append(message) self._current_task = task return task diff --git a/src/a2a/server/tasks/task_store.py b/src/a2a/server/tasks/task_store.py index 16b36edb9..25e4838d1 100644 --- a/src/a2a/server/tasks/task_store.py +++ b/src/a2a/server/tasks/task_store.py @@ -1,7 +1,7 @@ from abc import ABC, abstractmethod from a2a.server.context import ServerCallContext -from a2a.types import Task +from a2a.types.a2a_pb2 import ListTasksRequest, ListTasksResponse, Task class TaskStore(ABC): @@ -11,19 +11,23 @@ class TaskStore(ABC): """ @abstractmethod - async def save( - self, task: Task, context: ServerCallContext | None = None - ) -> None: + async def save(self, task: Task, context: ServerCallContext) -> None: """Saves or updates a task in the store.""" @abstractmethod async def get( - self, task_id: str, context: ServerCallContext | None = None + self, task_id: str, context: ServerCallContext ) -> Task | None: """Retrieves a task from the store by ID.""" @abstractmethod - async def delete( - self, task_id: str, context: ServerCallContext | None = None - ) -> None: + async def list( + self, + params: ListTasksRequest, + context: ServerCallContext, + ) -> ListTasksResponse: + """Retrieves a list of tasks from the store.""" + + @abstractmethod + async def delete(self, task_id: str, context: ServerCallContext) -> None: """Deletes a task from the store by ID.""" diff --git a/src/a2a/server/tasks/task_updater.py b/src/a2a/server/tasks/task_updater.py index b61ab7001..8298920da 100644 --- a/src/a2a/server/tasks/task_updater.py +++ b/src/a2a/server/tasks/task_updater.py @@ -3,13 +3,15 @@ from datetime import datetime, timezone from typing import Any +from google.protobuf.timestamp_pb2 import Timestamp + from a2a.server.events import EventQueue from a2a.server.id_generator import ( IDGenerator, IDGeneratorContext, UUIDGenerator, ) -from a2a.types import ( +from a2a.types.a2a_pb2 import ( Artifact, Message, Part, @@ -50,10 +52,10 @@ def __init__( self._lock = asyncio.Lock() self._terminal_state_reached = False self._terminal_states = { - TaskState.completed, - TaskState.canceled, - TaskState.failed, - TaskState.rejected, + TaskState.TASK_STATE_COMPLETED, + TaskState.TASK_STATE_CANCELED, + TaskState.TASK_STATE_FAILED, + TaskState.TASK_STATE_REJECTED, } self._artifact_id_generator = ( artifact_id_generator if artifact_id_generator else UUIDGenerator() @@ -66,7 +68,6 @@ async def update_status( self, state: TaskState, message: Message | None = None, - final: bool = False, timestamp: str | None = None, metadata: dict[str, Any] | None = None, ) -> None: @@ -75,7 +76,6 @@ async def update_status( Args: state: The new state of the task. message: An optional message associated with the status update. - final: If True, indicates this is the final status update for the task. timestamp: Optional ISO 8601 datetime string. Defaults to current time. metadata: Optional metadata for extensions. """ @@ -86,24 +86,27 @@ async def update_status( ) if state in self._terminal_states: self._terminal_state_reached = True - final = True - current_timestamp = ( - timestamp - if timestamp - else datetime.now(timezone.utc).isoformat() - ) + # Create proto timestamp from datetime + ts = Timestamp() + if timestamp: + # If timestamp string provided, parse it + dt = datetime.fromisoformat(timestamp.replace('Z', '+00:00')) + ts.FromDatetime(dt) + else: + ts.FromDatetime(datetime.now(timezone.utc)) + + status = TaskStatus(state=state) + if message: + status.message.CopyFrom(message) + status.timestamp.CopyFrom(ts) + await self.event_queue.enqueue_event( TaskStatusUpdateEvent( task_id=self.task_id, context_id=self.context_id, - final=final, metadata=metadata, - status=TaskStatus( - state=state, - message=message, - timestamp=current_timestamp, - ), + status=status, ) ) @@ -154,57 +157,56 @@ async def add_artifact( # noqa: PLR0913 async def complete(self, message: Message | None = None) -> None: """Marks the task as completed and publishes a final status update.""" await self.update_status( - TaskState.completed, + TaskState.TASK_STATE_COMPLETED, message=message, - final=True, ) async def failed(self, message: Message | None = None) -> None: """Marks the task as failed and publishes a final status update.""" - await self.update_status(TaskState.failed, message=message, final=True) + await self.update_status( + TaskState.TASK_STATE_FAILED, + message=message, + ) async def reject(self, message: Message | None = None) -> None: """Marks the task as rejected and publishes a final status update.""" await self.update_status( - TaskState.rejected, message=message, final=True + TaskState.TASK_STATE_REJECTED, + message=message, ) async def submit(self, message: Message | None = None) -> None: """Marks the task as submitted and publishes a status update.""" await self.update_status( - TaskState.submitted, + TaskState.TASK_STATE_SUBMITTED, message=message, ) async def start_work(self, message: Message | None = None) -> None: """Marks the task as working and publishes a status update.""" await self.update_status( - TaskState.working, + TaskState.TASK_STATE_WORKING, message=message, ) async def cancel(self, message: Message | None = None) -> None: """Marks the task as cancelled and publishes a finalstatus update.""" await self.update_status( - TaskState.canceled, message=message, final=True + TaskState.TASK_STATE_CANCELED, + message=message, ) - async def requires_input( - self, message: Message | None = None, final: bool = False - ) -> None: + async def requires_input(self, message: Message | None = None) -> None: """Marks the task as input required and publishes a status update.""" await self.update_status( - TaskState.input_required, + TaskState.TASK_STATE_INPUT_REQUIRED, message=message, - final=final, ) - async def requires_auth( - self, message: Message | None = None, final: bool = False - ) -> None: + async def requires_auth(self, message: Message | None = None) -> None: """Marks the task as auth required and publishes a status update.""" await self.update_status( - TaskState.auth_required, message=message, final=final + TaskState.TASK_STATE_AUTH_REQUIRED, message=message ) def new_agent_message( @@ -225,7 +227,7 @@ def new_agent_message( A new `Message` object. """ return Message( - role=Role.agent, + role=Role.ROLE_AGENT, task_id=self.task_id, context_id=self.context_id, message_id=self._message_id_generator.generate( diff --git a/src/a2a/types/__init__.py b/src/a2a/types/__init__.py new file mode 100644 index 000000000..1f54c8ad7 --- /dev/null +++ b/src/a2a/types/__init__.py @@ -0,0 +1,146 @@ +"""A2A Types Package - Protocol Buffer and SDK-specific types.""" + +# Import all proto-generated types from a2a_pb2 +from a2a.types.a2a_pb2 import ( + APIKeySecurityScheme, + AgentCapabilities, + AgentCard, + AgentCardSignature, + AgentExtension, + AgentInterface, + AgentProvider, + AgentSkill, + Artifact, + AuthenticationInfo, + AuthorizationCodeOAuthFlow, + CancelTaskRequest, + ClientCredentialsOAuthFlow, + DeleteTaskPushNotificationConfigRequest, + DeviceCodeOAuthFlow, + GetExtendedAgentCardRequest, + GetTaskPushNotificationConfigRequest, + GetTaskRequest, + HTTPAuthSecurityScheme, + ImplicitOAuthFlow, + ListTaskPushNotificationConfigsRequest, + ListTaskPushNotificationConfigsResponse, + ListTasksRequest, + ListTasksResponse, + Message, + MutualTlsSecurityScheme, + OAuth2SecurityScheme, + OAuthFlows, + OpenIdConnectSecurityScheme, + Part, + PasswordOAuthFlow, + Role, + SecurityRequirement, + SecurityScheme, + SendMessageConfiguration, + SendMessageRequest, + SendMessageResponse, + StreamResponse, + StringList, + SubscribeToTaskRequest, + Task, + TaskArtifactUpdateEvent, + TaskPushNotificationConfig, + TaskState, + TaskStatus, + TaskStatusUpdateEvent, +) + +# Import SDK-specific error types from utils.errors +from a2a.utils.errors import ( + ContentTypeNotSupportedError, + ExtendedAgentCardNotConfiguredError, + ExtensionSupportRequiredError, + InternalError, + InvalidAgentResponseError, + InvalidParamsError, + InvalidRequestError, + MethodNotFoundError, + PushNotificationNotSupportedError, + TaskNotCancelableError, + TaskNotFoundError, + UnsupportedOperationError, + VersionNotSupportedError, +) + + +A2ARequest = ( + SendMessageRequest + | GetTaskRequest + | CancelTaskRequest + | TaskPushNotificationConfig + | GetTaskPushNotificationConfigRequest + | SubscribeToTaskRequest + | GetExtendedAgentCardRequest + | ListTaskPushNotificationConfigsRequest +) + + +__all__ = [ + # SDK-specific types from extras + 'A2ARequest', + # Proto types + 'APIKeySecurityScheme', + 'AgentCapabilities', + 'AgentCard', + 'AgentCardSignature', + 'AgentExtension', + 'AgentInterface', + 'AgentProvider', + 'AgentSkill', + 'Artifact', + 'AuthenticationInfo', + 'AuthorizationCodeOAuthFlow', + 'CancelTaskRequest', + 'ClientCredentialsOAuthFlow', + 'ContentTypeNotSupportedError', + 'DeleteTaskPushNotificationConfigRequest', + 'DeviceCodeOAuthFlow', + 'ExtendedAgentCardNotConfiguredError', + 'ExtensionSupportRequiredError', + 'GetExtendedAgentCardRequest', + 'GetTaskPushNotificationConfigRequest', + 'GetTaskRequest', + 'HTTPAuthSecurityScheme', + 'ImplicitOAuthFlow', + 'InternalError', + 'InvalidAgentResponseError', + 'InvalidParamsError', + 'InvalidRequestError', + 'ListTaskPushNotificationConfigsRequest', + 'ListTaskPushNotificationConfigsResponse', + 'ListTasksRequest', + 'ListTasksResponse', + 'Message', + 'MethodNotFoundError', + 'MutualTlsSecurityScheme', + 'OAuth2SecurityScheme', + 'OAuthFlows', + 'OpenIdConnectSecurityScheme', + 'Part', + 'PasswordOAuthFlow', + 'PushNotificationNotSupportedError', + 'Role', + 'SecurityRequirement', + 'SecurityScheme', + 'SendMessageConfiguration', + 'SendMessageRequest', + 'SendMessageResponse', + 'StreamResponse', + 'StringList', + 'SubscribeToTaskRequest', + 'Task', + 'TaskArtifactUpdateEvent', + 'TaskNotCancelableError', + 'TaskNotFoundError', + 'TaskPushNotificationConfig', + 'TaskState', + 'TaskStatus', + 'TaskStatusUpdateEvent', + 'UnsupportedOperationError', + 'VersionNotSupportedError', +] diff --git a/src/a2a/types/a2a_pb2.py b/src/a2a/types/a2a_pb2.py new file mode 100644 index 000000000..a47abe4a3 --- /dev/null +++ b/src/a2a/types/a2a_pb2.py @@ -0,0 +1,311 @@ +# -*- coding: utf-8 -*- +# Generated by the protocol buffer compiler. DO NOT EDIT! +# NO CHECKED-IN PROTOBUF GENCODE +# source: a2a.proto +# Protobuf Python Version: 5.29.3 +"""Generated protocol buffer code.""" +from google.protobuf import descriptor as _descriptor +from google.protobuf import descriptor_pool as _descriptor_pool +from google.protobuf import runtime_version as _runtime_version +from google.protobuf import symbol_database as _symbol_database +from google.protobuf.internal import builder as _builder +_runtime_version.ValidateProtobufRuntimeVersion( + _runtime_version.Domain.PUBLIC, + 5, + 29, + 3, + '', + 'a2a.proto' +) +# @@protoc_insertion_point(imports) + +_sym_db = _symbol_database.Default() + + +from google.api import annotations_pb2 as google_dot_api_dot_annotations__pb2 +from google.api import client_pb2 as google_dot_api_dot_client__pb2 +from google.api import field_behavior_pb2 as google_dot_api_dot_field__behavior__pb2 +from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 +from google.protobuf import struct_pb2 as google_dot_protobuf_dot_struct__pb2 +from google.protobuf import timestamp_pb2 as google_dot_protobuf_dot_timestamp__pb2 + + +DESCRIPTOR = _descriptor_pool.Default().AddSerializedFile(b'\n\ta2a.proto\x12\tlf.a2a.v1\x1a\x1cgoogle/api/annotations.proto\x1a\x17google/api/client.proto\x1a\x1fgoogle/api/field_behavior.proto\x1a\x1bgoogle/protobuf/empty.proto\x1a\x1cgoogle/protobuf/struct.proto\x1a\x1fgoogle/protobuf/timestamp.proto\"\xa6\x02\n\x18SendMessageConfiguration\x12\x32\n\x15\x61\x63\x63\x65pted_output_modes\x18\x01 \x03(\tR\x13\x61\x63\x63\x65ptedOutputModes\x12h\n\x1dtask_push_notification_config\x18\x02 \x01(\x0b\x32%.lf.a2a.v1.TaskPushNotificationConfigR\x1ataskPushNotificationConfig\x12*\n\x0ehistory_length\x18\x03 \x01(\x05H\x00R\rhistoryLength\x88\x01\x01\x12-\n\x12return_immediately\x18\x04 \x01(\x08R\x11returnImmediatelyB\x11\n\x0f_history_length\"\x84\x02\n\x04Task\x12\x13\n\x02id\x18\x01 \x01(\tB\x03\xe0\x41\x02R\x02id\x12\x1d\n\ncontext_id\x18\x02 \x01(\tR\tcontextId\x12\x32\n\x06status\x18\x03 \x01(\x0b\x32\x15.lf.a2a.v1.TaskStatusB\x03\xe0\x41\x02R\x06status\x12\x31\n\tartifacts\x18\x04 \x03(\x0b\x32\x13.lf.a2a.v1.ArtifactR\tartifacts\x12,\n\x07history\x18\x05 \x03(\x0b\x32\x12.lf.a2a.v1.MessageR\x07history\x12\x33\n\x08metadata\x18\x06 \x01(\x0b\x32\x17.google.protobuf.StructR\x08metadata\"\xa5\x01\n\nTaskStatus\x12/\n\x05state\x18\x01 \x01(\x0e\x32\x14.lf.a2a.v1.TaskStateB\x03\xe0\x41\x02R\x05state\x12,\n\x07message\x18\x02 \x01(\x0b\x32\x12.lf.a2a.v1.MessageR\x07message\x12\x38\n\ttimestamp\x18\x03 \x01(\x0b\x32\x1a.google.protobuf.TimestampR\ttimestamp\"\xed\x01\n\x04Part\x12\x14\n\x04text\x18\x01 \x01(\tH\x00R\x04text\x12\x12\n\x03raw\x18\x02 \x01(\x0cH\x00R\x03raw\x12\x12\n\x03url\x18\x03 \x01(\tH\x00R\x03url\x12,\n\x04\x64\x61ta\x18\x04 \x01(\x0b\x32\x16.google.protobuf.ValueH\x00R\x04\x64\x61ta\x12\x33\n\x08metadata\x18\x05 \x01(\x0b\x32\x17.google.protobuf.StructR\x08metadata\x12\x1a\n\x08\x66ilename\x18\x06 \x01(\tR\x08\x66ilename\x12\x1d\n\nmedia_type\x18\x07 \x01(\tR\tmediaTypeB\t\n\x07\x63ontent\"\xbe\x02\n\x07Message\x12\"\n\nmessage_id\x18\x01 \x01(\tB\x03\xe0\x41\x02R\tmessageId\x12\x1d\n\ncontext_id\x18\x02 \x01(\tR\tcontextId\x12\x17\n\x07task_id\x18\x03 \x01(\tR\x06taskId\x12(\n\x04role\x18\x04 \x01(\x0e\x32\x0f.lf.a2a.v1.RoleB\x03\xe0\x41\x02R\x04role\x12*\n\x05parts\x18\x05 \x03(\x0b\x32\x0f.lf.a2a.v1.PartB\x03\xe0\x41\x02R\x05parts\x12\x33\n\x08metadata\x18\x06 \x01(\x0b\x32\x17.google.protobuf.StructR\x08metadata\x12\x1e\n\nextensions\x18\x07 \x03(\tR\nextensions\x12,\n\x12reference_task_ids\x18\x08 \x03(\tR\x10referenceTaskIds\"\xe7\x01\n\x08\x41rtifact\x12$\n\x0b\x61rtifact_id\x18\x01 \x01(\tB\x03\xe0\x41\x02R\nartifactId\x12\x12\n\x04name\x18\x02 \x01(\tR\x04name\x12 \n\x0b\x64\x65scription\x18\x03 \x01(\tR\x0b\x64\x65scription\x12*\n\x05parts\x18\x04 \x03(\x0b\x32\x0f.lf.a2a.v1.PartB\x03\xe0\x41\x02R\x05parts\x12\x33\n\x08metadata\x18\x05 \x01(\x0b\x32\x17.google.protobuf.StructR\x08metadata\x12\x1e\n\nextensions\x18\x06 \x03(\tR\nextensions\"\xc2\x01\n\x15TaskStatusUpdateEvent\x12\x1c\n\x07task_id\x18\x01 \x01(\tB\x03\xe0\x41\x02R\x06taskId\x12\"\n\ncontext_id\x18\x02 \x01(\tB\x03\xe0\x41\x02R\tcontextId\x12\x32\n\x06status\x18\x03 \x01(\x0b\x32\x15.lf.a2a.v1.TaskStatusB\x03\xe0\x41\x02R\x06status\x12\x33\n\x08metadata\x18\x04 \x01(\x0b\x32\x17.google.protobuf.StructR\x08metadata\"\xfd\x01\n\x17TaskArtifactUpdateEvent\x12\x1c\n\x07task_id\x18\x01 \x01(\tB\x03\xe0\x41\x02R\x06taskId\x12\"\n\ncontext_id\x18\x02 \x01(\tB\x03\xe0\x41\x02R\tcontextId\x12\x34\n\x08\x61rtifact\x18\x03 \x01(\x0b\x32\x13.lf.a2a.v1.ArtifactB\x03\xe0\x41\x02R\x08\x61rtifact\x12\x16\n\x06\x61ppend\x18\x04 \x01(\x08R\x06\x61ppend\x12\x1d\n\nlast_chunk\x18\x05 \x01(\x08R\tlastChunk\x12\x33\n\x08metadata\x18\x06 \x01(\x0b\x32\x17.google.protobuf.StructR\x08metadata\"S\n\x12\x41uthenticationInfo\x12\x1b\n\x06scheme\x18\x01 \x01(\tB\x03\xe0\x41\x02R\x06scheme\x12 \n\x0b\x63redentials\x18\x02 \x01(\tR\x0b\x63redentials\"\x9f\x01\n\x0e\x41gentInterface\x12\x15\n\x03url\x18\x01 \x01(\tB\x03\xe0\x41\x02R\x03url\x12.\n\x10protocol_binding\x18\x02 \x01(\tB\x03\xe0\x41\x02R\x0fprotocolBinding\x12\x16\n\x06tenant\x18\x03 \x01(\tR\x06tenant\x12.\n\x10protocol_version\x18\x04 \x01(\tB\x03\xe0\x41\x02R\x0fprotocolVersion\"\x98\x07\n\tAgentCard\x12\x17\n\x04name\x18\x01 \x01(\tB\x03\xe0\x41\x02R\x04name\x12%\n\x0b\x64\x65scription\x18\x02 \x01(\tB\x03\xe0\x41\x02R\x0b\x64\x65scription\x12Q\n\x14supported_interfaces\x18\x03 \x03(\x0b\x32\x19.lf.a2a.v1.AgentInterfaceB\x03\xe0\x41\x02R\x13supportedInterfaces\x12\x34\n\x08provider\x18\x04 \x01(\x0b\x32\x18.lf.a2a.v1.AgentProviderR\x08provider\x12\x1d\n\x07version\x18\x05 \x01(\tB\x03\xe0\x41\x02R\x07version\x12\x30\n\x11\x64ocumentation_url\x18\x06 \x01(\tH\x00R\x10\x64ocumentationUrl\x88\x01\x01\x12\x45\n\x0c\x63\x61pabilities\x18\x07 \x01(\x0b\x32\x1c.lf.a2a.v1.AgentCapabilitiesB\x03\xe0\x41\x02R\x0c\x63\x61pabilities\x12T\n\x10security_schemes\x18\x08 \x03(\x0b\x32).lf.a2a.v1.AgentCard.SecuritySchemesEntryR\x0fsecuritySchemes\x12S\n\x15security_requirements\x18\t \x03(\x0b\x32\x1e.lf.a2a.v1.SecurityRequirementR\x14securityRequirements\x12\x33\n\x13\x64\x65\x66\x61ult_input_modes\x18\n \x03(\tB\x03\xe0\x41\x02R\x11\x64\x65\x66\x61ultInputModes\x12\x35\n\x14\x64\x65\x66\x61ult_output_modes\x18\x0b \x03(\tB\x03\xe0\x41\x02R\x12\x64\x65\x66\x61ultOutputModes\x12\x32\n\x06skills\x18\x0c \x03(\x0b\x32\x15.lf.a2a.v1.AgentSkillB\x03\xe0\x41\x02R\x06skills\x12=\n\nsignatures\x18\r \x03(\x0b\x32\x1d.lf.a2a.v1.AgentCardSignatureR\nsignatures\x12\x1e\n\x08icon_url\x18\x0e \x01(\tH\x01R\x07iconUrl\x88\x01\x01\x1a]\n\x14SecuritySchemesEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12/\n\x05value\x18\x02 \x01(\x0b\x32\x19.lf.a2a.v1.SecuritySchemeR\x05value:\x02\x38\x01\x42\x14\n\x12_documentation_urlB\x0b\n\t_icon_url\"O\n\rAgentProvider\x12\x15\n\x03url\x18\x01 \x01(\tB\x03\xe0\x41\x02R\x03url\x12\'\n\x0corganization\x18\x02 \x01(\tB\x03\xe0\x41\x02R\x0corganization\"\x97\x02\n\x11\x41gentCapabilities\x12!\n\tstreaming\x18\x01 \x01(\x08H\x00R\tstreaming\x88\x01\x01\x12\x32\n\x12push_notifications\x18\x02 \x01(\x08H\x01R\x11pushNotifications\x88\x01\x01\x12\x39\n\nextensions\x18\x03 \x03(\x0b\x32\x19.lf.a2a.v1.AgentExtensionR\nextensions\x12\x33\n\x13\x65xtended_agent_card\x18\x04 \x01(\x08H\x02R\x11\x65xtendedAgentCard\x88\x01\x01\x42\x0c\n\n_streamingB\x15\n\x13_push_notificationsB\x16\n\x14_extended_agent_card\"\x91\x01\n\x0e\x41gentExtension\x12\x10\n\x03uri\x18\x01 \x01(\tR\x03uri\x12 \n\x0b\x64\x65scription\x18\x02 \x01(\tR\x0b\x64\x65scription\x12\x1a\n\x08required\x18\x03 \x01(\x08R\x08required\x12/\n\x06params\x18\x04 \x01(\x0b\x32\x17.google.protobuf.StructR\x06params\"\xaf\x02\n\nAgentSkill\x12\x13\n\x02id\x18\x01 \x01(\tB\x03\xe0\x41\x02R\x02id\x12\x17\n\x04name\x18\x02 \x01(\tB\x03\xe0\x41\x02R\x04name\x12%\n\x0b\x64\x65scription\x18\x03 \x01(\tB\x03\xe0\x41\x02R\x0b\x64\x65scription\x12\x17\n\x04tags\x18\x04 \x03(\tB\x03\xe0\x41\x02R\x04tags\x12\x1a\n\x08\x65xamples\x18\x05 \x03(\tR\x08\x65xamples\x12\x1f\n\x0binput_modes\x18\x06 \x03(\tR\ninputModes\x12!\n\x0coutput_modes\x18\x07 \x03(\tR\x0boutputModes\x12S\n\x15security_requirements\x18\x08 \x03(\x0b\x32\x1e.lf.a2a.v1.SecurityRequirementR\x14securityRequirements\"\x8b\x01\n\x12\x41gentCardSignature\x12!\n\tprotected\x18\x01 \x01(\tB\x03\xe0\x41\x02R\tprotected\x12!\n\tsignature\x18\x02 \x01(\tB\x03\xe0\x41\x02R\tsignature\x12/\n\x06header\x18\x03 \x01(\x0b\x32\x17.google.protobuf.StructR\x06header\"\xd1\x01\n\x1aTaskPushNotificationConfig\x12\x16\n\x06tenant\x18\x01 \x01(\tR\x06tenant\x12\x0e\n\x02id\x18\x02 \x01(\tR\x02id\x12\x17\n\x07task_id\x18\x03 \x01(\tR\x06taskId\x12\x15\n\x03url\x18\x04 \x01(\tB\x03\xe0\x41\x02R\x03url\x12\x14\n\x05token\x18\x05 \x01(\tR\x05token\x12\x45\n\x0e\x61uthentication\x18\x06 \x01(\x0b\x32\x1d.lf.a2a.v1.AuthenticationInfoR\x0e\x61uthentication\" \n\nStringList\x12\x12\n\x04list\x18\x01 \x03(\tR\x04list\"\xaf\x01\n\x13SecurityRequirement\x12\x45\n\x07schemes\x18\x01 \x03(\x0b\x32+.lf.a2a.v1.SecurityRequirement.SchemesEntryR\x07schemes\x1aQ\n\x0cSchemesEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12+\n\x05value\x18\x02 \x01(\x0b\x32\x15.lf.a2a.v1.StringListR\x05value:\x02\x38\x01\"\xf5\x03\n\x0eSecurityScheme\x12X\n\x17\x61pi_key_security_scheme\x18\x01 \x01(\x0b\x32\x1f.lf.a2a.v1.APIKeySecuritySchemeH\x00R\x14\x61piKeySecurityScheme\x12^\n\x19http_auth_security_scheme\x18\x02 \x01(\x0b\x32!.lf.a2a.v1.HTTPAuthSecuritySchemeH\x00R\x16httpAuthSecurityScheme\x12W\n\x16oauth2_security_scheme\x18\x03 \x01(\x0b\x32\x1f.lf.a2a.v1.OAuth2SecuritySchemeH\x00R\x14oauth2SecurityScheme\x12n\n\x1fopen_id_connect_security_scheme\x18\x04 \x01(\x0b\x32&.lf.a2a.v1.OpenIdConnectSecuritySchemeH\x00R\x1bopenIdConnectSecurityScheme\x12V\n\x14mtls_security_scheme\x18\x05 \x01(\x0b\x32\".lf.a2a.v1.MutualTlsSecuritySchemeH\x00R\x12mtlsSecuritySchemeB\x08\n\x06scheme\"r\n\x14\x41PIKeySecurityScheme\x12 \n\x0b\x64\x65scription\x18\x01 \x01(\tR\x0b\x64\x65scription\x12\x1f\n\x08location\x18\x02 \x01(\tB\x03\xe0\x41\x02R\x08location\x12\x17\n\x04name\x18\x03 \x01(\tB\x03\xe0\x41\x02R\x04name\"|\n\x16HTTPAuthSecurityScheme\x12 \n\x0b\x64\x65scription\x18\x01 \x01(\tR\x0b\x64\x65scription\x12\x1b\n\x06scheme\x18\x02 \x01(\tB\x03\xe0\x41\x02R\x06scheme\x12#\n\rbearer_format\x18\x03 \x01(\tR\x0c\x62\x65\x61rerFormat\"\x9a\x01\n\x14OAuth2SecurityScheme\x12 \n\x0b\x64\x65scription\x18\x01 \x01(\tR\x0b\x64\x65scription\x12\x30\n\x05\x66lows\x18\x02 \x01(\x0b\x32\x15.lf.a2a.v1.OAuthFlowsB\x03\xe0\x41\x02R\x05\x66lows\x12.\n\x13oauth2_metadata_url\x18\x03 \x01(\tR\x11oauth2MetadataUrl\"s\n\x1bOpenIdConnectSecurityScheme\x12 \n\x0b\x64\x65scription\x18\x01 \x01(\tR\x0b\x64\x65scription\x12\x32\n\x13open_id_connect_url\x18\x02 \x01(\tB\x03\xe0\x41\x02R\x10openIdConnectUrl\";\n\x17MutualTlsSecurityScheme\x12 \n\x0b\x64\x65scription\x18\x01 \x01(\tR\x0b\x64\x65scription\"\x87\x03\n\nOAuthFlows\x12V\n\x12\x61uthorization_code\x18\x01 \x01(\x0b\x32%.lf.a2a.v1.AuthorizationCodeOAuthFlowH\x00R\x11\x61uthorizationCode\x12V\n\x12\x63lient_credentials\x18\x02 \x01(\x0b\x32%.lf.a2a.v1.ClientCredentialsOAuthFlowH\x00R\x11\x63lientCredentials\x12>\n\x08implicit\x18\x03 \x01(\x0b\x32\x1c.lf.a2a.v1.ImplicitOAuthFlowB\x02\x18\x01H\x00R\x08implicit\x12>\n\x08password\x18\x04 \x01(\x0b\x32\x1c.lf.a2a.v1.PasswordOAuthFlowB\x02\x18\x01H\x00R\x08password\x12\x41\n\x0b\x64\x65vice_code\x18\x05 \x01(\x0b\x32\x1e.lf.a2a.v1.DeviceCodeOAuthFlowH\x00R\ndeviceCodeB\x06\n\x04\x66low\"\xc1\x02\n\x1a\x41uthorizationCodeOAuthFlow\x12\x30\n\x11\x61uthorization_url\x18\x01 \x01(\tB\x03\xe0\x41\x02R\x10\x61uthorizationUrl\x12 \n\ttoken_url\x18\x02 \x01(\tB\x03\xe0\x41\x02R\x08tokenUrl\x12\x1f\n\x0brefresh_url\x18\x03 \x01(\tR\nrefreshUrl\x12N\n\x06scopes\x18\x04 \x03(\x0b\x32\x31.lf.a2a.v1.AuthorizationCodeOAuthFlow.ScopesEntryB\x03\xe0\x41\x02R\x06scopes\x12#\n\rpkce_required\x18\x05 \x01(\x08R\x0cpkceRequired\x1a\x39\n\x0bScopesEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12\x14\n\x05value\x18\x02 \x01(\tR\x05value:\x02\x38\x01\"\xea\x01\n\x1a\x43lientCredentialsOAuthFlow\x12 \n\ttoken_url\x18\x01 \x01(\tB\x03\xe0\x41\x02R\x08tokenUrl\x12\x1f\n\x0brefresh_url\x18\x02 \x01(\tR\nrefreshUrl\x12N\n\x06scopes\x18\x03 \x03(\x0b\x32\x31.lf.a2a.v1.ClientCredentialsOAuthFlow.ScopesEntryB\x03\xe0\x41\x02R\x06scopes\x1a\x39\n\x0bScopesEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12\x14\n\x05value\x18\x02 \x01(\tR\x05value:\x02\x38\x01\"\xde\x01\n\x11ImplicitOAuthFlow\x12+\n\x11\x61uthorization_url\x18\x01 \x01(\tR\x10\x61uthorizationUrl\x12\x1f\n\x0brefresh_url\x18\x02 \x01(\tR\nrefreshUrl\x12@\n\x06scopes\x18\x03 \x03(\x0b\x32(.lf.a2a.v1.ImplicitOAuthFlow.ScopesEntryR\x06scopes\x1a\x39\n\x0bScopesEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12\x14\n\x05value\x18\x02 \x01(\tR\x05value:\x02\x38\x01\"\xce\x01\n\x11PasswordOAuthFlow\x12\x1b\n\ttoken_url\x18\x01 \x01(\tR\x08tokenUrl\x12\x1f\n\x0brefresh_url\x18\x02 \x01(\tR\nrefreshUrl\x12@\n\x06scopes\x18\x03 \x03(\x0b\x32(.lf.a2a.v1.PasswordOAuthFlow.ScopesEntryR\x06scopes\x1a\x39\n\x0bScopesEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12\x14\n\x05value\x18\x02 \x01(\tR\x05value:\x02\x38\x01\"\x9b\x02\n\x13\x44\x65viceCodeOAuthFlow\x12=\n\x18\x64\x65vice_authorization_url\x18\x01 \x01(\tB\x03\xe0\x41\x02R\x16\x64\x65viceAuthorizationUrl\x12 \n\ttoken_url\x18\x02 \x01(\tB\x03\xe0\x41\x02R\x08tokenUrl\x12\x1f\n\x0brefresh_url\x18\x03 \x01(\tR\nrefreshUrl\x12G\n\x06scopes\x18\x04 \x03(\x0b\x32*.lf.a2a.v1.DeviceCodeOAuthFlow.ScopesEntryB\x03\xe0\x41\x02R\x06scopes\x1a\x39\n\x0bScopesEntry\x12\x10\n\x03key\x18\x01 \x01(\tR\x03key\x12\x14\n\x05value\x18\x02 \x01(\tR\x05value:\x02\x38\x01\"\xdf\x01\n\x12SendMessageRequest\x12\x16\n\x06tenant\x18\x01 \x01(\tR\x06tenant\x12\x31\n\x07message\x18\x02 \x01(\x0b\x32\x12.lf.a2a.v1.MessageB\x03\xe0\x41\x02R\x07message\x12I\n\rconfiguration\x18\x03 \x01(\x0b\x32#.lf.a2a.v1.SendMessageConfigurationR\rconfiguration\x12\x33\n\x08metadata\x18\x04 \x01(\x0b\x32\x17.google.protobuf.StructR\x08metadata\"|\n\x0eGetTaskRequest\x12\x16\n\x06tenant\x18\x01 \x01(\tR\x06tenant\x12\x13\n\x02id\x18\x02 \x01(\tB\x03\xe0\x41\x02R\x02id\x12*\n\x0ehistory_length\x18\x03 \x01(\x05H\x00R\rhistoryLength\x88\x01\x01\x42\x11\n\x0f_history_length\"\x9f\x03\n\x10ListTasksRequest\x12\x16\n\x06tenant\x18\x01 \x01(\tR\x06tenant\x12\x1d\n\ncontext_id\x18\x02 \x01(\tR\tcontextId\x12,\n\x06status\x18\x03 \x01(\x0e\x32\x14.lf.a2a.v1.TaskStateR\x06status\x12 \n\tpage_size\x18\x04 \x01(\x05H\x00R\x08pageSize\x88\x01\x01\x12\x1d\n\npage_token\x18\x05 \x01(\tR\tpageToken\x12*\n\x0ehistory_length\x18\x06 \x01(\x05H\x01R\rhistoryLength\x88\x01\x01\x12P\n\x16status_timestamp_after\x18\x07 \x01(\x0b\x32\x1a.google.protobuf.TimestampR\x14statusTimestampAfter\x12\x30\n\x11include_artifacts\x18\x08 \x01(\x08H\x02R\x10includeArtifacts\x88\x01\x01\x42\x0c\n\n_page_sizeB\x11\n\x0f_history_lengthB\x14\n\x12_include_artifacts\"\xb2\x01\n\x11ListTasksResponse\x12*\n\x05tasks\x18\x01 \x03(\x0b\x32\x0f.lf.a2a.v1.TaskB\x03\xe0\x41\x02R\x05tasks\x12+\n\x0fnext_page_token\x18\x02 \x01(\tB\x03\xe0\x41\x02R\rnextPageToken\x12 \n\tpage_size\x18\x03 \x01(\x05\x42\x03\xe0\x41\x02R\x08pageSize\x12\"\n\ntotal_size\x18\x04 \x01(\x05\x42\x03\xe0\x41\x02R\ttotalSize\"u\n\x11\x43\x61ncelTaskRequest\x12\x16\n\x06tenant\x18\x01 \x01(\tR\x06tenant\x12\x13\n\x02id\x18\x02 \x01(\tB\x03\xe0\x41\x02R\x02id\x12\x33\n\x08metadata\x18\x03 \x01(\x0b\x32\x17.google.protobuf.StructR\x08metadata\"q\n$GetTaskPushNotificationConfigRequest\x12\x16\n\x06tenant\x18\x01 \x01(\tR\x06tenant\x12\x1c\n\x07task_id\x18\x02 \x01(\tB\x03\xe0\x41\x02R\x06taskId\x12\x13\n\x02id\x18\x03 \x01(\tB\x03\xe0\x41\x02R\x02id\"t\n\'DeleteTaskPushNotificationConfigRequest\x12\x16\n\x06tenant\x18\x01 \x01(\tR\x06tenant\x12\x1c\n\x07task_id\x18\x02 \x01(\tB\x03\xe0\x41\x02R\x06taskId\x12\x13\n\x02id\x18\x03 \x01(\tB\x03\xe0\x41\x02R\x02id\"E\n\x16SubscribeToTaskRequest\x12\x16\n\x06tenant\x18\x01 \x01(\tR\x06tenant\x12\x13\n\x02id\x18\x02 \x01(\tB\x03\xe0\x41\x02R\x02id\"\x9a\x01\n&ListTaskPushNotificationConfigsRequest\x12\x16\n\x06tenant\x18\x04 \x01(\tR\x06tenant\x12\x1c\n\x07task_id\x18\x01 \x01(\tB\x03\xe0\x41\x02R\x06taskId\x12\x1b\n\tpage_size\x18\x02 \x01(\x05R\x08pageSize\x12\x1d\n\npage_token\x18\x03 \x01(\tR\tpageToken\"5\n\x1bGetExtendedAgentCardRequest\x12\x16\n\x06tenant\x18\x01 \x01(\tR\x06tenant\"w\n\x13SendMessageResponse\x12%\n\x04task\x18\x01 \x01(\x0b\x32\x0f.lf.a2a.v1.TaskH\x00R\x04task\x12.\n\x07message\x18\x02 \x01(\x0b\x32\x12.lf.a2a.v1.MessageH\x00R\x07messageB\t\n\x07payload\"\x8a\x02\n\x0eStreamResponse\x12%\n\x04task\x18\x01 \x01(\x0b\x32\x0f.lf.a2a.v1.TaskH\x00R\x04task\x12.\n\x07message\x18\x02 \x01(\x0b\x32\x12.lf.a2a.v1.MessageH\x00R\x07message\x12G\n\rstatus_update\x18\x03 \x01(\x0b\x32 .lf.a2a.v1.TaskStatusUpdateEventH\x00R\x0cstatusUpdate\x12M\n\x0f\x61rtifact_update\x18\x04 \x01(\x0b\x32\".lf.a2a.v1.TaskArtifactUpdateEventH\x00R\x0e\x61rtifactUpdateB\t\n\x07payload\"\x92\x01\n\'ListTaskPushNotificationConfigsResponse\x12?\n\x07\x63onfigs\x18\x01 \x03(\x0b\x32%.lf.a2a.v1.TaskPushNotificationConfigR\x07\x63onfigs\x12&\n\x0fnext_page_token\x18\x02 \x01(\tR\rnextPageToken*\xf9\x01\n\tTaskState\x12\x1a\n\x16TASK_STATE_UNSPECIFIED\x10\x00\x12\x18\n\x14TASK_STATE_SUBMITTED\x10\x01\x12\x16\n\x12TASK_STATE_WORKING\x10\x02\x12\x18\n\x14TASK_STATE_COMPLETED\x10\x03\x12\x15\n\x11TASK_STATE_FAILED\x10\x04\x12\x17\n\x13TASK_STATE_CANCELED\x10\x05\x12\x1d\n\x19TASK_STATE_INPUT_REQUIRED\x10\x06\x12\x17\n\x13TASK_STATE_REJECTED\x10\x07\x12\x1c\n\x18TASK_STATE_AUTH_REQUIRED\x10\x08*;\n\x04Role\x12\x14\n\x10ROLE_UNSPECIFIED\x10\x00\x12\r\n\tROLE_USER\x10\x01\x12\x0e\n\nROLE_AGENT\x10\x02\x32\x97\x0f\n\nA2AService\x12\x83\x01\n\x0bSendMessage\x12\x1d.lf.a2a.v1.SendMessageRequest\x1a\x1e.lf.a2a.v1.SendMessageResponse\"5\x82\xd3\xe4\x93\x02/\"\r/message:send:\x01*Z\x1b\"\x16/{tenant}/message:send:\x01*\x12\x8d\x01\n\x14SendStreamingMessage\x12\x1d.lf.a2a.v1.SendMessageRequest\x1a\x19.lf.a2a.v1.StreamResponse\"9\x82\xd3\xe4\x93\x02\x33\"\x0f/message:stream:\x01*Z\x1d\"\x18/{tenant}/message:stream:\x01*0\x01\x12k\n\x07GetTask\x12\x19.lf.a2a.v1.GetTaskRequest\x1a\x0f.lf.a2a.v1.Task\"4\xda\x41\x02id\x82\xd3\xe4\x93\x02)\x12\r/tasks/{id=*}Z\x18\x12\x16/{tenant}/tasks/{id=*}\x12i\n\tListTasks\x12\x1b.lf.a2a.v1.ListTasksRequest\x1a\x1c.lf.a2a.v1.ListTasksResponse\"!\x82\xd3\xe4\x93\x02\x1b\x12\x06/tasksZ\x11\x12\x0f/{tenant}/tasks\x12\x80\x01\n\nCancelTask\x12\x1c.lf.a2a.v1.CancelTaskRequest\x1a\x0f.lf.a2a.v1.Task\"C\x82\xd3\xe4\x93\x02=\"\x14/tasks/{id=*}:cancel:\x01*Z\"\"\x1d/{tenant}/tasks/{id=*}:cancel:\x01*\x12\x96\x01\n\x0fSubscribeToTask\x12!.lf.a2a.v1.SubscribeToTaskRequest\x1a\x19.lf.a2a.v1.StreamResponse\"C\x82\xd3\xe4\x93\x02=\x12\x17/tasks/{id=*}:subscribeZ\"\x12 /{tenant}/tasks/{id=*}:subscribe0\x01\x12\xf3\x01\n CreateTaskPushNotificationConfig\x12%.lf.a2a.v1.TaskPushNotificationConfig\x1a%.lf.a2a.v1.TaskPushNotificationConfig\"\x80\x01\xda\x41\x0etask_id,config\x82\xd3\xe4\x93\x02i\"*/tasks/{task_id=*}/pushNotificationConfigs:\x01*Z8\"3/{tenant}/tasks/{task_id=*}/pushNotificationConfigs:\x01*\x12\xfe\x01\n\x1dGetTaskPushNotificationConfig\x12/.lf.a2a.v1.GetTaskPushNotificationConfigRequest\x1a%.lf.a2a.v1.TaskPushNotificationConfig\"\x84\x01\xda\x41\ntask_id,id\x82\xd3\xe4\x93\x02q\x12\x31/tasks/{task_id=*}/pushNotificationConfigs/{id=*}Z<\x12:/{tenant}/tasks/{task_id=*}/pushNotificationConfigs/{id=*}\x12\xfd\x01\n\x1fListTaskPushNotificationConfigs\x12\x31.lf.a2a.v1.ListTaskPushNotificationConfigsRequest\x1a\x32.lf.a2a.v1.ListTaskPushNotificationConfigsResponse\"s\xda\x41\x07task_id\x82\xd3\xe4\x93\x02\x63\x12*/tasks/{task_id=*}/pushNotificationConfigsZ5\x12\x33/{tenant}/tasks/{task_id=*}/pushNotificationConfigs\x12\x8f\x01\n\x14GetExtendedAgentCard\x12&.lf.a2a.v1.GetExtendedAgentCardRequest\x1a\x14.lf.a2a.v1.AgentCard\"9\x82\xd3\xe4\x93\x02\x33\x12\x12/extendedAgentCardZ\x1d\x12\x1b/{tenant}/extendedAgentCard\x12\xf5\x01\n DeleteTaskPushNotificationConfig\x12\x32.lf.a2a.v1.DeleteTaskPushNotificationConfigRequest\x1a\x16.google.protobuf.Empty\"\x84\x01\xda\x41\ntask_id,id\x82\xd3\xe4\x93\x02q*1/tasks/{task_id=*}/pushNotificationConfigs/{id=*}Z<*:/{tenant}/tasks/{task_id=*}/pushNotificationConfigs/{id=*}B|\n\rcom.lf.a2a.v1B\x08\x41\x32\x61ProtoP\x01Z\x1bgoogle.golang.org/lf/a2a/v1\xa2\x02\x03LAX\xaa\x02\tLf.A2a.V1\xca\x02\tLf\\A2a\\V1\xe2\x02\x15Lf\\A2a\\V1\\GPBMetadata\xea\x02\x0bLf::A2a::V1b\x06proto3') + +_globals = globals() +_builder.BuildMessageAndEnumDescriptors(DESCRIPTOR, _globals) +_builder.BuildTopDescriptorsAndMessages(DESCRIPTOR, 'a2a_pb2', _globals) +if not _descriptor._USE_C_DESCRIPTORS: + _globals['DESCRIPTOR']._loaded_options = None + _globals['DESCRIPTOR']._serialized_options = b'\n\rcom.lf.a2a.v1B\010A2aProtoP\001Z\033google.golang.org/lf/a2a/v1\242\002\003LAX\252\002\tLf.A2a.V1\312\002\tLf\\A2a\\V1\342\002\025Lf\\A2a\\V1\\GPBMetadata\352\002\013Lf::A2a::V1' + _globals['_TASK'].fields_by_name['id']._loaded_options = None + _globals['_TASK'].fields_by_name['id']._serialized_options = b'\340A\002' + _globals['_TASK'].fields_by_name['status']._loaded_options = None + _globals['_TASK'].fields_by_name['status']._serialized_options = b'\340A\002' + _globals['_TASKSTATUS'].fields_by_name['state']._loaded_options = None + _globals['_TASKSTATUS'].fields_by_name['state']._serialized_options = b'\340A\002' + _globals['_MESSAGE'].fields_by_name['message_id']._loaded_options = None + _globals['_MESSAGE'].fields_by_name['message_id']._serialized_options = b'\340A\002' + _globals['_MESSAGE'].fields_by_name['role']._loaded_options = None + _globals['_MESSAGE'].fields_by_name['role']._serialized_options = b'\340A\002' + _globals['_MESSAGE'].fields_by_name['parts']._loaded_options = None + _globals['_MESSAGE'].fields_by_name['parts']._serialized_options = b'\340A\002' + _globals['_ARTIFACT'].fields_by_name['artifact_id']._loaded_options = None + _globals['_ARTIFACT'].fields_by_name['artifact_id']._serialized_options = b'\340A\002' + _globals['_ARTIFACT'].fields_by_name['parts']._loaded_options = None + _globals['_ARTIFACT'].fields_by_name['parts']._serialized_options = b'\340A\002' + _globals['_TASKSTATUSUPDATEEVENT'].fields_by_name['task_id']._loaded_options = None + _globals['_TASKSTATUSUPDATEEVENT'].fields_by_name['task_id']._serialized_options = b'\340A\002' + _globals['_TASKSTATUSUPDATEEVENT'].fields_by_name['context_id']._loaded_options = None + _globals['_TASKSTATUSUPDATEEVENT'].fields_by_name['context_id']._serialized_options = b'\340A\002' + _globals['_TASKSTATUSUPDATEEVENT'].fields_by_name['status']._loaded_options = None + _globals['_TASKSTATUSUPDATEEVENT'].fields_by_name['status']._serialized_options = b'\340A\002' + _globals['_TASKARTIFACTUPDATEEVENT'].fields_by_name['task_id']._loaded_options = None + _globals['_TASKARTIFACTUPDATEEVENT'].fields_by_name['task_id']._serialized_options = b'\340A\002' + _globals['_TASKARTIFACTUPDATEEVENT'].fields_by_name['context_id']._loaded_options = None + _globals['_TASKARTIFACTUPDATEEVENT'].fields_by_name['context_id']._serialized_options = b'\340A\002' + _globals['_TASKARTIFACTUPDATEEVENT'].fields_by_name['artifact']._loaded_options = None + _globals['_TASKARTIFACTUPDATEEVENT'].fields_by_name['artifact']._serialized_options = b'\340A\002' + _globals['_AUTHENTICATIONINFO'].fields_by_name['scheme']._loaded_options = None + _globals['_AUTHENTICATIONINFO'].fields_by_name['scheme']._serialized_options = b'\340A\002' + _globals['_AGENTINTERFACE'].fields_by_name['url']._loaded_options = None + _globals['_AGENTINTERFACE'].fields_by_name['url']._serialized_options = b'\340A\002' + _globals['_AGENTINTERFACE'].fields_by_name['protocol_binding']._loaded_options = None + _globals['_AGENTINTERFACE'].fields_by_name['protocol_binding']._serialized_options = b'\340A\002' + _globals['_AGENTINTERFACE'].fields_by_name['protocol_version']._loaded_options = None + _globals['_AGENTINTERFACE'].fields_by_name['protocol_version']._serialized_options = b'\340A\002' + _globals['_AGENTCARD_SECURITYSCHEMESENTRY']._loaded_options = None + _globals['_AGENTCARD_SECURITYSCHEMESENTRY']._serialized_options = b'8\001' + _globals['_AGENTCARD'].fields_by_name['name']._loaded_options = None + _globals['_AGENTCARD'].fields_by_name['name']._serialized_options = b'\340A\002' + _globals['_AGENTCARD'].fields_by_name['description']._loaded_options = None + _globals['_AGENTCARD'].fields_by_name['description']._serialized_options = b'\340A\002' + _globals['_AGENTCARD'].fields_by_name['supported_interfaces']._loaded_options = None + _globals['_AGENTCARD'].fields_by_name['supported_interfaces']._serialized_options = b'\340A\002' + _globals['_AGENTCARD'].fields_by_name['version']._loaded_options = None + _globals['_AGENTCARD'].fields_by_name['version']._serialized_options = b'\340A\002' + _globals['_AGENTCARD'].fields_by_name['capabilities']._loaded_options = None + _globals['_AGENTCARD'].fields_by_name['capabilities']._serialized_options = b'\340A\002' + _globals['_AGENTCARD'].fields_by_name['default_input_modes']._loaded_options = None + _globals['_AGENTCARD'].fields_by_name['default_input_modes']._serialized_options = b'\340A\002' + _globals['_AGENTCARD'].fields_by_name['default_output_modes']._loaded_options = None + _globals['_AGENTCARD'].fields_by_name['default_output_modes']._serialized_options = b'\340A\002' + _globals['_AGENTCARD'].fields_by_name['skills']._loaded_options = None + _globals['_AGENTCARD'].fields_by_name['skills']._serialized_options = b'\340A\002' + _globals['_AGENTPROVIDER'].fields_by_name['url']._loaded_options = None + _globals['_AGENTPROVIDER'].fields_by_name['url']._serialized_options = b'\340A\002' + _globals['_AGENTPROVIDER'].fields_by_name['organization']._loaded_options = None + _globals['_AGENTPROVIDER'].fields_by_name['organization']._serialized_options = b'\340A\002' + _globals['_AGENTSKILL'].fields_by_name['id']._loaded_options = None + _globals['_AGENTSKILL'].fields_by_name['id']._serialized_options = b'\340A\002' + _globals['_AGENTSKILL'].fields_by_name['name']._loaded_options = None + _globals['_AGENTSKILL'].fields_by_name['name']._serialized_options = b'\340A\002' + _globals['_AGENTSKILL'].fields_by_name['description']._loaded_options = None + _globals['_AGENTSKILL'].fields_by_name['description']._serialized_options = b'\340A\002' + _globals['_AGENTSKILL'].fields_by_name['tags']._loaded_options = None + _globals['_AGENTSKILL'].fields_by_name['tags']._serialized_options = b'\340A\002' + _globals['_AGENTCARDSIGNATURE'].fields_by_name['protected']._loaded_options = None + _globals['_AGENTCARDSIGNATURE'].fields_by_name['protected']._serialized_options = b'\340A\002' + _globals['_AGENTCARDSIGNATURE'].fields_by_name['signature']._loaded_options = None + _globals['_AGENTCARDSIGNATURE'].fields_by_name['signature']._serialized_options = b'\340A\002' + _globals['_TASKPUSHNOTIFICATIONCONFIG'].fields_by_name['url']._loaded_options = None + _globals['_TASKPUSHNOTIFICATIONCONFIG'].fields_by_name['url']._serialized_options = b'\340A\002' + _globals['_SECURITYREQUIREMENT_SCHEMESENTRY']._loaded_options = None + _globals['_SECURITYREQUIREMENT_SCHEMESENTRY']._serialized_options = b'8\001' + _globals['_APIKEYSECURITYSCHEME'].fields_by_name['location']._loaded_options = None + _globals['_APIKEYSECURITYSCHEME'].fields_by_name['location']._serialized_options = b'\340A\002' + _globals['_APIKEYSECURITYSCHEME'].fields_by_name['name']._loaded_options = None + _globals['_APIKEYSECURITYSCHEME'].fields_by_name['name']._serialized_options = b'\340A\002' + _globals['_HTTPAUTHSECURITYSCHEME'].fields_by_name['scheme']._loaded_options = None + _globals['_HTTPAUTHSECURITYSCHEME'].fields_by_name['scheme']._serialized_options = b'\340A\002' + _globals['_OAUTH2SECURITYSCHEME'].fields_by_name['flows']._loaded_options = None + _globals['_OAUTH2SECURITYSCHEME'].fields_by_name['flows']._serialized_options = b'\340A\002' + _globals['_OPENIDCONNECTSECURITYSCHEME'].fields_by_name['open_id_connect_url']._loaded_options = None + _globals['_OPENIDCONNECTSECURITYSCHEME'].fields_by_name['open_id_connect_url']._serialized_options = b'\340A\002' + _globals['_OAUTHFLOWS'].fields_by_name['implicit']._loaded_options = None + _globals['_OAUTHFLOWS'].fields_by_name['implicit']._serialized_options = b'\030\001' + _globals['_OAUTHFLOWS'].fields_by_name['password']._loaded_options = None + _globals['_OAUTHFLOWS'].fields_by_name['password']._serialized_options = b'\030\001' + _globals['_AUTHORIZATIONCODEOAUTHFLOW_SCOPESENTRY']._loaded_options = None + _globals['_AUTHORIZATIONCODEOAUTHFLOW_SCOPESENTRY']._serialized_options = b'8\001' + _globals['_AUTHORIZATIONCODEOAUTHFLOW'].fields_by_name['authorization_url']._loaded_options = None + _globals['_AUTHORIZATIONCODEOAUTHFLOW'].fields_by_name['authorization_url']._serialized_options = b'\340A\002' + _globals['_AUTHORIZATIONCODEOAUTHFLOW'].fields_by_name['token_url']._loaded_options = None + _globals['_AUTHORIZATIONCODEOAUTHFLOW'].fields_by_name['token_url']._serialized_options = b'\340A\002' + _globals['_AUTHORIZATIONCODEOAUTHFLOW'].fields_by_name['scopes']._loaded_options = None + _globals['_AUTHORIZATIONCODEOAUTHFLOW'].fields_by_name['scopes']._serialized_options = b'\340A\002' + _globals['_CLIENTCREDENTIALSOAUTHFLOW_SCOPESENTRY']._loaded_options = None + _globals['_CLIENTCREDENTIALSOAUTHFLOW_SCOPESENTRY']._serialized_options = b'8\001' + _globals['_CLIENTCREDENTIALSOAUTHFLOW'].fields_by_name['token_url']._loaded_options = None + _globals['_CLIENTCREDENTIALSOAUTHFLOW'].fields_by_name['token_url']._serialized_options = b'\340A\002' + _globals['_CLIENTCREDENTIALSOAUTHFLOW'].fields_by_name['scopes']._loaded_options = None + _globals['_CLIENTCREDENTIALSOAUTHFLOW'].fields_by_name['scopes']._serialized_options = b'\340A\002' + _globals['_IMPLICITOAUTHFLOW_SCOPESENTRY']._loaded_options = None + _globals['_IMPLICITOAUTHFLOW_SCOPESENTRY']._serialized_options = b'8\001' + _globals['_PASSWORDOAUTHFLOW_SCOPESENTRY']._loaded_options = None + _globals['_PASSWORDOAUTHFLOW_SCOPESENTRY']._serialized_options = b'8\001' + _globals['_DEVICECODEOAUTHFLOW_SCOPESENTRY']._loaded_options = None + _globals['_DEVICECODEOAUTHFLOW_SCOPESENTRY']._serialized_options = b'8\001' + _globals['_DEVICECODEOAUTHFLOW'].fields_by_name['device_authorization_url']._loaded_options = None + _globals['_DEVICECODEOAUTHFLOW'].fields_by_name['device_authorization_url']._serialized_options = b'\340A\002' + _globals['_DEVICECODEOAUTHFLOW'].fields_by_name['token_url']._loaded_options = None + _globals['_DEVICECODEOAUTHFLOW'].fields_by_name['token_url']._serialized_options = b'\340A\002' + _globals['_DEVICECODEOAUTHFLOW'].fields_by_name['scopes']._loaded_options = None + _globals['_DEVICECODEOAUTHFLOW'].fields_by_name['scopes']._serialized_options = b'\340A\002' + _globals['_SENDMESSAGEREQUEST'].fields_by_name['message']._loaded_options = None + _globals['_SENDMESSAGEREQUEST'].fields_by_name['message']._serialized_options = b'\340A\002' + _globals['_GETTASKREQUEST'].fields_by_name['id']._loaded_options = None + _globals['_GETTASKREQUEST'].fields_by_name['id']._serialized_options = b'\340A\002' + _globals['_LISTTASKSRESPONSE'].fields_by_name['tasks']._loaded_options = None + _globals['_LISTTASKSRESPONSE'].fields_by_name['tasks']._serialized_options = b'\340A\002' + _globals['_LISTTASKSRESPONSE'].fields_by_name['next_page_token']._loaded_options = None + _globals['_LISTTASKSRESPONSE'].fields_by_name['next_page_token']._serialized_options = b'\340A\002' + _globals['_LISTTASKSRESPONSE'].fields_by_name['page_size']._loaded_options = None + _globals['_LISTTASKSRESPONSE'].fields_by_name['page_size']._serialized_options = b'\340A\002' + _globals['_LISTTASKSRESPONSE'].fields_by_name['total_size']._loaded_options = None + _globals['_LISTTASKSRESPONSE'].fields_by_name['total_size']._serialized_options = b'\340A\002' + _globals['_CANCELTASKREQUEST'].fields_by_name['id']._loaded_options = None + _globals['_CANCELTASKREQUEST'].fields_by_name['id']._serialized_options = b'\340A\002' + _globals['_GETTASKPUSHNOTIFICATIONCONFIGREQUEST'].fields_by_name['task_id']._loaded_options = None + _globals['_GETTASKPUSHNOTIFICATIONCONFIGREQUEST'].fields_by_name['task_id']._serialized_options = b'\340A\002' + _globals['_GETTASKPUSHNOTIFICATIONCONFIGREQUEST'].fields_by_name['id']._loaded_options = None + _globals['_GETTASKPUSHNOTIFICATIONCONFIGREQUEST'].fields_by_name['id']._serialized_options = b'\340A\002' + _globals['_DELETETASKPUSHNOTIFICATIONCONFIGREQUEST'].fields_by_name['task_id']._loaded_options = None + _globals['_DELETETASKPUSHNOTIFICATIONCONFIGREQUEST'].fields_by_name['task_id']._serialized_options = b'\340A\002' + _globals['_DELETETASKPUSHNOTIFICATIONCONFIGREQUEST'].fields_by_name['id']._loaded_options = None + _globals['_DELETETASKPUSHNOTIFICATIONCONFIGREQUEST'].fields_by_name['id']._serialized_options = b'\340A\002' + _globals['_SUBSCRIBETOTASKREQUEST'].fields_by_name['id']._loaded_options = None + _globals['_SUBSCRIBETOTASKREQUEST'].fields_by_name['id']._serialized_options = b'\340A\002' + _globals['_LISTTASKPUSHNOTIFICATIONCONFIGSREQUEST'].fields_by_name['task_id']._loaded_options = None + _globals['_LISTTASKPUSHNOTIFICATIONCONFIGSREQUEST'].fields_by_name['task_id']._serialized_options = b'\340A\002' + _globals['_A2ASERVICE'].methods_by_name['SendMessage']._loaded_options = None + _globals['_A2ASERVICE'].methods_by_name['SendMessage']._serialized_options = b'\202\323\344\223\002/\"\r/message:send:\001*Z\033\"\026/{tenant}/message:send:\001*' + _globals['_A2ASERVICE'].methods_by_name['SendStreamingMessage']._loaded_options = None + _globals['_A2ASERVICE'].methods_by_name['SendStreamingMessage']._serialized_options = b'\202\323\344\223\0023\"\017/message:stream:\001*Z\035\"\030/{tenant}/message:stream:\001*' + _globals['_A2ASERVICE'].methods_by_name['GetTask']._loaded_options = None + _globals['_A2ASERVICE'].methods_by_name['GetTask']._serialized_options = b'\332A\002id\202\323\344\223\002)\022\r/tasks/{id=*}Z\030\022\026/{tenant}/tasks/{id=*}' + _globals['_A2ASERVICE'].methods_by_name['ListTasks']._loaded_options = None + _globals['_A2ASERVICE'].methods_by_name['ListTasks']._serialized_options = b'\202\323\344\223\002\033\022\006/tasksZ\021\022\017/{tenant}/tasks' + _globals['_A2ASERVICE'].methods_by_name['CancelTask']._loaded_options = None + _globals['_A2ASERVICE'].methods_by_name['CancelTask']._serialized_options = b'\202\323\344\223\002=\"\024/tasks/{id=*}:cancel:\001*Z\"\"\035/{tenant}/tasks/{id=*}:cancel:\001*' + _globals['_A2ASERVICE'].methods_by_name['SubscribeToTask']._loaded_options = None + _globals['_A2ASERVICE'].methods_by_name['SubscribeToTask']._serialized_options = b'\202\323\344\223\002=\022\027/tasks/{id=*}:subscribeZ\"\022 /{tenant}/tasks/{id=*}:subscribe' + _globals['_A2ASERVICE'].methods_by_name['CreateTaskPushNotificationConfig']._loaded_options = None + _globals['_A2ASERVICE'].methods_by_name['CreateTaskPushNotificationConfig']._serialized_options = b'\332A\016task_id,config\202\323\344\223\002i\"*/tasks/{task_id=*}/pushNotificationConfigs:\001*Z8\"3/{tenant}/tasks/{task_id=*}/pushNotificationConfigs:\001*' + _globals['_A2ASERVICE'].methods_by_name['GetTaskPushNotificationConfig']._loaded_options = None + _globals['_A2ASERVICE'].methods_by_name['GetTaskPushNotificationConfig']._serialized_options = b'\332A\ntask_id,id\202\323\344\223\002q\0221/tasks/{task_id=*}/pushNotificationConfigs/{id=*}Z<\022:/{tenant}/tasks/{task_id=*}/pushNotificationConfigs/{id=*}' + _globals['_A2ASERVICE'].methods_by_name['ListTaskPushNotificationConfigs']._loaded_options = None + _globals['_A2ASERVICE'].methods_by_name['ListTaskPushNotificationConfigs']._serialized_options = b'\332A\007task_id\202\323\344\223\002c\022*/tasks/{task_id=*}/pushNotificationConfigsZ5\0223/{tenant}/tasks/{task_id=*}/pushNotificationConfigs' + _globals['_A2ASERVICE'].methods_by_name['GetExtendedAgentCard']._loaded_options = None + _globals['_A2ASERVICE'].methods_by_name['GetExtendedAgentCard']._serialized_options = b'\202\323\344\223\0023\022\022/extendedAgentCardZ\035\022\033/{tenant}/extendedAgentCard' + _globals['_A2ASERVICE'].methods_by_name['DeleteTaskPushNotificationConfig']._loaded_options = None + _globals['_A2ASERVICE'].methods_by_name['DeleteTaskPushNotificationConfig']._serialized_options = b'\332A\ntask_id,id\202\323\344\223\002q*1/tasks/{task_id=*}/pushNotificationConfigs/{id=*}Z<*:/{tenant}/tasks/{task_id=*}/pushNotificationConfigs/{id=*}' + _globals['_TASKSTATE']._serialized_start=9615 + _globals['_TASKSTATE']._serialized_end=9864 + _globals['_ROLE']._serialized_start=9866 + _globals['_ROLE']._serialized_end=9925 + _globals['_SENDMESSAGECONFIGURATION']._serialized_start=205 + _globals['_SENDMESSAGECONFIGURATION']._serialized_end=499 + _globals['_TASK']._serialized_start=502 + _globals['_TASK']._serialized_end=762 + _globals['_TASKSTATUS']._serialized_start=765 + _globals['_TASKSTATUS']._serialized_end=930 + _globals['_PART']._serialized_start=933 + _globals['_PART']._serialized_end=1170 + _globals['_MESSAGE']._serialized_start=1173 + _globals['_MESSAGE']._serialized_end=1491 + _globals['_ARTIFACT']._serialized_start=1494 + _globals['_ARTIFACT']._serialized_end=1725 + _globals['_TASKSTATUSUPDATEEVENT']._serialized_start=1728 + _globals['_TASKSTATUSUPDATEEVENT']._serialized_end=1922 + _globals['_TASKARTIFACTUPDATEEVENT']._serialized_start=1925 + _globals['_TASKARTIFACTUPDATEEVENT']._serialized_end=2178 + _globals['_AUTHENTICATIONINFO']._serialized_start=2180 + _globals['_AUTHENTICATIONINFO']._serialized_end=2263 + _globals['_AGENTINTERFACE']._serialized_start=2266 + _globals['_AGENTINTERFACE']._serialized_end=2425 + _globals['_AGENTCARD']._serialized_start=2428 + _globals['_AGENTCARD']._serialized_end=3348 + _globals['_AGENTCARD_SECURITYSCHEMESENTRY']._serialized_start=3220 + _globals['_AGENTCARD_SECURITYSCHEMESENTRY']._serialized_end=3313 + _globals['_AGENTPROVIDER']._serialized_start=3350 + _globals['_AGENTPROVIDER']._serialized_end=3429 + _globals['_AGENTCAPABILITIES']._serialized_start=3432 + _globals['_AGENTCAPABILITIES']._serialized_end=3711 + _globals['_AGENTEXTENSION']._serialized_start=3714 + _globals['_AGENTEXTENSION']._serialized_end=3859 + _globals['_AGENTSKILL']._serialized_start=3862 + _globals['_AGENTSKILL']._serialized_end=4165 + _globals['_AGENTCARDSIGNATURE']._serialized_start=4168 + _globals['_AGENTCARDSIGNATURE']._serialized_end=4307 + _globals['_TASKPUSHNOTIFICATIONCONFIG']._serialized_start=4310 + _globals['_TASKPUSHNOTIFICATIONCONFIG']._serialized_end=4519 + _globals['_STRINGLIST']._serialized_start=4521 + _globals['_STRINGLIST']._serialized_end=4553 + _globals['_SECURITYREQUIREMENT']._serialized_start=4556 + _globals['_SECURITYREQUIREMENT']._serialized_end=4731 + _globals['_SECURITYREQUIREMENT_SCHEMESENTRY']._serialized_start=4650 + _globals['_SECURITYREQUIREMENT_SCHEMESENTRY']._serialized_end=4731 + _globals['_SECURITYSCHEME']._serialized_start=4734 + _globals['_SECURITYSCHEME']._serialized_end=5235 + _globals['_APIKEYSECURITYSCHEME']._serialized_start=5237 + _globals['_APIKEYSECURITYSCHEME']._serialized_end=5351 + _globals['_HTTPAUTHSECURITYSCHEME']._serialized_start=5353 + _globals['_HTTPAUTHSECURITYSCHEME']._serialized_end=5477 + _globals['_OAUTH2SECURITYSCHEME']._serialized_start=5480 + _globals['_OAUTH2SECURITYSCHEME']._serialized_end=5634 + _globals['_OPENIDCONNECTSECURITYSCHEME']._serialized_start=5636 + _globals['_OPENIDCONNECTSECURITYSCHEME']._serialized_end=5751 + _globals['_MUTUALTLSSECURITYSCHEME']._serialized_start=5753 + _globals['_MUTUALTLSSECURITYSCHEME']._serialized_end=5812 + _globals['_OAUTHFLOWS']._serialized_start=5815 + _globals['_OAUTHFLOWS']._serialized_end=6206 + _globals['_AUTHORIZATIONCODEOAUTHFLOW']._serialized_start=6209 + _globals['_AUTHORIZATIONCODEOAUTHFLOW']._serialized_end=6530 + _globals['_AUTHORIZATIONCODEOAUTHFLOW_SCOPESENTRY']._serialized_start=6473 + _globals['_AUTHORIZATIONCODEOAUTHFLOW_SCOPESENTRY']._serialized_end=6530 + _globals['_CLIENTCREDENTIALSOAUTHFLOW']._serialized_start=6533 + _globals['_CLIENTCREDENTIALSOAUTHFLOW']._serialized_end=6767 + _globals['_CLIENTCREDENTIALSOAUTHFLOW_SCOPESENTRY']._serialized_start=6473 + _globals['_CLIENTCREDENTIALSOAUTHFLOW_SCOPESENTRY']._serialized_end=6530 + _globals['_IMPLICITOAUTHFLOW']._serialized_start=6770 + _globals['_IMPLICITOAUTHFLOW']._serialized_end=6992 + _globals['_IMPLICITOAUTHFLOW_SCOPESENTRY']._serialized_start=6473 + _globals['_IMPLICITOAUTHFLOW_SCOPESENTRY']._serialized_end=6530 + _globals['_PASSWORDOAUTHFLOW']._serialized_start=6995 + _globals['_PASSWORDOAUTHFLOW']._serialized_end=7201 + _globals['_PASSWORDOAUTHFLOW_SCOPESENTRY']._serialized_start=6473 + _globals['_PASSWORDOAUTHFLOW_SCOPESENTRY']._serialized_end=6530 + _globals['_DEVICECODEOAUTHFLOW']._serialized_start=7204 + _globals['_DEVICECODEOAUTHFLOW']._serialized_end=7487 + _globals['_DEVICECODEOAUTHFLOW_SCOPESENTRY']._serialized_start=6473 + _globals['_DEVICECODEOAUTHFLOW_SCOPESENTRY']._serialized_end=6530 + _globals['_SENDMESSAGEREQUEST']._serialized_start=7490 + _globals['_SENDMESSAGEREQUEST']._serialized_end=7713 + _globals['_GETTASKREQUEST']._serialized_start=7715 + _globals['_GETTASKREQUEST']._serialized_end=7839 + _globals['_LISTTASKSREQUEST']._serialized_start=7842 + _globals['_LISTTASKSREQUEST']._serialized_end=8257 + _globals['_LISTTASKSRESPONSE']._serialized_start=8260 + _globals['_LISTTASKSRESPONSE']._serialized_end=8438 + _globals['_CANCELTASKREQUEST']._serialized_start=8440 + _globals['_CANCELTASKREQUEST']._serialized_end=8557 + _globals['_GETTASKPUSHNOTIFICATIONCONFIGREQUEST']._serialized_start=8559 + _globals['_GETTASKPUSHNOTIFICATIONCONFIGREQUEST']._serialized_end=8672 + _globals['_DELETETASKPUSHNOTIFICATIONCONFIGREQUEST']._serialized_start=8674 + _globals['_DELETETASKPUSHNOTIFICATIONCONFIGREQUEST']._serialized_end=8790 + _globals['_SUBSCRIBETOTASKREQUEST']._serialized_start=8792 + _globals['_SUBSCRIBETOTASKREQUEST']._serialized_end=8861 + _globals['_LISTTASKPUSHNOTIFICATIONCONFIGSREQUEST']._serialized_start=8864 + _globals['_LISTTASKPUSHNOTIFICATIONCONFIGSREQUEST']._serialized_end=9018 + _globals['_GETEXTENDEDAGENTCARDREQUEST']._serialized_start=9020 + _globals['_GETEXTENDEDAGENTCARDREQUEST']._serialized_end=9073 + _globals['_SENDMESSAGERESPONSE']._serialized_start=9075 + _globals['_SENDMESSAGERESPONSE']._serialized_end=9194 + _globals['_STREAMRESPONSE']._serialized_start=9197 + _globals['_STREAMRESPONSE']._serialized_end=9463 + _globals['_LISTTASKPUSHNOTIFICATIONCONFIGSRESPONSE']._serialized_start=9466 + _globals['_LISTTASKPUSHNOTIFICATIONCONFIGSRESPONSE']._serialized_end=9612 + _globals['_A2ASERVICE']._serialized_start=9928 + _globals['_A2ASERVICE']._serialized_end=11871 +# @@protoc_insertion_point(module_scope) diff --git a/src/a2a/types/a2a_pb2.pyi b/src/a2a/types/a2a_pb2.pyi new file mode 100644 index 000000000..7da2f649e --- /dev/null +++ b/src/a2a/types/a2a_pb2.pyi @@ -0,0 +1,623 @@ +import datetime + +from google.api import annotations_pb2 as _annotations_pb2 +from google.api import client_pb2 as _client_pb2 +from google.api import field_behavior_pb2 as _field_behavior_pb2 +from google.protobuf import empty_pb2 as _empty_pb2 +from google.protobuf import struct_pb2 as _struct_pb2 +from google.protobuf import timestamp_pb2 as _timestamp_pb2 +from google.protobuf.internal import containers as _containers +from google.protobuf.internal import enum_type_wrapper as _enum_type_wrapper +from google.protobuf import descriptor as _descriptor +from google.protobuf import message as _message +from collections.abc import Iterable as _Iterable, Mapping as _Mapping +from typing import ClassVar as _ClassVar, Optional as _Optional, Union as _Union + +DESCRIPTOR: _descriptor.FileDescriptor + +class TaskState(int, metaclass=_enum_type_wrapper.EnumTypeWrapper): + __slots__ = () + TASK_STATE_UNSPECIFIED: _ClassVar[TaskState] + TASK_STATE_SUBMITTED: _ClassVar[TaskState] + TASK_STATE_WORKING: _ClassVar[TaskState] + TASK_STATE_COMPLETED: _ClassVar[TaskState] + TASK_STATE_FAILED: _ClassVar[TaskState] + TASK_STATE_CANCELED: _ClassVar[TaskState] + TASK_STATE_INPUT_REQUIRED: _ClassVar[TaskState] + TASK_STATE_REJECTED: _ClassVar[TaskState] + TASK_STATE_AUTH_REQUIRED: _ClassVar[TaskState] + +class Role(int, metaclass=_enum_type_wrapper.EnumTypeWrapper): + __slots__ = () + ROLE_UNSPECIFIED: _ClassVar[Role] + ROLE_USER: _ClassVar[Role] + ROLE_AGENT: _ClassVar[Role] +TASK_STATE_UNSPECIFIED: TaskState +TASK_STATE_SUBMITTED: TaskState +TASK_STATE_WORKING: TaskState +TASK_STATE_COMPLETED: TaskState +TASK_STATE_FAILED: TaskState +TASK_STATE_CANCELED: TaskState +TASK_STATE_INPUT_REQUIRED: TaskState +TASK_STATE_REJECTED: TaskState +TASK_STATE_AUTH_REQUIRED: TaskState +ROLE_UNSPECIFIED: Role +ROLE_USER: Role +ROLE_AGENT: Role + +class SendMessageConfiguration(_message.Message): + __slots__ = ("accepted_output_modes", "task_push_notification_config", "history_length", "return_immediately") + ACCEPTED_OUTPUT_MODES_FIELD_NUMBER: _ClassVar[int] + TASK_PUSH_NOTIFICATION_CONFIG_FIELD_NUMBER: _ClassVar[int] + HISTORY_LENGTH_FIELD_NUMBER: _ClassVar[int] + RETURN_IMMEDIATELY_FIELD_NUMBER: _ClassVar[int] + accepted_output_modes: _containers.RepeatedScalarFieldContainer[str] + task_push_notification_config: TaskPushNotificationConfig + history_length: int + return_immediately: bool + def __init__(self, accepted_output_modes: _Optional[_Iterable[str]] = ..., task_push_notification_config: _Optional[_Union[TaskPushNotificationConfig, _Mapping]] = ..., history_length: _Optional[int] = ..., return_immediately: _Optional[bool] = ...) -> None: ... + +class Task(_message.Message): + __slots__ = ("id", "context_id", "status", "artifacts", "history", "metadata") + ID_FIELD_NUMBER: _ClassVar[int] + CONTEXT_ID_FIELD_NUMBER: _ClassVar[int] + STATUS_FIELD_NUMBER: _ClassVar[int] + ARTIFACTS_FIELD_NUMBER: _ClassVar[int] + HISTORY_FIELD_NUMBER: _ClassVar[int] + METADATA_FIELD_NUMBER: _ClassVar[int] + id: str + context_id: str + status: TaskStatus + artifacts: _containers.RepeatedCompositeFieldContainer[Artifact] + history: _containers.RepeatedCompositeFieldContainer[Message] + metadata: _struct_pb2.Struct + def __init__(self, id: _Optional[str] = ..., context_id: _Optional[str] = ..., status: _Optional[_Union[TaskStatus, _Mapping]] = ..., artifacts: _Optional[_Iterable[_Union[Artifact, _Mapping]]] = ..., history: _Optional[_Iterable[_Union[Message, _Mapping]]] = ..., metadata: _Optional[_Union[_struct_pb2.Struct, _Mapping]] = ...) -> None: ... + +class TaskStatus(_message.Message): + __slots__ = ("state", "message", "timestamp") + STATE_FIELD_NUMBER: _ClassVar[int] + MESSAGE_FIELD_NUMBER: _ClassVar[int] + TIMESTAMP_FIELD_NUMBER: _ClassVar[int] + state: TaskState + message: Message + timestamp: _timestamp_pb2.Timestamp + def __init__(self, state: _Optional[_Union[TaskState, str]] = ..., message: _Optional[_Union[Message, _Mapping]] = ..., timestamp: _Optional[_Union[datetime.datetime, _timestamp_pb2.Timestamp, _Mapping]] = ...) -> None: ... + +class Part(_message.Message): + __slots__ = ("text", "raw", "url", "data", "metadata", "filename", "media_type") + TEXT_FIELD_NUMBER: _ClassVar[int] + RAW_FIELD_NUMBER: _ClassVar[int] + URL_FIELD_NUMBER: _ClassVar[int] + DATA_FIELD_NUMBER: _ClassVar[int] + METADATA_FIELD_NUMBER: _ClassVar[int] + FILENAME_FIELD_NUMBER: _ClassVar[int] + MEDIA_TYPE_FIELD_NUMBER: _ClassVar[int] + text: str + raw: bytes + url: str + data: _struct_pb2.Value + metadata: _struct_pb2.Struct + filename: str + media_type: str + def __init__(self, text: _Optional[str] = ..., raw: _Optional[bytes] = ..., url: _Optional[str] = ..., data: _Optional[_Union[_struct_pb2.Value, _Mapping]] = ..., metadata: _Optional[_Union[_struct_pb2.Struct, _Mapping]] = ..., filename: _Optional[str] = ..., media_type: _Optional[str] = ...) -> None: ... + +class Message(_message.Message): + __slots__ = ("message_id", "context_id", "task_id", "role", "parts", "metadata", "extensions", "reference_task_ids") + MESSAGE_ID_FIELD_NUMBER: _ClassVar[int] + CONTEXT_ID_FIELD_NUMBER: _ClassVar[int] + TASK_ID_FIELD_NUMBER: _ClassVar[int] + ROLE_FIELD_NUMBER: _ClassVar[int] + PARTS_FIELD_NUMBER: _ClassVar[int] + METADATA_FIELD_NUMBER: _ClassVar[int] + EXTENSIONS_FIELD_NUMBER: _ClassVar[int] + REFERENCE_TASK_IDS_FIELD_NUMBER: _ClassVar[int] + message_id: str + context_id: str + task_id: str + role: Role + parts: _containers.RepeatedCompositeFieldContainer[Part] + metadata: _struct_pb2.Struct + extensions: _containers.RepeatedScalarFieldContainer[str] + reference_task_ids: _containers.RepeatedScalarFieldContainer[str] + def __init__(self, message_id: _Optional[str] = ..., context_id: _Optional[str] = ..., task_id: _Optional[str] = ..., role: _Optional[_Union[Role, str]] = ..., parts: _Optional[_Iterable[_Union[Part, _Mapping]]] = ..., metadata: _Optional[_Union[_struct_pb2.Struct, _Mapping]] = ..., extensions: _Optional[_Iterable[str]] = ..., reference_task_ids: _Optional[_Iterable[str]] = ...) -> None: ... + +class Artifact(_message.Message): + __slots__ = ("artifact_id", "name", "description", "parts", "metadata", "extensions") + ARTIFACT_ID_FIELD_NUMBER: _ClassVar[int] + NAME_FIELD_NUMBER: _ClassVar[int] + DESCRIPTION_FIELD_NUMBER: _ClassVar[int] + PARTS_FIELD_NUMBER: _ClassVar[int] + METADATA_FIELD_NUMBER: _ClassVar[int] + EXTENSIONS_FIELD_NUMBER: _ClassVar[int] + artifact_id: str + name: str + description: str + parts: _containers.RepeatedCompositeFieldContainer[Part] + metadata: _struct_pb2.Struct + extensions: _containers.RepeatedScalarFieldContainer[str] + def __init__(self, artifact_id: _Optional[str] = ..., name: _Optional[str] = ..., description: _Optional[str] = ..., parts: _Optional[_Iterable[_Union[Part, _Mapping]]] = ..., metadata: _Optional[_Union[_struct_pb2.Struct, _Mapping]] = ..., extensions: _Optional[_Iterable[str]] = ...) -> None: ... + +class TaskStatusUpdateEvent(_message.Message): + __slots__ = ("task_id", "context_id", "status", "metadata") + TASK_ID_FIELD_NUMBER: _ClassVar[int] + CONTEXT_ID_FIELD_NUMBER: _ClassVar[int] + STATUS_FIELD_NUMBER: _ClassVar[int] + METADATA_FIELD_NUMBER: _ClassVar[int] + task_id: str + context_id: str + status: TaskStatus + metadata: _struct_pb2.Struct + def __init__(self, task_id: _Optional[str] = ..., context_id: _Optional[str] = ..., status: _Optional[_Union[TaskStatus, _Mapping]] = ..., metadata: _Optional[_Union[_struct_pb2.Struct, _Mapping]] = ...) -> None: ... + +class TaskArtifactUpdateEvent(_message.Message): + __slots__ = ("task_id", "context_id", "artifact", "append", "last_chunk", "metadata") + TASK_ID_FIELD_NUMBER: _ClassVar[int] + CONTEXT_ID_FIELD_NUMBER: _ClassVar[int] + ARTIFACT_FIELD_NUMBER: _ClassVar[int] + APPEND_FIELD_NUMBER: _ClassVar[int] + LAST_CHUNK_FIELD_NUMBER: _ClassVar[int] + METADATA_FIELD_NUMBER: _ClassVar[int] + task_id: str + context_id: str + artifact: Artifact + append: bool + last_chunk: bool + metadata: _struct_pb2.Struct + def __init__(self, task_id: _Optional[str] = ..., context_id: _Optional[str] = ..., artifact: _Optional[_Union[Artifact, _Mapping]] = ..., append: _Optional[bool] = ..., last_chunk: _Optional[bool] = ..., metadata: _Optional[_Union[_struct_pb2.Struct, _Mapping]] = ...) -> None: ... + +class AuthenticationInfo(_message.Message): + __slots__ = ("scheme", "credentials") + SCHEME_FIELD_NUMBER: _ClassVar[int] + CREDENTIALS_FIELD_NUMBER: _ClassVar[int] + scheme: str + credentials: str + def __init__(self, scheme: _Optional[str] = ..., credentials: _Optional[str] = ...) -> None: ... + +class AgentInterface(_message.Message): + __slots__ = ("url", "protocol_binding", "tenant", "protocol_version") + URL_FIELD_NUMBER: _ClassVar[int] + PROTOCOL_BINDING_FIELD_NUMBER: _ClassVar[int] + TENANT_FIELD_NUMBER: _ClassVar[int] + PROTOCOL_VERSION_FIELD_NUMBER: _ClassVar[int] + url: str + protocol_binding: str + tenant: str + protocol_version: str + def __init__(self, url: _Optional[str] = ..., protocol_binding: _Optional[str] = ..., tenant: _Optional[str] = ..., protocol_version: _Optional[str] = ...) -> None: ... + +class AgentCard(_message.Message): + __slots__ = ("name", "description", "supported_interfaces", "provider", "version", "documentation_url", "capabilities", "security_schemes", "security_requirements", "default_input_modes", "default_output_modes", "skills", "signatures", "icon_url") + class SecuritySchemesEntry(_message.Message): + __slots__ = ("key", "value") + KEY_FIELD_NUMBER: _ClassVar[int] + VALUE_FIELD_NUMBER: _ClassVar[int] + key: str + value: SecurityScheme + def __init__(self, key: _Optional[str] = ..., value: _Optional[_Union[SecurityScheme, _Mapping]] = ...) -> None: ... + NAME_FIELD_NUMBER: _ClassVar[int] + DESCRIPTION_FIELD_NUMBER: _ClassVar[int] + SUPPORTED_INTERFACES_FIELD_NUMBER: _ClassVar[int] + PROVIDER_FIELD_NUMBER: _ClassVar[int] + VERSION_FIELD_NUMBER: _ClassVar[int] + DOCUMENTATION_URL_FIELD_NUMBER: _ClassVar[int] + CAPABILITIES_FIELD_NUMBER: _ClassVar[int] + SECURITY_SCHEMES_FIELD_NUMBER: _ClassVar[int] + SECURITY_REQUIREMENTS_FIELD_NUMBER: _ClassVar[int] + DEFAULT_INPUT_MODES_FIELD_NUMBER: _ClassVar[int] + DEFAULT_OUTPUT_MODES_FIELD_NUMBER: _ClassVar[int] + SKILLS_FIELD_NUMBER: _ClassVar[int] + SIGNATURES_FIELD_NUMBER: _ClassVar[int] + ICON_URL_FIELD_NUMBER: _ClassVar[int] + name: str + description: str + supported_interfaces: _containers.RepeatedCompositeFieldContainer[AgentInterface] + provider: AgentProvider + version: str + documentation_url: str + capabilities: AgentCapabilities + security_schemes: _containers.MessageMap[str, SecurityScheme] + security_requirements: _containers.RepeatedCompositeFieldContainer[SecurityRequirement] + default_input_modes: _containers.RepeatedScalarFieldContainer[str] + default_output_modes: _containers.RepeatedScalarFieldContainer[str] + skills: _containers.RepeatedCompositeFieldContainer[AgentSkill] + signatures: _containers.RepeatedCompositeFieldContainer[AgentCardSignature] + icon_url: str + def __init__(self, name: _Optional[str] = ..., description: _Optional[str] = ..., supported_interfaces: _Optional[_Iterable[_Union[AgentInterface, _Mapping]]] = ..., provider: _Optional[_Union[AgentProvider, _Mapping]] = ..., version: _Optional[str] = ..., documentation_url: _Optional[str] = ..., capabilities: _Optional[_Union[AgentCapabilities, _Mapping]] = ..., security_schemes: _Optional[_Mapping[str, SecurityScheme]] = ..., security_requirements: _Optional[_Iterable[_Union[SecurityRequirement, _Mapping]]] = ..., default_input_modes: _Optional[_Iterable[str]] = ..., default_output_modes: _Optional[_Iterable[str]] = ..., skills: _Optional[_Iterable[_Union[AgentSkill, _Mapping]]] = ..., signatures: _Optional[_Iterable[_Union[AgentCardSignature, _Mapping]]] = ..., icon_url: _Optional[str] = ...) -> None: ... + +class AgentProvider(_message.Message): + __slots__ = ("url", "organization") + URL_FIELD_NUMBER: _ClassVar[int] + ORGANIZATION_FIELD_NUMBER: _ClassVar[int] + url: str + organization: str + def __init__(self, url: _Optional[str] = ..., organization: _Optional[str] = ...) -> None: ... + +class AgentCapabilities(_message.Message): + __slots__ = ("streaming", "push_notifications", "extensions", "extended_agent_card") + STREAMING_FIELD_NUMBER: _ClassVar[int] + PUSH_NOTIFICATIONS_FIELD_NUMBER: _ClassVar[int] + EXTENSIONS_FIELD_NUMBER: _ClassVar[int] + EXTENDED_AGENT_CARD_FIELD_NUMBER: _ClassVar[int] + streaming: bool + push_notifications: bool + extensions: _containers.RepeatedCompositeFieldContainer[AgentExtension] + extended_agent_card: bool + def __init__(self, streaming: _Optional[bool] = ..., push_notifications: _Optional[bool] = ..., extensions: _Optional[_Iterable[_Union[AgentExtension, _Mapping]]] = ..., extended_agent_card: _Optional[bool] = ...) -> None: ... + +class AgentExtension(_message.Message): + __slots__ = ("uri", "description", "required", "params") + URI_FIELD_NUMBER: _ClassVar[int] + DESCRIPTION_FIELD_NUMBER: _ClassVar[int] + REQUIRED_FIELD_NUMBER: _ClassVar[int] + PARAMS_FIELD_NUMBER: _ClassVar[int] + uri: str + description: str + required: bool + params: _struct_pb2.Struct + def __init__(self, uri: _Optional[str] = ..., description: _Optional[str] = ..., required: _Optional[bool] = ..., params: _Optional[_Union[_struct_pb2.Struct, _Mapping]] = ...) -> None: ... + +class AgentSkill(_message.Message): + __slots__ = ("id", "name", "description", "tags", "examples", "input_modes", "output_modes", "security_requirements") + ID_FIELD_NUMBER: _ClassVar[int] + NAME_FIELD_NUMBER: _ClassVar[int] + DESCRIPTION_FIELD_NUMBER: _ClassVar[int] + TAGS_FIELD_NUMBER: _ClassVar[int] + EXAMPLES_FIELD_NUMBER: _ClassVar[int] + INPUT_MODES_FIELD_NUMBER: _ClassVar[int] + OUTPUT_MODES_FIELD_NUMBER: _ClassVar[int] + SECURITY_REQUIREMENTS_FIELD_NUMBER: _ClassVar[int] + id: str + name: str + description: str + tags: _containers.RepeatedScalarFieldContainer[str] + examples: _containers.RepeatedScalarFieldContainer[str] + input_modes: _containers.RepeatedScalarFieldContainer[str] + output_modes: _containers.RepeatedScalarFieldContainer[str] + security_requirements: _containers.RepeatedCompositeFieldContainer[SecurityRequirement] + def __init__(self, id: _Optional[str] = ..., name: _Optional[str] = ..., description: _Optional[str] = ..., tags: _Optional[_Iterable[str]] = ..., examples: _Optional[_Iterable[str]] = ..., input_modes: _Optional[_Iterable[str]] = ..., output_modes: _Optional[_Iterable[str]] = ..., security_requirements: _Optional[_Iterable[_Union[SecurityRequirement, _Mapping]]] = ...) -> None: ... + +class AgentCardSignature(_message.Message): + __slots__ = ("protected", "signature", "header") + PROTECTED_FIELD_NUMBER: _ClassVar[int] + SIGNATURE_FIELD_NUMBER: _ClassVar[int] + HEADER_FIELD_NUMBER: _ClassVar[int] + protected: str + signature: str + header: _struct_pb2.Struct + def __init__(self, protected: _Optional[str] = ..., signature: _Optional[str] = ..., header: _Optional[_Union[_struct_pb2.Struct, _Mapping]] = ...) -> None: ... + +class TaskPushNotificationConfig(_message.Message): + __slots__ = ("tenant", "id", "task_id", "url", "token", "authentication") + TENANT_FIELD_NUMBER: _ClassVar[int] + ID_FIELD_NUMBER: _ClassVar[int] + TASK_ID_FIELD_NUMBER: _ClassVar[int] + URL_FIELD_NUMBER: _ClassVar[int] + TOKEN_FIELD_NUMBER: _ClassVar[int] + AUTHENTICATION_FIELD_NUMBER: _ClassVar[int] + tenant: str + id: str + task_id: str + url: str + token: str + authentication: AuthenticationInfo + def __init__(self, tenant: _Optional[str] = ..., id: _Optional[str] = ..., task_id: _Optional[str] = ..., url: _Optional[str] = ..., token: _Optional[str] = ..., authentication: _Optional[_Union[AuthenticationInfo, _Mapping]] = ...) -> None: ... + +class StringList(_message.Message): + __slots__ = ("list",) + LIST_FIELD_NUMBER: _ClassVar[int] + list: _containers.RepeatedScalarFieldContainer[str] + def __init__(self, list: _Optional[_Iterable[str]] = ...) -> None: ... + +class SecurityRequirement(_message.Message): + __slots__ = ("schemes",) + class SchemesEntry(_message.Message): + __slots__ = ("key", "value") + KEY_FIELD_NUMBER: _ClassVar[int] + VALUE_FIELD_NUMBER: _ClassVar[int] + key: str + value: StringList + def __init__(self, key: _Optional[str] = ..., value: _Optional[_Union[StringList, _Mapping]] = ...) -> None: ... + SCHEMES_FIELD_NUMBER: _ClassVar[int] + schemes: _containers.MessageMap[str, StringList] + def __init__(self, schemes: _Optional[_Mapping[str, StringList]] = ...) -> None: ... + +class SecurityScheme(_message.Message): + __slots__ = ("api_key_security_scheme", "http_auth_security_scheme", "oauth2_security_scheme", "open_id_connect_security_scheme", "mtls_security_scheme") + API_KEY_SECURITY_SCHEME_FIELD_NUMBER: _ClassVar[int] + HTTP_AUTH_SECURITY_SCHEME_FIELD_NUMBER: _ClassVar[int] + OAUTH2_SECURITY_SCHEME_FIELD_NUMBER: _ClassVar[int] + OPEN_ID_CONNECT_SECURITY_SCHEME_FIELD_NUMBER: _ClassVar[int] + MTLS_SECURITY_SCHEME_FIELD_NUMBER: _ClassVar[int] + api_key_security_scheme: APIKeySecurityScheme + http_auth_security_scheme: HTTPAuthSecurityScheme + oauth2_security_scheme: OAuth2SecurityScheme + open_id_connect_security_scheme: OpenIdConnectSecurityScheme + mtls_security_scheme: MutualTlsSecurityScheme + def __init__(self, api_key_security_scheme: _Optional[_Union[APIKeySecurityScheme, _Mapping]] = ..., http_auth_security_scheme: _Optional[_Union[HTTPAuthSecurityScheme, _Mapping]] = ..., oauth2_security_scheme: _Optional[_Union[OAuth2SecurityScheme, _Mapping]] = ..., open_id_connect_security_scheme: _Optional[_Union[OpenIdConnectSecurityScheme, _Mapping]] = ..., mtls_security_scheme: _Optional[_Union[MutualTlsSecurityScheme, _Mapping]] = ...) -> None: ... + +class APIKeySecurityScheme(_message.Message): + __slots__ = ("description", "location", "name") + DESCRIPTION_FIELD_NUMBER: _ClassVar[int] + LOCATION_FIELD_NUMBER: _ClassVar[int] + NAME_FIELD_NUMBER: _ClassVar[int] + description: str + location: str + name: str + def __init__(self, description: _Optional[str] = ..., location: _Optional[str] = ..., name: _Optional[str] = ...) -> None: ... + +class HTTPAuthSecurityScheme(_message.Message): + __slots__ = ("description", "scheme", "bearer_format") + DESCRIPTION_FIELD_NUMBER: _ClassVar[int] + SCHEME_FIELD_NUMBER: _ClassVar[int] + BEARER_FORMAT_FIELD_NUMBER: _ClassVar[int] + description: str + scheme: str + bearer_format: str + def __init__(self, description: _Optional[str] = ..., scheme: _Optional[str] = ..., bearer_format: _Optional[str] = ...) -> None: ... + +class OAuth2SecurityScheme(_message.Message): + __slots__ = ("description", "flows", "oauth2_metadata_url") + DESCRIPTION_FIELD_NUMBER: _ClassVar[int] + FLOWS_FIELD_NUMBER: _ClassVar[int] + OAUTH2_METADATA_URL_FIELD_NUMBER: _ClassVar[int] + description: str + flows: OAuthFlows + oauth2_metadata_url: str + def __init__(self, description: _Optional[str] = ..., flows: _Optional[_Union[OAuthFlows, _Mapping]] = ..., oauth2_metadata_url: _Optional[str] = ...) -> None: ... + +class OpenIdConnectSecurityScheme(_message.Message): + __slots__ = ("description", "open_id_connect_url") + DESCRIPTION_FIELD_NUMBER: _ClassVar[int] + OPEN_ID_CONNECT_URL_FIELD_NUMBER: _ClassVar[int] + description: str + open_id_connect_url: str + def __init__(self, description: _Optional[str] = ..., open_id_connect_url: _Optional[str] = ...) -> None: ... + +class MutualTlsSecurityScheme(_message.Message): + __slots__ = ("description",) + DESCRIPTION_FIELD_NUMBER: _ClassVar[int] + description: str + def __init__(self, description: _Optional[str] = ...) -> None: ... + +class OAuthFlows(_message.Message): + __slots__ = ("authorization_code", "client_credentials", "implicit", "password", "device_code") + AUTHORIZATION_CODE_FIELD_NUMBER: _ClassVar[int] + CLIENT_CREDENTIALS_FIELD_NUMBER: _ClassVar[int] + IMPLICIT_FIELD_NUMBER: _ClassVar[int] + PASSWORD_FIELD_NUMBER: _ClassVar[int] + DEVICE_CODE_FIELD_NUMBER: _ClassVar[int] + authorization_code: AuthorizationCodeOAuthFlow + client_credentials: ClientCredentialsOAuthFlow + implicit: ImplicitOAuthFlow + password: PasswordOAuthFlow + device_code: DeviceCodeOAuthFlow + def __init__(self, authorization_code: _Optional[_Union[AuthorizationCodeOAuthFlow, _Mapping]] = ..., client_credentials: _Optional[_Union[ClientCredentialsOAuthFlow, _Mapping]] = ..., implicit: _Optional[_Union[ImplicitOAuthFlow, _Mapping]] = ..., password: _Optional[_Union[PasswordOAuthFlow, _Mapping]] = ..., device_code: _Optional[_Union[DeviceCodeOAuthFlow, _Mapping]] = ...) -> None: ... + +class AuthorizationCodeOAuthFlow(_message.Message): + __slots__ = ("authorization_url", "token_url", "refresh_url", "scopes", "pkce_required") + class ScopesEntry(_message.Message): + __slots__ = ("key", "value") + KEY_FIELD_NUMBER: _ClassVar[int] + VALUE_FIELD_NUMBER: _ClassVar[int] + key: str + value: str + def __init__(self, key: _Optional[str] = ..., value: _Optional[str] = ...) -> None: ... + AUTHORIZATION_URL_FIELD_NUMBER: _ClassVar[int] + TOKEN_URL_FIELD_NUMBER: _ClassVar[int] + REFRESH_URL_FIELD_NUMBER: _ClassVar[int] + SCOPES_FIELD_NUMBER: _ClassVar[int] + PKCE_REQUIRED_FIELD_NUMBER: _ClassVar[int] + authorization_url: str + token_url: str + refresh_url: str + scopes: _containers.ScalarMap[str, str] + pkce_required: bool + def __init__(self, authorization_url: _Optional[str] = ..., token_url: _Optional[str] = ..., refresh_url: _Optional[str] = ..., scopes: _Optional[_Mapping[str, str]] = ..., pkce_required: _Optional[bool] = ...) -> None: ... + +class ClientCredentialsOAuthFlow(_message.Message): + __slots__ = ("token_url", "refresh_url", "scopes") + class ScopesEntry(_message.Message): + __slots__ = ("key", "value") + KEY_FIELD_NUMBER: _ClassVar[int] + VALUE_FIELD_NUMBER: _ClassVar[int] + key: str + value: str + def __init__(self, key: _Optional[str] = ..., value: _Optional[str] = ...) -> None: ... + TOKEN_URL_FIELD_NUMBER: _ClassVar[int] + REFRESH_URL_FIELD_NUMBER: _ClassVar[int] + SCOPES_FIELD_NUMBER: _ClassVar[int] + token_url: str + refresh_url: str + scopes: _containers.ScalarMap[str, str] + def __init__(self, token_url: _Optional[str] = ..., refresh_url: _Optional[str] = ..., scopes: _Optional[_Mapping[str, str]] = ...) -> None: ... + +class ImplicitOAuthFlow(_message.Message): + __slots__ = ("authorization_url", "refresh_url", "scopes") + class ScopesEntry(_message.Message): + __slots__ = ("key", "value") + KEY_FIELD_NUMBER: _ClassVar[int] + VALUE_FIELD_NUMBER: _ClassVar[int] + key: str + value: str + def __init__(self, key: _Optional[str] = ..., value: _Optional[str] = ...) -> None: ... + AUTHORIZATION_URL_FIELD_NUMBER: _ClassVar[int] + REFRESH_URL_FIELD_NUMBER: _ClassVar[int] + SCOPES_FIELD_NUMBER: _ClassVar[int] + authorization_url: str + refresh_url: str + scopes: _containers.ScalarMap[str, str] + def __init__(self, authorization_url: _Optional[str] = ..., refresh_url: _Optional[str] = ..., scopes: _Optional[_Mapping[str, str]] = ...) -> None: ... + +class PasswordOAuthFlow(_message.Message): + __slots__ = ("token_url", "refresh_url", "scopes") + class ScopesEntry(_message.Message): + __slots__ = ("key", "value") + KEY_FIELD_NUMBER: _ClassVar[int] + VALUE_FIELD_NUMBER: _ClassVar[int] + key: str + value: str + def __init__(self, key: _Optional[str] = ..., value: _Optional[str] = ...) -> None: ... + TOKEN_URL_FIELD_NUMBER: _ClassVar[int] + REFRESH_URL_FIELD_NUMBER: _ClassVar[int] + SCOPES_FIELD_NUMBER: _ClassVar[int] + token_url: str + refresh_url: str + scopes: _containers.ScalarMap[str, str] + def __init__(self, token_url: _Optional[str] = ..., refresh_url: _Optional[str] = ..., scopes: _Optional[_Mapping[str, str]] = ...) -> None: ... + +class DeviceCodeOAuthFlow(_message.Message): + __slots__ = ("device_authorization_url", "token_url", "refresh_url", "scopes") + class ScopesEntry(_message.Message): + __slots__ = ("key", "value") + KEY_FIELD_NUMBER: _ClassVar[int] + VALUE_FIELD_NUMBER: _ClassVar[int] + key: str + value: str + def __init__(self, key: _Optional[str] = ..., value: _Optional[str] = ...) -> None: ... + DEVICE_AUTHORIZATION_URL_FIELD_NUMBER: _ClassVar[int] + TOKEN_URL_FIELD_NUMBER: _ClassVar[int] + REFRESH_URL_FIELD_NUMBER: _ClassVar[int] + SCOPES_FIELD_NUMBER: _ClassVar[int] + device_authorization_url: str + token_url: str + refresh_url: str + scopes: _containers.ScalarMap[str, str] + def __init__(self, device_authorization_url: _Optional[str] = ..., token_url: _Optional[str] = ..., refresh_url: _Optional[str] = ..., scopes: _Optional[_Mapping[str, str]] = ...) -> None: ... + +class SendMessageRequest(_message.Message): + __slots__ = ("tenant", "message", "configuration", "metadata") + TENANT_FIELD_NUMBER: _ClassVar[int] + MESSAGE_FIELD_NUMBER: _ClassVar[int] + CONFIGURATION_FIELD_NUMBER: _ClassVar[int] + METADATA_FIELD_NUMBER: _ClassVar[int] + tenant: str + message: Message + configuration: SendMessageConfiguration + metadata: _struct_pb2.Struct + def __init__(self, tenant: _Optional[str] = ..., message: _Optional[_Union[Message, _Mapping]] = ..., configuration: _Optional[_Union[SendMessageConfiguration, _Mapping]] = ..., metadata: _Optional[_Union[_struct_pb2.Struct, _Mapping]] = ...) -> None: ... + +class GetTaskRequest(_message.Message): + __slots__ = ("tenant", "id", "history_length") + TENANT_FIELD_NUMBER: _ClassVar[int] + ID_FIELD_NUMBER: _ClassVar[int] + HISTORY_LENGTH_FIELD_NUMBER: _ClassVar[int] + tenant: str + id: str + history_length: int + def __init__(self, tenant: _Optional[str] = ..., id: _Optional[str] = ..., history_length: _Optional[int] = ...) -> None: ... + +class ListTasksRequest(_message.Message): + __slots__ = ("tenant", "context_id", "status", "page_size", "page_token", "history_length", "status_timestamp_after", "include_artifacts") + TENANT_FIELD_NUMBER: _ClassVar[int] + CONTEXT_ID_FIELD_NUMBER: _ClassVar[int] + STATUS_FIELD_NUMBER: _ClassVar[int] + PAGE_SIZE_FIELD_NUMBER: _ClassVar[int] + PAGE_TOKEN_FIELD_NUMBER: _ClassVar[int] + HISTORY_LENGTH_FIELD_NUMBER: _ClassVar[int] + STATUS_TIMESTAMP_AFTER_FIELD_NUMBER: _ClassVar[int] + INCLUDE_ARTIFACTS_FIELD_NUMBER: _ClassVar[int] + tenant: str + context_id: str + status: TaskState + page_size: int + page_token: str + history_length: int + status_timestamp_after: _timestamp_pb2.Timestamp + include_artifacts: bool + def __init__(self, tenant: _Optional[str] = ..., context_id: _Optional[str] = ..., status: _Optional[_Union[TaskState, str]] = ..., page_size: _Optional[int] = ..., page_token: _Optional[str] = ..., history_length: _Optional[int] = ..., status_timestamp_after: _Optional[_Union[datetime.datetime, _timestamp_pb2.Timestamp, _Mapping]] = ..., include_artifacts: _Optional[bool] = ...) -> None: ... + +class ListTasksResponse(_message.Message): + __slots__ = ("tasks", "next_page_token", "page_size", "total_size") + TASKS_FIELD_NUMBER: _ClassVar[int] + NEXT_PAGE_TOKEN_FIELD_NUMBER: _ClassVar[int] + PAGE_SIZE_FIELD_NUMBER: _ClassVar[int] + TOTAL_SIZE_FIELD_NUMBER: _ClassVar[int] + tasks: _containers.RepeatedCompositeFieldContainer[Task] + next_page_token: str + page_size: int + total_size: int + def __init__(self, tasks: _Optional[_Iterable[_Union[Task, _Mapping]]] = ..., next_page_token: _Optional[str] = ..., page_size: _Optional[int] = ..., total_size: _Optional[int] = ...) -> None: ... + +class CancelTaskRequest(_message.Message): + __slots__ = ("tenant", "id", "metadata") + TENANT_FIELD_NUMBER: _ClassVar[int] + ID_FIELD_NUMBER: _ClassVar[int] + METADATA_FIELD_NUMBER: _ClassVar[int] + tenant: str + id: str + metadata: _struct_pb2.Struct + def __init__(self, tenant: _Optional[str] = ..., id: _Optional[str] = ..., metadata: _Optional[_Union[_struct_pb2.Struct, _Mapping]] = ...) -> None: ... + +class GetTaskPushNotificationConfigRequest(_message.Message): + __slots__ = ("tenant", "task_id", "id") + TENANT_FIELD_NUMBER: _ClassVar[int] + TASK_ID_FIELD_NUMBER: _ClassVar[int] + ID_FIELD_NUMBER: _ClassVar[int] + tenant: str + task_id: str + id: str + def __init__(self, tenant: _Optional[str] = ..., task_id: _Optional[str] = ..., id: _Optional[str] = ...) -> None: ... + +class DeleteTaskPushNotificationConfigRequest(_message.Message): + __slots__ = ("tenant", "task_id", "id") + TENANT_FIELD_NUMBER: _ClassVar[int] + TASK_ID_FIELD_NUMBER: _ClassVar[int] + ID_FIELD_NUMBER: _ClassVar[int] + tenant: str + task_id: str + id: str + def __init__(self, tenant: _Optional[str] = ..., task_id: _Optional[str] = ..., id: _Optional[str] = ...) -> None: ... + +class SubscribeToTaskRequest(_message.Message): + __slots__ = ("tenant", "id") + TENANT_FIELD_NUMBER: _ClassVar[int] + ID_FIELD_NUMBER: _ClassVar[int] + tenant: str + id: str + def __init__(self, tenant: _Optional[str] = ..., id: _Optional[str] = ...) -> None: ... + +class ListTaskPushNotificationConfigsRequest(_message.Message): + __slots__ = ("tenant", "task_id", "page_size", "page_token") + TENANT_FIELD_NUMBER: _ClassVar[int] + TASK_ID_FIELD_NUMBER: _ClassVar[int] + PAGE_SIZE_FIELD_NUMBER: _ClassVar[int] + PAGE_TOKEN_FIELD_NUMBER: _ClassVar[int] + tenant: str + task_id: str + page_size: int + page_token: str + def __init__(self, tenant: _Optional[str] = ..., task_id: _Optional[str] = ..., page_size: _Optional[int] = ..., page_token: _Optional[str] = ...) -> None: ... + +class GetExtendedAgentCardRequest(_message.Message): + __slots__ = ("tenant",) + TENANT_FIELD_NUMBER: _ClassVar[int] + tenant: str + def __init__(self, tenant: _Optional[str] = ...) -> None: ... + +class SendMessageResponse(_message.Message): + __slots__ = ("task", "message") + TASK_FIELD_NUMBER: _ClassVar[int] + MESSAGE_FIELD_NUMBER: _ClassVar[int] + task: Task + message: Message + def __init__(self, task: _Optional[_Union[Task, _Mapping]] = ..., message: _Optional[_Union[Message, _Mapping]] = ...) -> None: ... + +class StreamResponse(_message.Message): + __slots__ = ("task", "message", "status_update", "artifact_update") + TASK_FIELD_NUMBER: _ClassVar[int] + MESSAGE_FIELD_NUMBER: _ClassVar[int] + STATUS_UPDATE_FIELD_NUMBER: _ClassVar[int] + ARTIFACT_UPDATE_FIELD_NUMBER: _ClassVar[int] + task: Task + message: Message + status_update: TaskStatusUpdateEvent + artifact_update: TaskArtifactUpdateEvent + def __init__(self, task: _Optional[_Union[Task, _Mapping]] = ..., message: _Optional[_Union[Message, _Mapping]] = ..., status_update: _Optional[_Union[TaskStatusUpdateEvent, _Mapping]] = ..., artifact_update: _Optional[_Union[TaskArtifactUpdateEvent, _Mapping]] = ...) -> None: ... + +class ListTaskPushNotificationConfigsResponse(_message.Message): + __slots__ = ("configs", "next_page_token") + CONFIGS_FIELD_NUMBER: _ClassVar[int] + NEXT_PAGE_TOKEN_FIELD_NUMBER: _ClassVar[int] + configs: _containers.RepeatedCompositeFieldContainer[TaskPushNotificationConfig] + next_page_token: str + def __init__(self, configs: _Optional[_Iterable[_Union[TaskPushNotificationConfig, _Mapping]]] = ..., next_page_token: _Optional[str] = ...) -> None: ... diff --git a/src/a2a/types/a2a_pb2_grpc.py b/src/a2a/types/a2a_pb2_grpc.py new file mode 100644 index 000000000..e969f3bd5 --- /dev/null +++ b/src/a2a/types/a2a_pb2_grpc.py @@ -0,0 +1,528 @@ +# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT! +"""Client and server classes corresponding to protobuf-defined services.""" +import grpc + +from . import a2a_pb2 as a2a__pb2 +from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2 + + +class A2AServiceStub(object): + """Provides operations for interacting with agents using the A2A protocol. + """ + + def __init__(self, channel): + """Constructor. + + Args: + channel: A grpc.Channel. + """ + self.SendMessage = channel.unary_unary( + '/lf.a2a.v1.A2AService/SendMessage', + request_serializer=a2a__pb2.SendMessageRequest.SerializeToString, + response_deserializer=a2a__pb2.SendMessageResponse.FromString, + _registered_method=True) + self.SendStreamingMessage = channel.unary_stream( + '/lf.a2a.v1.A2AService/SendStreamingMessage', + request_serializer=a2a__pb2.SendMessageRequest.SerializeToString, + response_deserializer=a2a__pb2.StreamResponse.FromString, + _registered_method=True) + self.GetTask = channel.unary_unary( + '/lf.a2a.v1.A2AService/GetTask', + request_serializer=a2a__pb2.GetTaskRequest.SerializeToString, + response_deserializer=a2a__pb2.Task.FromString, + _registered_method=True) + self.ListTasks = channel.unary_unary( + '/lf.a2a.v1.A2AService/ListTasks', + request_serializer=a2a__pb2.ListTasksRequest.SerializeToString, + response_deserializer=a2a__pb2.ListTasksResponse.FromString, + _registered_method=True) + self.CancelTask = channel.unary_unary( + '/lf.a2a.v1.A2AService/CancelTask', + request_serializer=a2a__pb2.CancelTaskRequest.SerializeToString, + response_deserializer=a2a__pb2.Task.FromString, + _registered_method=True) + self.SubscribeToTask = channel.unary_stream( + '/lf.a2a.v1.A2AService/SubscribeToTask', + request_serializer=a2a__pb2.SubscribeToTaskRequest.SerializeToString, + response_deserializer=a2a__pb2.StreamResponse.FromString, + _registered_method=True) + self.CreateTaskPushNotificationConfig = channel.unary_unary( + '/lf.a2a.v1.A2AService/CreateTaskPushNotificationConfig', + request_serializer=a2a__pb2.TaskPushNotificationConfig.SerializeToString, + response_deserializer=a2a__pb2.TaskPushNotificationConfig.FromString, + _registered_method=True) + self.GetTaskPushNotificationConfig = channel.unary_unary( + '/lf.a2a.v1.A2AService/GetTaskPushNotificationConfig', + request_serializer=a2a__pb2.GetTaskPushNotificationConfigRequest.SerializeToString, + response_deserializer=a2a__pb2.TaskPushNotificationConfig.FromString, + _registered_method=True) + self.ListTaskPushNotificationConfigs = channel.unary_unary( + '/lf.a2a.v1.A2AService/ListTaskPushNotificationConfigs', + request_serializer=a2a__pb2.ListTaskPushNotificationConfigsRequest.SerializeToString, + response_deserializer=a2a__pb2.ListTaskPushNotificationConfigsResponse.FromString, + _registered_method=True) + self.GetExtendedAgentCard = channel.unary_unary( + '/lf.a2a.v1.A2AService/GetExtendedAgentCard', + request_serializer=a2a__pb2.GetExtendedAgentCardRequest.SerializeToString, + response_deserializer=a2a__pb2.AgentCard.FromString, + _registered_method=True) + self.DeleteTaskPushNotificationConfig = channel.unary_unary( + '/lf.a2a.v1.A2AService/DeleteTaskPushNotificationConfig', + request_serializer=a2a__pb2.DeleteTaskPushNotificationConfigRequest.SerializeToString, + response_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString, + _registered_method=True) + + +class A2AServiceServicer(object): + """Provides operations for interacting with agents using the A2A protocol. + """ + + def SendMessage(self, request, context): + """Sends a message to an agent. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def SendStreamingMessage(self, request, context): + """Sends a streaming message to an agent, allowing for real-time interaction and status updates. + Streaming version of `SendMessage` + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def GetTask(self, request, context): + """Gets the latest state of a task. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def ListTasks(self, request, context): + """Lists tasks that match the specified filter. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def CancelTask(self, request, context): + """Cancels a task in progress. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def SubscribeToTask(self, request, context): + """Subscribes to task updates for tasks not in a terminal state. + Returns `UnsupportedOperationError` if the task is already in a terminal state (completed, failed, canceled, rejected). + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def CreateTaskPushNotificationConfig(self, request, context): + """(-- api-linter: client-libraries::4232::required-fields=disabled + api-linter: core::0133::method-signature=disabled + api-linter: core::0133::request-message-name=disabled + aip.dev/not-precedent: method_signature preserved for backwards compatibility --) + Creates a push notification config for a task. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def GetTaskPushNotificationConfig(self, request, context): + """Gets a push notification config for a task. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def ListTaskPushNotificationConfigs(self, request, context): + """Get a list of push notifications configured for a task. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def GetExtendedAgentCard(self, request, context): + """Gets the extended agent card for the authenticated agent. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + def DeleteTaskPushNotificationConfig(self, request, context): + """Deletes a push notification config for a task. + """ + context.set_code(grpc.StatusCode.UNIMPLEMENTED) + context.set_details('Method not implemented!') + raise NotImplementedError('Method not implemented!') + + +def add_A2AServiceServicer_to_server(servicer, server): + rpc_method_handlers = { + 'SendMessage': grpc.unary_unary_rpc_method_handler( + servicer.SendMessage, + request_deserializer=a2a__pb2.SendMessageRequest.FromString, + response_serializer=a2a__pb2.SendMessageResponse.SerializeToString, + ), + 'SendStreamingMessage': grpc.unary_stream_rpc_method_handler( + servicer.SendStreamingMessage, + request_deserializer=a2a__pb2.SendMessageRequest.FromString, + response_serializer=a2a__pb2.StreamResponse.SerializeToString, + ), + 'GetTask': grpc.unary_unary_rpc_method_handler( + servicer.GetTask, + request_deserializer=a2a__pb2.GetTaskRequest.FromString, + response_serializer=a2a__pb2.Task.SerializeToString, + ), + 'ListTasks': grpc.unary_unary_rpc_method_handler( + servicer.ListTasks, + request_deserializer=a2a__pb2.ListTasksRequest.FromString, + response_serializer=a2a__pb2.ListTasksResponse.SerializeToString, + ), + 'CancelTask': grpc.unary_unary_rpc_method_handler( + servicer.CancelTask, + request_deserializer=a2a__pb2.CancelTaskRequest.FromString, + response_serializer=a2a__pb2.Task.SerializeToString, + ), + 'SubscribeToTask': grpc.unary_stream_rpc_method_handler( + servicer.SubscribeToTask, + request_deserializer=a2a__pb2.SubscribeToTaskRequest.FromString, + response_serializer=a2a__pb2.StreamResponse.SerializeToString, + ), + 'CreateTaskPushNotificationConfig': grpc.unary_unary_rpc_method_handler( + servicer.CreateTaskPushNotificationConfig, + request_deserializer=a2a__pb2.TaskPushNotificationConfig.FromString, + response_serializer=a2a__pb2.TaskPushNotificationConfig.SerializeToString, + ), + 'GetTaskPushNotificationConfig': grpc.unary_unary_rpc_method_handler( + servicer.GetTaskPushNotificationConfig, + request_deserializer=a2a__pb2.GetTaskPushNotificationConfigRequest.FromString, + response_serializer=a2a__pb2.TaskPushNotificationConfig.SerializeToString, + ), + 'ListTaskPushNotificationConfigs': grpc.unary_unary_rpc_method_handler( + servicer.ListTaskPushNotificationConfigs, + request_deserializer=a2a__pb2.ListTaskPushNotificationConfigsRequest.FromString, + response_serializer=a2a__pb2.ListTaskPushNotificationConfigsResponse.SerializeToString, + ), + 'GetExtendedAgentCard': grpc.unary_unary_rpc_method_handler( + servicer.GetExtendedAgentCard, + request_deserializer=a2a__pb2.GetExtendedAgentCardRequest.FromString, + response_serializer=a2a__pb2.AgentCard.SerializeToString, + ), + 'DeleteTaskPushNotificationConfig': grpc.unary_unary_rpc_method_handler( + servicer.DeleteTaskPushNotificationConfig, + request_deserializer=a2a__pb2.DeleteTaskPushNotificationConfigRequest.FromString, + response_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString, + ), + } + generic_handler = grpc.method_handlers_generic_handler( + 'lf.a2a.v1.A2AService', rpc_method_handlers) + server.add_generic_rpc_handlers((generic_handler,)) + server.add_registered_method_handlers('lf.a2a.v1.A2AService', rpc_method_handlers) + + + # This class is part of an EXPERIMENTAL API. +class A2AService(object): + """Provides operations for interacting with agents using the A2A protocol. + """ + + @staticmethod + def SendMessage(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/lf.a2a.v1.A2AService/SendMessage', + a2a__pb2.SendMessageRequest.SerializeToString, + a2a__pb2.SendMessageResponse.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def SendStreamingMessage(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_stream( + request, + target, + '/lf.a2a.v1.A2AService/SendStreamingMessage', + a2a__pb2.SendMessageRequest.SerializeToString, + a2a__pb2.StreamResponse.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def GetTask(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/lf.a2a.v1.A2AService/GetTask', + a2a__pb2.GetTaskRequest.SerializeToString, + a2a__pb2.Task.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def ListTasks(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/lf.a2a.v1.A2AService/ListTasks', + a2a__pb2.ListTasksRequest.SerializeToString, + a2a__pb2.ListTasksResponse.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def CancelTask(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/lf.a2a.v1.A2AService/CancelTask', + a2a__pb2.CancelTaskRequest.SerializeToString, + a2a__pb2.Task.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def SubscribeToTask(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_stream( + request, + target, + '/lf.a2a.v1.A2AService/SubscribeToTask', + a2a__pb2.SubscribeToTaskRequest.SerializeToString, + a2a__pb2.StreamResponse.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def CreateTaskPushNotificationConfig(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/lf.a2a.v1.A2AService/CreateTaskPushNotificationConfig', + a2a__pb2.TaskPushNotificationConfig.SerializeToString, + a2a__pb2.TaskPushNotificationConfig.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def GetTaskPushNotificationConfig(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/lf.a2a.v1.A2AService/GetTaskPushNotificationConfig', + a2a__pb2.GetTaskPushNotificationConfigRequest.SerializeToString, + a2a__pb2.TaskPushNotificationConfig.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def ListTaskPushNotificationConfigs(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/lf.a2a.v1.A2AService/ListTaskPushNotificationConfigs', + a2a__pb2.ListTaskPushNotificationConfigsRequest.SerializeToString, + a2a__pb2.ListTaskPushNotificationConfigsResponse.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def GetExtendedAgentCard(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/lf.a2a.v1.A2AService/GetExtendedAgentCard', + a2a__pb2.GetExtendedAgentCardRequest.SerializeToString, + a2a__pb2.AgentCard.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) + + @staticmethod + def DeleteTaskPushNotificationConfig(request, + target, + options=(), + channel_credentials=None, + call_credentials=None, + insecure=False, + compression=None, + wait_for_ready=None, + timeout=None, + metadata=None): + return grpc.experimental.unary_unary( + request, + target, + '/lf.a2a.v1.A2AService/DeleteTaskPushNotificationConfig', + a2a__pb2.DeleteTaskPushNotificationConfigRequest.SerializeToString, + google_dot_protobuf_dot_empty__pb2.Empty.FromString, + options, + channel_credentials, + insecure, + call_credentials, + compression, + wait_for_ready, + timeout, + metadata, + _registered_method=True) diff --git a/src/a2a/utils/__init__.py b/src/a2a/utils/__init__.py index e5b5663dd..04693dd0b 100644 --- a/src/a2a/utils/__init__.py +++ b/src/a2a/utils/__init__.py @@ -1,58 +1,18 @@ """Utility functions for the A2A Python SDK.""" -from a2a.utils.artifact import ( - get_artifact_text, - new_artifact, - new_data_artifact, - new_text_artifact, -) +from a2a.utils import proto_utils from a2a.utils.constants import ( AGENT_CARD_WELL_KNOWN_PATH, DEFAULT_RPC_URL, - EXTENDED_AGENT_CARD_PATH, - PREV_AGENT_CARD_WELL_KNOWN_PATH, -) -from a2a.utils.helpers import ( - append_artifact_to_task, - are_modalities_compatible, - build_text_artifact, - create_task_obj, -) -from a2a.utils.message import ( - get_message_text, - new_agent_parts_message, - new_agent_text_message, -) -from a2a.utils.parts import ( - get_data_parts, - get_file_parts, - get_text_parts, -) -from a2a.utils.task import ( - completed_task, - new_task, + TransportProtocol, ) +from a2a.utils.proto_utils import to_stream_response __all__ = [ 'AGENT_CARD_WELL_KNOWN_PATH', 'DEFAULT_RPC_URL', - 'EXTENDED_AGENT_CARD_PATH', - 'PREV_AGENT_CARD_WELL_KNOWN_PATH', - 'append_artifact_to_task', - 'are_modalities_compatible', - 'build_text_artifact', - 'completed_task', - 'create_task_obj', - 'get_artifact_text', - 'get_data_parts', - 'get_file_parts', - 'get_message_text', - 'get_text_parts', - 'new_agent_parts_message', - 'new_agent_text_message', - 'new_artifact', - 'new_data_artifact', - 'new_task', - 'new_text_artifact', + 'TransportProtocol', + 'proto_utils', + 'to_stream_response', ] diff --git a/src/a2a/utils/artifact.py b/src/a2a/utils/artifact.py deleted file mode 100644 index 5053ca421..000000000 --- a/src/a2a/utils/artifact.py +++ /dev/null @@ -1,88 +0,0 @@ -"""Utility functions for creating A2A Artifact objects.""" - -import uuid - -from typing import Any - -from a2a.types import Artifact, DataPart, Part, TextPart -from a2a.utils.parts import get_text_parts - - -def new_artifact( - parts: list[Part], - name: str, - description: str | None = None, -) -> Artifact: - """Creates a new Artifact object. - - Args: - parts: The list of `Part` objects forming the artifact's content. - name: The human-readable name of the artifact. - description: An optional description of the artifact. - - Returns: - A new `Artifact` object with a generated artifact_id. - """ - return Artifact( - artifact_id=str(uuid.uuid4()), - parts=parts, - name=name, - description=description, - ) - - -def new_text_artifact( - name: str, - text: str, - description: str | None = None, -) -> Artifact: - """Creates a new Artifact object containing only a single TextPart. - - Args: - name: The human-readable name of the artifact. - text: The text content of the artifact. - description: An optional description of the artifact. - - Returns: - A new `Artifact` object with a generated artifact_id. - """ - return new_artifact( - [Part(root=TextPart(text=text))], - name, - description, - ) - - -def new_data_artifact( - name: str, - data: dict[str, Any], - description: str | None = None, -) -> Artifact: - """Creates a new Artifact object containing only a single DataPart. - - Args: - name: The human-readable name of the artifact. - data: The structured data content of the artifact. - description: An optional description of the artifact. - - Returns: - A new `Artifact` object with a generated artifact_id. - """ - return new_artifact( - [Part(root=DataPart(data=data))], - name, - description, - ) - - -def get_artifact_text(artifact: Artifact, delimiter: str = '\n') -> str: - """Extracts and joins all text content from an Artifact's parts. - - Args: - artifact: The `Artifact` object. - delimiter: The string to use when joining text from multiple TextParts. - - Returns: - A single string containing all text content, or an empty string if no text parts are found. - """ - return delimiter.join(get_text_parts(artifact.parts)) diff --git a/src/a2a/utils/constants.py b/src/a2a/utils/constants.py index 2935251a5..5497d8a24 100644 --- a/src/a2a/utils/constants.py +++ b/src/a2a/utils/constants.py @@ -1,6 +1,28 @@ """Constants for well-known URIs used throughout the A2A Python SDK.""" +from enum import Enum + + AGENT_CARD_WELL_KNOWN_PATH = '/.well-known/agent-card.json' -PREV_AGENT_CARD_WELL_KNOWN_PATH = '/.well-known/agent.json' -EXTENDED_AGENT_CARD_PATH = '/agent/authenticatedExtendedCard' DEFAULT_RPC_URL = '/' +DEFAULT_LIST_TASKS_PAGE_SIZE = 50 +"""Default page size for the `tasks/list` method.""" + +MAX_LIST_TASKS_PAGE_SIZE = 100 +"""Maximum page size for the `tasks/list` method.""" + + +class TransportProtocol(str, Enum): + """Transport protocol string constants.""" + + JSONRPC = 'JSONRPC' + HTTP_JSON = 'HTTP+JSON' + GRPC = 'GRPC' + + +JSONRPC_PARSE_ERROR_CODE = -32700 +VERSION_HEADER = 'A2A-Version' + +PROTOCOL_VERSION_1_0 = '1.0' +PROTOCOL_VERSION_0_3 = '0.3' +PROTOCOL_VERSION_CURRENT = PROTOCOL_VERSION_1_0 diff --git a/src/a2a/utils/error_handlers.py b/src/a2a/utils/error_handlers.py index 53cdb9f56..ea544d79d 100644 --- a/src/a2a/utils/error_handlers.py +++ b/src/a2a/utils/error_handlers.py @@ -1,7 +1,8 @@ import functools +import inspect import logging -from collections.abc import Awaitable, Callable, Coroutine +from collections.abc import AsyncGenerator, Awaitable, Callable, Coroutine from typing import TYPE_CHECKING, Any @@ -15,77 +16,119 @@ Response = Any -from a2a._base import A2ABaseModel -from a2a.types import ( - AuthenticatedExtendedCardNotConfiguredError, - ContentTypeNotSupportedError, +from google.protobuf.json_format import ParseError + +from a2a.utils.errors import ( + A2A_REST_ERROR_MAPPING, + A2AError, InternalError, - InvalidAgentResponseError, - InvalidParamsError, - InvalidRequestError, - JSONParseError, - MethodNotFoundError, - PushNotificationNotSupportedError, - TaskNotCancelableError, - TaskNotFoundError, - UnsupportedOperationError, + RestErrorMap, ) -from a2a.utils.errors import ServerError logger = logging.getLogger(__name__) -A2AErrorToHttpStatus: dict[type[A2ABaseModel], int] = { - JSONParseError: 400, - InvalidRequestError: 400, - MethodNotFoundError: 404, - InvalidParamsError: 422, - InternalError: 500, - TaskNotFoundError: 404, - TaskNotCancelableError: 409, - PushNotificationNotSupportedError: 501, - UnsupportedOperationError: 501, - ContentTypeNotSupportedError: 415, - InvalidAgentResponseError: 502, - AuthenticatedExtendedCardNotConfiguredError: 404, -} + +def _build_error_payload( + code: int, + status: str, + message: str, + reason: str | None = None, + metadata: dict[str, Any] | None = None, +) -> dict[str, Any]: + """Helper function to build the JSON error payload.""" + payload: dict[str, Any] = { + 'code': code, + 'status': status, + 'message': message, + } + if reason: + payload['details'] = [ + { + '@type': 'type.googleapis.com/google.rpc.ErrorInfo', + 'reason': reason, + 'domain': 'a2a-protocol.org', + 'metadata': metadata if metadata is not None else {}, + } + ] + return {'error': payload} + + +def build_rest_error_payload(error: Exception) -> dict[str, Any]: + """Build a REST error payload dict from an exception. + + Returns: + A dict with the error payload in the standard REST error format. + """ + if isinstance(error, A2AError): + mapping = A2A_REST_ERROR_MAPPING.get( + type(error), RestErrorMap(500, 'INTERNAL', 'INTERNAL_ERROR') + ) + # SECURITY WARNING: Data attached to A2AError.data is serialized unaltered and exposed publicly to the client in the REST API response. + metadata = getattr(error, 'data', None) or {} + return _build_error_payload( + code=mapping.http_code, + status=mapping.grpc_status, + message=getattr(error, 'message', str(error)), + reason=mapping.reason, + metadata=metadata, + ) + if isinstance(error, ParseError): + return _build_error_payload( + code=400, + status='INVALID_ARGUMENT', + message=str(error), + reason='INVALID_REQUEST', + metadata={}, + ) + return _build_error_payload( + code=500, + status='INTERNAL', + message='unknown exception', + ) + + +def _create_error_response(error: Exception) -> Response: + """Helper function to create a JSONResponse for an error.""" + if isinstance(error, A2AError): + log_level = ( + logging.ERROR + if isinstance(error, InternalError) + else logging.WARNING + ) + logger.log( + log_level, + "Request error: Code=%s, Message='%s'%s", + getattr(error, 'code', 'N/A'), + getattr(error, 'message', str(error)), + f', Data={error.data}' if error.data else '', + ) + elif isinstance(error, ParseError): + logger.warning('Parse error: %s', str(error)) + else: + logger.exception('Unknown error occurred') + + payload = build_rest_error_payload(error) + # Extract HTTP status code from the payload + http_code = payload.get('error', {}).get('code', 500) + return JSONResponse( + content=payload, + status_code=http_code, + media_type='application/json', + ) def rest_error_handler( func: Callable[..., Awaitable[Response]], ) -> Callable[..., Awaitable[Response]]: - """Decorator to catch ServerError and map it to an appropriate JSONResponse.""" + """Decorator to catch A2AError and map it to an appropriate JSONResponse.""" @functools.wraps(func) async def wrapper(*args: Any, **kwargs: Any) -> Response: try: return await func(*args, **kwargs) - except ServerError as e: - error = e.error or InternalError( - message='Internal error due to unknown reason' - ) - http_code = A2AErrorToHttpStatus.get(type(error), 500) - - log_level = ( - logging.ERROR - if isinstance(error, InternalError) - else logging.WARNING - ) - logger.log( - log_level, - "Request error: Code=%s, Message='%s'%s", - error.code, - error.message, - ', Data=' + str(error.data) if error.data else '', - ) - return JSONResponse( - content={'message': error.message}, status_code=http_code - ) - except Exception: - logger.exception('Unknown error occurred') - return JSONResponse( - content={'message': 'unknown exception'}, status_code=500 - ) + except Exception as error: # noqa: BLE001 + return _create_error_response(error) return wrapper @@ -93,17 +136,10 @@ async def wrapper(*args: Any, **kwargs: Any) -> Response: def rest_stream_error_handler( func: Callable[..., Coroutine[Any, Any, Any]], ) -> Callable[..., Coroutine[Any, Any, Any]]: - """Decorator to catch ServerError for a streaming method,log it and then rethrow it to be handled by framework.""" - - @functools.wraps(func) - async def wrapper(*args: Any, **kwargs: Any) -> Any: - try: - return await func(*args, **kwargs) - except ServerError as e: - error = e.error or InternalError( - message='Internal error due to unknown reason' - ) + """Decorator to catch A2AError for a streaming method. Maps synchronous errors to JSONResponse and logs streaming errors.""" + def _log_error(error: Exception) -> None: + if isinstance(error, A2AError): log_level = ( logging.ERROR if isinstance(error, InternalError) @@ -112,18 +148,40 @@ async def wrapper(*args: Any, **kwargs: Any) -> Any: logger.log( log_level, "Request error: Code=%s, Message='%s'%s", - error.code, - error.message, - ', Data=' + str(error.data) if error.data else '', + getattr(error, 'code', 'N/A'), + getattr(error, 'message', str(error)), + f', Data={error.data}' if error.data else '', ) - # Since the stream has started, we can't return a JSONResponse. - # Instead, we run the error handling logic (provides logging) - # and reraise the error and let server framework manage - raise e - except Exception as e: - # Since the stream has started, we can't return a JSONResponse. - # Instead, we run the error handling logic (provides logging) - # and reraise the error and let server framework manage - raise e + else: + logger.exception('Unknown streaming error occurred') + + @functools.wraps(func) + async def wrapper(*args: Any, **kwargs: Any) -> Any: + try: + response = await func(*args, **kwargs) + + # If the response has an async generator body (like EventSourceResponse), + # we must wrap it to catch errors that occur during stream execution. + if hasattr(response, 'body_iterator') and inspect.isasyncgen( + response.body_iterator + ): + original_iterator = response.body_iterator + + async def error_catching_iterator() -> AsyncGenerator[ + Any, None + ]: + try: + async for item in original_iterator: + yield item + except Exception as stream_error: + _log_error(stream_error) + raise stream_error + + response.body_iterator = error_catching_iterator() + + except Exception as e: # noqa: BLE001 + return _create_error_response(e) + else: + return response return wrapper diff --git a/src/a2a/utils/errors.py b/src/a2a/utils/errors.py index f2b6cc2b4..c87fa7372 100644 --- a/src/a2a/utils/errors.py +++ b/src/a2a/utils/errors.py @@ -1,82 +1,198 @@ -"""Custom exceptions for A2A server-side errors.""" - -from a2a.types import ( - AuthenticatedExtendedCardNotConfiguredError, - ContentTypeNotSupportedError, - InternalError, - InvalidAgentResponseError, - InvalidParamsError, - InvalidRequestError, - JSONParseError, - JSONRPCError, - MethodNotFoundError, - PushNotificationNotSupportedError, - TaskNotCancelableError, - TaskNotFoundError, - UnsupportedOperationError, -) - - -class A2AServerError(Exception): - """Base exception for A2A Server errors.""" - - -class MethodNotImplementedError(A2AServerError): - """Exception raised for methods that are not implemented by the server handler.""" - - def __init__( - self, message: str = 'This method is not implemented by the server' - ): - """Initializes the MethodNotImplementedError. - - Args: - message: A descriptive error message. - """ - self.message = message - super().__init__(f'Not Implemented operation Error: {message}') - - -class ServerError(Exception): - """Wrapper exception for A2A or JSON-RPC errors originating from the server's logic. - - This exception is used internally by request handlers and other server components - to signal a specific error that should be formatted as a JSON-RPC error response. - """ - - def __init__( - self, - error: ( - JSONRPCError - | JSONParseError - | InvalidRequestError - | MethodNotFoundError - | InvalidParamsError - | InternalError - | TaskNotFoundError - | TaskNotCancelableError - | PushNotificationNotSupportedError - | UnsupportedOperationError - | ContentTypeNotSupportedError - | InvalidAgentResponseError - | AuthenticatedExtendedCardNotConfiguredError - | None - ), - ): - """Initializes the ServerError. - - Args: - error: The specific A2A or JSON-RPC error model instance. - """ - self.error = error - - def __str__(self) -> str: - """Returns a readable representation of the internal Pydantic error.""" - if self.error is None: - return 'None' - if self.error.message is None: - return self.error.__class__.__name__ - return self.error.message - - def __repr__(self) -> str: - """Returns an unambiguous representation for developers showing how the ServerError was constructed with the internal Pydantic error.""" - return f'{self.__class__.__name__}({self.error!r})' +"""Custom exceptions and error types for A2A server-side errors. + +This module contains A2A-specific error codes, +as well as server exception classes. +""" + +from typing import NamedTuple + + +class RestErrorMap(NamedTuple): + """Named tuple mapping HTTP status, gRPC status, and reason strings.""" + + http_code: int + grpc_status: str + reason: str + + +class A2AError(Exception): + """Base exception for A2A errors.""" + + message: str = 'A2A Error' + data: dict | None = None + + def __init__(self, message: str | None = None, data: dict | None = None): + if message: + self.message = message + self.data = data + super().__init__(self.message) + + +class TaskNotFoundError(A2AError): + """Exception raised when a task is not found.""" + + message = 'Task not found' + + +class TaskNotCancelableError(A2AError): + """Exception raised when a task cannot be canceled.""" + + message = 'Task cannot be canceled' + + +class PushNotificationNotSupportedError(A2AError): + """Exception raised when push notifications are not supported.""" + + message = 'Push Notification is not supported' + + +class UnsupportedOperationError(A2AError): + """Exception raised when an operation is not supported.""" + + message = 'This operation is not supported' + + +class ContentTypeNotSupportedError(A2AError): + """Exception raised when the content type is incompatible.""" + + message = 'Incompatible content types' + + +class InternalError(A2AError): + """Exception raised for internal server errors.""" + + message = 'Internal error' + + +class InvalidAgentResponseError(A2AError): + """Exception raised when the agent response is invalid.""" + + message = 'Invalid agent response' + + +class ExtendedAgentCardNotConfiguredError(A2AError): + """Exception raised when the authenticated extended card is not configured.""" + + message = 'Authenticated Extended Card is not configured' + + +class InvalidParamsError(A2AError): + """Exception raised when parameters are invalid.""" + + message = 'Invalid params' + + +class InvalidRequestError(A2AError): + """Exception raised when the request is invalid.""" + + message = 'Invalid Request' + + +class MethodNotFoundError(A2AError): + """Exception raised when a method is not found.""" + + message = 'Method not found' + + +class ExtensionSupportRequiredError(A2AError): + """Exception raised when extension support is required but not present.""" + + message = 'Extension support required' + + +class VersionNotSupportedError(A2AError): + """Exception raised when the requested version is not supported.""" + + message = 'Version not supported' + + +# For backward compatibility if needed, or just aliases for clean refactor +# We remove the Pydantic models here. + +__all__ = [ + 'A2A_ERROR_REASONS', + 'A2A_REASON_TO_ERROR', + 'A2A_REST_ERROR_MAPPING', + 'JSON_RPC_ERROR_CODE_MAP', + 'ExtensionSupportRequiredError', + 'InternalError', + 'InvalidAgentResponseError', + 'InvalidParamsError', + 'InvalidRequestError', + 'MethodNotFoundError', + 'PushNotificationNotSupportedError', + 'RestErrorMap', + 'TaskNotCancelableError', + 'TaskNotFoundError', + 'UnsupportedOperationError', + 'VersionNotSupportedError', +] + + +JSON_RPC_ERROR_CODE_MAP: dict[type[A2AError], int] = { + TaskNotFoundError: -32001, + TaskNotCancelableError: -32002, + PushNotificationNotSupportedError: -32003, + UnsupportedOperationError: -32004, + ContentTypeNotSupportedError: -32005, + InvalidAgentResponseError: -32006, + ExtendedAgentCardNotConfiguredError: -32007, + ExtensionSupportRequiredError: -32008, + VersionNotSupportedError: -32009, + InvalidParamsError: -32602, + InvalidRequestError: -32600, + MethodNotFoundError: -32601, + InternalError: -32603, +} + + +A2A_REST_ERROR_MAPPING: dict[type[A2AError], RestErrorMap] = { + TaskNotFoundError: RestErrorMap(404, 'NOT_FOUND', 'TASK_NOT_FOUND'), + TaskNotCancelableError: RestErrorMap( + 409, 'FAILED_PRECONDITION', 'TASK_NOT_CANCELABLE' + ), + PushNotificationNotSupportedError: RestErrorMap( + 400, + 'UNIMPLEMENTED', + 'PUSH_NOTIFICATION_NOT_SUPPORTED', + ), + UnsupportedOperationError: RestErrorMap( + 400, 'UNIMPLEMENTED', 'UNSUPPORTED_OPERATION' + ), + ContentTypeNotSupportedError: RestErrorMap( + 415, + 'INVALID_ARGUMENT', + 'CONTENT_TYPE_NOT_SUPPORTED', + ), + InvalidAgentResponseError: RestErrorMap( + 502, 'INTERNAL', 'INVALID_AGENT_RESPONSE' + ), + ExtendedAgentCardNotConfiguredError: RestErrorMap( + 400, + 'FAILED_PRECONDITION', + 'EXTENDED_AGENT_CARD_NOT_CONFIGURED', + ), + ExtensionSupportRequiredError: RestErrorMap( + 400, + 'FAILED_PRECONDITION', + 'EXTENSION_SUPPORT_REQUIRED', + ), + VersionNotSupportedError: RestErrorMap( + 400, 'UNIMPLEMENTED', 'VERSION_NOT_SUPPORTED' + ), + InvalidParamsError: RestErrorMap(400, 'INVALID_ARGUMENT', 'INVALID_PARAMS'), + InvalidRequestError: RestErrorMap( + 400, 'INVALID_ARGUMENT', 'INVALID_REQUEST' + ), + MethodNotFoundError: RestErrorMap(404, 'NOT_FOUND', 'METHOD_NOT_FOUND'), + InternalError: RestErrorMap(500, 'INTERNAL', 'INTERNAL_ERROR'), +} + + +A2A_ERROR_REASONS = { + cls: mapping.reason for cls, mapping in A2A_REST_ERROR_MAPPING.items() +} + +A2A_REASON_TO_ERROR = { + mapping.reason: cls for cls, mapping in A2A_REST_ERROR_MAPPING.items() +} diff --git a/src/a2a/utils/helpers.py b/src/a2a/utils/helpers.py deleted file mode 100644 index cfc06c1a8..000000000 --- a/src/a2a/utils/helpers.py +++ /dev/null @@ -1,386 +0,0 @@ -"""General utility functions for the A2A Python SDK.""" - -import functools -import inspect -import json -import logging - -from collections.abc import Awaitable, Callable -from typing import Any, TypeVar -from uuid import uuid4 - -from a2a.types import ( - AgentCard, - Artifact, - MessageSendParams, - Part, - Task, - TaskArtifactUpdateEvent, - TaskState, - TaskStatus, - TextPart, -) -from a2a.utils.errors import ServerError, UnsupportedOperationError -from a2a.utils.telemetry import trace_function - - -T = TypeVar('T') - - -logger = logging.getLogger(__name__) - - -@trace_function() -def create_task_obj(message_send_params: MessageSendParams) -> Task: - """Create a new task object from message send params. - - Generates UUIDs for task and context IDs if they are not already present in the message. - - Args: - message_send_params: The `MessageSendParams` object containing the initial message. - - Returns: - A new `Task` object initialized with 'submitted' status and the input message in history. - """ - if not message_send_params.message.context_id: - message_send_params.message.context_id = str(uuid4()) - - return Task( - id=str(uuid4()), - context_id=message_send_params.message.context_id, - status=TaskStatus(state=TaskState.submitted), - history=[message_send_params.message], - ) - - -@trace_function() -def append_artifact_to_task(task: Task, event: TaskArtifactUpdateEvent) -> None: - """Helper method for updating a Task object with new artifact data from an event. - - Handles creating the artifacts list if it doesn't exist, adding new artifacts, - and appending parts to existing artifacts based on the `append` flag in the event. - - Args: - task: The `Task` object to modify. - event: The `TaskArtifactUpdateEvent` containing the artifact data. - """ - if not task.artifacts: - task.artifacts = [] - - new_artifact_data: Artifact = event.artifact - artifact_id: str = new_artifact_data.artifact_id - append_parts: bool = event.append or False - - existing_artifact: Artifact | None = None - existing_artifact_list_index: int | None = None - - # Find existing artifact by its id - for i, art in enumerate(task.artifacts): - if art.artifact_id == artifact_id: - existing_artifact = art - existing_artifact_list_index = i - break - - if not append_parts: - # This represents the first chunk for this artifact index. - if existing_artifact_list_index is not None: - # Replace the existing artifact entirely with the new data - logger.debug( - 'Replacing artifact at id %s for task %s', artifact_id, task.id - ) - task.artifacts[existing_artifact_list_index] = new_artifact_data - else: - # Append the new artifact since no artifact with this index exists yet - logger.debug( - 'Adding new artifact with id %s for task %s', - artifact_id, - task.id, - ) - task.artifacts.append(new_artifact_data) - elif existing_artifact: - # Append new parts to the existing artifact's part list - logger.debug( - 'Appending parts to artifact id %s for task %s', - artifact_id, - task.id, - ) - existing_artifact.parts.extend(new_artifact_data.parts) - else: - # We received a chunk to append, but we don't have an existing artifact. - # we will ignore this chunk - logger.warning( - 'Received append=True for nonexistent artifact index %s in task %s. Ignoring chunk.', - artifact_id, - task.id, - ) - - -def build_text_artifact(text: str, artifact_id: str) -> Artifact: - """Helper to create a text artifact. - - Args: - text: The text content for the artifact. - artifact_id: The ID for the artifact. - - Returns: - An `Artifact` object containing a single `TextPart`. - """ - text_part = TextPart(text=text) - part = Part(root=text_part) - return Artifact(parts=[part], artifact_id=artifact_id) - - -def validate( - expression: Callable[[Any], bool], error_message: str | None = None -) -> Callable: - """Decorator that validates if a given expression evaluates to True. - - Typically used on class methods to check capabilities or configuration - before executing the method's logic. If the expression is False, - a `ServerError` with an `UnsupportedOperationError` is raised. - - Args: - expression: A callable that takes the instance (`self`) as its argument - and returns a boolean. - error_message: An optional custom error message for the `UnsupportedOperationError`. - If None, the string representation of the expression will be used. - - Examples: - Demonstrating with an async method: - >>> import asyncio - >>> from a2a.utils.errors import ServerError - >>> - >>> class MyAgent: - ... def __init__(self, streaming_enabled: bool): - ... self.streaming_enabled = streaming_enabled - ... - ... @validate( - ... lambda self: self.streaming_enabled, - ... 'Streaming is not enabled for this agent', - ... ) - ... async def stream_response(self, message: str): - ... return f'Streaming: {message}' - >>> - >>> async def run_async_test(): - ... # Successful call - ... agent_ok = MyAgent(streaming_enabled=True) - ... result = await agent_ok.stream_response('hello') - ... print(result) - ... - ... # Call that fails validation - ... agent_fail = MyAgent(streaming_enabled=False) - ... try: - ... await agent_fail.stream_response('world') - ... except ServerError as e: - ... print(e.error.message) - >>> - >>> asyncio.run(run_async_test()) - Streaming: hello - Streaming is not enabled for this agent - - Demonstrating with a sync method: - >>> class SecureAgent: - ... def __init__(self): - ... self.auth_enabled = False - ... - ... @validate( - ... lambda self: self.auth_enabled, - ... 'Authentication must be enabled for this operation', - ... ) - ... def secure_operation(self, data: str): - ... return f'Processing secure data: {data}' - >>> - >>> # Error case example - >>> agent = SecureAgent() - >>> try: - ... agent.secure_operation('secret') - ... except ServerError as e: - ... print(e.error.message) - Authentication must be enabled for this operation - - Note: - This decorator works with both sync and async methods automatically. - """ - - def decorator(function: Callable) -> Callable: - if inspect.iscoroutinefunction(function): - - @functools.wraps(function) - async def async_wrapper(self: Any, *args, **kwargs) -> Any: - if not expression(self): - final_message = error_message or str(expression) - logger.error('Unsupported Operation: %s', final_message) - raise ServerError( - UnsupportedOperationError(message=final_message) - ) - return await function(self, *args, **kwargs) - - return async_wrapper - - @functools.wraps(function) - def sync_wrapper(self: Any, *args, **kwargs) -> Any: - if not expression(self): - final_message = error_message or str(expression) - logger.error('Unsupported Operation: %s', final_message) - raise ServerError( - UnsupportedOperationError(message=final_message) - ) - return function(self, *args, **kwargs) - - return sync_wrapper - - return decorator - - -def validate_async_generator( - expression: Callable[[Any], bool], error_message: str | None = None -): - """Decorator that validates if a given expression evaluates to True for async generators. - - Typically used on class methods to check capabilities or configuration - before executing the method's logic. If the expression is False, - a `ServerError` with an `UnsupportedOperationError` is raised. - - Args: - expression: A callable that takes the instance (`self`) as its argument - and returns a boolean. - error_message: An optional custom error message for the `UnsupportedOperationError`. - If None, the string representation of the expression will be used. - - Examples: - Streaming capability validation with success case: - >>> import asyncio - >>> from a2a.utils.errors import ServerError - >>> - >>> class StreamingAgent: - ... def __init__(self, streaming_enabled: bool): - ... self.streaming_enabled = streaming_enabled - ... - ... @validate_async_generator( - ... lambda self: self.streaming_enabled, - ... 'Streaming is not supported by this agent', - ... ) - ... async def stream_messages(self, count: int): - ... for i in range(count): - ... yield f'Message {i}' - >>> - >>> async def run_streaming_test(): - ... # Successful streaming - ... agent = StreamingAgent(streaming_enabled=True) - ... async for msg in agent.stream_messages(2): - ... print(msg) - >>> - >>> asyncio.run(run_streaming_test()) - Message 0 - Message 1 - - Error case - validation fails: - >>> class FeatureAgent: - ... def __init__(self): - ... self.features = {'real_time': False} - ... - ... @validate_async_generator( - ... lambda self: self.features.get('real_time', False), - ... 'Real-time feature must be enabled to stream updates', - ... ) - ... async def real_time_updates(self): - ... yield 'This should not be yielded' - >>> - >>> async def run_error_test(): - ... agent = FeatureAgent() - ... try: - ... async for _ in agent.real_time_updates(): - ... pass - ... except ServerError as e: - ... print(e.error.message) - >>> - >>> asyncio.run(run_error_test()) - Real-time feature must be enabled to stream updates - - Note: - This decorator is specifically for async generator methods (async def with yield). - The validation happens before the generator starts yielding values. - """ - - def decorator(function): - @functools.wraps(function) - async def wrapper(self, *args, **kwargs): - if not expression(self): - final_message = error_message or str(expression) - logger.error('Unsupported Operation: %s', final_message) - raise ServerError( - UnsupportedOperationError(message=final_message) - ) - async for i in function(self, *args, **kwargs): - yield i - - return wrapper - - return decorator - - -def are_modalities_compatible( - server_output_modes: list[str] | None, client_output_modes: list[str] | None -) -> bool: - """Checks if server and client output modalities (MIME types) are compatible. - - Modalities are compatible if: - 1. The client specifies no preferred output modes (client_output_modes is None or empty). - 2. The server specifies no supported output modes (server_output_modes is None or empty). - 3. There is at least one common modality between the server's supported list and the client's preferred list. - - Args: - server_output_modes: A list of MIME types supported by the server/agent for output. - Can be None or empty if the server doesn't specify. - client_output_modes: A list of MIME types preferred by the client for output. - Can be None or empty if the client accepts any. - - Returns: - True if the modalities are compatible, False otherwise. - """ - if client_output_modes is None or len(client_output_modes) == 0: - return True - - if server_output_modes is None or len(server_output_modes) == 0: - return True - - return any(x in server_output_modes for x in client_output_modes) - - -def _clean_empty(d: Any) -> Any: - """Recursively remove empty strings, lists and dicts from a dictionary.""" - if isinstance(d, dict): - cleaned_dict = { - k: cleaned_v - for k, v in d.items() - if (cleaned_v := _clean_empty(v)) is not None - } - return cleaned_dict or None - if isinstance(d, list): - cleaned_list = [ - cleaned_v for v in d if (cleaned_v := _clean_empty(v)) is not None - ] - return cleaned_list or None - if isinstance(d, str) and not d: - return None - return d - - -def canonicalize_agent_card(agent_card: AgentCard) -> str: - """Canonicalizes the Agent Card JSON according to RFC 8785 (JCS).""" - card_dict = agent_card.model_dump( - exclude={'signatures'}, - exclude_defaults=True, - exclude_none=True, - by_alias=True, - ) - # Recursively remove empty values - cleaned_dict = _clean_empty(card_dict) - return json.dumps(cleaned_dict, separators=(',', ':'), sort_keys=True) - - -async def maybe_await(value: T | Awaitable[T]) -> T: - """Awaits a value if it's awaitable, otherwise simply provides it back.""" - if inspect.isawaitable(value): - return await value - return value diff --git a/src/a2a/utils/message.py b/src/a2a/utils/message.py deleted file mode 100644 index bfd675fdf..000000000 --- a/src/a2a/utils/message.py +++ /dev/null @@ -1,72 +0,0 @@ -"""Utility functions for creating and handling A2A Message objects.""" - -import uuid - -from a2a.types import ( - Message, - Part, - Role, - TextPart, -) -from a2a.utils.parts import get_text_parts - - -def new_agent_text_message( - text: str, - context_id: str | None = None, - task_id: str | None = None, -) -> Message: - """Creates a new agent message containing a single TextPart. - - Args: - text: The text content of the message. - context_id: The context ID for the message. - task_id: The task ID for the message. - - Returns: - A new `Message` object with role 'agent'. - """ - return Message( - role=Role.agent, - parts=[Part(root=TextPart(text=text))], - message_id=str(uuid.uuid4()), - task_id=task_id, - context_id=context_id, - ) - - -def new_agent_parts_message( - parts: list[Part], - context_id: str | None = None, - task_id: str | None = None, -) -> Message: - """Creates a new agent message containing a list of Parts. - - Args: - parts: The list of `Part` objects for the message content. - context_id: The context ID for the message. - task_id: The task ID for the message. - - Returns: - A new `Message` object with role 'agent'. - """ - return Message( - role=Role.agent, - parts=parts, - message_id=str(uuid.uuid4()), - task_id=task_id, - context_id=context_id, - ) - - -def get_message_text(message: Message, delimiter: str = '\n') -> str: - """Extracts and joins all text content from a Message's parts. - - Args: - message: The `Message` object. - delimiter: The string to use when joining text from multiple TextParts. - - Returns: - A single string containing all text content, or an empty string if no text parts are found. - """ - return delimiter.join(get_text_parts(message.parts)) diff --git a/src/a2a/utils/parts.py b/src/a2a/utils/parts.py deleted file mode 100644 index f32076c8c..000000000 --- a/src/a2a/utils/parts.py +++ /dev/null @@ -1,48 +0,0 @@ -"""Utility functions for creating and handling A2A Parts objects.""" - -from typing import Any - -from a2a.types import ( - DataPart, - FilePart, - FileWithBytes, - FileWithUri, - Part, - TextPart, -) - - -def get_text_parts(parts: list[Part]) -> list[str]: - """Extracts text content from all TextPart objects in a list of Parts. - - Args: - parts: A list of `Part` objects. - - Returns: - A list of strings containing the text content from any `TextPart` objects found. - """ - return [part.root.text for part in parts if isinstance(part.root, TextPart)] - - -def get_data_parts(parts: list[Part]) -> list[dict[str, Any]]: - """Extracts dictionary data from all DataPart objects in a list of Parts. - - Args: - parts: A list of `Part` objects. - - Returns: - A list of dictionaries containing the data from any `DataPart` objects found. - """ - return [part.root.data for part in parts if isinstance(part.root, DataPart)] - - -def get_file_parts(parts: list[Part]) -> list[FileWithBytes | FileWithUri]: - """Extracts file data from all FilePart objects in a list of Parts. - - Args: - parts: A list of `Part` objects. - - Returns: - A list of `FileWithBytes` or `FileWithUri` objects containing the file data from any `FilePart` objects found. - """ - return [part.root.file for part in parts if isinstance(part.root, FilePart)] diff --git a/src/a2a/utils/proto_utils.py b/src/a2a/utils/proto_utils.py index 57272c89e..f77593297 100644 --- a/src/a2a/utils/proto_utils.py +++ b/src/a2a/utils/proto_utils.py @@ -1,49 +1,73 @@ -# mypy: disable-error-code="arg-type" -"""Utils for converting between proto and Python types.""" - -import json -import logging -import re - -from typing import Any - -from google.protobuf import json_format, struct_pb2 - -from a2a import types -from a2a.grpc import a2a_pb2 -from a2a.utils.errors import ServerError - - -logger = logging.getLogger(__name__) - - -# Regexp patterns for matching -_TASK_NAME_MATCH = re.compile(r'tasks/([^/]+)') -_TASK_PUSH_CONFIG_NAME_MATCH = re.compile( - r'tasks/([^/]+)/pushNotificationConfigs/([^/]+)' +# Copyright 2025 Google LLC +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Utilities for working with proto types. + +This module provides helper functions for common proto type operations. +""" + +from typing import TYPE_CHECKING, Any, TypedDict + +from google.api.field_behavior_pb2 import FieldBehavior, field_behavior +from google.protobuf.descriptor import FieldDescriptor +from google.protobuf.json_format import ParseDict +from google.protobuf.message import Message as ProtobufMessage +from google.rpc import error_details_pb2 + +from a2a.utils.errors import InvalidParamsError + + +if TYPE_CHECKING: + from starlette.datastructures import QueryParams +else: + try: + from starlette.datastructures import QueryParams + except ImportError: + QueryParams = Any + +from a2a.types.a2a_pb2 import ( + Message, + StreamResponse, + Task, + TaskArtifactUpdateEvent, + TaskStatusUpdateEvent, ) -def dict_to_struct(dictionary: dict[str, Any]) -> struct_pb2.Struct: - """Converts a Python dict to a Struct proto. +# Define Event type locally to avoid circular imports +Event = Message | Task | TaskStatusUpdateEvent | TaskArtifactUpdateEvent - Unfortunately, using `json_format.ParseDict` does not work because this - wants the dictionary to be an exact match of the Struct proto with fields - and keys and values, not the traditional Python dict structure. + +def to_stream_response(event: Event) -> StreamResponse: + """Convert internal Event to StreamResponse proto. Args: - dictionary: The Python dict to convert. + event: The event (Task, Message, TaskStatusUpdateEvent, TaskArtifactUpdateEvent) Returns: - The Struct proto. + A StreamResponse proto with the appropriate field set. """ - struct = struct_pb2.Struct() - for key, val in dictionary.items(): - if isinstance(val, dict): - struct[key] = dict_to_struct(val) - else: - struct[key] = val - return struct + response = StreamResponse() + if isinstance(event, Task): + response.task.CopyFrom(event) + elif isinstance(event, Message): + response.message.CopyFrom(event) + elif isinstance(event, TaskStatusUpdateEvent): + response.status_update.CopyFrom(event) + elif isinstance(event, TaskArtifactUpdateEvent): + response.artifact_update.CopyFrom(event) + return response def make_dict_serializable(value: Any) -> Any: @@ -126,975 +150,172 @@ def parse_string_integers_in_dict(value: Any, max_safe_digits: int = 15) -> Any: return value -class ToProto: - """Converts Python types to proto types.""" - - @classmethod - def message(cls, message: types.Message | None) -> a2a_pb2.Message | None: - if message is None: - return None - return a2a_pb2.Message( - message_id=message.message_id, - content=[cls.part(p) for p in message.parts], - context_id=message.context_id or '', - task_id=message.task_id or '', - role=cls.role(message.role), - metadata=cls.metadata(message.metadata), - extensions=message.extensions or [], - ) - - @classmethod - def metadata( - cls, metadata: dict[str, Any] | None - ) -> struct_pb2.Struct | None: - if metadata is None: - return None - return dict_to_struct(metadata) - - @classmethod - def part(cls, part: types.Part) -> a2a_pb2.Part: - if isinstance(part.root, types.TextPart): - return a2a_pb2.Part( - text=part.root.text, metadata=cls.metadata(part.root.metadata) - ) - if isinstance(part.root, types.FilePart): - return a2a_pb2.Part( - file=cls.file(part.root.file), - metadata=cls.metadata(part.root.metadata), - ) - if isinstance(part.root, types.DataPart): - return a2a_pb2.Part( - data=cls.data(part.root.data), - metadata=cls.metadata(part.root.metadata), - ) - raise ValueError(f'Unsupported part type: {part.root}') - - @classmethod - def data(cls, data: dict[str, Any]) -> a2a_pb2.DataPart: - return a2a_pb2.DataPart(data=dict_to_struct(data)) - - @classmethod - def file( - cls, file: types.FileWithUri | types.FileWithBytes - ) -> a2a_pb2.FilePart: - if isinstance(file, types.FileWithUri): - return a2a_pb2.FilePart( - file_with_uri=file.uri, mime_type=file.mime_type, name=file.name - ) - return a2a_pb2.FilePart( - file_with_bytes=file.bytes.encode('utf-8'), - mime_type=file.mime_type, - name=file.name, - ) - - @classmethod - def task(cls, task: types.Task) -> a2a_pb2.Task: - return a2a_pb2.Task( - id=task.id, - context_id=task.context_id, - status=cls.task_status(task.status), - artifacts=( - [cls.artifact(a) for a in task.artifacts] - if task.artifacts - else None - ), - history=( - [cls.message(h) for h in task.history] # type: ignore[misc] - if task.history - else None - ), - metadata=cls.metadata(task.metadata), - ) - - @classmethod - def task_status(cls, status: types.TaskStatus) -> a2a_pb2.TaskStatus: - return a2a_pb2.TaskStatus( - state=cls.task_state(status.state), - update=cls.message(status.message), - ) - - @classmethod - def task_state(cls, state: types.TaskState) -> a2a_pb2.TaskState: - match state: - case types.TaskState.submitted: - return a2a_pb2.TaskState.TASK_STATE_SUBMITTED - case types.TaskState.working: - return a2a_pb2.TaskState.TASK_STATE_WORKING - case types.TaskState.completed: - return a2a_pb2.TaskState.TASK_STATE_COMPLETED - case types.TaskState.canceled: - return a2a_pb2.TaskState.TASK_STATE_CANCELLED - case types.TaskState.failed: - return a2a_pb2.TaskState.TASK_STATE_FAILED - case types.TaskState.input_required: - return a2a_pb2.TaskState.TASK_STATE_INPUT_REQUIRED - case types.TaskState.auth_required: - return a2a_pb2.TaskState.TASK_STATE_AUTH_REQUIRED - case types.TaskState.rejected: - return a2a_pb2.TaskState.TASK_STATE_REJECTED - case _: - return a2a_pb2.TaskState.TASK_STATE_UNSPECIFIED - - @classmethod - def artifact(cls, artifact: types.Artifact) -> a2a_pb2.Artifact: - return a2a_pb2.Artifact( - artifact_id=artifact.artifact_id, - description=artifact.description, - metadata=cls.metadata(artifact.metadata), - name=artifact.name, - parts=[cls.part(p) for p in artifact.parts], - extensions=artifact.extensions or [], - ) - - @classmethod - def authentication_info( - cls, info: types.PushNotificationAuthenticationInfo - ) -> a2a_pb2.AuthenticationInfo: - return a2a_pb2.AuthenticationInfo( - schemes=info.schemes, - credentials=info.credentials, - ) - - @classmethod - def push_notification_config( - cls, config: types.PushNotificationConfig - ) -> a2a_pb2.PushNotificationConfig: - auth_info = ( - cls.authentication_info(config.authentication) - if config.authentication - else None - ) - return a2a_pb2.PushNotificationConfig( - id=config.id or '', - url=config.url, - token=config.token, - authentication=auth_info, - ) - - @classmethod - def task_artifact_update_event( - cls, event: types.TaskArtifactUpdateEvent - ) -> a2a_pb2.TaskArtifactUpdateEvent: - return a2a_pb2.TaskArtifactUpdateEvent( - task_id=event.task_id, - context_id=event.context_id, - artifact=cls.artifact(event.artifact), - metadata=cls.metadata(event.metadata), - append=event.append or False, - last_chunk=event.last_chunk or False, - ) - - @classmethod - def task_status_update_event( - cls, event: types.TaskStatusUpdateEvent - ) -> a2a_pb2.TaskStatusUpdateEvent: - return a2a_pb2.TaskStatusUpdateEvent( - task_id=event.task_id, - context_id=event.context_id, - status=cls.task_status(event.status), - metadata=cls.metadata(event.metadata), - final=event.final, - ) - - @classmethod - def message_send_configuration( - cls, config: types.MessageSendConfiguration | None - ) -> a2a_pb2.SendMessageConfiguration: - if not config: - return a2a_pb2.SendMessageConfiguration() - return a2a_pb2.SendMessageConfiguration( - accepted_output_modes=config.accepted_output_modes, - push_notification=cls.push_notification_config( - config.push_notification_config - ) - if config.push_notification_config - else None, - history_length=config.history_length, - blocking=config.blocking or False, - ) - - @classmethod - def update_event( - cls, - event: types.Task - | types.Message - | types.TaskStatusUpdateEvent - | types.TaskArtifactUpdateEvent, - ) -> a2a_pb2.StreamResponse: - """Converts a task, message, or task update event to a StreamResponse.""" - return cls.stream_response(event) - - @classmethod - def task_or_message( - cls, event: types.Task | types.Message - ) -> a2a_pb2.SendMessageResponse: - if isinstance(event, types.Message): - return a2a_pb2.SendMessageResponse( - msg=cls.message(event), - ) - return a2a_pb2.SendMessageResponse( - task=cls.task(event), - ) - - @classmethod - def stream_response( - cls, - event: ( - types.Message - | types.Task - | types.TaskStatusUpdateEvent - | types.TaskArtifactUpdateEvent - ), - ) -> a2a_pb2.StreamResponse: - if isinstance(event, types.Message): - return a2a_pb2.StreamResponse(msg=cls.message(event)) - if isinstance(event, types.Task): - return a2a_pb2.StreamResponse(task=cls.task(event)) - if isinstance(event, types.TaskStatusUpdateEvent): - return a2a_pb2.StreamResponse( - status_update=cls.task_status_update_event(event), - ) - if isinstance(event, types.TaskArtifactUpdateEvent): - return a2a_pb2.StreamResponse( - artifact_update=cls.task_artifact_update_event(event), - ) - raise ValueError(f'Unsupported event type: {type(event)}') - - @classmethod - def task_push_notification_config( - cls, config: types.TaskPushNotificationConfig - ) -> a2a_pb2.TaskPushNotificationConfig: - return a2a_pb2.TaskPushNotificationConfig( - name=f'tasks/{config.task_id}/pushNotificationConfigs/{config.push_notification_config.id}', - push_notification_config=cls.push_notification_config( - config.push_notification_config, - ), - ) - - @classmethod - def agent_card( - cls, - card: types.AgentCard, - ) -> a2a_pb2.AgentCard: - return a2a_pb2.AgentCard( - capabilities=cls.capabilities(card.capabilities), - default_input_modes=list(card.default_input_modes), - default_output_modes=list(card.default_output_modes), - description=card.description, - documentation_url=card.documentation_url, - name=card.name, - provider=cls.provider(card.provider), - security=cls.security(card.security), - security_schemes=cls.security_schemes(card.security_schemes), - skills=[cls.skill(x) for x in card.skills] if card.skills else [], - url=card.url, - version=card.version, - supports_authenticated_extended_card=bool( - card.supports_authenticated_extended_card - ), - preferred_transport=card.preferred_transport, - protocol_version=card.protocol_version, - additional_interfaces=[ - cls.agent_interface(x) for x in card.additional_interfaces - ] - if card.additional_interfaces - else None, - signatures=[cls.agent_card_signature(x) for x in card.signatures] - if card.signatures - else None, - ) - - @classmethod - def agent_card_signature( - cls, signature: types.AgentCardSignature - ) -> a2a_pb2.AgentCardSignature: - return a2a_pb2.AgentCardSignature( - protected=signature.protected, - signature=signature.signature, - header=dict_to_struct(signature.header) - if signature.header is not None - else None, - ) - - @classmethod - def agent_interface( - cls, - interface: types.AgentInterface, - ) -> a2a_pb2.AgentInterface: - return a2a_pb2.AgentInterface( - transport=interface.transport, - url=interface.url, - ) - - @classmethod - def capabilities( - cls, capabilities: types.AgentCapabilities - ) -> a2a_pb2.AgentCapabilities: - return a2a_pb2.AgentCapabilities( - streaming=bool(capabilities.streaming), - push_notifications=bool(capabilities.push_notifications), - extensions=[ - cls.extension(x) for x in capabilities.extensions or [] - ], - ) - - @classmethod - def extension( - cls, - extension: types.AgentExtension, - ) -> a2a_pb2.AgentExtension: - return a2a_pb2.AgentExtension( - uri=extension.uri, - description=extension.description, - params=dict_to_struct(extension.params) - if extension.params - else None, - required=extension.required, - ) - - @classmethod - def provider( - cls, provider: types.AgentProvider | None - ) -> a2a_pb2.AgentProvider | None: - if not provider: - return None - return a2a_pb2.AgentProvider( - organization=provider.organization, - url=provider.url, - ) - - @classmethod - def security( - cls, - security: list[dict[str, list[str]]] | None, - ) -> list[a2a_pb2.Security] | None: - if not security: - return None - return [ - a2a_pb2.Security( - schemes={k: a2a_pb2.StringList(list=v) for (k, v) in s.items()} - ) - for s in security - ] - - @classmethod - def security_schemes( - cls, - schemes: dict[str, types.SecurityScheme] | None, - ) -> dict[str, a2a_pb2.SecurityScheme] | None: - if not schemes: - return None - return {k: cls.security_scheme(v) for (k, v) in schemes.items()} - - @classmethod - def security_scheme( - cls, - scheme: types.SecurityScheme, - ) -> a2a_pb2.SecurityScheme: - if isinstance(scheme.root, types.APIKeySecurityScheme): - return a2a_pb2.SecurityScheme( - api_key_security_scheme=a2a_pb2.APIKeySecurityScheme( - description=scheme.root.description, - location=scheme.root.in_.value, - name=scheme.root.name, - ) - ) - if isinstance(scheme.root, types.HTTPAuthSecurityScheme): - return a2a_pb2.SecurityScheme( - http_auth_security_scheme=a2a_pb2.HTTPAuthSecurityScheme( - description=scheme.root.description, - scheme=scheme.root.scheme, - bearer_format=scheme.root.bearer_format, - ) - ) - if isinstance(scheme.root, types.OAuth2SecurityScheme): - return a2a_pb2.SecurityScheme( - oauth2_security_scheme=a2a_pb2.OAuth2SecurityScheme( - description=scheme.root.description, - flows=cls.oauth2_flows(scheme.root.flows), - ) - ) - if isinstance(scheme.root, types.MutualTLSSecurityScheme): - return a2a_pb2.SecurityScheme( - mtls_security_scheme=a2a_pb2.MutualTlsSecurityScheme( - description=scheme.root.description, - ) - ) - return a2a_pb2.SecurityScheme( - open_id_connect_security_scheme=a2a_pb2.OpenIdConnectSecurityScheme( - description=scheme.root.description, - open_id_connect_url=scheme.root.open_id_connect_url, - ) - ) - - @classmethod - def oauth2_flows(cls, flows: types.OAuthFlows) -> a2a_pb2.OAuthFlows: - if flows.authorization_code: - return a2a_pb2.OAuthFlows( - authorization_code=a2a_pb2.AuthorizationCodeOAuthFlow( - authorization_url=flows.authorization_code.authorization_url, - refresh_url=flows.authorization_code.refresh_url, - scopes=dict(flows.authorization_code.scopes.items()), - token_url=flows.authorization_code.token_url, - ), - ) - if flows.client_credentials: - return a2a_pb2.OAuthFlows( - client_credentials=a2a_pb2.ClientCredentialsOAuthFlow( - refresh_url=flows.client_credentials.refresh_url, - scopes=dict(flows.client_credentials.scopes.items()), - token_url=flows.client_credentials.token_url, - ), - ) - if flows.implicit: - return a2a_pb2.OAuthFlows( - implicit=a2a_pb2.ImplicitOAuthFlow( - authorization_url=flows.implicit.authorization_url, - refresh_url=flows.implicit.refresh_url, - scopes=dict(flows.implicit.scopes.items()), - ), - ) - if flows.password: - return a2a_pb2.OAuthFlows( - password=a2a_pb2.PasswordOAuthFlow( - refresh_url=flows.password.refresh_url, - scopes=dict(flows.password.scopes.items()), - token_url=flows.password.token_url, - ), - ) - raise ValueError('Unknown oauth flow definition') - - @classmethod - def skill(cls, skill: types.AgentSkill) -> a2a_pb2.AgentSkill: - return a2a_pb2.AgentSkill( - id=skill.id, - name=skill.name, - description=skill.description, - tags=skill.tags, - examples=skill.examples, - input_modes=skill.input_modes, - output_modes=skill.output_modes, - ) +def parse_params(params: QueryParams, message: ProtobufMessage) -> None: + """Converts REST query parameters back into a Protobuf message. - @classmethod - def role(cls, role: types.Role) -> a2a_pb2.Role: - match role: - case types.Role.user: - return a2a_pb2.Role.ROLE_USER - case types.Role.agent: - return a2a_pb2.Role.ROLE_AGENT - case _: - return a2a_pb2.Role.ROLE_UNSPECIFIED + Handles A2A-specific pre-processing before calling ParseDict: + - Booleans: 'true'/'false' -> True/False + - Repeated: Supports BOTH repeated keys and comma-separated values. + - Others: Handles string->enum/timestamp/number conversion via ParseDict. + See Also: + https://a2a-protocol.org/latest/specification/#115-query-parameter-naming-for-request-parameters + """ + descriptor = message.DESCRIPTOR + fields = {f.camelcase_name: f for f in descriptor.fields} + processed: dict[str, Any] = {} + + keys = params.keys() + + for k in keys: + if k not in fields: + continue + + field = fields[k] + v_list = params.getlist(k) + + if field.label == field.LABEL_REPEATED: + accumulated: list[Any] = [] + for v in v_list: + if not v: + continue + if isinstance(v, str): + accumulated.extend([x for x in v.split(',') if x]) + else: + accumulated.append(v) + processed[k] = accumulated + else: + # For non-repeated fields, the last one wins. + raw_val = v_list[-1] + if raw_val is not None: + parsed_val: Any = raw_val + if field.type == field.TYPE_BOOL and isinstance(raw_val, str): + parsed_val = raw_val.lower() == 'true' + processed[k] = parsed_val + + ParseDict(processed, message, ignore_unknown_fields=True) + + +class ValidationDetail(TypedDict): + """Structured validation error detail.""" + + field: str + message: str + + +def _check_required_field_violation( + msg: ProtobufMessage, field: FieldDescriptor +) -> ValidationDetail | None: + """Check if a required field is missing or invalid.""" + val = getattr(msg, field.name) + if field.is_repeated: + if not val: + return ValidationDetail( + field=field.name, + message='Field must contain at least one element.', + ) + elif field.has_presence: + if not msg.HasField(field.name): + return ValidationDetail( + field=field.name, message='Field is required.' + ) + elif val == field.default_value: + return ValidationDetail(field=field.name, message='Field is required.') + return None + + +def _append_nested_errors( + errors: list[ValidationDetail], + prefix: str, + sub_errs: list[ValidationDetail], +) -> None: + """Format nested validation errors and append to errors list.""" + for sub in sub_errs: + sub_field = sub['field'] + errors.append( + ValidationDetail( + field=f'{prefix}.{sub_field}' if sub_field else prefix, + message=sub['message'], + ) + ) + + +def _recurse_validation( + msg: ProtobufMessage, field: FieldDescriptor +) -> list[ValidationDetail]: + """Recurse validation for nested messages and map fields.""" + errors: list[ValidationDetail] = [] + if field.type != FieldDescriptor.TYPE_MESSAGE: + return errors + + val = getattr(msg, field.name) + if not field.is_repeated: + if msg.HasField(field.name): + sub_errs = _validate_proto_required_fields_internal(val) + _append_nested_errors(errors, field.name, sub_errs) + elif field.message_type.GetOptions().map_entry: + for k, v in val.items(): + if isinstance(v, ProtobufMessage): + sub_errs = _validate_proto_required_fields_internal(v) + _append_nested_errors(errors, f'{field.name}[{k}]', sub_errs) + else: + for i, item in enumerate(val): + sub_errs = _validate_proto_required_fields_internal(item) + _append_nested_errors(errors, f'{field.name}[{i}]', sub_errs) + return errors + + +def _validate_proto_required_fields_internal( + msg: ProtobufMessage, +) -> list[ValidationDetail]: + """Internal validation that returns a list of error dictionaries.""" + desc = msg.DESCRIPTOR + errors: list[ValidationDetail] = [] + + for field in desc.fields: + options = field.GetOptions() + if FieldBehavior.REQUIRED in options.Extensions[field_behavior]: + violation = _check_required_field_violation(msg, field) + if violation: + errors.append(violation) + errors.extend(_recurse_validation(msg, field)) + return errors + + +def validate_proto_required_fields(msg: ProtobufMessage) -> None: + """Validate that all fields marked as REQUIRED are present on the proto message. -class FromProto: - """Converts proto types to Python types.""" - - @classmethod - def message(cls, message: a2a_pb2.Message) -> types.Message: - return types.Message( - message_id=message.message_id, - parts=[cls.part(p) for p in message.content], - context_id=message.context_id or None, - task_id=message.task_id or None, - role=cls.role(message.role), - metadata=cls.metadata(message.metadata), - extensions=list(message.extensions) or None, - ) - - @classmethod - def metadata(cls, metadata: struct_pb2.Struct) -> dict[str, Any]: - if not metadata.fields: - return {} - return json_format.MessageToDict(metadata) - - @classmethod - def part(cls, part: a2a_pb2.Part) -> types.Part: - if part.HasField('text'): - return types.Part( - root=types.TextPart( - text=part.text, - metadata=cls.metadata(part.metadata) - if part.metadata - else None, - ), - ) - if part.HasField('file'): - return types.Part( - root=types.FilePart( - file=cls.file(part.file), - metadata=cls.metadata(part.metadata) - if part.metadata - else None, - ), - ) - if part.HasField('data'): - return types.Part( - root=types.DataPart( - data=cls.data(part.data), - metadata=cls.metadata(part.metadata) - if part.metadata - else None, - ), - ) - raise ValueError(f'Unsupported part type: {part}') - - @classmethod - def data(cls, data: a2a_pb2.DataPart) -> dict[str, Any]: - json_data = json_format.MessageToJson(data.data) - return json.loads(json_data) - - @classmethod - def file( - cls, file: a2a_pb2.FilePart - ) -> types.FileWithUri | types.FileWithBytes: - common_args = { - 'mime_type': file.mime_type or None, - 'name': file.name or None, - } - if file.HasField('file_with_uri'): - return types.FileWithUri( - uri=file.file_with_uri, - **common_args, - ) - return types.FileWithBytes( - bytes=file.file_with_bytes.decode('utf-8'), - **common_args, - ) - - @classmethod - def task_or_message( - cls, event: a2a_pb2.SendMessageResponse - ) -> types.Task | types.Message: - if event.HasField('msg'): - return cls.message(event.msg) - return cls.task(event.task) - - @classmethod - def task(cls, task: a2a_pb2.Task) -> types.Task: - return types.Task( - id=task.id, - context_id=task.context_id, - status=cls.task_status(task.status), - artifacts=[cls.artifact(a) for a in task.artifacts], - history=[cls.message(h) for h in task.history], - metadata=cls.metadata(task.metadata), - ) - - @classmethod - def task_status(cls, status: a2a_pb2.TaskStatus) -> types.TaskStatus: - return types.TaskStatus( - state=cls.task_state(status.state), - message=cls.message(status.update), - ) - - @classmethod - def task_state(cls, state: a2a_pb2.TaskState) -> types.TaskState: - match state: - case a2a_pb2.TaskState.TASK_STATE_SUBMITTED: - return types.TaskState.submitted - case a2a_pb2.TaskState.TASK_STATE_WORKING: - return types.TaskState.working - case a2a_pb2.TaskState.TASK_STATE_COMPLETED: - return types.TaskState.completed - case a2a_pb2.TaskState.TASK_STATE_CANCELLED: - return types.TaskState.canceled - case a2a_pb2.TaskState.TASK_STATE_FAILED: - return types.TaskState.failed - case a2a_pb2.TaskState.TASK_STATE_INPUT_REQUIRED: - return types.TaskState.input_required - case a2a_pb2.TaskState.TASK_STATE_AUTH_REQUIRED: - return types.TaskState.auth_required - case a2a_pb2.TaskState.TASK_STATE_REJECTED: - return types.TaskState.rejected - case _: - return types.TaskState.unknown - - @classmethod - def artifact(cls, artifact: a2a_pb2.Artifact) -> types.Artifact: - return types.Artifact( - artifact_id=artifact.artifact_id, - description=artifact.description, - metadata=cls.metadata(artifact.metadata), - name=artifact.name, - parts=[cls.part(p) for p in artifact.parts], - extensions=artifact.extensions or None, - ) - - @classmethod - def task_artifact_update_event( - cls, event: a2a_pb2.TaskArtifactUpdateEvent - ) -> types.TaskArtifactUpdateEvent: - return types.TaskArtifactUpdateEvent( - task_id=event.task_id, - context_id=event.context_id, - artifact=cls.artifact(event.artifact), - metadata=cls.metadata(event.metadata), - append=event.append, - last_chunk=event.last_chunk, - ) - - @classmethod - def task_status_update_event( - cls, event: a2a_pb2.TaskStatusUpdateEvent - ) -> types.TaskStatusUpdateEvent: - return types.TaskStatusUpdateEvent( - task_id=event.task_id, - context_id=event.context_id, - status=cls.task_status(event.status), - metadata=cls.metadata(event.metadata), - final=event.final, - ) - - @classmethod - def push_notification_config( - cls, config: a2a_pb2.PushNotificationConfig - ) -> types.PushNotificationConfig: - return types.PushNotificationConfig( - id=config.id, - url=config.url, - token=config.token, - authentication=cls.authentication_info(config.authentication) - if config.HasField('authentication') - else None, - ) - - @classmethod - def authentication_info( - cls, info: a2a_pb2.AuthenticationInfo - ) -> types.PushNotificationAuthenticationInfo: - return types.PushNotificationAuthenticationInfo( - schemes=list(info.schemes), - credentials=info.credentials, - ) - - @classmethod - def message_send_configuration( - cls, config: a2a_pb2.SendMessageConfiguration - ) -> types.MessageSendConfiguration: - return types.MessageSendConfiguration( - accepted_output_modes=list(config.accepted_output_modes), - push_notification_config=cls.push_notification_config( - config.push_notification - ) - if config.HasField('push_notification') - else None, - history_length=config.history_length, - blocking=config.blocking, - ) - - @classmethod - def message_send_params( - cls, request: a2a_pb2.SendMessageRequest - ) -> types.MessageSendParams: - return types.MessageSendParams( - configuration=cls.message_send_configuration(request.configuration), - message=cls.message(request.request), - metadata=cls.metadata(request.metadata), - ) - - @classmethod - def task_id_params( - cls, - request: ( - a2a_pb2.CancelTaskRequest - | a2a_pb2.TaskSubscriptionRequest - | a2a_pb2.GetTaskPushNotificationConfigRequest - ), - ) -> types.TaskIdParams: - if isinstance(request, a2a_pb2.GetTaskPushNotificationConfigRequest): - m = _TASK_PUSH_CONFIG_NAME_MATCH.match(request.name) - if not m: - raise ServerError( - error=types.InvalidParamsError( - message=f'No task for {request.name}' - ) - ) - return types.TaskIdParams(id=m.group(1)) - m = _TASK_NAME_MATCH.match(request.name) - if not m: - raise ServerError( - error=types.InvalidParamsError( - message=f'No task for {request.name}' - ) - ) - return types.TaskIdParams(id=m.group(1)) - - @classmethod - def task_push_notification_config_request( - cls, - request: a2a_pb2.CreateTaskPushNotificationConfigRequest, - ) -> types.TaskPushNotificationConfig: - m = _TASK_NAME_MATCH.match(request.parent) - if not m: - raise ServerError( - error=types.InvalidParamsError( - message=f'No task for {request.parent}' - ) - ) - return types.TaskPushNotificationConfig( - push_notification_config=cls.push_notification_config( - request.config.push_notification_config, - ), - task_id=m.group(1), - ) - - @classmethod - def task_push_notification_config( - cls, - config: a2a_pb2.TaskPushNotificationConfig, - ) -> types.TaskPushNotificationConfig: - m = _TASK_PUSH_CONFIG_NAME_MATCH.match(config.name) - if not m: - raise ServerError( - error=types.InvalidParamsError( - message=f'Bad TaskPushNotificationConfig resource name {config.name}' - ) - ) - return types.TaskPushNotificationConfig( - push_notification_config=cls.push_notification_config( - config.push_notification_config, - ), - task_id=m.group(1), - ) - - @classmethod - def agent_card( - cls, - card: a2a_pb2.AgentCard, - ) -> types.AgentCard: - return types.AgentCard( - capabilities=cls.capabilities(card.capabilities), - default_input_modes=list(card.default_input_modes), - default_output_modes=list(card.default_output_modes), - description=card.description, - documentation_url=card.documentation_url, - name=card.name, - provider=cls.provider(card.provider), - security=cls.security(list(card.security)), - security_schemes=cls.security_schemes(dict(card.security_schemes)), - skills=[cls.skill(x) for x in card.skills] if card.skills else [], - url=card.url, - version=card.version, - supports_authenticated_extended_card=card.supports_authenticated_extended_card, - preferred_transport=card.preferred_transport, - protocol_version=card.protocol_version, - additional_interfaces=[ - cls.agent_interface(x) for x in card.additional_interfaces - ] - if card.additional_interfaces - else None, - signatures=[cls.agent_card_signature(x) for x in card.signatures] - if card.signatures - else None, - ) - - @classmethod - def agent_card_signature( - cls, signature: a2a_pb2.AgentCardSignature - ) -> types.AgentCardSignature: - return types.AgentCardSignature( - protected=signature.protected, - signature=signature.signature, - header=json_format.MessageToDict(signature.header), - ) - - @classmethod - def agent_interface( - cls, - interface: a2a_pb2.AgentInterface, - ) -> types.AgentInterface: - return types.AgentInterface( - transport=interface.transport, - url=interface.url, - ) - - @classmethod - def task_query_params( - cls, - request: a2a_pb2.GetTaskRequest, - ) -> types.TaskQueryParams: - m = _TASK_NAME_MATCH.match(request.name) - if not m: - raise ServerError( - error=types.InvalidParamsError( - message=f'No task for {request.name}' - ) - ) - return types.TaskQueryParams( - history_length=request.history_length - if request.history_length - else None, - id=m.group(1), - metadata=None, - ) - - @classmethod - def capabilities( - cls, capabilities: a2a_pb2.AgentCapabilities - ) -> types.AgentCapabilities: - return types.AgentCapabilities( - streaming=capabilities.streaming, - push_notifications=capabilities.push_notifications, - extensions=[ - cls.agent_extension(x) for x in capabilities.extensions - ], - ) - - @classmethod - def agent_extension( - cls, - extension: a2a_pb2.AgentExtension, - ) -> types.AgentExtension: - return types.AgentExtension( - uri=extension.uri, - description=extension.description, - params=json_format.MessageToDict(extension.params), - required=extension.required, - ) - - @classmethod - def security( - cls, - security: list[a2a_pb2.Security] | None, - ) -> list[dict[str, list[str]]] | None: - if not security: - return None - return [ - {k: list(v.list) for (k, v) in s.schemes.items()} for s in security - ] - - @classmethod - def provider( - cls, provider: a2a_pb2.AgentProvider | None - ) -> types.AgentProvider | None: - if not provider: - return None - return types.AgentProvider( - organization=provider.organization, - url=provider.url, - ) - - @classmethod - def security_schemes( - cls, schemes: dict[str, a2a_pb2.SecurityScheme] - ) -> dict[str, types.SecurityScheme]: - return {k: cls.security_scheme(v) for (k, v) in schemes.items()} - - @classmethod - def security_scheme( - cls, - scheme: a2a_pb2.SecurityScheme, - ) -> types.SecurityScheme: - if scheme.HasField('api_key_security_scheme'): - return types.SecurityScheme( - root=types.APIKeySecurityScheme( - description=scheme.api_key_security_scheme.description, - name=scheme.api_key_security_scheme.name, - in_=types.In(scheme.api_key_security_scheme.location), # type: ignore[call-arg] - ) - ) - if scheme.HasField('http_auth_security_scheme'): - return types.SecurityScheme( - root=types.HTTPAuthSecurityScheme( - description=scheme.http_auth_security_scheme.description, - scheme=scheme.http_auth_security_scheme.scheme, - bearer_format=scheme.http_auth_security_scheme.bearer_format, - ) - ) - if scheme.HasField('oauth2_security_scheme'): - return types.SecurityScheme( - root=types.OAuth2SecurityScheme( - description=scheme.oauth2_security_scheme.description, - flows=cls.oauth2_flows(scheme.oauth2_security_scheme.flows), - ) - ) - if scheme.HasField('mtls_security_scheme'): - return types.SecurityScheme( - root=types.MutualTLSSecurityScheme( - description=scheme.mtls_security_scheme.description, - ) - ) - return types.SecurityScheme( - root=types.OpenIdConnectSecurityScheme( - description=scheme.open_id_connect_security_scheme.description, - open_id_connect_url=scheme.open_id_connect_security_scheme.open_id_connect_url, - ) - ) - - @classmethod - def oauth2_flows(cls, flows: a2a_pb2.OAuthFlows) -> types.OAuthFlows: - if flows.HasField('authorization_code'): - return types.OAuthFlows( - authorization_code=types.AuthorizationCodeOAuthFlow( - authorization_url=flows.authorization_code.authorization_url, - refresh_url=flows.authorization_code.refresh_url, - scopes=dict(flows.authorization_code.scopes.items()), - token_url=flows.authorization_code.token_url, - ), - ) - if flows.HasField('client_credentials'): - return types.OAuthFlows( - client_credentials=types.ClientCredentialsOAuthFlow( - refresh_url=flows.client_credentials.refresh_url, - scopes=dict(flows.client_credentials.scopes.items()), - token_url=flows.client_credentials.token_url, - ), - ) - if flows.HasField('implicit'): - return types.OAuthFlows( - implicit=types.ImplicitOAuthFlow( - authorization_url=flows.implicit.authorization_url, - refresh_url=flows.implicit.refresh_url, - scopes=dict(flows.implicit.scopes.items()), - ), - ) - return types.OAuthFlows( - password=types.PasswordOAuthFlow( - refresh_url=flows.password.refresh_url, - scopes=dict(flows.password.scopes.items()), - token_url=flows.password.token_url, - ), - ) - - @classmethod - def stream_response( - cls, - response: a2a_pb2.StreamResponse, - ) -> ( - types.Message - | types.Task - | types.TaskStatusUpdateEvent - | types.TaskArtifactUpdateEvent - ): - if response.HasField('msg'): - return cls.message(response.msg) - if response.HasField('task'): - return cls.task(response.task) - if response.HasField('status_update'): - return cls.task_status_update_event(response.status_update) - if response.HasField('artifact_update'): - return cls.task_artifact_update_event(response.artifact_update) - raise ValueError('Unsupported StreamResponse type') - - @classmethod - def skill(cls, skill: a2a_pb2.AgentSkill) -> types.AgentSkill: - return types.AgentSkill( - id=skill.id, - name=skill.name, - description=skill.description, - tags=list(skill.tags), - examples=list(skill.examples), - input_modes=list(skill.input_modes), - output_modes=list(skill.output_modes), - ) + Args: + msg: The Protobuf message to validate. - @classmethod - def role(cls, role: a2a_pb2.Role) -> types.Role: - match role: - case a2a_pb2.Role.ROLE_USER: - return types.Role.user - case a2a_pb2.Role.ROLE_AGENT: - return types.Role.agent - case _: - return types.Role.agent + Raises: + InvalidParamsError: If a required field is missing or empty. + """ + errors = _validate_proto_required_fields_internal(msg) + + if errors: + raise InvalidParamsError( + message='Validation failed', data={'errors': errors} + ) + + +def validation_errors_to_bad_request( + errors: list[ValidationDetail], +) -> error_details_pb2.BadRequest: + """Convert validation error details to a gRPC BadRequest proto.""" + bad_request = error_details_pb2.BadRequest() + for err in errors: + violation = bad_request.field_violations.add() + violation.field = err['field'] + violation.description = err['message'] + return bad_request + + +def bad_request_to_validation_errors( + bad_request: error_details_pb2.BadRequest, +) -> list[ValidationDetail]: + """Convert a gRPC BadRequest proto to validation error details.""" + return [ + ValidationDetail(field=v.field, message=v.description) + for v in bad_request.field_violations + ] diff --git a/src/a2a/utils/signing.py b/src/a2a/utils/signing.py index 6ea8c21b8..aa720d159 100644 --- a/src/a2a/utils/signing.py +++ b/src/a2a/utils/signing.py @@ -3,7 +3,7 @@ from collections.abc import Callable from typing import Any, TypedDict -from a2a.utils.helpers import canonicalize_agent_card +from google.protobuf.json_format import MessageToDict try: @@ -68,7 +68,7 @@ def create_agent_card_signer( def agent_card_signer(agent_card: AgentCard) -> AgentCard: """Signs agent card.""" - canonical_payload = canonicalize_agent_card(agent_card) + canonical_payload = _canonicalize_agent_card(agent_card) payload_dict = json.loads(canonical_payload) jws_string = jwt.encode( @@ -87,9 +87,7 @@ def agent_card_signer(agent_card: AgentCard) -> AgentCard: signature=signature, ) - agent_card.signatures = (agent_card.signatures or []) + [ - agent_card_signature - ] + agent_card.signatures.append(agent_card_signature) return agent_card return agent_card_signer @@ -130,7 +128,7 @@ def signature_verifier( jku = protected_header.get('jku') verification_key = key_provider(kid, jku) - canonical_payload = canonicalize_agent_card(agent_card) + canonical_payload = _canonicalize_agent_card(agent_card) encoded_payload = base64url_encode( canonical_payload.encode('utf-8') ).decode('utf-8') @@ -150,3 +148,35 @@ def signature_verifier( raise InvalidSignaturesError('No valid signature found') return signature_verifier + + +def _clean_empty(d: Any) -> Any: + """Recursively remove empty strings, lists and dicts from a dictionary.""" + if isinstance(d, dict): + cleaned_dict = { + k: cleaned_v + for k, v in d.items() + if (cleaned_v := _clean_empty(v)) is not None + } + return cleaned_dict or None + if isinstance(d, list): + cleaned_list = [ + cleaned_v for v in d if (cleaned_v := _clean_empty(v)) is not None + ] + return cleaned_list or None + if isinstance(d, str) and not d: + return None + return d + + +def _canonicalize_agent_card(agent_card: AgentCard) -> str: + """Canonicalizes the Agent Card JSON according to RFC 8785 (JCS).""" + card_dict = MessageToDict( + agent_card, + ) + # Remove signatures field if present + card_dict.pop('signatures', None) + + # Recursively remove empty values + cleaned_dict = _clean_empty(card_dict) + return json.dumps(cleaned_dict, separators=(',', ':'), sort_keys=True) diff --git a/src/a2a/utils/task.py b/src/a2a/utils/task.py index d8215cec0..4acf54e46 100644 --- a/src/a2a/utils/task.py +++ b/src/a2a/utils/task.py @@ -1,92 +1,121 @@ """Utility functions for creating A2A Task objects.""" -import uuid +import binascii -from a2a.types import Artifact, Message, Task, TaskState, TaskStatus, TextPart +from base64 import b64decode, b64encode +from typing import Literal, Protocol, runtime_checkable +from a2a.types.a2a_pb2 import Task +from a2a.utils.constants import MAX_LIST_TASKS_PAGE_SIZE +from a2a.utils.errors import InvalidParamsError -def new_task(request: Message) -> Task: - """Creates a new Task object from an initial user message. - Generates task and context IDs if not provided in the message. +@runtime_checkable +class HistoryLengthConfig(Protocol): + """Protocol for configuration arguments containing history_length field.""" + + history_length: int + + def HasField(self, field_name: Literal['history_length']) -> bool: # noqa: N802 -- Protobuf generated code + """Checks if a field is set. + + This method name matches the generated Protobuf code. + """ + ... + + +def validate_history_length(config: HistoryLengthConfig | None) -> None: + """Validates that history_length is non-negative.""" + if config and config.history_length < 0: + raise InvalidParamsError(message='history length must be non-negative') + + +def apply_history_length( + task: Task, config: HistoryLengthConfig | None +) -> Task: + """Applies history_length parameter on task and returns a new task object. Args: - request: The initial `Message` object from the user. + task: The original task object with complete history + config: Configuration object containing 'history_length' field and HasField method. Returns: - A new `Task` object initialized with 'submitted' status and the input message in history. + A new task object with limited history - Raises: - TypeError: If the message role is None. - ValueError: If the message parts are empty, if any part has empty content, or if the provided context_id is invalid. + See Also: + https://a2a-protocol.org/latest/specification/#324-history-length-semantics """ - if not request.role: - raise TypeError('Message role cannot be None') - if not request.parts: - raise ValueError('Message parts cannot be empty') - for part in request.parts: - if isinstance(part.root, TextPart) and not part.root.text: - raise ValueError('TextPart content cannot be empty') - - return Task( - status=TaskStatus(state=TaskState.submitted), - id=request.task_id or str(uuid.uuid4()), - context_id=request.context_id or str(uuid.uuid4()), - history=[request], - ) - - -def completed_task( - task_id: str, - context_id: str, - artifacts: list[Artifact], - history: list[Message] | None = None, -) -> Task: - """Creates a Task object in the 'completed' state. + if config is None or not config.HasField('history_length'): + return task - Useful for constructing a final Task representation when the agent - finishes and produces artifacts. + history_length = config.history_length - Args: - task_id: The ID of the task. - context_id: The context ID of the task. - artifacts: A list of `Artifact` objects produced by the task. - history: An optional list of `Message` objects representing the task history. + if history_length == 0: + if not task.history: + return task + task_copy = Task() + task_copy.CopyFrom(task) + task_copy.ClearField('history') + return task_copy - Returns: - A `Task` object with status set to 'completed'. + if history_length > 0 and task.history: + if len(task.history) <= history_length: + return task + + task_copy = Task() + task_copy.CopyFrom(task) + del task_copy.history[:-history_length] + return task_copy + + return task + + +def validate_page_size(page_size: int) -> None: + """Validates that page_size is in range [1, 100]. + + See Also: + https://a2a-protocol.org/latest/specification/#314-list-tasks """ - if not artifacts or not all(isinstance(a, Artifact) for a in artifacts): - raise ValueError( - 'artifacts must be a non-empty list of Artifact objects' + if page_size < 1: + raise InvalidParamsError(message='minimum page size is 1') + if page_size > MAX_LIST_TASKS_PAGE_SIZE: + raise InvalidParamsError( + message=f'maximum page size is {MAX_LIST_TASKS_PAGE_SIZE}' ) - if history is None: - history = [] - return Task( - status=TaskStatus(state=TaskState.completed), - id=task_id, - context_id=context_id, - artifacts=artifacts, - history=history, - ) +_ENCODING = 'utf-8' -def apply_history_length(task: Task, history_length: int | None) -> Task: - """Applies history_length parameter on task and returns a new task object. + +def encode_page_token(task_id: str) -> str: + """Encodes page token for tasks pagination. Args: - task: The original task object with complete history - history_length: History length configuration value + task_id: The ID of the task. Returns: - A new task object with limited history + The encoded page token. """ - # Apply historyLength parameter if specified - if history_length is not None and history_length > 0 and task.history: - # Limit history to the most recent N messages - limited_history = task.history[-history_length:] - # Create a new task instance with limited history - return task.model_copy(update={'history': limited_history}) + return b64encode(task_id.encode(_ENCODING)).decode(_ENCODING) - return task + +def decode_page_token(page_token: str) -> str: + """Decodes page token for tasks pagination. + + Args: + page_token: The encoded page token. + + Returns: + The decoded task ID. + """ + encoded_str = page_token + missing_padding = len(encoded_str) % 4 + if missing_padding: + encoded_str += '=' * (4 - missing_padding) + try: + decoded = b64decode(encoded_str.encode(_ENCODING)).decode(_ENCODING) + except (binascii.Error, UnicodeDecodeError) as e: + raise InvalidParamsError( + 'Token is not a valid base64-encoded cursor.' + ) from e + return decoded diff --git a/src/a2a/utils/telemetry.py b/src/a2a/utils/telemetry.py index fa8658bf7..3edf2fb23 100644 --- a/src/a2a/utils/telemetry.py +++ b/src/a2a/utils/telemetry.py @@ -76,7 +76,9 @@ def internal_method(self): if TYPE_CHECKING: - from opentelemetry.trace import SpanKind as SpanKindType + from opentelemetry.trace import ( + SpanKind as SpanKindType, + ) else: SpanKindType = object @@ -84,8 +86,12 @@ def internal_method(self): try: from opentelemetry import trace - from opentelemetry.trace import SpanKind as _SpanKind - from opentelemetry.trace import StatusCode + from opentelemetry.trace import ( + SpanKind as _SpanKind, + ) + from opentelemetry.trace import ( + StatusCode, + ) otel_installed = True diff --git a/src/a2a/utils/version_validator.py b/src/a2a/utils/version_validator.py new file mode 100644 index 000000000..4a776c27e --- /dev/null +++ b/src/a2a/utils/version_validator.py @@ -0,0 +1,130 @@ +"""General utility functions for the A2A Python SDK.""" + +import functools +import inspect +import logging + +from collections.abc import AsyncIterator, Callable +from typing import Any, TypeVar, cast + +from packaging.version import InvalidVersion, Version + +from a2a.server.context import ServerCallContext +from a2a.utils import constants +from a2a.utils.errors import VersionNotSupportedError + + +F = TypeVar('F', bound=Callable[..., Any]) + + +logger = logging.getLogger(__name__) + + +def validate_version(expected_version: str) -> Callable[[F], F]: + """Decorator that validates the A2A-Version header in the request context. + + The header name is defined by `constants.VERSION_HEADER` ('A2A-Version'). + If the header is missing or empty, it is interpreted as `constants.PROTOCOL_VERSION_0_3` ('0.3'). + If the version in the header does not match the `expected_version` (major and minor parts), + a `VersionNotSupportedError` is raised. Patch version is ignored. + + This decorator supports both async methods and async generator methods. It + expects a `ServerCallContext` to be present either in the arguments or + keyword arguments of the decorated method. + + Args: + expected_version: The A2A protocol version string expected by the method. + + Returns: + The decorated function. + + Raises: + VersionNotSupportedError: If the version in the request does not match `expected_version`. + """ + try: + expected_v = Version(expected_version) + except InvalidVersion: + # If the expected version is not a valid semver, we can't do major/minor comparison. + # This shouldn't happen with our constants. + expected_v = None + + def decorator(func: F) -> F: + def _get_actual_version( + args: tuple[Any, ...], kwargs: dict[str, Any] + ) -> str: + context = kwargs.get('context') + if context is None: + for arg in args: + if isinstance(arg, ServerCallContext): + context = arg + break + + if context is None: + # If no context is found, we can't validate the version. + # In a real scenario, this shouldn't happen for properly routed requests. + # We default to the expected version to allow test call to proceed. + return expected_version + + headers = context.state.get('headers', {}) + # Header names are usually case-insensitive in most frameworks, but dict lookup is case-sensitive. + # We check both standard and lowercase versions. + actual_version = headers.get( + constants.VERSION_HEADER + ) or headers.get(constants.VERSION_HEADER.lower()) + + if not actual_version: + return constants.PROTOCOL_VERSION_0_3 + + return str(actual_version) + + def _is_version_compatible(actual: str) -> bool: + if actual == expected_version: + return True + if not expected_v: + return False + try: + actual_v = Version(actual) + except InvalidVersion: + return False + else: + return actual_v.major == expected_v.major + + if inspect.isasyncgenfunction(inspect.unwrap(func)): + + @functools.wraps(func) + def async_gen_wrapper( + *args: Any, **kwargs: Any + ) -> AsyncIterator[Any]: + actual_version = _get_actual_version(args, kwargs) + if not _is_version_compatible(actual_version): + logger.warning( + "Version mismatch: actual='%s', expected='%s'", + actual_version, + expected_version, + ) + raise VersionNotSupportedError( + message=f"A2A version '{actual_version}' is not supported by this handler. " + f"Expected version '{expected_version}'." + ) + return func(*args, **kwargs) + + return cast('F', async_gen_wrapper) + + @functools.wraps(func) + async def async_wrapper(*args: Any, **kwargs: Any) -> Any: + actual_version = _get_actual_version(args, kwargs) + if not _is_version_compatible(actual_version): + logger.warning( + "Version mismatch: actual='%s', expected='%s'", + actual_version, + expected_version, + ) + raise VersionNotSupportedError( + message=f"A2A version '{actual_version}' is not supported by this handler. " + f"Expected version '{expected_version}'." + ) + return await func(*args, **kwargs) + + return cast('F', async_wrapper) + + return decorator diff --git a/tck/sut_agent.py b/tck/sut_agent.py index fd2afb5b1..0ca3a1450 100644 --- a/tck/sut_agent.py +++ b/tck/sut_agent.py @@ -5,30 +5,44 @@ from datetime import datetime, timezone +import grpc.aio import uvicorn +from starlette.applications import Starlette + +import a2a.compat.v0_3.a2a_v0_3_pb2_grpc as a2a_v0_3_grpc +import a2a.types.a2a_pb2_grpc as a2a_grpc + +from a2a.compat.v0_3.grpc_handler import CompatGrpcHandler from a2a.server.agent_execution.agent_executor import AgentExecutor from a2a.server.agent_execution.context import RequestContext -from a2a.server.apps import A2AStarletteApplication from a2a.server.events.event_queue import EventQueue -from a2a.server.request_handlers.default_request_handler import ( - DefaultRequestHandler, +from a2a.server.request_handlers import DefaultRequestHandler +from a2a.server.request_handlers.grpc_handler import GrpcHandler +from a2a.server.routes import ( + create_agent_card_routes, + create_jsonrpc_routes, + create_rest_routes, ) from a2a.server.tasks.inmemory_task_store import InMemoryTaskStore from a2a.server.tasks.task_store import TaskStore from a2a.types import ( AgentCapabilities, AgentCard, + AgentInterface, AgentProvider, + AgentSkill, Message, + Part, + Role, TaskState, TaskStatus, TaskStatusUpdateEvent, - TextPart, ) JSONRPC_URL = '/a2a/jsonrpc' +REST_URL = '/a2a/rest' logging.basicConfig(level=logging.INFO) logger = logging.getLogger('SUTAgent') @@ -39,13 +53,15 @@ class SUTAgentExecutor(AgentExecutor): def __init__(self) -> None: """Initializes the SUT agent executor.""" - self.running_tasks = set() + self.running_tasks: set[str] = set() async def cancel( self, context: RequestContext, event_queue: EventQueue ) -> None: """Cancels a task.""" api_task_id = context.task_id + if api_task_id is None: + return if api_task_id in self.running_tasks: self.running_tasks.remove(api_task_id) @@ -53,10 +69,9 @@ async def cancel( task_id=api_task_id, context_id=context.context_id or str(uuid.uuid4()), status=TaskStatus( - state=TaskState.canceled, - timestamp=datetime.now(timezone.utc).isoformat(), + state=TaskState.TASK_STATE_CANCELED, + timestamp=datetime.now(timezone.utc), ), - final=True, ) await event_queue.enqueue_event(status_update) @@ -66,6 +81,8 @@ async def execute( """Executes a task.""" user_message = context.message task_id = context.task_id + if user_message is None or task_id is None: + return context_id = context.context_id self.running_tasks.add(task_id) @@ -81,17 +98,16 @@ async def execute( task_id=task_id, context_id=context_id, status=TaskStatus( - state=TaskState.working, + state=TaskState.TASK_STATE_WORKING, message=Message( - role='agent', + role=Role.ROLE_AGENT, message_id=str(uuid.uuid4()), - parts=[TextPart(text='Processing your question')], + parts=[Part(text='Processing your question')], task_id=task_id, context_id=context_id, ), - timestamp=datetime.now(timezone.utc).isoformat(), + timestamp=datetime.now(timezone.utc), ), - final=False, ) await event_queue.enqueue_event(working_status) @@ -105,9 +121,9 @@ async def execute( logger.info('[SUTAgentExecutor] Response: %s', agent_reply_text) agent_message = Message( - role='agent', + role=Role.ROLE_AGENT, message_id=str(uuid.uuid4()), - parts=[TextPart(text=agent_reply_text)], + parts=[Part(text=agent_reply_text)], task_id=task_id, context_id=context_id, ) @@ -116,11 +132,10 @@ async def execute( task_id=task_id, context_id=context_id, status=TaskStatus( - state=TaskState.input_required, + state=TaskState.TASK_STATE_INPUT_REQUIRED, message=agent_message, - timestamp=datetime.now(timezone.utc).isoformat(), + timestamp=datetime.now(timezone.utc), ), - final=True, ) await event_queue.enqueue_event(final_update) @@ -129,58 +144,106 @@ def serve(task_store: TaskStore) -> None: """Sets up the A2A service and starts the HTTP server.""" http_port = int(os.environ.get('HTTP_PORT', '41241')) + grpc_port = int(os.environ.get('GRPC_PORT', '50051')) + agent_card = AgentCard( name='SUT Agent', description='An agent to be used as SUT against TCK tests.', - url=f'http://localhost:{http_port}{JSONRPC_URL}', + supported_interfaces=[ + AgentInterface( + url=f'http://localhost:{http_port}{JSONRPC_URL}', + protocol_binding='JSONRPC', + protocol_version='1.0.0', + ), + AgentInterface( + url=f'http://localhost:{http_port}{REST_URL}', + protocol_binding='REST', + protocol_version='1.0.0', + ), + AgentInterface( + url=f'http://localhost:{grpc_port}', + protocol_binding='GRPC', + protocol_version='1.0.0', + ), + ], provider=AgentProvider( organization='A2A Samples', url='https://example.com/a2a-samples', ), version='1.0.0', - protocol_version='0.3.0', capabilities=AgentCapabilities( streaming=True, push_notifications=False, - state_transition_history=True, ), default_input_modes=['text'], default_output_modes=['text', 'task-status'], skills=[ - { - 'id': 'sut_agent', - 'name': 'SUT Agent', - 'description': 'Simulate the general flow of a streaming agent.', - 'tags': ['sut'], - 'examples': ['hi', 'hello world', 'how are you', 'goodbye'], - 'input_modes': ['text'], - 'output_modes': ['text', 'task-status'], - } - ], - supports_authenticated_extended_card=False, - preferred_transport='JSONRPC', - additional_interfaces=[ - { - 'url': f'http://localhost:{http_port}{JSONRPC_URL}', - 'transport': 'JSONRPC', - }, + AgentSkill( + id='sut_agent', + name='SUT Agent', + description='Simulate the general flow of a streaming agent.', + tags=['sut'], + examples=['hi', 'hello world', 'how are you', 'goodbye'], + input_modes=['text'], + output_modes=['text', 'task-status'], + ) ], ) request_handler = DefaultRequestHandler( + agent_card=agent_card, agent_executor=SUTAgentExecutor(), task_store=task_store, ) - server = A2AStarletteApplication( + # JSONRPC + jsonrpc_routes = create_jsonrpc_routes( + request_handler=request_handler, + rpc_url=JSONRPC_URL, + ) + # Agent Card + agent_card_routes = create_agent_card_routes( agent_card=agent_card, - http_handler=request_handler, + ) + # REST + rest_routes = create_rest_routes( + request_handler=request_handler, + path_prefix=REST_URL, ) - app = server.build(rpc_url=JSONRPC_URL) + routes = [ + *jsonrpc_routes, + *agent_card_routes, + *rest_routes, + ] + main_app = Starlette(routes=routes) - logger.info('Starting HTTP server on port %s...', http_port) - uvicorn.run(app, host='127.0.0.1', port=http_port, log_level='info') + config = uvicorn.Config( + main_app, host='127.0.0.1', port=http_port, log_level='info' + ) + uvicorn_server = uvicorn.Server(config) + + # GRPC + grpc_server = grpc.aio.server() + grpc_server.add_insecure_port(f'[::]:{grpc_port}') + servicer = GrpcHandler(request_handler) + compat_servicer = CompatGrpcHandler(request_handler) + a2a_grpc.add_A2AServiceServicer_to_server(servicer, grpc_server) + a2a_v0_3_grpc.add_A2AServiceServicer_to_server(compat_servicer, grpc_server) + + logger.info( + 'Starting HTTP server on port %s and gRPC on port %s...', + http_port, + grpc_port, + ) + + loop = asyncio.get_event_loop() + loop.run_until_complete(grpc_server.start()) + loop.run_until_complete( + asyncio.gather( + uvicorn_server.serve(), grpc_server.wait_for_termination() + ) + ) def main() -> None: diff --git a/tests/README.md b/tests/README.md index 6c70551c7..f16379b19 100644 --- a/tests/README.md +++ b/tests/README.md @@ -53,7 +53,7 @@ ``` (Follow the onscreen instructions to export DSNs and run pytest manually). -In case of failures, you can clean up the cache: +In case of failures, you can clean up the cache: 1. `uv clean` 2. `rm -fR .pytest_cache .venv __pycache__` diff --git a/tests/__init__.py b/tests/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/client/test_auth_middleware.py b/tests/client/test_auth_interceptor.py similarity index 69% rename from tests/client/test_auth_middleware.py rename to tests/client/test_auth_interceptor.py index c41b45017..11d932090 100644 --- a/tests/client/test_auth_middleware.py +++ b/tests/client/test_auth_interceptor.py @@ -1,3 +1,4 @@ +# ruff: noqa: INP001, S106 import json from collections.abc import Callable @@ -8,67 +9,53 @@ import pytest import respx +from google.protobuf import json_format + from a2a.client import ( AuthInterceptor, Client, ClientCallContext, - ClientCallInterceptor, ClientConfig, ClientFactory, InMemoryContextCredentialStore, ) -from a2a.types import ( +from a2a.client.interceptors import BeforeArgs +from a2a.types.a2a_pb2 import ( APIKeySecurityScheme, AgentCapabilities, AgentCard, + AgentInterface, AuthorizationCodeOAuthFlow, HTTPAuthSecurityScheme, - In, Message, OAuth2SecurityScheme, OAuthFlows, OpenIdConnectSecurityScheme, Role, + SecurityRequirement, SecurityScheme, - SendMessageSuccessResponse, - TransportProtocol, + SendMessageRequest, + SendMessageResponse, + StringList, ) - - -class HeaderInterceptor(ClientCallInterceptor): - """A simple mock interceptor for testing basic middleware functionality.""" - - def __init__(self, header_name: str, header_value: str): - self.header_name = header_name - self.header_value = header_value - - async def intercept( - self, - method_name: str, - request_payload: dict[str, Any], - http_kwargs: dict[str, Any], - agent_card: AgentCard | None, - context: ClientCallContext | None, - ) -> tuple[dict[str, Any], dict[str, Any]]: - headers = http_kwargs.get('headers', {}) - headers[self.header_name] = self.header_value - http_kwargs['headers'] = headers - return request_payload, http_kwargs +from a2a.utils.constants import TransportProtocol def build_success_response(request: httpx.Request) -> httpx.Response: """Creates a valid JSON-RPC success response based on the request.""" + request_payload = json.loads(request.content) - response_payload = SendMessageSuccessResponse( - id=request_payload['id'], - jsonrpc='2.0', - result=Message( - kind='message', - message_id='message-id', - role=Role.agent, - parts=[], - ), - ).model_dump(mode='json') + message = Message( + message_id='message-id', + role=Role.ROLE_AGENT, + parts=[], + ) + response = SendMessageResponse(message=message) + response_payload = { + 'id': request_payload['id'], + 'jsonrpc': '2.0', + 'result': json_format.MessageToDict(response), + } return httpx.Response(200, json=response_payload) @@ -76,7 +63,7 @@ def build_message() -> Message: """Builds a minimal Message.""" return Message( message_id='msg1', - role=Role.user, + role=Role.ROLE_USER, parts=[], ) @@ -91,8 +78,9 @@ async def send_message( context = ClientCallContext( state={'sessionId': session_id} if session_id else {} ) + request = SendMessageRequest(message=build_message()) async for _ in client.send_message( - request=build_message(), + request=request, context=context, ): pass @@ -110,19 +98,18 @@ async def test_auth_interceptor_skips_when_no_agent_card( store: InMemoryContextCredentialStore, ) -> None: """Tests that the AuthInterceptor does not modify the request when no AgentCard is provided.""" - request_payload = {'foo': 'bar'} - http_kwargs = {'fizz': 'buzz'} auth_interceptor = AuthInterceptor(credential_service=store) - - new_payload, new_kwargs = await auth_interceptor.intercept( - method_name='message/send', - request_payload=request_payload, - http_kwargs=http_kwargs, - agent_card=None, - context=ClientCallContext(state={}), + request = SendMessageRequest(message=Message()) + context = ClientCallContext(state={}) + args = BeforeArgs( + input=request, + method='send_message', + agent_card=AgentCard(), + context=context, ) - assert new_payload == request_payload - assert new_kwargs == http_kwargs + + await auth_interceptor.before(args) + assert context.service_parameters is None @pytest.mark.asyncio @@ -162,34 +149,17 @@ async def test_in_memory_context_credential_store( assert await store.get_credentials(scheme_name, context) == new_credential -@pytest.mark.asyncio -@respx.mock -async def test_client_with_simple_interceptor() -> None: - """Ensures that a custom HeaderInterceptor correctly injects a static header into outbound HTTP requests from the A2AClient.""" - url = 'http://agent.com/rpc' - interceptor = HeaderInterceptor('X-Test-Header', 'Test-Value-123') - card = AgentCard( - url=url, - name='testbot', - description='test bot', - version='1.0', - default_input_modes=[], - default_output_modes=[], - skills=[], - capabilities=AgentCapabilities(), - preferred_transport=TransportProtocol.jsonrpc, - ) - - async with httpx.AsyncClient() as http_client: - config = ClientConfig( - httpx_client=http_client, - supported_transports=[TransportProtocol.jsonrpc], - ) - factory = ClientFactory(config) - client = factory.create(card, interceptors=[interceptor]) - - request = await send_message(client, url) - assert request.headers['x-test-header'] == 'Test-Value-123' +def wrap_security_scheme(scheme: Any) -> SecurityScheme: + """Wraps a security scheme in the correct SecurityScheme proto field.""" + if isinstance(scheme, APIKeySecurityScheme): + return SecurityScheme(api_key_security_scheme=scheme) + if isinstance(scheme, HTTPAuthSecurityScheme): + return SecurityScheme(http_auth_security_scheme=scheme) + if isinstance(scheme, OAuth2SecurityScheme): + return SecurityScheme(oauth2_security_scheme=scheme) + if isinstance(scheme, OpenIdConnectSecurityScheme): + return SecurityScheme(open_id_connect_security_scheme=scheme) + raise ValueError(f'Unknown security scheme type: {type(scheme)}') @dataclass @@ -218,9 +188,8 @@ class AuthTestCase: scheme_name='apikey', credential='secret-api-key', security_scheme=APIKeySecurityScheme( - type='apiKey', name='X-API-Key', - in_=In.header, + location='header', ), expected_header_key='x-api-key', expected_header_value_func=lambda c: c, @@ -233,12 +202,10 @@ class AuthTestCase: scheme_name='oauth2', credential='secret-oauth-access-token', security_scheme=OAuth2SecurityScheme( - type='oauth2', flows=OAuthFlows( authorization_code=AuthorizationCodeOAuthFlow( authorization_url='http://provider.com/auth', token_url='http://provider.com/token', - scopes={'read': 'Read scope'}, ) ), ), @@ -253,7 +220,6 @@ class AuthTestCase: scheme_name='oidc', credential='secret-oidc-id-token', security_scheme=OpenIdConnectSecurityScheme( - type='openIdConnect', open_id_connect_url='http://provider.com/.well-known/openid-configuration', ), expected_header_key='Authorization', @@ -274,6 +240,7 @@ class AuthTestCase: ) +@pytest.mark.skip(reason='Interceptors disabled by user request') @pytest.mark.asyncio @pytest.mark.parametrize( 'test_case', @@ -289,7 +256,11 @@ async def test_auth_interceptor_variants( ) auth_interceptor = AuthInterceptor(credential_service=store) agent_card = AgentCard( - url=test_case.url, + supported_interfaces=[ + AgentInterface( + url=test_case.url, protocol_binding=TransportProtocol.JSONRPC + ) + ], name=f'{test_case.scheme_name}bot', description=f'A bot that uses {test_case.scheme_name}', version='1.0', @@ -297,19 +268,20 @@ async def test_auth_interceptor_variants( default_output_modes=[], skills=[], capabilities=AgentCapabilities(), - security=[{test_case.scheme_name: []}], + security_requirements=[ + SecurityRequirement(schemes={test_case.scheme_name: StringList()}) + ], security_schemes={ - test_case.scheme_name: SecurityScheme( - root=test_case.security_scheme + test_case.scheme_name: wrap_security_scheme( + test_case.security_scheme ) }, - preferred_transport=TransportProtocol.jsonrpc, ) async with httpx.AsyncClient() as http_client: config = ClientConfig( httpx_client=http_client, - supported_transports=[TransportProtocol.jsonrpc], + supported_protocol_bindings=[TransportProtocol.JSONRPC], ) factory = ClientFactory(config) client = factory.create(agent_card, interceptors=[auth_interceptor]) @@ -329,13 +301,16 @@ async def test_auth_interceptor_skips_when_scheme_not_in_security_schemes( """Tests that AuthInterceptor skips a scheme if it's listed in security requirements but not defined in security_schemes.""" scheme_name = 'missing' session_id = 'session-id' - credential = 'dummy-token' - request_payload = {'foo': 'bar'} - http_kwargs = {'fizz': 'buzz'} + credential = 'test-token' await store.set_credentials(session_id, scheme_name, credential) auth_interceptor = AuthInterceptor(credential_service=store) agent_card = AgentCard( - url='http://agent.com/rpc', + supported_interfaces=[ + AgentInterface( + url='http://agent.com/rpc', + protocol_binding=TransportProtocol.JSONRPC, + ) + ], name='missingbot', description='A bot that uses missing scheme definition', version='1.0', @@ -343,16 +318,19 @@ async def test_auth_interceptor_skips_when_scheme_not_in_security_schemes( default_output_modes=[], skills=[], capabilities=AgentCapabilities(), - security=[{scheme_name: []}], + security_requirements=[ + SecurityRequirement(schemes={scheme_name: StringList()}) + ], security_schemes={}, ) - - new_payload, new_kwargs = await auth_interceptor.intercept( - method_name='message/send', - request_payload=request_payload, - http_kwargs=http_kwargs, + request = SendMessageRequest(message=Message()) + context = ClientCallContext(state={'sessionId': session_id}) + args = BeforeArgs( + input=request, + method='send_message', agent_card=agent_card, - context=ClientCallContext(state={'sessionId': session_id}), + context=context, ) - assert new_payload == request_payload - assert new_kwargs == http_kwargs + + await auth_interceptor.before(args) + assert context.service_parameters is None diff --git a/tests/client/test_base_client.py b/tests/client/test_base_client.py index 4fd6ff9c5..ed49469a7 100644 --- a/tests/client/test_base_client.py +++ b/tests/client/test_base_client.py @@ -5,17 +5,31 @@ from a2a.client.base_client import BaseClient from a2a.client.client import ClientConfig from a2a.client.transports.base import ClientTransport -from a2a.types import ( +from a2a.types.a2a_pb2 import ( AgentCapabilities, AgentCard, + AgentInterface, + CancelTaskRequest, + TaskPushNotificationConfig, + DeleteTaskPushNotificationConfigRequest, + GetTaskPushNotificationConfigRequest, + GetTaskRequest, + ListTaskPushNotificationConfigsRequest, + ListTaskPushNotificationConfigsResponse, + ListTasksRequest, + ListTasksResponse, Message, - MessageSendConfiguration, Part, Role, + SendMessageConfiguration, + SendMessageRequest, + SendMessageResponse, + StreamResponse, + SubscribeToTaskRequest, Task, + TaskPushNotificationConfig, TaskState, TaskStatus, - TextPart, ) @@ -29,7 +43,9 @@ def sample_agent_card() -> AgentCard: return AgentCard( name='Test Agent', description='An agent for testing', - url='http://test.com', + supported_interfaces=[ + AgentInterface(url='http://test.com', protocol_binding='HTTP+JSON') + ], version='1.0', capabilities=AgentCapabilities(streaming=True), default_input_modes=['text/plain'], @@ -41,9 +57,9 @@ def sample_agent_card() -> AgentCard: @pytest.fixture def sample_message() -> Message: return Message( - role=Role.user, + role=Role.ROLE_USER, message_id='msg-1', - parts=[Part(root=TextPart(text='Hello'))], + parts=[Part(text='Hello')], ) @@ -56,194 +72,216 @@ def base_client( card=sample_agent_card, config=config, transport=mock_transport, - consumers=[], - middleware=[], + interceptors=[], ) -@pytest.mark.asyncio -async def test_transport_async_context_manager() -> None: - with ( - patch.object(ClientTransport, '__abstractmethods__', set()), - patch.object(ClientTransport, 'close', new_callable=AsyncMock), - ): - transport = ClientTransport() - async with transport as t: - assert t is transport - transport.close.assert_not_awaited() - transport.close.assert_awaited_once() - - -@pytest.mark.asyncio -async def test_transport_async_context_manager_on_exception() -> None: - with ( - patch.object(ClientTransport, '__abstractmethods__', set()), - patch.object(ClientTransport, 'close', new_callable=AsyncMock), - ): - transport = ClientTransport() +class TestClientTransport: + @pytest.mark.asyncio + async def test_transport_async_context_manager(self) -> None: + with ( + patch.object(ClientTransport, '__abstractmethods__', set()), + patch.object(ClientTransport, 'close', new_callable=AsyncMock), + ): + transport = ClientTransport() + async with transport as t: + assert t is transport + transport.close.assert_not_awaited() + transport.close.assert_awaited_once() + + @pytest.mark.asyncio + async def test_transport_async_context_manager_on_exception(self) -> None: + with ( + patch.object(ClientTransport, '__abstractmethods__', set()), + patch.object(ClientTransport, 'close', new_callable=AsyncMock), + ): + transport = ClientTransport() + with pytest.raises(RuntimeError, match='boom'): + async with transport: + raise RuntimeError('boom') + transport.close.assert_awaited_once() + + @pytest.mark.asyncio + async def test_base_client_async_context_manager( + self, base_client: BaseClient, mock_transport: AsyncMock + ) -> None: + async with base_client as client: + assert client is base_client + mock_transport.close.assert_not_awaited() + mock_transport.close.assert_awaited_once() + + @pytest.mark.asyncio + async def test_base_client_async_context_manager_on_exception( + self, base_client: BaseClient, mock_transport: AsyncMock + ) -> None: with pytest.raises(RuntimeError, match='boom'): - async with transport: + async with base_client: raise RuntimeError('boom') - transport.close.assert_awaited_once() - - -@pytest.mark.asyncio -async def test_base_client_async_context_manager( - base_client: BaseClient, mock_transport: AsyncMock -) -> None: - async with base_client as client: - assert client is base_client - mock_transport.close.assert_not_awaited() - mock_transport.close.assert_awaited_once() - - -@pytest.mark.asyncio -async def test_base_client_async_context_manager_on_exception( - base_client: BaseClient, mock_transport: AsyncMock -) -> None: - with pytest.raises(RuntimeError, match='boom'): - async with base_client: - raise RuntimeError('boom') - mock_transport.close.assert_awaited_once() - - -@pytest.mark.asyncio -async def test_send_message_streaming( - base_client: BaseClient, mock_transport: MagicMock, sample_message: Message -) -> None: - async def create_stream(*args, **kwargs): - yield Task( - id='task-123', - context_id='ctx-456', - status=TaskStatus(state=TaskState.completed), + mock_transport.close.assert_awaited_once() + + @pytest.mark.asyncio + async def test_send_message_streaming( + self, + base_client: BaseClient, + mock_transport: MagicMock, + sample_message: Message, + ) -> None: + async def create_stream(*args, **kwargs): + task = Task( + id='task-123', + context_id='ctx-456', + status=TaskStatus(state=TaskState.TASK_STATE_COMPLETED), + ) + stream_response = StreamResponse() + stream_response.task.CopyFrom(task) + yield stream_response + + mock_transport.send_message_streaming.return_value = create_stream() + + meta = {'test': 1} + request = SendMessageRequest(message=sample_message, metadata=meta) + stream = base_client.send_message(request) + events = [event async for event in stream] + + mock_transport.send_message_streaming.assert_called_once() + assert ( + mock_transport.send_message_streaming.call_args[0][0].metadata + == meta ) - - mock_transport.send_message_streaming.return_value = create_stream() - - meta = {'test': 1} - stream = base_client.send_message(sample_message, request_metadata=meta) - events = [event async for event in stream] - - mock_transport.send_message_streaming.assert_called_once() - assert ( - mock_transport.send_message_streaming.call_args[0][0].metadata == meta - ) - assert not mock_transport.send_message.called - assert len(events) == 1 - assert events[0][0].id == 'task-123' - - -@pytest.mark.asyncio -async def test_send_message_non_streaming( - base_client: BaseClient, mock_transport: MagicMock, sample_message: Message -) -> None: - base_client._config.streaming = False - mock_transport.send_message.return_value = Task( - id='task-456', - context_id='ctx-789', - status=TaskStatus(state=TaskState.completed), - ) - - meta = {'test': 1} - stream = base_client.send_message(sample_message, request_metadata=meta) - events = [event async for event in stream] - - mock_transport.send_message.assert_called_once() - assert mock_transport.send_message.call_args[0][0].metadata == meta - assert not mock_transport.send_message_streaming.called - assert len(events) == 1 - assert events[0][0].id == 'task-456' - - -@pytest.mark.asyncio -async def test_send_message_non_streaming_agent_capability_false( - base_client: BaseClient, mock_transport: MagicMock, sample_message: Message -) -> None: - base_client._card.capabilities.streaming = False - mock_transport.send_message.return_value = Task( - id='task-789', - context_id='ctx-101', - status=TaskStatus(state=TaskState.completed), - ) - - events = [event async for event in base_client.send_message(sample_message)] - - mock_transport.send_message.assert_called_once() - assert not mock_transport.send_message_streaming.called - assert len(events) == 1 - assert events[0][0].id == 'task-789' - - -@pytest.mark.asyncio -async def test_send_message_callsite_config_overrides_non_streaming( - base_client: BaseClient, mock_transport: MagicMock, sample_message: Message -): - base_client._config.streaming = False - mock_transport.send_message.return_value = Task( - id='task-cfg-ns-1', - context_id='ctx-cfg-ns-1', - status=TaskStatus(state=TaskState.completed), - ) - - cfg = MessageSendConfiguration( - history_length=2, - blocking=False, - accepted_output_modes=['application/json'], - ) - events = [ - event - async for event in base_client.send_message( - sample_message, configuration=cfg + assert not mock_transport.send_message.called + assert len(events) == 1 + response = events[0] + assert response.task.id == 'task-123' + + @pytest.mark.asyncio + async def test_send_message_non_streaming( + self, + base_client: BaseClient, + mock_transport: MagicMock, + sample_message: Message, + ) -> None: + base_client._config.streaming = False + task = Task( + id='task-456', + context_id='ctx-789', + status=TaskStatus(state=TaskState.TASK_STATE_COMPLETED), ) - ] - - mock_transport.send_message.assert_called_once() - assert not mock_transport.send_message_streaming.called - assert len(events) == 1 - task, _ = events[0] - assert task.id == 'task-cfg-ns-1' - - params = mock_transport.send_message.call_args[0][0] - assert params.configuration.history_length == 2 - assert params.configuration.blocking is False - assert params.configuration.accepted_output_modes == ['application/json'] - - -@pytest.mark.asyncio -async def test_send_message_callsite_config_overrides_streaming( - base_client: BaseClient, mock_transport: MagicMock, sample_message: Message -): - base_client._config.streaming = True - base_client._card.capabilities.streaming = True - - async def create_stream(*args, **kwargs): - yield Task( - id='task-cfg-s-1', - context_id='ctx-cfg-s-1', - status=TaskStatus(state=TaskState.completed), + response = SendMessageResponse() + response.task.CopyFrom(task) + mock_transport.send_message.return_value = response + + meta = {'test': 1} + request = SendMessageRequest(message=sample_message, metadata=meta) + stream = base_client.send_message(request) + events = [event async for event in stream] + + mock_transport.send_message.assert_called_once() + assert mock_transport.send_message.call_args[0][0].metadata == meta + assert not mock_transport.send_message_streaming.called + assert len(events) == 1 + response = events[0] + assert response.task.id == 'task-456' + + @pytest.mark.asyncio + async def test_send_message_non_streaming_agent_capability_false( + self, + base_client: BaseClient, + mock_transport: MagicMock, + sample_message: Message, + ) -> None: + base_client._card.capabilities.streaming = False + task = Task( + id='task-789', + context_id='ctx-101', + status=TaskStatus(state=TaskState.TASK_STATE_COMPLETED), ) - - mock_transport.send_message_streaming.return_value = create_stream() - - cfg = MessageSendConfiguration( - history_length=0, - blocking=True, - accepted_output_modes=['text/plain'], - ) - events = [ - event - async for event in base_client.send_message( - sample_message, configuration=cfg + response = SendMessageResponse() + response.task.CopyFrom(task) + mock_transport.send_message.return_value = response + + request = SendMessageRequest(message=sample_message) + events = [event async for event in base_client.send_message(request)] + + mock_transport.send_message.assert_called_once() + assert not mock_transport.send_message_streaming.called + assert len(events) == 1 + response = events[0] + assert response.task.id == 'task-789' + + @pytest.mark.asyncio + async def test_send_message_callsite_config_overrides_non_streaming( + self, + base_client: BaseClient, + mock_transport: MagicMock, + sample_message: Message, + ): + base_client._config.streaming = False + task = Task( + id='task-cfg-ns-1', + context_id='ctx-cfg-ns-1', + status=TaskStatus(state=TaskState.TASK_STATE_COMPLETED), + ) + response = SendMessageResponse() + response.task.CopyFrom(task) + mock_transport.send_message.return_value = response + + cfg = SendMessageConfiguration( + history_length=2, + return_immediately=True, + accepted_output_modes=['application/json'], + ) + request = SendMessageRequest(message=sample_message, configuration=cfg) + events = [event async for event in base_client.send_message(request)] + + mock_transport.send_message.assert_called_once() + assert not mock_transport.send_message_streaming.called + assert len(events) == 1 + response = events[0] + assert response.task.id == 'task-cfg-ns-1' + + params = mock_transport.send_message.call_args[0][0] + assert params.configuration.history_length == 2 + assert params.configuration.return_immediately is True + assert params.configuration.accepted_output_modes == [ + 'application/json' + ] + + @pytest.mark.asyncio + async def test_send_message_callsite_config_overrides_streaming( + self, + base_client: BaseClient, + mock_transport: MagicMock, + sample_message: Message, + ): + base_client._config.streaming = True + base_client._card.capabilities.streaming = True + + async def create_stream(*args, **kwargs): + task = Task( + id='task-cfg-s-1', + context_id='ctx-cfg-s-1', + status=TaskStatus(state=TaskState.TASK_STATE_COMPLETED), + ) + stream_response = StreamResponse() + stream_response.task.CopyFrom(task) + yield stream_response + + mock_transport.send_message_streaming.return_value = create_stream() + + cfg = SendMessageConfiguration( + history_length=0, + accepted_output_modes=['text/plain'], ) - ] - - mock_transport.send_message_streaming.assert_called_once() - assert not mock_transport.send_message.called - assert len(events) == 1 - task, _ = events[0] - assert task.id == 'task-cfg-s-1' - - params = mock_transport.send_message_streaming.call_args[0][0] - assert params.configuration.history_length == 0 - assert params.configuration.blocking is True - assert params.configuration.accepted_output_modes == ['text/plain'] + request = SendMessageRequest(message=sample_message, configuration=cfg) + events = [event async for event in base_client.send_message(request)] + + mock_transport.send_message_streaming.assert_called_once() + assert not mock_transport.send_message.called + assert len(events) == 1 + response = events[0] + assert response.task.id == 'task-cfg-s-1' + + params = mock_transport.send_message_streaming.call_args[0][0] + assert params.configuration.history_length == 0 + assert params.configuration.return_immediately is False + assert params.configuration.accepted_output_modes == ['text/plain'] diff --git a/tests/client/test_base_client_interceptors.py b/tests/client/test_base_client_interceptors.py new file mode 100644 index 000000000..d7930062f --- /dev/null +++ b/tests/client/test_base_client_interceptors.py @@ -0,0 +1,240 @@ +# ruff: noqa: INP001 +from unittest.mock import AsyncMock, MagicMock + +import pytest + +from a2a.client.base_client import BaseClient +from a2a.client.client import ClientConfig +from a2a.client.interceptors import ( + AfterArgs, + BeforeArgs, + ClientCallInterceptor, +) +from a2a.client.transports.base import ClientTransport +from a2a.types.a2a_pb2 import ( + AgentCapabilities, + AgentCard, + AgentInterface, + Message, + StreamResponse, +) + + +@pytest.fixture +def mock_transport() -> AsyncMock: + return AsyncMock(spec=ClientTransport) + + +@pytest.fixture +def sample_agent_card() -> AgentCard: + return AgentCard( + name='Test Agent', + description='An agent for testing', + supported_interfaces=[ + AgentInterface(url='http://test.com', protocol_binding='HTTP+JSON') + ], + version='1.0', + capabilities=AgentCapabilities(streaming=True), + default_input_modes=['text/plain'], + default_output_modes=['text/plain'], + skills=[], + ) + + +@pytest.fixture +def mock_interceptor() -> AsyncMock: + return AsyncMock(spec=ClientCallInterceptor) + + +@pytest.fixture +def base_client( + sample_agent_card: AgentCard, + mock_transport: AsyncMock, + mock_interceptor: AsyncMock, +) -> BaseClient: + config = ClientConfig(streaming=True) + return BaseClient( + card=sample_agent_card, + config=config, + transport=mock_transport, + interceptors=[mock_interceptor], + ) + + +class TestBaseClientInterceptors: + @pytest.mark.asyncio + async def test_execute_with_interceptors_normal_flow( + self, + base_client: BaseClient, + mock_interceptor: AsyncMock, + ): + input_data = MagicMock() + method = 'get_task' + context = MagicMock() + mock_transport_call = AsyncMock(return_value='transport_result') + + # Set up mock interceptor to just pass through + mock_interceptor.before.return_value = None + + result = await base_client._execute_with_interceptors( + input_data=input_data, + method=method, + context=context, + transport_call=mock_transport_call, + ) + + assert result == 'transport_result' + + # Verify before was called + mock_interceptor.before.assert_called_once() + before_args = mock_interceptor.before.call_args[0][0] + assert isinstance(before_args, BeforeArgs) + assert before_args.input == input_data + assert before_args.context == context + + # Verify transport call was made + mock_transport_call.assert_called_once_with(input_data, context) + + # Verify after was called + mock_interceptor.after.assert_called_once() + after_args = mock_interceptor.after.call_args[0][0] + assert isinstance(after_args, AfterArgs) + assert after_args.method == method + assert after_args.result == 'transport_result' + assert after_args.context == context + + @pytest.mark.asyncio + async def test_execute_with_interceptors_early_return( + self, + base_client: BaseClient, + mock_interceptor: AsyncMock, + ): + input_data = MagicMock() + method = 'get_task' + context = MagicMock() + mock_transport_call = AsyncMock() + + # Set up early return in before + early_return_result = 'early_result' + + async def mock_before_with_early_return(args: BeforeArgs): + args.early_return = early_return_result + + mock_interceptor.before.side_effect = mock_before_with_early_return + + result = await base_client._execute_with_interceptors( + input_data=input_data, + method=method, + context=context, + transport_call=mock_transport_call, + ) + + assert result == 'early_result' + + # Verify before was called + mock_interceptor.before.assert_called_once() + + # Verify transport call was NOT made + mock_transport_call.assert_not_called() + + # Verify after was called with early return value + mock_interceptor.after.assert_called_once() + after_args = mock_interceptor.after.call_args[0][0] + assert isinstance(after_args, AfterArgs) + assert after_args.result == 'early_result' + assert after_args.context == context + + @pytest.mark.asyncio + async def test_execute_stream_with_interceptors_normal_flow( + self, + base_client: BaseClient, + mock_interceptor: AsyncMock, + ): + input_data = MagicMock() + method = 'send_message_streaming' + context = MagicMock() + + async def mock_transport_call(*args, **kwargs): + yield StreamResponse(message=Message(message_id='1')) + + # Set up mock interceptor to just pass through + mock_interceptor.before.return_value = None + + events = [ + e + async for e in base_client._execute_stream_with_interceptors( + input_data=input_data, + method=method, + context=context, + transport_call=mock_transport_call, + ) + ] + + assert len(events) == 1 + + # Verify before was called + mock_interceptor.before.assert_called_once() + before_args = mock_interceptor.before.call_args[0][0] + assert isinstance(before_args, BeforeArgs) + assert before_args.input == input_data + assert before_args.context == context + + # Verify after was called + mock_interceptor.after.assert_called_once() + after_args = mock_interceptor.after.call_args[0][0] + assert isinstance(after_args, AfterArgs) + assert after_args.method == method + + @pytest.mark.asyncio + async def test_execute_stream_with_interceptors_early_return( + self, + base_client: BaseClient, + mock_interceptor: AsyncMock, + ): + input_data = MagicMock() + method = 'send_message_streaming' + context = MagicMock() + mock_transport_call = AsyncMock() + + # Set up early return in before + early_return_result = StreamResponse(message=Message(message_id='2')) + + async def mock_before_with_early_return(args: BeforeArgs): + args.early_return = early_return_result + return { + 'early_return': early_return_result, + 'executed': [mock_interceptor], + } + + mock_interceptor.before.side_effect = mock_before_with_early_return + + # Override BaseClient's _intercept_before to respect our early return setup + # as the test's mock interceptor replaces the actual list items + base_client._intercept_before = AsyncMock( # type: ignore + return_value={ + 'early_return': early_return_result, + 'executed': [mock_interceptor], + } + ) + + events = [ + e + async for e in base_client._execute_stream_with_interceptors( + input_data=input_data, + method=method, + context=context, + transport_call=mock_transport_call, + ) + ] + + assert len(events) == 1 + + # Verify transport call was NOT made + mock_transport_call.assert_not_called() + + # Verify after was called with early return value + mock_interceptor.after.assert_called_once() + after_args = mock_interceptor.after.call_args[0][0] + assert isinstance(after_args, AfterArgs) + assert after_args.method == method + assert after_args.context == context diff --git a/tests/client/test_card_resolver.py b/tests/client/test_card_resolver.py index 26f3f106d..ff60632ad 100644 --- a/tests/client/test_card_resolver.py +++ b/tests/client/test_card_resolver.py @@ -1,13 +1,35 @@ +import copy +import difflib import json import logging +from unittest.mock import AsyncMock, MagicMock, Mock -from unittest.mock import AsyncMock, MagicMock, Mock, patch - +from google.protobuf.json_format import MessageToDict import httpx import pytest -from a2a.client import A2ACardResolver, A2AClientHTTPError, A2AClientJSONError +from a2a.client import A2ACardResolver, AgentCardResolutionError +from a2a.client.card_resolver import parse_agent_card +from a2a.server.request_handlers.response_helpers import agent_card_to_dict from a2a.types import AgentCard +from a2a.types.a2a_pb2 import ( + APIKeySecurityScheme, + AgentCapabilities, + AgentCardSignature, + AgentInterface, + AgentProvider, + AgentSkill, + AuthorizationCodeOAuthFlow, + HTTPAuthSecurityScheme, + MutualTlsSecurityScheme, + OAuth2SecurityScheme, + OAuthFlows, + OpenIdConnectSecurityScheme, + Role, + SecurityRequirement, + SecurityScheme, + StringList, +) from a2a.utils import AGENT_CARD_WELL_KNOWN_PATH @@ -47,7 +69,12 @@ def valid_agent_card_data(): 'name': 'TestAgent', 'description': 'A test agent', 'version': '1.0.0', - 'url': 'https://example.com/a2a', + 'supported_interfaces': [ + { + 'url': 'https://example.com/a2a', + 'protocol_binding': 'HTTP+JSON', + } + ], 'capabilities': {}, 'default_input_modes': ['text/plain'], 'default_output_modes': ['text/plain'], @@ -115,17 +142,14 @@ async def test_get_agent_card_success_default_path( mock_response.json.return_value = valid_agent_card_data mock_httpx_client.get.return_value = mock_response - with patch.object( - AgentCard, 'model_validate', return_value=Mock(spec=AgentCard) - ) as mock_validate: - result = await resolver.get_agent_card() - mock_httpx_client.get.assert_called_once_with( - f'{base_url}/{AGENT_CARD_WELL_KNOWN_PATH[1:]}', - ) - mock_response.raise_for_status.assert_called_once() - mock_response.json.assert_called_once() - mock_validate.assert_called_once_with(valid_agent_card_data) - assert result is not None + result = await resolver.get_agent_card() + mock_httpx_client.get.assert_called_once_with( + f'{base_url}/{AGENT_CARD_WELL_KNOWN_PATH[1:]}', + ) + mock_response.raise_for_status.assert_called_once() + mock_response.json.assert_called_once() + assert result is not None + assert isinstance(result, AgentCard) @pytest.mark.asyncio async def test_get_agent_card_success_custom_path( @@ -140,14 +164,11 @@ async def test_get_agent_card_success_custom_path( custom_path = 'custom/path/card' mock_response.json.return_value = valid_agent_card_data mock_httpx_client.get.return_value = mock_response - with patch.object( - AgentCard, 'model_validate', return_value=Mock(spec=AgentCard) - ): - await resolver.get_agent_card(relative_card_path=custom_path) + await resolver.get_agent_card(relative_card_path=custom_path) - mock_httpx_client.get.assert_called_once_with( - f'{base_url}/{custom_path}', - ) + mock_httpx_client.get.assert_called_once_with( + f'{base_url}/{custom_path}', + ) @pytest.mark.asyncio async def test_get_agent_card_strips_leading_slash_from_relative_path( @@ -162,14 +183,11 @@ async def test_get_agent_card_strips_leading_slash_from_relative_path( custom_path = '/custom/path/card' mock_response.json.return_value = valid_agent_card_data mock_httpx_client.get.return_value = mock_response - with patch.object( - AgentCard, 'model_validate', return_value=Mock(spec=AgentCard) - ): - await resolver.get_agent_card(relative_card_path=custom_path) + await resolver.get_agent_card(relative_card_path=custom_path) - mock_httpx_client.get.assert_called_once_with( - f'{base_url}/{custom_path[1:]}', - ) + mock_httpx_client.get.assert_called_once_with( + f'{base_url}/{custom_path[1:]}', + ) @pytest.mark.asyncio async def test_get_agent_card_with_http_kwargs( @@ -187,15 +205,12 @@ async def test_get_agent_card_with_http_kwargs( 'timeout': 30, 'headers': {'Authorization': 'Bearer token'}, } - with patch.object( - AgentCard, 'model_validate', return_value=Mock(spec=AgentCard) - ): - await resolver.get_agent_card(http_kwargs=http_kwargs) - mock_httpx_client.get.assert_called_once_with( - f'{base_url}/{AGENT_CARD_WELL_KNOWN_PATH[1:]}', - timeout=30, - headers={'Authorization': 'Bearer token'}, - ) + await resolver.get_agent_card(http_kwargs=http_kwargs) + mock_httpx_client.get.assert_called_once_with( + f'{base_url}/{AGENT_CARD_WELL_KNOWN_PATH[1:]}', + timeout=30, + headers={'Authorization': 'Bearer token'}, + ) @pytest.mark.asyncio async def test_get_agent_card_root_path( @@ -209,11 +224,24 @@ async def test_get_agent_card_root_path( """Test fetching agent card from root path.""" mock_response.json.return_value = valid_agent_card_data mock_httpx_client.get.return_value = mock_response - with patch.object( - AgentCard, 'model_validate', return_value=Mock(spec=AgentCard) - ): - await resolver.get_agent_card(relative_card_path='/') - mock_httpx_client.get.assert_called_once_with(f'{base_url}/') + await resolver.get_agent_card(relative_card_path='/') + mock_httpx_client.get.assert_called_once_with(f'{base_url}') + + @pytest.mark.asyncio + async def test_get_agent_card_with_empty_resolver_agent_card_path( + self, + base_url, + resolver, + mock_httpx_client, + mock_response, + valid_agent_card_data, + ): + """Test fetching agent card when the resolver's agent_card_path is empty.""" + resolver.agent_card_path = '' + mock_response.json.return_value = valid_agent_card_data + mock_httpx_client.get.return_value = mock_response + await resolver.get_agent_card() + mock_httpx_client.get.assert_called_once_with(f'{base_url}') @pytest.mark.asyncio async def test_get_agent_card_http_status_error( @@ -228,10 +256,11 @@ async def test_get_agent_card_http_status_error( ) mock_httpx_client.get.return_value = mock_response - with pytest.raises(A2AClientHTTPError) as exc_info: + with pytest.raises(AgentCardResolutionError) as exc_info: await resolver.get_agent_card() assert exc_info.value.status_code == status_code + assert f'HTTP {status_code}' in str(exc_info.value) assert 'Failed to fetch agent card' in str(exc_info.value) @pytest.mark.asyncio @@ -243,7 +272,7 @@ async def test_get_agent_card_json_decode_error( 'Invalid JSON', '', 0 ) mock_httpx_client.get.return_value = mock_response - with pytest.raises(A2AClientJSONError) as exc_info: + with pytest.raises(AgentCardResolutionError) as exc_info: await resolver.get_agent_card() assert 'Failed to parse JSON' in str(exc_info.value) @@ -255,9 +284,8 @@ async def test_get_agent_card_request_error( mock_httpx_client.get.side_effect = httpx.RequestError( 'Connection timeout', request=Mock() ) - with pytest.raises(A2AClientHTTPError) as exc_info: + with pytest.raises(AgentCardResolutionError) as exc_info: await resolver.get_agent_card() - assert exc_info.value.status_code == 503 assert 'Network communication error' in str(exc_info.value) @pytest.mark.asyncio @@ -270,14 +298,14 @@ async def test_get_agent_card_validation_error( valid_agent_card_data, ): """Test A2AClientJSONError is raised on agent card validation error.""" - return_json = {'invalid': 'data'} + return_json = {'name': {'invalid': 'type'}} mock_response.json.return_value = return_json mock_httpx_client.get.return_value = mock_response - with pytest.raises(A2AClientJSONError) as exc_info: + with pytest.raises(AgentCardResolutionError) as exc_info: await resolver.get_agent_card() assert ( f'Failed to validate agent card structure from {base_url}/{AGENT_CARD_WELL_KNOWN_PATH[1:]}' - in exc_info.value.message + in str(exc_info.value) ) mock_httpx_client.get.assert_called_once_with( f'{base_url}/{AGENT_CARD_WELL_KNOWN_PATH[1:]}', @@ -295,12 +323,7 @@ async def test_get_agent_card_logs_success( # noqa: PLR0913 ): mock_response.json.return_value = valid_agent_card_data mock_httpx_client.get.return_value = mock_response - with ( - patch.object( - AgentCard, 'model_validate', return_value=Mock(spec=AgentCard) - ), - caplog.at_level(logging.INFO), - ): + with caplog.at_level(logging.INFO): await resolver.get_agent_card() assert ( f'Successfully fetched agent card data from {base_url}/{AGENT_CARD_WELL_KNOWN_PATH[1:]}' @@ -320,13 +343,10 @@ async def test_get_agent_card_none_relative_path( mock_response.json.return_value = valid_agent_card_data mock_httpx_client.get.return_value = mock_response - with patch.object( - AgentCard, 'model_validate', return_value=Mock(spec=AgentCard) - ): - await resolver.get_agent_card(relative_card_path=None) - mock_httpx_client.get.assert_called_once_with( - f'{base_url}/{AGENT_CARD_WELL_KNOWN_PATH[1:]}', - ) + await resolver.get_agent_card(relative_card_path=None) + mock_httpx_client.get.assert_called_once_with( + f'{base_url}/{AGENT_CARD_WELL_KNOWN_PATH[1:]}', + ) @pytest.mark.asyncio async def test_get_agent_card_empty_string_relative_path( @@ -341,14 +361,11 @@ async def test_get_agent_card_empty_string_relative_path( mock_response.json.return_value = valid_agent_card_data mock_httpx_client.get.return_value = mock_response - with patch.object( - AgentCard, 'model_validate', return_value=Mock(spec=AgentCard) - ): - await resolver.get_agent_card(relative_card_path='') + await resolver.get_agent_card(relative_card_path='') - mock_httpx_client.get.assert_called_once_with( - f'{base_url}/{AGENT_CARD_WELL_KNOWN_PATH[1:]}', - ) + mock_httpx_client.get.assert_called_once_with( + f'{base_url}/{AGENT_CARD_WELL_KNOWN_PATH[1:]}', + ) @pytest.mark.parametrize('status_code', [400, 401, 403, 500, 502]) @pytest.mark.asyncio @@ -362,9 +379,9 @@ async def test_get_agent_card_different_status_codes( f'Status {status_code}', request=Mock(), response=mock_response ) mock_httpx_client.get.return_value = mock_response - with pytest.raises(A2AClientHTTPError) as exc_info: + with pytest.raises(AgentCardResolutionError) as exc_info: await resolver.get_agent_card() - assert exc_info.value.status_code == status_code + assert f'HTTP {status_code}' in str(exc_info.value) @pytest.mark.asyncio async def test_get_agent_card_returns_agent_card_instance( @@ -373,14 +390,9 @@ async def test_get_agent_card_returns_agent_card_instance( """Test that get_agent_card returns an AgentCard instance.""" mock_response.json.return_value = valid_agent_card_data mock_httpx_client.get.return_value = mock_response - mock_agent_card = Mock(spec=AgentCard) - - with patch.object( - AgentCard, 'model_validate', return_value=mock_agent_card - ): - result = await resolver.get_agent_card() - assert result == mock_agent_card - mock_response.raise_for_status.assert_called_once() + result = await resolver.get_agent_card() + assert isinstance(result, AgentCard) + mock_response.raise_for_status.assert_called_once() @pytest.mark.asyncio async def test_get_agent_card_with_signature_verifier( @@ -398,3 +410,680 @@ async def test_get_agent_card_with_signature_verifier( ) mock_verifier.assert_called_once_with(agent_card) + + +class TestParseAgentCard: + """Tests for parse_agent_card function.""" + + @staticmethod + def _assert_agent_card_diff( + original_data: dict, serialized_data: dict + ) -> None: + """Helper to assert that the re-serialized 1.0.0 JSON payload contains all original 0.3.0 data (no dropped fields).""" + original_json_str = json.dumps(original_data, indent=2, sort_keys=True) + serialized_json_str = json.dumps( + serialized_data, indent=2, sort_keys=True + ) + + diff_lines = list( + difflib.unified_diff( + original_json_str.splitlines(), + serialized_json_str.splitlines(), + lineterm='', + ) + ) + + removed_lines = [] + for line in diff_lines: + if line.startswith('-') and not line.startswith('---'): + removed_lines.append(line) + + if removed_lines: + error_msg = ( + 'Re-serialization dropped fields from the original payload:\n' + + '\n'.join(removed_lines) + ) + raise AssertionError(error_msg) + + def test_parse_agent_card_legacy_support(self) -> None: + data = { + 'name': 'Legacy Agent', + 'description': 'Legacy Description', + 'version': '1.0', + 'supportsAuthenticatedExtendedCard': True, + } + card = parse_agent_card(data) + assert card.name == 'Legacy Agent' + assert card.capabilities.extended_agent_card is True + # Ensure it's popped from the dict + assert 'supportsAuthenticatedExtendedCard' not in data + + def test_parse_agent_card_new_support(self) -> None: + data = { + 'name': 'New Agent', + 'description': 'New Description', + 'version': '1.0', + 'capabilities': {'extendedAgentCard': True}, + } + card = parse_agent_card(data) + assert card.name == 'New Agent' + assert card.capabilities.extended_agent_card is True + + def test_parse_agent_card_no_support(self) -> None: + data = { + 'name': 'No Support Agent', + 'description': 'No Support Description', + 'version': '1.0', + 'capabilities': {'extendedAgentCard': False}, + } + card = parse_agent_card(data) + assert card.name == 'No Support Agent' + assert card.capabilities.extended_agent_card is False + + def test_parse_agent_card_both_legacy_and_new(self) -> None: + data = { + 'name': 'Mixed Agent', + 'description': 'Mixed Description', + 'version': '1.0', + 'supportsAuthenticatedExtendedCard': True, + 'capabilities': {'streaming': True}, + } + card = parse_agent_card(data) + assert card.name == 'Mixed Agent' + assert card.capabilities.streaming is True + assert card.capabilities.extended_agent_card is True + + def test_parse_typical_030_agent_card(self) -> None: + data = { + 'additionalInterfaces': [ + { + 'transport': 'GRPC', + 'url': 'http://agent.example.com/api/grpc', + } + ], + 'capabilities': {'streaming': True}, + 'defaultInputModes': ['text/plain'], + 'defaultOutputModes': ['application/json'], + 'description': 'A typical agent from 0.3.0', + 'name': 'Typical Agent 0.3', + 'preferredTransport': 'JSONRPC', + 'protocolVersion': '0.3.0', + 'security': [{'test_oauth': ['read', 'write']}], + 'securitySchemes': { + 'test_oauth': { + 'description': 'OAuth2 authentication', + 'flows': { + 'authorizationCode': { + 'authorizationUrl': 'http://auth.example.com', + 'scopes': { + 'read': 'Read access', + 'write': 'Write access', + }, + 'tokenUrl': 'http://token.example.com', + } + }, + 'type': 'oauth2', + } + }, + 'skills': [ + { + 'description': 'The first skill', + 'id': 'skill-1', + 'name': 'Skill 1', + 'security': [{'test_oauth': ['read']}], + 'tags': ['example'], + } + ], + 'supportsAuthenticatedExtendedCard': True, + 'url': 'http://agent.example.com/api', + 'version': '1.0', + } + original_data = copy.deepcopy(data) + card = parse_agent_card(data) + + expected_card = AgentCard( + name='Typical Agent 0.3', + description='A typical agent from 0.3.0', + version='1.0', + capabilities=AgentCapabilities( + extended_agent_card=True, streaming=True + ), + default_input_modes=['text/plain'], + default_output_modes=['application/json'], + supported_interfaces=[ + AgentInterface( + url='http://agent.example.com/api', + protocol_binding='JSONRPC', + protocol_version='0.3.0', + ), + AgentInterface( + url='http://agent.example.com/api/grpc', + protocol_binding='GRPC', + protocol_version='0.3.0', + ), + ], + security_requirements=[ + SecurityRequirement( + schemes={'test_oauth': StringList(list=['read', 'write'])} + ) + ], + security_schemes={ + 'test_oauth': SecurityScheme( + oauth2_security_scheme=OAuth2SecurityScheme( + description='OAuth2 authentication', + flows=OAuthFlows( + authorization_code=AuthorizationCodeOAuthFlow( + authorization_url='http://auth.example.com', + token_url='http://token.example.com', + scopes={ + 'read': 'Read access', + 'write': 'Write access', + }, + ) + ), + ) + ) + }, + skills=[ + AgentSkill( + id='skill-1', + name='Skill 1', + description='The first skill', + tags=['example'], + security_requirements=[ + SecurityRequirement( + schemes={'test_oauth': StringList(list=['read'])} + ) + ], + ) + ], + ) + + assert card == expected_card + + # Serialize back to JSON and compare + serialized_data = agent_card_to_dict(card) + + self._assert_agent_card_diff(original_data, serialized_data) + assert 'preferredTransport' in serialized_data + + # Re-parse from the serialized payload and verify identical to original parsing + re_parsed_card = parse_agent_card(copy.deepcopy(serialized_data)) + assert re_parsed_card == card + + def test_parse_agent_card_security_scheme_without_in(self) -> None: + data = { + 'name': 'API Key Agent', + 'description': 'API Key without in param', + 'version': '1.0', + 'securitySchemes': { + 'test_api_key': {'type': 'apiKey', 'name': 'X-API-KEY'} + }, + } + card = parse_agent_card(data) + assert 'test_api_key' in card.security_schemes + assert ( + card.security_schemes['test_api_key'].api_key_security_scheme.name + == 'X-API-KEY' + ) + assert ( + card.security_schemes[ + 'test_api_key' + ].api_key_security_scheme.location + == '' + ) + + def test_parse_agent_card_security_scheme_unknown_type(self) -> None: + data = { + 'name': 'Unknown Scheme Agent', + 'description': 'Has unknown scheme type', + 'version': '1.0', + 'securitySchemes': { + 'test_unknown': { + 'type': 'someFutureType', + 'future_prop': 'value', + }, + 'test_missing_type': {'prop': 'value'}, + }, + } + card = parse_agent_card(data) + assert 'test_unknown' in card.security_schemes + assert not card.security_schemes['test_unknown'].WhichOneof('scheme') + + assert 'test_missing_type' in card.security_schemes + assert not card.security_schemes['test_missing_type'].WhichOneof( + 'scheme' + ) + + def test_parse_030_agent_card_route_planner(self) -> None: + data = { + 'protocolVersion': '0.3', + 'name': 'GeoSpatial Route Planner Agent', + 'description': 'Provides advanced route planning.', + 'url': 'https://georoute-agent.example.com/a2a/v1', + 'preferredTransport': 'JSONRPC', + 'additionalInterfaces': [ + { + 'url': 'https://georoute-agent.example.com/a2a/v1', + 'transport': 'JSONRPC', + }, + { + 'url': 'https://georoute-agent.example.com/a2a/grpc', + 'transport': 'GRPC', + }, + { + 'url': 'https://georoute-agent.example.com/a2a/json', + 'transport': 'HTTP+JSON', + }, + ], + 'provider': { + 'organization': 'Example Geo Services Inc.', + 'url': 'https://www.examplegeoservices.com', + }, + 'iconUrl': 'https://georoute-agent.example.com/icon.png', + 'version': '1.2.0', + 'documentationUrl': 'https://docs.examplegeoservices.com/georoute-agent/api', + 'supportsAuthenticatedExtendedCard': True, + 'capabilities': { + 'streaming': True, + 'pushNotifications': True, + 'stateTransitionHistory': False, + }, + 'securitySchemes': { + 'google': { + 'type': 'openIdConnect', + 'openIdConnectUrl': 'https://accounts.google.com/.well-known/openid-configuration', + } + }, + 'security': [{'google': ['openid', 'profile', 'email']}], + 'defaultInputModes': ['application/json', 'text/plain'], + 'defaultOutputModes': ['application/json', 'image/png'], + 'skills': [ + { + 'id': 'route-optimizer-traffic', + 'name': 'Traffic-Aware Route Optimizer', + 'description': 'Calculates the optimal driving route between two or more locations, taking into account real-time traffic conditions, road closures, and user preferences (e.g., avoid tolls, prefer highways).', + 'tags': [ + 'maps', + 'routing', + 'navigation', + 'directions', + 'traffic', + ], + 'examples': [ + "Plan a route from '1600 Amphitheatre Parkway, Mountain View, CA' to 'San Francisco International Airport' avoiding tolls.", + '{"origin": {"lat": 37.422, "lng": -122.084}, "destination": {"lat": 37.7749, "lng": -122.4194}, "preferences": ["avoid_ferries"]}', + ], + 'inputModes': ['application/json', 'text/plain'], + 'outputModes': [ + 'application/json', + 'application/vnd.geo+json', + 'text/html', + ], + 'security': [ + {'example': []}, + {'google': ['openid', 'profile', 'email']}, + ], + }, + { + 'id': 'custom-map-generator', + 'name': 'Personalized Map Generator', + 'description': 'Creates custom map images or interactive map views based on user-defined points of interest, routes, and style preferences. Can overlay data layers.', + 'tags': [ + 'maps', + 'customization', + 'visualization', + 'cartography', + ], + 'examples': [ + 'Generate a map of my upcoming road trip with all planned stops highlighted.', + 'Show me a map visualizing all coffee shops within a 1-mile radius of my current location.', + ], + 'inputModes': ['application/json'], + 'outputModes': [ + 'image/png', + 'image/jpeg', + 'application/json', + 'text/html', + ], + }, + ], + 'signatures': [ + { + 'protected': 'eyJhbGciOiJFUzI1NiIsInR5cCI6IkpPU0UiLCJraWQiOiJrZXktMSIsImprdSI6Imh0dHBzOi8vZXhhbXBsZS5jb20vYWdlbnQvandrcy5qc29uIn0', + 'signature': 'QFdkNLNszlGj3z3u0YQGt_T9LixY3qtdQpZmsTdDHDe3fXV9y9-B3m2-XgCpzuhiLt8E0tV6HXoZKHv4GtHgKQ', + } + ], + } + + original_data = copy.deepcopy(data) + card = parse_agent_card(data) + + expected_card = AgentCard( + name='GeoSpatial Route Planner Agent', + description='Provides advanced route planning.', + version='1.2.0', + documentation_url='https://docs.examplegeoservices.com/georoute-agent/api', + icon_url='https://georoute-agent.example.com/icon.png', + provider=AgentProvider( + organization='Example Geo Services Inc.', + url='https://www.examplegeoservices.com', + ), + capabilities=AgentCapabilities( + extended_agent_card=True, + streaming=True, + push_notifications=True, + ), + default_input_modes=['application/json', 'text/plain'], + default_output_modes=['application/json', 'image/png'], + supported_interfaces=[ + AgentInterface( + url='https://georoute-agent.example.com/a2a/v1', + protocol_binding='JSONRPC', + protocol_version='0.3', + ), + AgentInterface( + url='https://georoute-agent.example.com/a2a/v1', + protocol_binding='JSONRPC', + protocol_version='0.3', + ), + AgentInterface( + url='https://georoute-agent.example.com/a2a/grpc', + protocol_binding='GRPC', + protocol_version='0.3', + ), + AgentInterface( + url='https://georoute-agent.example.com/a2a/json', + protocol_binding='HTTP+JSON', + protocol_version='0.3', + ), + ], + security_requirements=[ + SecurityRequirement( + schemes={ + 'google': StringList( + list=['openid', 'profile', 'email'] + ) + } + ) + ], + security_schemes={ + 'google': SecurityScheme( + open_id_connect_security_scheme=OpenIdConnectSecurityScheme( + open_id_connect_url='https://accounts.google.com/.well-known/openid-configuration' + ) + ) + }, + skills=[ + AgentSkill( + id='route-optimizer-traffic', + name='Traffic-Aware Route Optimizer', + description='Calculates the optimal driving route between two or more locations, taking into account real-time traffic conditions, road closures, and user preferences (e.g., avoid tolls, prefer highways).', + tags=[ + 'maps', + 'routing', + 'navigation', + 'directions', + 'traffic', + ], + examples=[ + "Plan a route from '1600 Amphitheatre Parkway, Mountain View, CA' to 'San Francisco International Airport' avoiding tolls.", + '{"origin": {"lat": 37.422, "lng": -122.084}, "destination": {"lat": 37.7749, "lng": -122.4194}, "preferences": ["avoid_ferries"]}', + ], + input_modes=['application/json', 'text/plain'], + output_modes=[ + 'application/json', + 'application/vnd.geo+json', + 'text/html', + ], + security_requirements=[ + SecurityRequirement(schemes={'example': StringList()}), + SecurityRequirement( + schemes={ + 'google': StringList( + list=['openid', 'profile', 'email'] + ) + } + ), + ], + ), + AgentSkill( + id='custom-map-generator', + name='Personalized Map Generator', + description='Creates custom map images or interactive map views based on user-defined points of interest, routes, and style preferences. Can overlay data layers.', + tags=[ + 'maps', + 'customization', + 'visualization', + 'cartography', + ], + examples=[ + 'Generate a map of my upcoming road trip with all planned stops highlighted.', + 'Show me a map visualizing all coffee shops within a 1-mile radius of my current location.', + ], + input_modes=['application/json'], + output_modes=[ + 'image/png', + 'image/jpeg', + 'application/json', + 'text/html', + ], + ), + ], + signatures=[ + AgentCardSignature( + protected='eyJhbGciOiJFUzI1NiIsInR5cCI6IkpPU0UiLCJraWQiOiJrZXktMSIsImprdSI6Imh0dHBzOi8vZXhhbXBsZS5jb20vYWdlbnQvandrcy5qc29uIn0', + signature='QFdkNLNszlGj3z3u0YQGt_T9LixY3qtdQpZmsTdDHDe3fXV9y9-B3m2-XgCpzuhiLt8E0tV6HXoZKHv4GtHgKQ', + ) + ], + ) + + assert card == expected_card + serialized_data = agent_card_to_dict(card) + del original_data['capabilities']['stateTransitionHistory'] + self._assert_agent_card_diff(original_data, serialized_data) + re_parsed_card = parse_agent_card(copy.deepcopy(serialized_data)) + assert re_parsed_card == card + + def test_parse_complex_030_agent_card(self) -> None: + data = { + 'additionalInterfaces': [ + { + 'transport': 'GRPC', + 'url': 'http://complex.agent.example.com/grpc', + }, + { + 'transport': 'JSONRPC', + 'url': 'http://complex.agent.example.com/jsonrpc', + }, + ], + 'capabilities': {'pushNotifications': True, 'streaming': True}, + 'defaultInputModes': ['text/plain', 'application/json'], + 'defaultOutputModes': ['application/json', 'image/png'], + 'description': 'A very complex agent from 0.3.0', + 'name': 'Complex Agent 0.3', + 'preferredTransport': 'HTTP+JSON', + 'protocolVersion': '0.3.0', + 'security': [ + {'test_oauth': ['read', 'write'], 'test_api_key': []}, + {'test_http': []}, + {'test_oidc': ['openid', 'profile']}, + {'test_mtls': []}, + ], + 'securitySchemes': { + 'test_oauth': { + 'description': 'OAuth2 authentication', + 'flows': { + 'authorizationCode': { + 'authorizationUrl': 'http://auth.example.com', + 'scopes': { + 'read': 'Read access', + 'write': 'Write access', + }, + 'tokenUrl': 'http://token.example.com', + } + }, + 'type': 'oauth2', + }, + 'test_api_key': { + 'description': 'API Key auth', + 'in': 'header', + 'name': 'X-API-KEY', + 'type': 'apiKey', + }, + 'test_http': { + 'bearerFormat': 'JWT', + 'description': 'HTTP Basic auth', + 'scheme': 'basic', + 'type': 'http', + }, + 'test_oidc': { + 'description': 'OIDC Auth', + 'openIdConnectUrl': 'https://example.com/.well-known/openid-configuration', + 'type': 'openIdConnect', + }, + 'test_mtls': {'description': 'mTLS Auth', 'type': 'mutualTLS'}, + }, + 'skills': [ + { + 'description': 'The first complex skill', + 'id': 'skill-1', + 'inputModes': ['application/json'], + 'name': 'Complex Skill 1', + 'outputModes': ['application/json'], + 'security': [{'test_api_key': []}], + 'tags': ['example', 'complex'], + }, + { + 'description': 'The second complex skill', + 'id': 'skill-2', + 'name': 'Complex Skill 2', + 'security': [{'test_oidc': ['openid']}], + 'tags': ['example2'], + }, + ], + 'supportsAuthenticatedExtendedCard': True, + 'url': 'http://complex.agent.example.com/api', + 'version': '1.5.2', + } + original_data = copy.deepcopy(data) + card = parse_agent_card(data) + + expected_card = AgentCard( + name='Complex Agent 0.3', + description='A very complex agent from 0.3.0', + version='1.5.2', + capabilities=AgentCapabilities( + extended_agent_card=True, + streaming=True, + push_notifications=True, + ), + default_input_modes=['text/plain', 'application/json'], + default_output_modes=['application/json', 'image/png'], + supported_interfaces=[ + AgentInterface( + url='http://complex.agent.example.com/api', + protocol_binding='HTTP+JSON', + protocol_version='0.3.0', + ), + AgentInterface( + url='http://complex.agent.example.com/grpc', + protocol_binding='GRPC', + protocol_version='0.3.0', + ), + AgentInterface( + url='http://complex.agent.example.com/jsonrpc', + protocol_binding='JSONRPC', + protocol_version='0.3.0', + ), + ], + security_requirements=[ + SecurityRequirement( + schemes={ + 'test_oauth': StringList(list=['read', 'write']), + 'test_api_key': StringList(), + } + ), + SecurityRequirement(schemes={'test_http': StringList()}), + SecurityRequirement( + schemes={ + 'test_oidc': StringList(list=['openid', 'profile']) + } + ), + SecurityRequirement(schemes={'test_mtls': StringList()}), + ], + security_schemes={ + 'test_oauth': SecurityScheme( + oauth2_security_scheme=OAuth2SecurityScheme( + description='OAuth2 authentication', + flows=OAuthFlows( + authorization_code=AuthorizationCodeOAuthFlow( + authorization_url='http://auth.example.com', + token_url='http://token.example.com', + scopes={ + 'read': 'Read access', + 'write': 'Write access', + }, + ) + ), + ) + ), + 'test_api_key': SecurityScheme( + api_key_security_scheme=APIKeySecurityScheme( + description='API Key auth', + location='header', + name='X-API-KEY', + ) + ), + 'test_http': SecurityScheme( + http_auth_security_scheme=HTTPAuthSecurityScheme( + description='HTTP Basic auth', + scheme='basic', + bearer_format='JWT', + ) + ), + 'test_oidc': SecurityScheme( + open_id_connect_security_scheme=OpenIdConnectSecurityScheme( + description='OIDC Auth', + open_id_connect_url='https://example.com/.well-known/openid-configuration', + ) + ), + 'test_mtls': SecurityScheme( + mtls_security_scheme=MutualTlsSecurityScheme( + description='mTLS Auth' + ) + ), + }, + skills=[ + AgentSkill( + id='skill-1', + name='Complex Skill 1', + description='The first complex skill', + tags=['example', 'complex'], + input_modes=['application/json'], + output_modes=['application/json'], + security_requirements=[ + SecurityRequirement( + schemes={'test_api_key': StringList()} + ) + ], + ), + AgentSkill( + id='skill-2', + name='Complex Skill 2', + description='The second complex skill', + tags=['example2'], + security_requirements=[ + SecurityRequirement( + schemes={'test_oidc': StringList(list=['openid'])} + ) + ], + ), + ], + ) + + assert card == expected_card + serialized_data = agent_card_to_dict(card) + self._assert_agent_card_diff(original_data, serialized_data) + re_parsed_card = parse_agent_card(copy.deepcopy(serialized_data)) + assert re_parsed_card == card diff --git a/tests/client/test_client_factory.py b/tests/client/test_client_factory.py index c388974b1..b30d57d12 100644 --- a/tests/client/test_client_factory.py +++ b/tests/client/test_client_factory.py @@ -1,18 +1,24 @@ """Tests for the ClientFactory.""" from unittest.mock import AsyncMock, MagicMock, patch +import typing import httpx import pytest -from a2a.client import ClientConfig, ClientFactory -from a2a.client.transports import JsonRpcTransport, RestTransport -from a2a.types import ( +from a2a.client import ClientConfig, ClientFactory, create_client +from a2a.client.client_factory import TransportProducer +from a2a.client.transports import ( + JsonRpcTransport, + RestTransport, +) +from a2a.client.transports.tenant_decorator import TenantTransportDecorator +from a2a.types.a2a_pb2 import ( AgentCapabilities, AgentCard, AgentInterface, - TransportProtocol, ) +from a2a.utils.constants import TransportProtocol @pytest.fixture @@ -21,13 +27,17 @@ def base_agent_card() -> AgentCard: return AgentCard( name='Test Agent', description='An agent for testing.', - url='http://primary-url.com', + supported_interfaces=[ + AgentInterface( + protocol_binding=TransportProtocol.JSONRPC, + url='http://primary-url.com', + ) + ], version='1.0.0', capabilities=AgentCapabilities(), skills=[], default_input_modes=[], default_output_modes=[], - preferred_transport=TransportProtocol.jsonrpc, ) @@ -35,116 +45,121 @@ def test_client_factory_selects_preferred_transport(base_agent_card: AgentCard): """Verify that the factory selects the preferred transport by default.""" config = ClientConfig( httpx_client=httpx.AsyncClient(), - supported_transports=[ - TransportProtocol.jsonrpc, - TransportProtocol.http_json, + supported_protocol_bindings=[ + TransportProtocol.JSONRPC, + TransportProtocol.HTTP_JSON, ], - extensions=['https://example.com/test-ext/v0'], ) factory = ClientFactory(config) client = factory.create(base_agent_card) - assert isinstance(client._transport, JsonRpcTransport) - assert client._transport.url == 'http://primary-url.com' - assert ['https://example.com/test-ext/v0'] == client._transport.extensions + assert isinstance(client._transport, JsonRpcTransport) # type: ignore[attr-defined] + assert client._transport.url == 'http://primary-url.com' # type: ignore[attr-defined] def test_client_factory_selects_secondary_transport_url( base_agent_card: AgentCard, ): """Verify that the factory selects the correct URL for a secondary transport.""" - base_agent_card.additional_interfaces = [ + base_agent_card.supported_interfaces.append( AgentInterface( - transport=TransportProtocol.http_json, + protocol_binding=TransportProtocol.HTTP_JSON, url='http://secondary-url.com', ) - ] + ) # Client prefers REST, which is available as a secondary transport config = ClientConfig( httpx_client=httpx.AsyncClient(), - supported_transports=[ - TransportProtocol.http_json, - TransportProtocol.jsonrpc, + supported_protocol_bindings=[ + TransportProtocol.HTTP_JSON, + TransportProtocol.JSONRPC, ], use_client_preference=True, - extensions=['https://example.com/test-ext/v0'], ) factory = ClientFactory(config) client = factory.create(base_agent_card) - assert isinstance(client._transport, RestTransport) - assert client._transport.url == 'http://secondary-url.com' - assert ['https://example.com/test-ext/v0'] == client._transport.extensions + assert isinstance(client._transport, RestTransport) # type: ignore[attr-defined] + assert client._transport.url == 'http://secondary-url.com' # type: ignore[attr-defined] def test_client_factory_server_preference(base_agent_card: AgentCard): """Verify that the factory respects server transport preference.""" - base_agent_card.preferred_transport = TransportProtocol.http_json - base_agent_card.additional_interfaces = [ + # Server lists REST first, which implies preference + base_agent_card.supported_interfaces.insert( + 0, + AgentInterface( + protocol_binding=TransportProtocol.HTTP_JSON, + url='http://primary-url.com', + ), + ) + base_agent_card.supported_interfaces.append( AgentInterface( - transport=TransportProtocol.jsonrpc, url='http://secondary-url.com' + protocol_binding=TransportProtocol.JSONRPC, + url='http://secondary-url.com', ) - ] + ) # Client supports both, but server prefers REST config = ClientConfig( httpx_client=httpx.AsyncClient(), - supported_transports=[ - TransportProtocol.jsonrpc, - TransportProtocol.http_json, + supported_protocol_bindings=[ + TransportProtocol.JSONRPC, + TransportProtocol.HTTP_JSON, ], ) factory = ClientFactory(config) client = factory.create(base_agent_card) - assert isinstance(client._transport, RestTransport) - assert client._transport.url == 'http://primary-url.com' + assert isinstance(client._transport, RestTransport) # type: ignore[attr-defined] + assert client._transport.url == 'http://primary-url.com' # type: ignore[attr-defined] def test_client_factory_no_compatible_transport(base_agent_card: AgentCard): """Verify that the factory raises an error if no compatible transport is found.""" config = ClientConfig( httpx_client=httpx.AsyncClient(), - supported_transports=[TransportProtocol.grpc], + supported_protocol_bindings=['UNKNOWN_PROTOCOL'], ) factory = ClientFactory(config) with pytest.raises(ValueError, match='no compatible transports found'): factory.create(base_agent_card) -@pytest.mark.asyncio -async def test_client_factory_connect_with_agent_card( +def test_client_factory_create_with_default_config( base_agent_card: AgentCard, ): - """Verify that connect works correctly when provided with an AgentCard.""" - client = await ClientFactory.connect(base_agent_card) - assert isinstance(client._transport, JsonRpcTransport) - assert client._transport.url == 'http://primary-url.com' + """Verify that create works correctly with a default ClientConfig.""" + factory = ClientFactory() + client = factory.create(base_agent_card) + assert isinstance(client._transport, JsonRpcTransport) # type: ignore[attr-defined] + assert client._transport.url == 'http://primary-url.com' # type: ignore[attr-defined] @pytest.mark.asyncio -async def test_client_factory_connect_with_url(base_agent_card: AgentCard): - """Verify that connect works correctly when provided with a URL.""" +async def test_client_factory_create_from_url(base_agent_card: AgentCard): + """Verify that create_from_url resolves the card and creates a client.""" with patch('a2a.client.client_factory.A2ACardResolver') as mock_resolver: mock_resolver.return_value.get_agent_card = AsyncMock( return_value=base_agent_card ) agent_url = 'http://example.com' - client = await ClientFactory.connect(agent_url) + factory = ClientFactory() + client = await factory.create_from_url(agent_url) mock_resolver.assert_called_once() assert mock_resolver.call_args[0][1] == agent_url mock_resolver.return_value.get_agent_card.assert_awaited_once() - assert isinstance(client._transport, JsonRpcTransport) - assert client._transport.url == 'http://primary-url.com' + assert isinstance(client._transport, JsonRpcTransport) # type: ignore[attr-defined] + assert client._transport.url == 'http://primary-url.com' # type: ignore[attr-defined] @pytest.mark.asyncio -async def test_client_factory_connect_with_url_and_client_config( +async def test_client_factory_create_from_url_uses_factory_httpx_client( base_agent_card: AgentCard, ): - """Verify connect with a URL and a pre-configured httpx client.""" + """Verify create_from_url uses the factory's configured httpx client.""" with patch('a2a.client.client_factory.A2ACardResolver') as mock_resolver: mock_resolver.return_value.get_agent_card = AsyncMock( return_value=base_agent_card @@ -154,35 +169,35 @@ async def test_client_factory_connect_with_url_and_client_config( mock_httpx_client = httpx.AsyncClient() config = ClientConfig(httpx_client=mock_httpx_client) - client = await ClientFactory.connect(agent_url, client_config=config) + factory = ClientFactory(config) + client = await factory.create_from_url(agent_url) mock_resolver.assert_called_once_with(mock_httpx_client, agent_url) mock_resolver.return_value.get_agent_card.assert_awaited_once() - assert isinstance(client._transport, JsonRpcTransport) - assert client._transport.url == 'http://primary-url.com' + assert isinstance(client._transport, JsonRpcTransport) # type: ignore[attr-defined] + assert client._transport.url == 'http://primary-url.com' # type: ignore[attr-defined] @pytest.mark.asyncio -async def test_client_factory_connect_with_resolver_args( +async def test_client_factory_create_from_url_passes_resolver_args( base_agent_card: AgentCard, ): - """Verify connect passes resolver arguments correctly.""" + """Verify create_from_url passes resolver arguments correctly.""" with patch('a2a.client.client_factory.A2ACardResolver') as mock_resolver: mock_resolver.return_value.get_agent_card = AsyncMock( return_value=base_agent_card ) agent_url = 'http://example.com' - relative_path = '/card' + relative_path = '/extendedAgentCard' http_kwargs = {'headers': {'X-Test': 'true'}} - # The resolver args are only passed if an httpx_client is provided in config config = ClientConfig(httpx_client=httpx.AsyncClient()) + factory = ClientFactory(config) - await ClientFactory.connect( + await factory.create_from_url( agent_url, - client_config=config, relative_card_path=relative_path, resolver_http_kwargs=http_kwargs, ) @@ -195,25 +210,29 @@ async def test_client_factory_connect_with_resolver_args( @pytest.mark.asyncio -async def test_client_factory_connect_resolver_args_without_client( +async def test_client_factory_create_from_url_with_default_config( base_agent_card: AgentCard, ): - """Verify resolver args are ignored if no httpx_client is provided.""" + """Verify create_from_url works with a default ClientConfig.""" with patch('a2a.client.client_factory.A2ACardResolver') as mock_resolver: mock_resolver.return_value.get_agent_card = AsyncMock( return_value=base_agent_card ) agent_url = 'http://example.com' - relative_path = '/card' + relative_path = '/extendedAgentCard' http_kwargs = {'headers': {'X-Test': 'true'}} - await ClientFactory.connect( + factory = ClientFactory() + + await factory.create_from_url( agent_url, relative_card_path=relative_path, resolver_http_kwargs=http_kwargs, ) + # Factory always creates an httpx client, so resolver gets it + mock_resolver.assert_called_once() mock_resolver.return_value.get_agent_card.assert_awaited_once_with( relative_card_path=relative_path, http_kwargs=http_kwargs, @@ -221,48 +240,139 @@ async def test_client_factory_connect_resolver_args_without_client( ) -@pytest.mark.asyncio -async def test_client_factory_connect_with_extra_transports( +def test_client_factory_register_and_create_custom_transport( base_agent_card: AgentCard, ): - """Verify that connect can register and use extra transports.""" + """Verify that register() + create() uses custom transports.""" class CustomTransport: pass - def custom_transport_producer(*args, **kwargs): + def custom_transport_producer( + *args: typing.Any, **kwargs: typing.Any + ) -> CustomTransport: return CustomTransport() - base_agent_card.preferred_transport = 'custom' - base_agent_card.url = 'custom://foo' - - config = ClientConfig(supported_transports=['custom']) + base_agent_card.supported_interfaces.insert( + 0, + AgentInterface(protocol_binding='custom', url='custom://foo'), + ) - client = await ClientFactory.connect( - base_agent_card, - client_config=config, - extra_transports={'custom': custom_transport_producer}, + config = ClientConfig(supported_protocol_bindings=['custom']) + factory = ClientFactory(config) + factory.register( + 'custom', + typing.cast(TransportProducer, custom_transport_producer), ) - assert isinstance(client._transport, CustomTransport) + client = factory.create(base_agent_card) + assert isinstance(client._transport, CustomTransport) # type: ignore[attr-defined] @pytest.mark.asyncio -async def test_client_factory_connect_with_consumers_and_interceptors( +async def test_client_factory_create_from_url_uses_registered_transports( + base_agent_card: AgentCard, +): + """Verify that create_from_url() respects custom transports from register().""" + + class CustomTransport: + pass + + def custom_transport_producer( + *args: typing.Any, **kwargs: typing.Any + ) -> CustomTransport: + return CustomTransport() + + base_agent_card.supported_interfaces.insert( + 0, + AgentInterface(protocol_binding='custom', url='custom://foo'), + ) + + with patch('a2a.client.client_factory.A2ACardResolver') as mock_resolver: + mock_resolver.return_value.get_agent_card = AsyncMock( + return_value=base_agent_card + ) + + config = ClientConfig(supported_protocol_bindings=['custom']) + factory = ClientFactory(config) + factory.register( + 'custom', + typing.cast(TransportProducer, custom_transport_producer), + ) + + client = await factory.create_from_url('http://example.com') + assert isinstance(client._transport, CustomTransport) # type: ignore[attr-defined] + + +def test_client_factory_create_with_interceptors( base_agent_card: AgentCard, ): - """Verify consumers and interceptors are passed through correctly.""" - consumer1 = MagicMock() + """Verify interceptors are passed through correctly.""" interceptor1 = MagicMock() with patch('a2a.client.client_factory.BaseClient') as mock_base_client: - await ClientFactory.connect( + factory = ClientFactory() + factory.create( base_agent_card, - consumers=[consumer1], interceptors=[interceptor1], ) mock_base_client.assert_called_once() call_args = mock_base_client.call_args[0] - assert call_args[3] == [consumer1] - assert call_args[4] == [interceptor1] + assert call_args[3] == [interceptor1] + + +def test_client_factory_applies_tenant_decorator(base_agent_card: AgentCard): + """Verify that the factory applies TenantTransportDecorator when tenant is present.""" + base_agent_card.supported_interfaces[0].tenant = 'my-tenant' + config = ClientConfig( + httpx_client=httpx.AsyncClient(), + supported_protocol_bindings=[TransportProtocol.JSONRPC], + ) + factory = ClientFactory(config) + client = factory.create(base_agent_card) + + assert isinstance(client._transport, TenantTransportDecorator) # type: ignore[attr-defined] + assert client._transport._tenant == 'my-tenant' # type: ignore[attr-defined] + assert isinstance(client._transport._base, JsonRpcTransport) # type: ignore[attr-defined] + + +@pytest.mark.asyncio +async def test_create_client_with_agent_card(base_agent_card: AgentCard): + """Verify create_client works when given an AgentCard directly.""" + client = await create_client(base_agent_card) + assert isinstance(client._transport, JsonRpcTransport) # type: ignore[attr-defined] + assert client._transport.url == 'http://primary-url.com' # type: ignore[attr-defined] + + +@pytest.mark.asyncio +async def test_create_client_with_url(base_agent_card: AgentCard): + """Verify create_client resolves a URL and creates a client.""" + with patch('a2a.client.client_factory.A2ACardResolver') as mock_resolver: + mock_resolver.return_value.get_agent_card = AsyncMock( + return_value=base_agent_card + ) + + client = await create_client('http://example.com') + + mock_resolver.assert_called_once() + assert mock_resolver.call_args[0][1] == 'http://example.com' + assert isinstance(client._transport, JsonRpcTransport) # type: ignore[attr-defined] + + +@pytest.mark.asyncio +async def test_create_client_with_url_and_config(base_agent_card: AgentCard): + """Verify create_client passes client_config to the factory.""" + with patch('a2a.client.client_factory.A2ACardResolver') as mock_resolver: + mock_resolver.return_value.get_agent_card = AsyncMock( + return_value=base_agent_card + ) + + mock_httpx_client = httpx.AsyncClient() + config = ClientConfig(httpx_client=mock_httpx_client) + + await create_client('http://example.com', client_config=config) + + mock_resolver.assert_called_once_with( + mock_httpx_client, 'http://example.com' + ) diff --git a/tests/client/test_client_factory_grpc.py b/tests/client/test_client_factory_grpc.py new file mode 100644 index 000000000..47423d0ab --- /dev/null +++ b/tests/client/test_client_factory_grpc.py @@ -0,0 +1,175 @@ +"""Tests for GRPC transport selection in ClientFactory.""" + +from unittest.mock import MagicMock, patch +import pytest + +from a2a.client import ClientConfig, ClientFactory +from a2a.types.a2a_pb2 import AgentCard, AgentInterface, AgentCapabilities +from a2a.utils.constants import TransportProtocol + + +@pytest.fixture +def grpc_agent_card() -> AgentCard: + """Provides an AgentCard with GRPC interfaces for tests.""" + return AgentCard( + supported_interfaces=[], + capabilities=AgentCapabilities(), + skills=[], + default_input_modes=[], + default_output_modes=[], + name='GRPC Agent', + version='1.0.0', + description='Test agent', + ) + + +def test_grpc_priority_1_0(grpc_agent_card): + """Verify that protocol version 1.0 has the highest priority and uses GrpcTransport.""" + grpc_agent_card.supported_interfaces.extend( + [ + AgentInterface( + protocol_binding=TransportProtocol.GRPC, + url='url03', + protocol_version='0.3', + ), + AgentInterface( + protocol_binding=TransportProtocol.GRPC, + url='url11', + protocol_version='1.1', + ), + AgentInterface( + protocol_binding=TransportProtocol.GRPC, + url='url10', + protocol_version='1.0', + ), + ] + ) + + config = ClientConfig( + supported_protocol_bindings=[TransportProtocol.GRPC], + grpc_channel_factory=MagicMock(), + ) + + # We patch GrpcTransport and CompatGrpcTransport in the client_factory module + with ( + patch('a2a.client.client_factory.GrpcTransport') as mock_grpc, + patch('a2a.client.client_factory.CompatGrpcTransport') as mock_compat, + ): + factory = ClientFactory(config) + factory.create(grpc_agent_card) + + # Priority 1: 1.0 -> GrpcTransport + mock_grpc.create.assert_called_once_with( + grpc_agent_card, 'url10', config + ) + mock_compat.create.assert_not_called() + + +def test_grpc_priority_gt_1_0(grpc_agent_card): + """Verify that protocol version > 1.0 uses GrpcTransport (first one found).""" + grpc_agent_card.supported_interfaces.extend( + [ + AgentInterface( + protocol_binding=TransportProtocol.GRPC, + url='url03', + protocol_version='0.3', + ), + AgentInterface( + protocol_binding=TransportProtocol.GRPC, + url='url11', + protocol_version='1.1', + ), + AgentInterface( + protocol_binding=TransportProtocol.GRPC, + url='url12', + protocol_version='1.2', + ), + ] + ) + + config = ClientConfig( + supported_protocol_bindings=[TransportProtocol.GRPC], + grpc_channel_factory=MagicMock(), + ) + + with ( + patch('a2a.client.client_factory.GrpcTransport') as mock_grpc, + patch('a2a.client.client_factory.CompatGrpcTransport') as mock_compat, + ): + factory = ClientFactory(config) + factory.create(grpc_agent_card) + + # Priority 2: > 1.0 -> GrpcTransport (first matching is 1.1) + mock_grpc.create.assert_called_once_with( + grpc_agent_card, 'url11', config + ) + mock_compat.create.assert_not_called() + + +def test_grpc_priority_lt_0_3_raises_value_error(grpc_agent_card): + """Verify that if the only available interface has version < 0.3, it raises a ValueError.""" + grpc_agent_card.supported_interfaces.extend( + [ + AgentInterface( + protocol_binding=TransportProtocol.GRPC, + url='url02', + protocol_version='0.2', + ), + ] + ) + + config = ClientConfig( + supported_protocol_bindings=[TransportProtocol.GRPC], + grpc_channel_factory=MagicMock(), + ) + + factory = ClientFactory(config) + with pytest.raises(ValueError, match='no compatible transports found'): + factory.create(grpc_agent_card) + + +def test_grpc_invalid_version_raises_value_error(grpc_agent_card): + """Verify that if only an invalid version is available, it raises a ValueError (it's ignored).""" + grpc_agent_card.supported_interfaces.extend( + [ + AgentInterface( + protocol_binding=TransportProtocol.GRPC, + url='url_invalid', + protocol_version='invalid_version_string', + ), + ] + ) + + config = ClientConfig( + supported_protocol_bindings=[TransportProtocol.GRPC], + grpc_channel_factory=MagicMock(), + ) + + factory = ClientFactory(config) + with pytest.raises(ValueError, match='no compatible transports found'): + factory.create(grpc_agent_card) + + +def test_grpc_unspecified_version_uses_grpc_transport(grpc_agent_card): + """Verify that if no version is specified, it defaults to GrpcTransport.""" + grpc_agent_card.supported_interfaces.extend( + [ + AgentInterface( + protocol_binding=TransportProtocol.GRPC, + url='url_no_version', + ), + ] + ) + + config = ClientConfig( + supported_protocol_bindings=[TransportProtocol.GRPC], + grpc_channel_factory=MagicMock(), + ) + + with patch('a2a.client.client_factory.GrpcTransport') as mock_grpc: + factory = ClientFactory(config) + factory.create(grpc_agent_card) + + mock_grpc.create.assert_called_once_with( + grpc_agent_card, 'url_no_version', config + ) diff --git a/tests/client/test_client_task_manager.py b/tests/client/test_client_task_manager.py deleted file mode 100644 index 63f98d8b9..000000000 --- a/tests/client/test_client_task_manager.py +++ /dev/null @@ -1,178 +0,0 @@ -from unittest.mock import AsyncMock, Mock, patch - -import pytest - -from a2a.client.client_task_manager import ClientTaskManager -from a2a.client.errors import ( - A2AClientInvalidArgsError, - A2AClientInvalidStateError, -) -from a2a.types import ( - Artifact, - Message, - Part, - Role, - Task, - TaskArtifactUpdateEvent, - TaskState, - TaskStatus, - TaskStatusUpdateEvent, - TextPart, -) - - -@pytest.fixture -def task_manager() -> ClientTaskManager: - return ClientTaskManager() - - -@pytest.fixture -def sample_task() -> Task: - return Task( - id='task123', - context_id='context456', - status=TaskStatus(state=TaskState.working), - history=[], - artifacts=[], - ) - - -@pytest.fixture -def sample_message() -> Message: - return Message( - message_id='msg1', - role=Role.user, - parts=[Part(root=TextPart(text='Hello'))], - ) - - -def test_get_task_no_task_id_returns_none( - task_manager: ClientTaskManager, -) -> None: - assert task_manager.get_task() is None - - -def test_get_task_or_raise_no_task_raises_error( - task_manager: ClientTaskManager, -) -> None: - with pytest.raises(A2AClientInvalidStateError, match='no current Task'): - task_manager.get_task_or_raise() - - -@pytest.mark.asyncio -async def test_save_task_event_with_task( - task_manager: ClientTaskManager, sample_task: Task -) -> None: - await task_manager.save_task_event(sample_task) - assert task_manager.get_task() == sample_task - assert task_manager._task_id == sample_task.id - assert task_manager._context_id == sample_task.context_id - - -@pytest.mark.asyncio -async def test_save_task_event_with_task_already_set_raises_error( - task_manager: ClientTaskManager, sample_task: Task -) -> None: - await task_manager.save_task_event(sample_task) - with pytest.raises( - A2AClientInvalidArgsError, - match='Task is already set, create new manager for new tasks.', - ): - await task_manager.save_task_event(sample_task) - - -@pytest.mark.asyncio -async def test_save_task_event_with_status_update( - task_manager: ClientTaskManager, sample_task: Task, sample_message: Message -) -> None: - await task_manager.save_task_event(sample_task) - status_update = TaskStatusUpdateEvent( - task_id=sample_task.id, - context_id=sample_task.context_id, - status=TaskStatus(state=TaskState.completed, message=sample_message), - final=True, - ) - updated_task = await task_manager.save_task_event(status_update) - assert updated_task.status.state == TaskState.completed - assert updated_task.history == [sample_message] - - -@pytest.mark.asyncio -async def test_save_task_event_with_artifact_update( - task_manager: ClientTaskManager, sample_task: Task -) -> None: - await task_manager.save_task_event(sample_task) - artifact = Artifact( - artifact_id='art1', parts=[Part(root=TextPart(text='artifact content'))] - ) - artifact_update = TaskArtifactUpdateEvent( - task_id=sample_task.id, - context_id=sample_task.context_id, - artifact=artifact, - ) - - with patch( - 'a2a.client.client_task_manager.append_artifact_to_task' - ) as mock_append: - updated_task = await task_manager.save_task_event(artifact_update) - mock_append.assert_called_once_with(updated_task, artifact_update) - - -@pytest.mark.asyncio -async def test_save_task_event_creates_task_if_not_exists( - task_manager: ClientTaskManager, -) -> None: - status_update = TaskStatusUpdateEvent( - task_id='new_task', - context_id='new_context', - status=TaskStatus(state=TaskState.working), - final=False, - ) - updated_task = await task_manager.save_task_event(status_update) - assert updated_task is not None - assert updated_task.id == 'new_task' - assert updated_task.status.state == TaskState.working - - -@pytest.mark.asyncio -async def test_process_with_task_event( - task_manager: ClientTaskManager, sample_task: Task -) -> None: - with patch.object( - task_manager, 'save_task_event', new_callable=AsyncMock - ) as mock_save: - await task_manager.process(sample_task) - mock_save.assert_called_once_with(sample_task) - - -@pytest.mark.asyncio -async def test_process_with_non_task_event( - task_manager: ClientTaskManager, -) -> None: - with patch.object( - task_manager, 'save_task_event', new_callable=Mock - ) as mock_save: - non_task_event = 'not a task event' - await task_manager.process(non_task_event) - mock_save.assert_not_called() - - -def test_update_with_message( - task_manager: ClientTaskManager, sample_task: Task, sample_message: Message -) -> None: - updated_task = task_manager.update_with_message(sample_message, sample_task) - assert updated_task.history == [sample_message] - - -def test_update_with_message_moves_status_message( - task_manager: ClientTaskManager, sample_task: Task, sample_message: Message -) -> None: - status_message = Message( - message_id='status_msg', - role=Role.agent, - parts=[Part(root=TextPart(text='Status'))], - ) - sample_task.status.message = status_message - updated_task = task_manager.update_with_message(sample_message, sample_task) - assert updated_task.history == [status_message, sample_message] - assert updated_task.status.message is None diff --git a/tests/client/test_errors.py b/tests/client/test_errors.py index c3b065f45..1ee7ab10a 100644 --- a/tests/client/test_errors.py +++ b/tests/client/test_errors.py @@ -1,15 +1,6 @@ -from typing import NoReturn - import pytest -from a2a.client import A2AClientError, A2AClientHTTPError, A2AClientJSONError -from a2a.client.errors import ( - A2AClientInvalidArgsError, - A2AClientInvalidStateError, - A2AClientJSONRPCError, - A2AClientTimeoutError, -) -from a2a.types import JSONRPCError, JSONRPCErrorResponse +from a2a.client import A2AClientError class TestA2AClientError: @@ -26,253 +17,9 @@ def test_inheritance(self) -> None: error = A2AClientError() assert isinstance(error, Exception) - -class TestA2AClientHTTPError: - """Test cases for A2AClientHTTPError class.""" - - def test_instantiation(self) -> None: - """Test that A2AClientHTTPError can be instantiated with status_code and message.""" - error = A2AClientHTTPError(404, 'Not Found') - assert isinstance(error, A2AClientError) - assert error.status_code == 404 - assert error.message == 'Not Found' - - def test_message_formatting(self) -> None: - """Test that the error message is formatted correctly.""" - error = A2AClientHTTPError(500, 'Internal Server Error') - assert str(error) == 'HTTP Error 500: Internal Server Error' - - def test_repr(self) -> None: - """Test that __repr__ shows structured attributes.""" - error = A2AClientHTTPError(404, 'Not Found') - assert ( - repr(error) - == "A2AClientHTTPError(status_code=404, message='Not Found')" - ) - - def test_inheritance(self) -> None: - """Test that A2AClientHTTPError inherits from A2AClientError.""" - error = A2AClientHTTPError(400, 'Bad Request') - assert isinstance(error, A2AClientError) - - def test_with_empty_message(self) -> None: - """Test behavior with an empty message.""" - error = A2AClientHTTPError(403, '') - assert error.status_code == 403 - assert error.message == '' - assert str(error) == 'HTTP Error 403: ' - - def test_with_various_status_codes(self) -> None: - """Test with different HTTP status codes.""" - test_cases = [ - (200, 'OK'), - (201, 'Created'), - (400, 'Bad Request'), - (401, 'Unauthorized'), - (403, 'Forbidden'), - (404, 'Not Found'), - (500, 'Internal Server Error'), - (503, 'Service Unavailable'), - ] - - for status_code, message in test_cases: - error = A2AClientHTTPError(status_code, message) - assert error.status_code == status_code - assert error.message == message - assert str(error) == f'HTTP Error {status_code}: {message}' - - -class TestA2AClientJSONError: - """Test cases for A2AClientJSONError class.""" - - def test_instantiation(self) -> None: - """Test that A2AClientJSONError can be instantiated with a message.""" - error = A2AClientJSONError('Invalid JSON format') - assert isinstance(error, A2AClientError) - assert error.message == 'Invalid JSON format' - - def test_message_formatting(self) -> None: - """Test that the error message is formatted correctly.""" - error = A2AClientJSONError('Missing required field') - assert str(error) == 'JSON Error: Missing required field' - - def test_repr(self) -> None: - """Test that __repr__ shows structured attributes.""" - error = A2AClientJSONError('Invalid JSON format') - assert ( - repr(error) == "A2AClientJSONError(message='Invalid JSON format')" - ) - - def test_inheritance(self) -> None: - """Test that A2AClientJSONError inherits from A2AClientError.""" - error = A2AClientJSONError('Parsing error') - assert isinstance(error, A2AClientError) - - def test_with_empty_message(self) -> None: - """Test behavior with an empty message.""" - error = A2AClientJSONError('') - assert error.message == '' - assert str(error) == 'JSON Error: ' - - def test_with_various_messages(self) -> None: - """Test with different error messages.""" - test_messages = [ - 'Malformed JSON', - 'Missing required fields', - 'Invalid data type', - 'Unexpected JSON structure', - 'Empty JSON object', - ] - - for message in test_messages: - error = A2AClientJSONError(message) - assert error.message == message - assert str(error) == f'JSON Error: {message}' - - -class TestA2AClientTimeoutErrorRepr: - """Test __repr__ for A2AClientTimeoutError.""" - - def test_repr(self) -> None: - """Test that __repr__ shows structured attributes.""" - error = A2AClientTimeoutError('Request timed out') - assert ( - repr(error) == "A2AClientTimeoutError(message='Request timed out')" - ) - - -class TestA2AClientInvalidArgsErrorRepr: - """Test __repr__ for A2AClientInvalidArgsError.""" - - def test_repr(self) -> None: - """Test that __repr__ shows structured attributes.""" - error = A2AClientInvalidArgsError('Missing required param') - assert ( - repr(error) - == "A2AClientInvalidArgsError(message='Missing required param')" - ) - - -class TestA2AClientInvalidStateErrorRepr: - """Test __repr__ for A2AClientInvalidStateError.""" - - def test_repr(self) -> None: - """Test that __repr__ shows structured attributes.""" - error = A2AClientInvalidStateError('Client not initialized') - assert ( - repr(error) - == "A2AClientInvalidStateError(message='Client not initialized')" - ) - - -class TestA2AClientJSONRPCErrorRepr: - """Test __repr__ for A2AClientJSONRPCError.""" - - def test_repr(self) -> None: - """Test that __repr__ shows the JSON-RPC error object.""" - response = JSONRPCErrorResponse( - id='test-1', - error=JSONRPCError(code=-32601, message='Method not found'), - ) - error = A2AClientJSONRPCError(response) - assert ( - repr(error) - == "A2AClientJSONRPCError(JSONRPCError(code=-32601, data=None, message='Method not found'))" - ) - - -class TestExceptionHierarchy: - """Test the exception hierarchy and relationships.""" - - def test_exception_hierarchy(self) -> None: - """Test that the exception hierarchy is correct.""" - assert issubclass(A2AClientError, Exception) - assert issubclass(A2AClientHTTPError, A2AClientError) - assert issubclass(A2AClientJSONError, A2AClientError) - - def test_catch_specific_exception(self) -> None: - """Test that specific exceptions can be caught.""" - try: - raise A2AClientHTTPError(404, 'Not Found') - except A2AClientHTTPError as e: - assert e.status_code == 404 - assert e.message == 'Not Found' - - def test_catch_base_exception(self) -> None: - """Test that derived exceptions can be caught as base exception.""" - exceptions = [ - A2AClientHTTPError(404, 'Not Found'), - A2AClientJSONError('Invalid JSON'), - ] - - for raised_error in exceptions: - try: - raise raised_error - except A2AClientError as e: - assert isinstance(e, A2AClientError) - - -class TestExceptionRaising: - """Test cases for raising and handling the exceptions.""" - - def test_raising_http_error(self) -> NoReturn: - """Test raising an HTTP error and checking its properties.""" - with pytest.raises(A2AClientHTTPError) as excinfo: - raise A2AClientHTTPError(429, 'Too Many Requests') - - error = excinfo.value - assert error.status_code == 429 - assert error.message == 'Too Many Requests' - assert str(error) == 'HTTP Error 429: Too Many Requests' - - def test_raising_json_error(self) -> NoReturn: - """Test raising a JSON error and checking its properties.""" - with pytest.raises(A2AClientJSONError) as excinfo: - raise A2AClientJSONError('Invalid format') - - error = excinfo.value - assert error.message == 'Invalid format' - assert str(error) == 'JSON Error: Invalid format' - - def test_raising_base_error(self) -> NoReturn: + def test_raising_base_error(self) -> None: """Test raising the base error.""" with pytest.raises(A2AClientError) as excinfo: raise A2AClientError('Generic client error') assert str(excinfo.value) == 'Generic client error' - - -# Additional parametrized tests for more comprehensive coverage - - -@pytest.mark.parametrize( - 'status_code,message,expected', - [ - (400, 'Bad Request', 'HTTP Error 400: Bad Request'), - (404, 'Not Found', 'HTTP Error 404: Not Found'), - (500, 'Server Error', 'HTTP Error 500: Server Error'), - ], -) -def test_http_error_parametrized( - status_code: int, message: str, expected: str -) -> None: - """Parametrized test for HTTP errors with different status codes.""" - error = A2AClientHTTPError(status_code, message) - assert error.status_code == status_code - assert error.message == message - assert str(error) == expected - - -@pytest.mark.parametrize( - 'message,expected', - [ - ('Missing field', 'JSON Error: Missing field'), - ('Invalid type', 'JSON Error: Invalid type'), - ('Parsing failed', 'JSON Error: Parsing failed'), - ], -) -def test_json_error_parametrized(message: str, expected: str) -> None: - """Parametrized test for JSON errors with different messages.""" - error = A2AClientJSONError(message) - assert error.message == message - assert str(error) == expected diff --git a/tests/client/test_legacy_client.py b/tests/client/test_legacy_client.py deleted file mode 100644 index 1bd9e4ae2..000000000 --- a/tests/client/test_legacy_client.py +++ /dev/null @@ -1,115 +0,0 @@ -"""Tests for the legacy client compatibility layer.""" - -from unittest.mock import AsyncMock, MagicMock - -import httpx -import pytest - -from a2a.client import A2AClient, A2AGrpcClient -from a2a.types import ( - AgentCapabilities, - AgentCard, - Message, - MessageSendParams, - Part, - Role, - SendMessageRequest, - Task, - TaskQueryParams, - TaskState, - TaskStatus, - TextPart, -) - - -@pytest.fixture -def mock_httpx_client() -> AsyncMock: - return AsyncMock(spec=httpx.AsyncClient) - - -@pytest.fixture -def mock_grpc_stub() -> AsyncMock: - stub = AsyncMock() - stub._channel = MagicMock() - return stub - - -@pytest.fixture -def jsonrpc_agent_card() -> AgentCard: - return AgentCard( - name='Test Agent', - description='A test agent', - url='http://test.agent.com/rpc', - version='1.0.0', - capabilities=AgentCapabilities(streaming=True), - skills=[], - default_input_modes=[], - default_output_modes=[], - preferred_transport='jsonrpc', - ) - - -@pytest.fixture -def grpc_agent_card() -> AgentCard: - return AgentCard( - name='Test Agent', - description='A test agent', - url='http://test.agent.com/rpc', - version='1.0.0', - capabilities=AgentCapabilities(streaming=True), - skills=[], - default_input_modes=[], - default_output_modes=[], - preferred_transport='grpc', - ) - - -@pytest.mark.asyncio -async def test_a2a_client_send_message( - mock_httpx_client: AsyncMock, jsonrpc_agent_card: AgentCard -): - client = A2AClient( - httpx_client=mock_httpx_client, agent_card=jsonrpc_agent_card - ) - - # Mock the underlying transport's send_message method - mock_response_task = Task( - id='task-123', - context_id='ctx-456', - status=TaskStatus(state=TaskState.completed), - ) - - client._transport.send_message = AsyncMock(return_value=mock_response_task) - - message = Message( - message_id='msg-123', - role=Role.user, - parts=[Part(root=TextPart(text='Hello'))], - ) - request = SendMessageRequest( - id='req-123', params=MessageSendParams(message=message) - ) - response = await client.send_message(request) - - assert response.root.result.id == 'task-123' - - -@pytest.mark.asyncio -async def test_a2a_grpc_client_get_task( - mock_grpc_stub: AsyncMock, grpc_agent_card: AgentCard -): - client = A2AGrpcClient(grpc_stub=mock_grpc_stub, agent_card=grpc_agent_card) - - mock_response_task = Task( - id='task-456', - context_id='ctx-789', - status=TaskStatus(state=TaskState.working), - ) - - client.get_task = AsyncMock(return_value=mock_response_task) - - params = TaskQueryParams(id='task-456') - response = await client.get_task(params) - - assert response.id == 'task-456' - client.get_task.assert_awaited_once_with(params) diff --git a/tests/client/test_service_parameters.py b/tests/client/test_service_parameters.py new file mode 100644 index 000000000..fbabd9719 --- /dev/null +++ b/tests/client/test_service_parameters.py @@ -0,0 +1,53 @@ +"""Tests for a2a.client.service_parameters module.""" + +from a2a.client.service_parameters import ( + ServiceParametersFactory, + with_a2a_extensions, +) +from a2a.extensions.common import HTTP_EXTENSION_HEADER + + +def test_with_a2a_extensions_merges_dedupes_and_sorts(): + """Repeated calls accumulate; duplicates collapse; output is sorted.""" + parameters = ServiceParametersFactory.create( + [ + with_a2a_extensions(['ext-c', 'ext-a']), + with_a2a_extensions(['ext-b', 'ext-a']), + ] + ) + + assert parameters[HTTP_EXTENSION_HEADER] == 'ext-a,ext-b,ext-c' + + +def test_with_a2a_extensions_merges_existing_header_value(): + """Pre-existing comma-separated header values are parsed and merged.""" + parameters = ServiceParametersFactory.create_from( + {HTTP_EXTENSION_HEADER: 'ext-a, ext-b'}, + [with_a2a_extensions(['ext-c'])], + ) + + assert parameters[HTTP_EXTENSION_HEADER] == 'ext-a,ext-b,ext-c' + + +def test_with_a2a_extensions_empty_is_noop(): + """An empty extensions list leaves the header untouched / absent.""" + parameters = ServiceParametersFactory.create( + [ + with_a2a_extensions(['ext-a']), + with_a2a_extensions([]), + ] + ) + + assert parameters[HTTP_EXTENSION_HEADER] == 'ext-a' + assert HTTP_EXTENSION_HEADER not in ServiceParametersFactory.create( + [with_a2a_extensions([])] + ) + + +def test_with_a2a_extensions_normalizes_input_strings(): + """Input strings are split on commas and stripped, like header values.""" + parameters = ServiceParametersFactory.create( + [with_a2a_extensions(['ext-a, ext-b', ' ext-c '])] + ) + + assert parameters[HTTP_EXTENSION_HEADER] == 'ext-a,ext-b,ext-c' diff --git a/tests/client/transports/__init__.py b/tests/client/transports/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/client/transports/test_grpc_client.py b/tests/client/transports/test_grpc_client.py index 0d491c2e0..95cca9189 100644 --- a/tests/client/transports/test_grpc_client.py +++ b/tests/client/transports/test_grpc_client.py @@ -3,44 +3,54 @@ import grpc import pytest +from google.protobuf import any_pb2 +from google.rpc import error_details_pb2, status_pb2 + +from a2a.client.client import ClientCallContext from a2a.client.transports.grpc import GrpcTransport from a2a.extensions.common import HTTP_EXTENSION_HEADER -from a2a.grpc import a2a_pb2, a2a_pb2_grpc -from a2a.types import ( +from a2a.utils.constants import VERSION_HEADER, PROTOCOL_VERSION_CURRENT +from a2a.utils.errors import A2A_ERROR_REASONS +from a2a.types import a2a_pb2 +from a2a.types.a2a_pb2 import ( AgentCapabilities, AgentCard, + AgentInterface, Artifact, - GetTaskPushNotificationConfigParams, + AuthenticationInfo, + TaskPushNotificationConfig, + DeleteTaskPushNotificationConfigRequest, + GetTaskPushNotificationConfigRequest, + GetTaskRequest, + ListTaskPushNotificationConfigsRequest, Message, - MessageSendParams, Part, - PushNotificationAuthenticationInfo, - PushNotificationConfig, + TaskPushNotificationConfig, Role, + SendMessageRequest, Task, TaskArtifactUpdateEvent, - TaskIdParams, TaskPushNotificationConfig, - TaskQueryParams, TaskState, TaskStatus, TaskStatusUpdateEvent, - TextPart, ) -from a2a.utils import get_text_parts, proto_utils -from a2a.utils.errors import ServerError +from a2a.helpers.proto_helpers import get_text_parts @pytest.fixture def mock_grpc_stub() -> AsyncMock: """Provides a mock gRPC stub with methods mocked.""" - stub = AsyncMock(spec=a2a_pb2_grpc.A2AServiceStub) + stub = MagicMock() # Use MagicMock without spec to avoid auto-spec warnings stub.SendMessage = AsyncMock() stub.SendStreamingMessage = MagicMock() stub.GetTask = AsyncMock() + stub.ListTasks = AsyncMock() stub.CancelTask = AsyncMock() stub.CreateTaskPushNotificationConfig = AsyncMock() stub.GetTaskPushNotificationConfig = AsyncMock() + stub.ListTaskPushNotificationConfigs = AsyncMock() + stub.DeleteTaskPushNotificationConfig = AsyncMock() return stub @@ -50,7 +60,11 @@ def sample_agent_card() -> AgentCard: return AgentCard( name='gRPC Test Agent', description='Agent for testing gRPC client', - url='grpc://localhost:50051', + supported_interfaces=[ + AgentInterface( + url='grpc://localhost:50051', protocol_binding='GRPC' + ) + ], version='1.0', capabilities=AgentCapabilities(streaming=True, push_notifications=True), default_input_modes=['text/plain'], @@ -64,27 +78,23 @@ def grpc_transport( mock_grpc_stub: AsyncMock, sample_agent_card: AgentCard ) -> GrpcTransport: """Provides a GrpcTransport instance.""" - channel = AsyncMock() + channel = MagicMock() # Use MagicMock instead of AsyncMock transport = GrpcTransport( channel=channel, agent_card=sample_agent_card, - extensions=[ - 'https://example.com/test-ext/v1', - 'https://example.com/test-ext/v2', - ], ) transport.stub = mock_grpc_stub return transport @pytest.fixture -def sample_message_send_params() -> MessageSendParams: - """Provides a sample MessageSendParams object.""" - return MessageSendParams( +def sample_message_send_params() -> SendMessageRequest: + """Provides a sample SendMessageRequest object.""" + return SendMessageRequest( message=Message( - role=Role.user, + role=Role.ROLE_USER, message_id='msg-1', - parts=[Part(root=TextPart(text='Hello'))], + parts=[Part(text='Hello')], ) ) @@ -95,7 +105,17 @@ def sample_task() -> Task: return Task( id='task-1', context_id='ctx-1', - status=TaskStatus(state=TaskState.completed), + status=TaskStatus(state=TaskState.TASK_STATE_COMPLETED), + ) + + +@pytest.fixture +def sample_task_2() -> Task: + """Provides a sample Task object.""" + return Task( + id='task-2', + context_id='ctx-2', + status=TaskStatus(state=TaskState.TASK_STATE_FAILED), ) @@ -103,9 +123,9 @@ def sample_task() -> Task: def sample_message() -> Message: """Provides a sample Message object.""" return Message( - role=Role.agent, + role=Role.ROLE_AGENT, message_id='msg-response', - parts=[Part(root=TextPart(text='Hi there'))], + parts=[Part(text='Hi there')], ) @@ -116,7 +136,7 @@ def sample_artifact() -> Artifact: artifact_id='artifact-1', name='example.txt', description='An example artifact', - parts=[Part(root=TextPart(text='Hi there'))], + parts=[Part(text='Hi there')], metadata={}, extensions=[], ) @@ -128,8 +148,7 @@ def sample_task_status_update_event() -> TaskStatusUpdateEvent: return TaskStatusUpdateEvent( task_id='task-1', context_id='ctx-1', - status=TaskStatus(state=TaskState.working), - final=False, + status=TaskStatus(state=TaskState.TASK_STATE_WORKING), metadata={}, ) @@ -150,34 +169,22 @@ def sample_task_artifact_update_event( @pytest.fixture -def sample_authentication_info() -> PushNotificationAuthenticationInfo: +def sample_authentication_info() -> AuthenticationInfo: """Provides a sample AuthenticationInfo object.""" - return PushNotificationAuthenticationInfo( - schemes=['apikey', 'oauth2'], credentials='secret-token' - ) - - -@pytest.fixture -def sample_push_notification_config( - sample_authentication_info: PushNotificationAuthenticationInfo, -) -> PushNotificationConfig: - """Provides a sample PushNotificationConfig object.""" - return PushNotificationConfig( - id='config-1', - url='https://example.com/notify', - token='example-token', - authentication=sample_authentication_info, - ) + return AuthenticationInfo(scheme='apikey', credentials='secret-token') @pytest.fixture def sample_task_push_notification_config( - sample_push_notification_config: PushNotificationConfig, + sample_authentication_info: AuthenticationInfo, ) -> TaskPushNotificationConfig: """Provides a sample TaskPushNotificationConfig object.""" return TaskPushNotificationConfig( task_id='task-1', - push_notification_config=sample_push_notification_config, + id='config-1', + url='https://example.com/notify', + token='example-token', + authentication=sample_authentication_info, ) @@ -185,41 +192,112 @@ def sample_task_push_notification_config( async def test_send_message_task_response( grpc_transport: GrpcTransport, mock_grpc_stub: AsyncMock, - sample_message_send_params: MessageSendParams, + sample_message_send_params: SendMessageRequest, sample_task: Task, ) -> None: """Test send_message that returns a Task.""" mock_grpc_stub.SendMessage.return_value = a2a_pb2.SendMessageResponse( - task=proto_utils.ToProto.task(sample_task) + task=sample_task ) response = await grpc_transport.send_message( sample_message_send_params, - extensions=['https://example.com/test-ext/v3'], + context=ClientCallContext( + service_parameters={ + HTTP_EXTENSION_HEADER: 'https://example.com/test-ext/v3' + } + ), ) mock_grpc_stub.SendMessage.assert_awaited_once() _, kwargs = mock_grpc_stub.SendMessage.call_args assert kwargs['metadata'] == [ + (VERSION_HEADER.lower(), PROTOCOL_VERSION_CURRENT), ( HTTP_EXTENSION_HEADER.lower(), 'https://example.com/test-ext/v3', - ) + ), ] - assert isinstance(response, Task) - assert response.id == sample_task.id + assert response.HasField('task') + assert response.task.id == sample_task.id + + +@pytest.mark.asyncio +async def test_send_message_with_timeout_context( + grpc_transport: GrpcTransport, + mock_grpc_stub: AsyncMock, + sample_message_send_params: SendMessageRequest, + sample_task: Task, +) -> None: + """Test send_message passes context timeout to grpc stub.""" + from a2a.client.client import ClientCallContext + + mock_grpc_stub.SendMessage.return_value = a2a_pb2.SendMessageResponse( + task=sample_task + ) + context = ClientCallContext(timeout=12.5) + + await grpc_transport.send_message( + sample_message_send_params, + context=context, + ) + + mock_grpc_stub.SendMessage.assert_awaited_once() + _, kwargs = mock_grpc_stub.SendMessage.call_args + assert 'timeout' in kwargs + assert kwargs['timeout'] == 12.5 + + +@pytest.mark.parametrize('error_cls', list(A2A_ERROR_REASONS.keys())) +@pytest.mark.asyncio +async def test_grpc_mapped_errors_rich( + grpc_transport: GrpcTransport, + mock_grpc_stub: AsyncMock, + sample_message_send_params: SendMessageRequest, + error_cls, +) -> None: + """Test handling of rich gRPC error responses with Status metadata.""" + + reason = A2A_ERROR_REASONS.get(error_cls, 'UNKNOWN_ERROR') + + error_info = error_details_pb2.ErrorInfo( + reason=reason, + domain='a2a-protocol.org', + ) + + error_details = f'{error_cls.__name__}: Mapped Error' + status = status_pb2.Status( + code=grpc.StatusCode.INTERNAL.value[0], message=error_details + ) + detail = any_pb2.Any() + detail.Pack(error_info) + status.details.append(detail) + + mock_grpc_stub.SendMessage.side_effect = grpc.aio.AioRpcError( + code=grpc.StatusCode.INTERNAL, + initial_metadata=grpc.aio.Metadata(), + trailing_metadata=grpc.aio.Metadata( + ('grpc-status-details-bin', status.SerializeToString()), + ), + details=error_details, + ) + + with pytest.raises(error_cls) as excinfo: + await grpc_transport.send_message(sample_message_send_params) + + assert str(excinfo.value) == error_details @pytest.mark.asyncio async def test_send_message_message_response( grpc_transport: GrpcTransport, mock_grpc_stub: AsyncMock, - sample_message_send_params: MessageSendParams, + sample_message_send_params: SendMessageRequest, sample_message: Message, ) -> None: """Test send_message that returns a Message.""" mock_grpc_stub.SendMessage.return_value = a2a_pb2.SendMessageResponse( - msg=proto_utils.ToProto.message(sample_message) + message=sample_message ) response = await grpc_transport.send_message(sample_message_send_params) @@ -227,14 +305,11 @@ async def test_send_message_message_response( mock_grpc_stub.SendMessage.assert_awaited_once() _, kwargs = mock_grpc_stub.SendMessage.call_args assert kwargs['metadata'] == [ - ( - HTTP_EXTENSION_HEADER.lower(), - 'https://example.com/test-ext/v1,https://example.com/test-ext/v2', - ) + (VERSION_HEADER.lower(), PROTOCOL_VERSION_CURRENT), ] - assert isinstance(response, Message) - assert response.message_id == sample_message.message_id - assert get_text_parts(response.parts) == get_text_parts( + assert response.HasField('message') + assert response.message.message_id == sample_message.message_id + assert get_text_parts(response.message.parts) == get_text_parts( sample_message.parts ) @@ -243,7 +318,7 @@ async def test_send_message_message_response( async def test_send_message_streaming( # noqa: PLR0913 grpc_transport: GrpcTransport, mock_grpc_stub: AsyncMock, - sample_message_send_params: MessageSendParams, + sample_message_send_params: SendMessageRequest, sample_message: Message, sample_task: Task, sample_task_status_update_event: TaskStatusUpdateEvent, @@ -253,21 +328,15 @@ async def test_send_message_streaming( # noqa: PLR0913 stream = MagicMock() stream.read = AsyncMock( side_effect=[ + a2a_pb2.StreamResponse(message=sample_message), + a2a_pb2.StreamResponse(task=sample_task), a2a_pb2.StreamResponse( - msg=proto_utils.ToProto.message(sample_message) + status_update=sample_task_status_update_event ), - a2a_pb2.StreamResponse(task=proto_utils.ToProto.task(sample_task)), a2a_pb2.StreamResponse( - status_update=proto_utils.ToProto.task_status_update_event( - sample_task_status_update_event - ) + artifact_update=sample_task_artifact_update_event ), - a2a_pb2.StreamResponse( - artifact_update=proto_utils.ToProto.task_artifact_update_event( - sample_task_artifact_update_event - ) - ), - grpc.aio.EOF, + grpc.aio.EOF, # type: ignore[attr-defined] ] ) mock_grpc_stub.SendStreamingMessage.return_value = stream @@ -282,19 +351,23 @@ async def test_send_message_streaming( # noqa: PLR0913 mock_grpc_stub.SendStreamingMessage.assert_called_once() _, kwargs = mock_grpc_stub.SendStreamingMessage.call_args assert kwargs['metadata'] == [ - ( - HTTP_EXTENSION_HEADER.lower(), - 'https://example.com/test-ext/v1,https://example.com/test-ext/v2', - ) + (VERSION_HEADER.lower(), PROTOCOL_VERSION_CURRENT), ] - assert isinstance(responses[0], Message) - assert responses[0].message_id == sample_message.message_id - assert isinstance(responses[1], Task) - assert responses[1].id == sample_task.id - assert isinstance(responses[2], TaskStatusUpdateEvent) - assert responses[2].task_id == sample_task_status_update_event.task_id - assert isinstance(responses[3], TaskArtifactUpdateEvent) - assert responses[3].task_id == sample_task_artifact_update_event.task_id + # Responses are StreamResponse proto objects + assert responses[0].HasField('message') + assert responses[0].message.message_id == sample_message.message_id + assert responses[1].HasField('task') + assert responses[1].task.id == sample_task.id + assert responses[2].HasField('status_update') + assert ( + responses[2].status_update.task_id + == sample_task_status_update_event.task_id + ) + assert responses[3].HasField('artifact_update') + assert ( + responses[3].artifact_update.task_id + == sample_task_artifact_update_event.task_id + ) @pytest.mark.asyncio @@ -302,46 +375,68 @@ async def test_get_task( grpc_transport: GrpcTransport, mock_grpc_stub: AsyncMock, sample_task: Task ) -> None: """Test retrieving a task.""" - mock_grpc_stub.GetTask.return_value = proto_utils.ToProto.task(sample_task) - params = TaskQueryParams(id=sample_task.id) + mock_grpc_stub.GetTask.return_value = sample_task + params = GetTaskRequest(id=f'{sample_task.id}') response = await grpc_transport.get_task(params) mock_grpc_stub.GetTask.assert_awaited_once_with( - a2a_pb2.GetTaskRequest( - name=f'tasks/{sample_task.id}', history_length=None - ), + a2a_pb2.GetTaskRequest(id=f'{sample_task.id}', history_length=None), metadata=[ - ( - HTTP_EXTENSION_HEADER.lower(), - 'https://example.com/test-ext/v1,https://example.com/test-ext/v2', - ) + (VERSION_HEADER.lower(), PROTOCOL_VERSION_CURRENT), ], + timeout=None, ) assert response.id == sample_task.id +@pytest.mark.asyncio +async def test_list_tasks( + grpc_transport: GrpcTransport, + mock_grpc_stub: AsyncMock, + sample_task: Task, + sample_task_2: Task, +): + """Test listing tasks.""" + mock_grpc_stub.ListTasks.return_value = a2a_pb2.ListTasksResponse( + tasks=[sample_task, sample_task_2], + total_size=2, + ) + params = a2a_pb2.ListTasksRequest() + + result = await grpc_transport.list_tasks(params) + + mock_grpc_stub.ListTasks.assert_awaited_once_with( + params, + metadata=[ + (VERSION_HEADER.lower(), PROTOCOL_VERSION_CURRENT), + ], + timeout=None, + ) + assert result.total_size == 2 + assert not result.next_page_token + assert [t.id for t in result.tasks] == [sample_task.id, sample_task_2.id] + + @pytest.mark.asyncio async def test_get_task_with_history( grpc_transport: GrpcTransport, mock_grpc_stub: AsyncMock, sample_task: Task ) -> None: """Test retrieving a task with history.""" - mock_grpc_stub.GetTask.return_value = proto_utils.ToProto.task(sample_task) + mock_grpc_stub.GetTask.return_value = sample_task history_len = 10 - params = TaskQueryParams(id=sample_task.id, history_length=history_len) + params = GetTaskRequest(id=f'{sample_task.id}', history_length=history_len) await grpc_transport.get_task(params) mock_grpc_stub.GetTask.assert_awaited_once_with( a2a_pb2.GetTaskRequest( - name=f'tasks/{sample_task.id}', history_length=history_len + id=f'{sample_task.id}', history_length=history_len ), metadata=[ - ( - HTTP_EXTENSION_HEADER.lower(), - 'https://example.com/test-ext/v1,https://example.com/test-ext/v2', - ) + (VERSION_HEADER.lower(), PROTOCOL_VERSION_CURRENT), ], + timeout=None, ) @@ -350,195 +445,242 @@ async def test_cancel_task( grpc_transport: GrpcTransport, mock_grpc_stub: AsyncMock, sample_task: Task ) -> None: """Test cancelling a task.""" - cancelled_task = sample_task.model_copy() - cancelled_task.status.state = TaskState.canceled - mock_grpc_stub.CancelTask.return_value = proto_utils.ToProto.task( - cancelled_task - ) - params = TaskIdParams(id=sample_task.id) - extensions = [ - 'https://example.com/test-ext/v3', - ] - response = await grpc_transport.cancel_task(params, extensions=extensions) + cancelled_task = Task( + id=sample_task.id, + context_id=sample_task.context_id, + status=TaskStatus(state=TaskState.TASK_STATE_CANCELED), + ) + mock_grpc_stub.CancelTask.return_value = cancelled_task + extensions = 'https://example.com/test-ext/v3' + + request = a2a_pb2.CancelTaskRequest(id=f'{sample_task.id}') + response = await grpc_transport.cancel_task( + request, + context=ClientCallContext( + service_parameters={HTTP_EXTENSION_HEADER: extensions} + ), + ) mock_grpc_stub.CancelTask.assert_awaited_once_with( - a2a_pb2.CancelTaskRequest(name=f'tasks/{sample_task.id}'), + a2a_pb2.CancelTaskRequest(id=f'{sample_task.id}'), metadata=[ - (HTTP_EXTENSION_HEADER.lower(), 'https://example.com/test-ext/v3') + (VERSION_HEADER.lower(), PROTOCOL_VERSION_CURRENT), + (HTTP_EXTENSION_HEADER.lower(), 'https://example.com/test-ext/v3'), ], + timeout=None, ) - assert response.status.state == TaskState.canceled + assert response.status.state == TaskState.TASK_STATE_CANCELED @pytest.mark.asyncio -async def test_set_task_callback_with_valid_task( +async def test_create_task_push_notification_config_with_valid_task( grpc_transport: GrpcTransport, mock_grpc_stub: AsyncMock, sample_task_push_notification_config: TaskPushNotificationConfig, ) -> None: """Test setting a task push notification config with a valid task id.""" mock_grpc_stub.CreateTaskPushNotificationConfig.return_value = ( - proto_utils.ToProto.task_push_notification_config( - sample_task_push_notification_config - ) + sample_task_push_notification_config ) - response = await grpc_transport.set_task_callback( - sample_task_push_notification_config + # Create the request object expected by the transport + request = TaskPushNotificationConfig( + task_id='task-1', + url='https://example.com/notify', + ) + response = await grpc_transport.create_task_push_notification_config( + request ) mock_grpc_stub.CreateTaskPushNotificationConfig.assert_awaited_once_with( - a2a_pb2.CreateTaskPushNotificationConfigRequest( - parent=f'tasks/{sample_task_push_notification_config.task_id}', - config_id=sample_task_push_notification_config.push_notification_config.id, - config=proto_utils.ToProto.task_push_notification_config( - sample_task_push_notification_config - ), - ), + request, metadata=[ - ( - HTTP_EXTENSION_HEADER.lower(), - 'https://example.com/test-ext/v1,https://example.com/test-ext/v2', - ) + (VERSION_HEADER.lower(), PROTOCOL_VERSION_CURRENT), ], + timeout=None, ) assert response.task_id == sample_task_push_notification_config.task_id @pytest.mark.asyncio -async def test_set_task_callback_with_invalid_task( +async def test_create_task_push_notification_config_with_invalid_task( grpc_transport: GrpcTransport, mock_grpc_stub: AsyncMock, sample_task_push_notification_config: TaskPushNotificationConfig, ) -> None: - """Test setting a task push notification config with an invalid task id.""" - mock_grpc_stub.CreateTaskPushNotificationConfig.return_value = a2a_pb2.TaskPushNotificationConfig( - name=( - f'invalid-path-to-tasks/{sample_task_push_notification_config.task_id}/' - f'pushNotificationConfigs/{sample_task_push_notification_config.push_notification_config.id}' - ), - push_notification_config=proto_utils.ToProto.push_notification_config( - sample_task_push_notification_config.push_notification_config - ), + """Test setting a task push notification config with an invalid task name format.""" + # Return a config with an invalid name format + mock_grpc_stub.CreateTaskPushNotificationConfig.return_value = ( + a2a_pb2.TaskPushNotificationConfig( + task_id='invalid-path-to-task-1', + id='config-1', + url='https://example.com/notify', + ) ) - with pytest.raises(ServerError) as exc_info: - await grpc_transport.set_task_callback( - sample_task_push_notification_config - ) - assert ( - 'Bad TaskPushNotificationConfig resource name' - in exc_info.value.error.message + request = TaskPushNotificationConfig( + task_id='task-1', + id='config-1', + url='https://example.com/notify', ) + # Note: The transport doesn't validate the response name format + # It just returns the response from the stub + response = await grpc_transport.create_task_push_notification_config( + request + ) + assert response.task_id == 'invalid-path-to-task-1' + @pytest.mark.asyncio -async def test_get_task_callback_with_valid_task( +async def test_get_task_push_notification_config_with_valid_task( grpc_transport: GrpcTransport, mock_grpc_stub: AsyncMock, sample_task_push_notification_config: TaskPushNotificationConfig, ) -> None: """Test retrieving a task push notification config with a valid task id.""" mock_grpc_stub.GetTaskPushNotificationConfig.return_value = ( - proto_utils.ToProto.task_push_notification_config( - sample_task_push_notification_config - ) - ) - params = GetTaskPushNotificationConfigParams( - id=sample_task_push_notification_config.task_id, - push_notification_config_id=sample_task_push_notification_config.push_notification_config.id, + sample_task_push_notification_config ) + config_id = sample_task_push_notification_config.id - response = await grpc_transport.get_task_callback(params) + response = await grpc_transport.get_task_push_notification_config( + GetTaskPushNotificationConfigRequest( + task_id='task-1', + id=config_id, + ) + ) mock_grpc_stub.GetTaskPushNotificationConfig.assert_awaited_once_with( a2a_pb2.GetTaskPushNotificationConfigRequest( - name=( - f'tasks/{params.id}/' - f'pushNotificationConfigs/{params.push_notification_config_id}' - ), + task_id='task-1', + id=config_id, ), metadata=[ - ( - HTTP_EXTENSION_HEADER.lower(), - 'https://example.com/test-ext/v1,https://example.com/test-ext/v2', - ) + (VERSION_HEADER.lower(), PROTOCOL_VERSION_CURRENT), ], + timeout=None, ) assert response.task_id == sample_task_push_notification_config.task_id @pytest.mark.asyncio -async def test_get_task_callback_with_invalid_task( +async def test_get_task_push_notification_config_with_invalid_task( grpc_transport: GrpcTransport, mock_grpc_stub: AsyncMock, sample_task_push_notification_config: TaskPushNotificationConfig, ) -> None: - """Test retrieving a task push notification config with an invalid task id.""" - mock_grpc_stub.GetTaskPushNotificationConfig.return_value = a2a_pb2.TaskPushNotificationConfig( - name=( - f'invalid-path-to-tasks/{sample_task_push_notification_config.task_id}/' - f'pushNotificationConfigs/{sample_task_push_notification_config.push_notification_config.id}' - ), - push_notification_config=proto_utils.ToProto.push_notification_config( - sample_task_push_notification_config.push_notification_config - ), + """Test retrieving a task push notification config with an invalid task name.""" + mock_grpc_stub.GetTaskPushNotificationConfig.return_value = ( + a2a_pb2.TaskPushNotificationConfig( + task_id='invalid-path-to-task-1', + id='config-1', + url='https://example.com/notify', + ) ) - params = GetTaskPushNotificationConfigParams( - id=sample_task_push_notification_config.task_id, - push_notification_config_id=sample_task_push_notification_config.push_notification_config.id, + + response = await grpc_transport.get_task_push_notification_config( + GetTaskPushNotificationConfigRequest( + task_id='task-1', + id='config-1', + ) ) + # The transport doesn't validate the response name format + assert response.task_id == 'invalid-path-to-task-1' - with pytest.raises(ServerError) as exc_info: - await grpc_transport.get_task_callback(params) - assert ( - 'Bad TaskPushNotificationConfig resource name' - in exc_info.value.error.message + +@pytest.mark.asyncio +async def test_list_task_push_notification_configs( + grpc_transport: GrpcTransport, + mock_grpc_stub: AsyncMock, + sample_task_push_notification_config: TaskPushNotificationConfig, +) -> None: + """Test retrieving task push notification configs.""" + mock_grpc_stub.ListTaskPushNotificationConfigs.return_value = ( + a2a_pb2.ListTaskPushNotificationConfigsResponse( + configs=[sample_task_push_notification_config] + ) + ) + + response = await grpc_transport.list_task_push_notification_configs( + ListTaskPushNotificationConfigsRequest(task_id='task-1') + ) + + mock_grpc_stub.ListTaskPushNotificationConfigs.assert_awaited_once_with( + a2a_pb2.ListTaskPushNotificationConfigsRequest(task_id='task-1'), + metadata=[ + (VERSION_HEADER.lower(), PROTOCOL_VERSION_CURRENT), + ], + timeout=None, + ) + assert len(response.configs) == 1 + assert response.configs[0].task_id == 'task-1' + + +@pytest.mark.asyncio +async def test_delete_task_push_notification_config( + grpc_transport: GrpcTransport, + mock_grpc_stub: AsyncMock, + sample_task_push_notification_config: TaskPushNotificationConfig, +) -> None: + """Test deleting task push notification config.""" + mock_grpc_stub.DeleteTaskPushNotificationConfig.return_value = None + + await grpc_transport.delete_task_push_notification_config( + DeleteTaskPushNotificationConfigRequest( + task_id='task-1', + id='config-1', + ) + ) + + mock_grpc_stub.DeleteTaskPushNotificationConfig.assert_awaited_once_with( + a2a_pb2.DeleteTaskPushNotificationConfigRequest( + task_id='task-1', + id='config-1', + ), + metadata=[ + (VERSION_HEADER.lower(), PROTOCOL_VERSION_CURRENT), + ], + timeout=None, ) @pytest.mark.parametrize( - 'initial_extensions, input_extensions, expected_metadata', + 'input_extensions, expected_metadata', [ ( None, - None, - None, - ), # Case 1: No initial, No input - ( - ['ext1'], - None, - [(HTTP_EXTENSION_HEADER.lower(), 'ext1')], - ), # Case 2: Initial, No input - ( - None, - ['ext2'], - [(HTTP_EXTENSION_HEADER.lower(), 'ext2')], - ), # Case 3: No initial, Input + [], + ), ( - ['ext1'], ['ext2'], - [(HTTP_EXTENSION_HEADER.lower(), 'ext2')], - ), # Case 4: Initial, Input (override) + [ + (HTTP_EXTENSION_HEADER.lower(), 'ext2'), + ], + ), ( - ['ext1'], ['ext2', 'ext3'], - [(HTTP_EXTENSION_HEADER.lower(), 'ext2,ext3')], - ), # Case 5: Initial, Multiple inputs (override) - ( - ['ext1', 'ext2'], - ['ext3'], - [(HTTP_EXTENSION_HEADER.lower(), 'ext3')], - ), # Case 6: Multiple initial, Single input (override) + [ + (HTTP_EXTENSION_HEADER.lower(), 'ext2,ext3'), + ], + ), ], ) def test_get_grpc_metadata( grpc_transport: GrpcTransport, - initial_extensions: list[str] | None, input_extensions: list[str] | None, expected_metadata: list[tuple[str, str]] | None, ) -> None: - """Tests _get_grpc_metadata for correct metadata generation and self.extensions update.""" - grpc_transport.extensions = initial_extensions - metadata = grpc_transport._get_grpc_metadata(input_extensions) - assert metadata == expected_metadata + """Tests _get_grpc_metadata for correct metadata generation.""" + context = None + if input_extensions: + context = ClientCallContext( + service_parameters={ + HTTP_EXTENSION_HEADER: ','.join(input_extensions) + } + ) + + metadata = grpc_transport._get_grpc_metadata(context) + # Filter out a2a-version as it's not being tested here directly and simplifies the assertion + filtered_metadata = [m for m in metadata if m[0] != VERSION_HEADER.lower()] + assert filtered_metadata == expected_metadata diff --git a/tests/client/transports/test_jsonrpc_client.py b/tests/client/transports/test_jsonrpc_client.py index 9725273ff..b005c2e05 100644 --- a/tests/client/transports/test_jsonrpc_client.py +++ b/tests/client/transports/test_jsonrpc_client.py @@ -1,118 +1,99 @@ +"""Tests for the JSON-RPC client transport.""" + import json -from collections.abc import AsyncGenerator -from typing import Any from unittest.mock import AsyncMock, MagicMock, patch +from uuid import uuid4 import httpx import pytest -import respx -from httpx_sse import EventSource, SSEError, ServerSentEvent +from google.protobuf import json_format +from httpx_sse import EventSource, SSEError -from a2a.client import ( - A2ACardResolver, - A2AClientHTTPError, - A2AClientJSONError, - A2AClientTimeoutError, - create_text_message_object, -) +from a2a.client.errors import A2AClientError from a2a.client.transports.jsonrpc import JsonRpcTransport -from a2a.extensions.common import HTTP_EXTENSION_HEADER -from a2a.types import ( +from a2a.types.a2a_pb2 import ( AgentCapabilities, AgentCard, - AgentSkill, - InvalidParamsError, + AgentInterface, + CancelTaskRequest, + DeleteTaskPushNotificationConfigRequest, + GetExtendedAgentCardRequest, + GetTaskPushNotificationConfigRequest, + GetTaskRequest, + ListTaskPushNotificationConfigsRequest, Message, - MessageSendParams, - PushNotificationConfig, - Role, - SendMessageSuccessResponse, + Part, + SendMessageConfiguration, + SendMessageRequest, + SendMessageResponse, Task, - TaskIdParams, TaskPushNotificationConfig, - TaskQueryParams, -) -from a2a.utils import AGENT_CARD_WELL_KNOWN_PATH - - -AGENT_CARD = AgentCard( - name='Hello World Agent', - description='Just a hello world agent', - url='http://localhost:9999/', - version='1.0.0', - default_input_modes=['text'], - default_output_modes=['text'], - capabilities=AgentCapabilities(), - skills=[ - AgentSkill( - id='hello_world', - name='Returns hello world', - description='just returns hello world', - tags=['hello world'], - examples=['hi', 'hello world'], - ) - ], + TaskState, ) +from a2a.utils.errors import JSON_RPC_ERROR_CODE_MAP -AGENT_CARD_EXTENDED = AGENT_CARD.model_copy( - update={ - 'name': 'Hello World Agent - Extended Edition', - 'skills': [ - *AGENT_CARD.skills, - AgentSkill( - id='extended_skill', - name='Super Greet', - description='A more enthusiastic greeting.', - tags=['extended'], - examples=['super hi'], - ), - ], - 'version': '1.0.1', - } -) -AGENT_CARD_SUPPORTS_EXTENDED = AGENT_CARD.model_copy( - update={'supports_authenticated_extended_card': True} -) -AGENT_CARD_NO_URL_SUPPORTS_EXTENDED = AGENT_CARD_SUPPORTS_EXTENDED.model_copy( - update={'url': ''} -) +@pytest.fixture +def mock_httpx_client(): + """Creates a mock httpx.AsyncClient.""" + client = AsyncMock(spec=httpx.AsyncClient) + client.headers = httpx.Headers() + client.timeout = httpx.Timeout(30.0) + return client -MINIMAL_TASK: dict[str, Any] = { - 'id': 'task-abc', - 'contextId': 'session-xyz', - 'status': {'state': 'working'}, - 'kind': 'task', -} -MINIMAL_CANCELLED_TASK: dict[str, Any] = { - 'id': 'task-abc', - 'contextId': 'session-xyz', - 'status': {'state': 'canceled'}, - 'kind': 'task', -} +@pytest.fixture +def agent_card(): + """Creates a minimal AgentCard for testing.""" + return AgentCard( + name='Test Agent', + description='A test agent', + supported_interfaces=[ + AgentInterface( + url='http://test-agent.example.com', + protocol_binding='HTTP+JSON', + ) + ], + version='1.0.0', + capabilities=AgentCapabilities(), + ) @pytest.fixture -def mock_httpx_client() -> AsyncMock: - return AsyncMock(spec=httpx.AsyncClient) +def transport(mock_httpx_client, agent_card): + """Creates a JsonRpcTransport instance for testing.""" + return JsonRpcTransport( + httpx_client=mock_httpx_client, + agent_card=agent_card, + url='http://test-agent.example.com', + ) @pytest.fixture -def mock_agent_card() -> MagicMock: - mock = MagicMock(spec=AgentCard, url='http://agent.example.com/api') - mock.supports_authenticated_extended_card = False - return mock +def transport_with_url(mock_httpx_client): + """Creates a JsonRpcTransport with just a URL.""" + return JsonRpcTransport( + httpx_client=mock_httpx_client, + agent_card=AgentCard(name='Dummy'), + url='http://custom-url.example.com', + ) + + +def create_send_message_request(text='Hello'): + """Helper to create a SendMessageRequest with proper proto structure.""" + return SendMessageRequest( + message=Message( + role='ROLE_USER', + parts=[Part(text=text)], + message_id='msg-123', + ), + configuration=SendMessageConfiguration(), + ) -async def async_iterable_from_list( - items: list[ServerSentEvent], -) -> AsyncGenerator[ServerSentEvent, None]: - """Helper to create an async iterable from a list.""" - for item in items: - yield item +from a2a.extensions.common import HTTP_EXTENSION_HEADER def _assert_extensions_header(mock_kwargs: dict, expected_extensions: set[str]): @@ -123,869 +104,476 @@ def _assert_extensions_header(mock_kwargs: dict, expected_extensions: set[str]): assert actual_extensions == expected_extensions -class TestA2ACardResolver: - BASE_URL = 'http://example.com' - AGENT_CARD_PATH = AGENT_CARD_WELL_KNOWN_PATH - FULL_AGENT_CARD_URL = f'{BASE_URL}{AGENT_CARD_PATH}' - EXTENDED_AGENT_CARD_PATH = '/agent/authenticatedExtendedCard' +class TestJsonRpcTransportInit: + """Tests for JsonRpcTransport initialization.""" - @pytest.mark.asyncio - async def test_init_parameters_stored_correctly( - self, mock_httpx_client: AsyncMock - ): - base_url = 'http://example.com' - custom_path = '/custom/agent-card.json' - resolver = A2ACardResolver( + def test_init_with_agent_card(self, mock_httpx_client, agent_card): + """Test initialization with an agent card.""" + transport = JsonRpcTransport( httpx_client=mock_httpx_client, - base_url=base_url, - agent_card_path=custom_path, + agent_card=agent_card, + url='http://test-agent.example.com', ) - assert resolver.base_url == base_url - assert resolver.agent_card_path == custom_path.lstrip('/') - assert resolver.httpx_client == mock_httpx_client + assert transport.url == 'http://test-agent.example.com' + assert transport.agent_card == agent_card - resolver_default_path = A2ACardResolver( - httpx_client=mock_httpx_client, - base_url=base_url, - ) - assert ( - '/' + resolver_default_path.agent_card_path - == AGENT_CARD_WELL_KNOWN_PATH - ) - @pytest.mark.asyncio - async def test_init_strips_slashes(self, mock_httpx_client: AsyncMock): - resolver = A2ACardResolver( - httpx_client=mock_httpx_client, - base_url='http://example.com/', - agent_card_path='/.well-known/agent-card.json/', - ) - assert resolver.base_url == 'http://example.com' - assert resolver.agent_card_path == '.well-known/agent-card.json/' +class TestSendMessage: + """Tests for the send_message method.""" @pytest.mark.asyncio - async def test_get_agent_card_success_public_only( - self, mock_httpx_client: AsyncMock - ): - mock_response = AsyncMock(spec=httpx.Response) - mock_response.status_code = 200 - mock_response.json.return_value = AGENT_CARD.model_dump(mode='json') - mock_httpx_client.get.return_value = mock_response + async def test_send_message_success(self, transport, mock_httpx_client): + """Test successful message sending.""" + task_id = str(uuid4()) + mock_response = MagicMock() + mock_response.json.return_value = { + 'jsonrpc': '2.0', + 'id': '1', + 'result': { + 'task': { + 'id': task_id, + 'contextId': 'ctx-123', + 'status': {'state': 'TASK_STATE_COMPLETED'}, + } + }, + } + mock_response.raise_for_status = MagicMock() + mock_httpx_client.send.return_value = mock_response - resolver = A2ACardResolver( - httpx_client=mock_httpx_client, - base_url=self.BASE_URL, - agent_card_path=self.AGENT_CARD_PATH, - ) - agent_card = await resolver.get_agent_card(http_kwargs={'timeout': 10}) + request = create_send_message_request() + response = await transport.send_message(request) - mock_httpx_client.get.assert_called_once_with( - self.FULL_AGENT_CARD_URL, timeout=10 - ) - mock_response.raise_for_status.assert_called_once() - assert isinstance(agent_card, AgentCard) - assert agent_card == AGENT_CARD - assert mock_httpx_client.get.call_count == 1 + assert isinstance(response, SendMessageResponse) + mock_httpx_client.build_request.assert_called_once() + call_args = mock_httpx_client.build_request.call_args + assert call_args[0][1] == 'http://test-agent.example.com' + payload = call_args[1]['json'] + assert payload['method'] == 'SendMessage' + @pytest.mark.parametrize( + 'error_cls, error_code', JSON_RPC_ERROR_CODE_MAP.items() + ) @pytest.mark.asyncio - async def test_get_agent_card_success_with_specified_path_for_extended_card( - self, mock_httpx_client: AsyncMock + async def test_send_message_jsonrpc_error( + self, transport, mock_httpx_client, error_cls, error_code ): - extended_card_response = AsyncMock(spec=httpx.Response) - extended_card_response.status_code = 200 - extended_card_response.json.return_value = ( - AGENT_CARD_EXTENDED.model_dump(mode='json') - ) - mock_httpx_client.get.return_value = extended_card_response - - resolver = A2ACardResolver( - httpx_client=mock_httpx_client, - base_url=self.BASE_URL, - agent_card_path=self.AGENT_CARD_PATH, - ) - - auth_kwargs = {'headers': {'Authorization': 'Bearer test token'}} - agent_card_result = await resolver.get_agent_card( - relative_card_path=self.EXTENDED_AGENT_CARD_PATH, - http_kwargs=auth_kwargs, - ) - - expected_extended_url = ( - f'{self.BASE_URL}/{self.EXTENDED_AGENT_CARD_PATH.lstrip("/")}' - ) - mock_httpx_client.get.assert_called_once_with( - expected_extended_url, **auth_kwargs - ) - extended_card_response.raise_for_status.assert_called_once() - assert isinstance(agent_card_result, AgentCard) - assert agent_card_result == AGENT_CARD_EXTENDED - - @pytest.mark.asyncio - async def test_get_agent_card_validation_error( - self, mock_httpx_client: AsyncMock - ): - mock_response = AsyncMock(spec=httpx.Response) - mock_response.status_code = 200 + """Test handling of JSON-RPC mapped error response.""" + mock_response = MagicMock() mock_response.json.return_value = { - 'invalid_field': 'value', - 'name': 'Test Agent', + 'jsonrpc': '2.0', + 'id': '1', + 'error': {'code': error_code, 'message': 'Mapped Error'}, + 'result': None, } - mock_httpx_client.get.return_value = mock_response + mock_response.raise_for_status = MagicMock() + mock_httpx_client.send.return_value = mock_response - resolver = A2ACardResolver( - httpx_client=mock_httpx_client, base_url=self.BASE_URL - ) - with pytest.raises(A2AClientJSONError) as exc_info: - await resolver.get_agent_card() + request = create_send_message_request() - assert ( - f'Failed to validate agent card structure from {self.FULL_AGENT_CARD_URL}' - in str(exc_info.value) - ) - assert 'invalid_field' in str(exc_info.value) - assert mock_httpx_client.get.call_count == 1 + # The transport raises the specific A2AError mapped from code + with pytest.raises(error_cls): + await transport.send_message(request) @pytest.mark.asyncio - async def test_get_agent_card_http_status_error( - self, mock_httpx_client: AsyncMock - ): - mock_response = MagicMock(spec=httpx.Response) - mock_response.status_code = 404 - mock_response.text = 'Not Found' - http_status_error = httpx.HTTPStatusError( - 'Not Found', request=MagicMock(), response=mock_response - ) - mock_httpx_client.get.side_effect = http_status_error + async def test_send_message_timeout(self, transport, mock_httpx_client): + """Test handling of request timeout.""" + mock_httpx_client.send.side_effect = httpx.ReadTimeout('Timeout') - resolver = A2ACardResolver( - httpx_client=mock_httpx_client, - base_url=self.BASE_URL, - agent_card_path=self.AGENT_CARD_PATH, - ) + request = create_send_message_request() - with pytest.raises(A2AClientHTTPError) as exc_info: - await resolver.get_agent_card() - - assert exc_info.value.status_code == 404 - assert ( - f'Failed to fetch agent card from {self.FULL_AGENT_CARD_URL}' - in str(exc_info.value) - ) - assert 'Not Found' in str(exc_info.value) - mock_httpx_client.get.assert_called_once_with(self.FULL_AGENT_CARD_URL) + with pytest.raises(A2AClientError, match='timed out'): + await transport.send_message(request) @pytest.mark.asyncio - async def test_get_agent_card_json_decode_error( - self, mock_httpx_client: AsyncMock - ): - mock_response = AsyncMock(spec=httpx.Response) - mock_response.status_code = 200 - json_error = json.JSONDecodeError('Expecting value', 'doc', 0) - mock_response.json.side_effect = json_error - mock_httpx_client.get.return_value = mock_response - - resolver = A2ACardResolver( - httpx_client=mock_httpx_client, - base_url=self.BASE_URL, - agent_card_path=self.AGENT_CARD_PATH, + async def test_send_message_http_error(self, transport, mock_httpx_client): + """Test handling of HTTP errors.""" + mock_response = MagicMock() + mock_response.status_code = 500 + mock_httpx_client.send.side_effect = httpx.HTTPStatusError( + 'Server Error', request=MagicMock(), response=mock_response ) - with pytest.raises(A2AClientJSONError) as exc_info: - await resolver.get_agent_card() + request = create_send_message_request() - assert ( - f'Failed to parse JSON for agent card from {self.FULL_AGENT_CARD_URL}' - in str(exc_info.value) - ) - assert 'Expecting value' in str(exc_info.value) - mock_httpx_client.get.assert_called_once_with(self.FULL_AGENT_CARD_URL) + with pytest.raises(A2AClientError): + await transport.send_message(request) @pytest.mark.asyncio - async def test_get_agent_card_request_error( - self, mock_httpx_client: AsyncMock + async def test_send_message_json_decode_error( + self, transport, mock_httpx_client ): - request_error = httpx.RequestError('Network issue', request=MagicMock()) - mock_httpx_client.get.side_effect = request_error - - resolver = A2ACardResolver( - httpx_client=mock_httpx_client, - base_url=self.BASE_URL, - agent_card_path=self.AGENT_CARD_PATH, - ) + """Test handling of invalid JSON response.""" + mock_response = MagicMock() + mock_response.raise_for_status = MagicMock() + mock_response.json.side_effect = json.JSONDecodeError('msg', 'doc', 0) + mock_httpx_client.send.return_value = mock_response - with pytest.raises(A2AClientHTTPError) as exc_info: - await resolver.get_agent_card() - - assert exc_info.value.status_code == 503 - assert ( - f'Network communication error fetching agent card from {self.FULL_AGENT_CARD_URL}' - in str(exc_info.value) - ) - assert 'Network issue' in str(exc_info.value) - mock_httpx_client.get.assert_called_once_with(self.FULL_AGENT_CARD_URL) + request = create_send_message_request() + with pytest.raises(A2AClientError): + await transport.send_message(request) -class TestJsonRpcTransport: - AGENT_URL = 'http://agent.example.com/api' - - def test_init_with_agent_card( - self, mock_httpx_client: AsyncMock, mock_agent_card: MagicMock + @pytest.mark.asyncio + async def test_send_message_with_timeout_context( + self, transport, mock_httpx_client ): - client = JsonRpcTransport( - httpx_client=mock_httpx_client, agent_card=mock_agent_card - ) - assert client.url == mock_agent_card.url - assert client.httpx_client == mock_httpx_client + """Test that send_message passes context timeout to build_request.""" + from a2a.client.client import ClientCallContext - def test_init_with_url(self, mock_httpx_client: AsyncMock): - client = JsonRpcTransport( - httpx_client=mock_httpx_client, url=self.AGENT_URL - ) - assert client.url == self.AGENT_URL - assert client.httpx_client == mock_httpx_client + mock_response = MagicMock() + mock_response.json.return_value = { + 'jsonrpc': '2.0', + 'id': '1', + 'result': {}, + } + mock_response.raise_for_status = MagicMock() + mock_httpx_client.send.return_value = mock_response - def test_init_with_agent_card_and_url_prioritizes_url( - self, mock_httpx_client: AsyncMock, mock_agent_card: MagicMock - ): - client = JsonRpcTransport( - httpx_client=mock_httpx_client, - agent_card=mock_agent_card, - url='http://otherurl.com', - ) - assert client.url == 'http://otherurl.com' + request = create_send_message_request() + context = ClientCallContext(timeout=15.0) - def test_init_raises_value_error_if_no_card_or_url( - self, mock_httpx_client: AsyncMock - ): - with pytest.raises(ValueError) as exc_info: - JsonRpcTransport(httpx_client=mock_httpx_client) - assert 'Must provide either agent_card or url' in str(exc_info.value) + await transport.send_message(request, context=context) - @pytest.mark.asyncio - async def test_send_message_success( - self, mock_httpx_client: AsyncMock, mock_agent_card: MagicMock - ): - client = JsonRpcTransport( - httpx_client=mock_httpx_client, agent_card=mock_agent_card - ) - params = MessageSendParams( - message=create_text_message_object(content='Hello') - ) - success_response = create_text_message_object( - role=Role.agent, content='Hi there!' - ) - rpc_response = SendMessageSuccessResponse( - id='123', jsonrpc='2.0', result=success_response - ) - response = httpx.Response( - 200, json=rpc_response.model_dump(mode='json') - ) - response.request = httpx.Request('POST', 'http://agent.example.com/api') - mock_httpx_client.post.return_value = response + mock_httpx_client.build_request.assert_called_once() + _, kwargs = mock_httpx_client.build_request.call_args + assert 'timeout' in kwargs + assert kwargs['timeout'] == httpx.Timeout(15.0) - response = await client.send_message(request=params) - assert isinstance(response, Message) - assert response.model_dump() == success_response.model_dump() +class TestGetTask: + """Tests for the get_task method.""" @pytest.mark.asyncio - async def test_send_message_error_response( - self, mock_httpx_client: AsyncMock, mock_agent_card: MagicMock - ): - client = JsonRpcTransport( - httpx_client=mock_httpx_client, agent_card=mock_agent_card - ) - params = MessageSendParams( - message=create_text_message_object(content='Hello') - ) - error_response = InvalidParamsError() - rpc_response = { - 'id': '123', + async def test_get_task_success(self, transport, mock_httpx_client): + """Test successful task retrieval.""" + task_id = str(uuid4()) + mock_response = MagicMock() + mock_response.json.return_value = { 'jsonrpc': '2.0', - 'error': error_response.model_dump(exclude_none=True), + 'id': '1', + 'result': { + 'id': task_id, + 'contextId': 'ctx-123', + 'status': {'state': 'TASK_STATE_COMPLETED'}, + }, } - mock_httpx_client.post.return_value.json.return_value = rpc_response - - with pytest.raises(Exception): - await client.send_message(request=params) - - @pytest.mark.asyncio - @patch('a2a.client.transports.jsonrpc.aconnect_sse') - async def test_send_message_streaming_success( - self, - mock_aconnect_sse: AsyncMock, - mock_httpx_client: AsyncMock, - mock_agent_card: MagicMock, - ): - client = JsonRpcTransport( - httpx_client=mock_httpx_client, agent_card=mock_agent_card - ) - params = MessageSendParams( - message=create_text_message_object(content='Hello stream') - ) - mock_stream_response_1 = SendMessageSuccessResponse( - id='stream_id_123', - jsonrpc='2.0', - result=create_text_message_object( - content='First part ', role=Role.agent - ), - ) - mock_stream_response_2 = SendMessageSuccessResponse( - id='stream_id_123', - jsonrpc='2.0', - result=create_text_message_object( - content='second part ', role=Role.agent - ), - ) - sse_event_1 = ServerSentEvent( - data=mock_stream_response_1.model_dump_json() - ) - sse_event_2 = ServerSentEvent( - data=mock_stream_response_2.model_dump_json() - ) - mock_event_source = AsyncMock(spec=EventSource) - mock_event_source.aiter_sse.return_value = async_iterable_from_list( - [sse_event_1, sse_event_2] - ) - mock_aconnect_sse.return_value.__aenter__.return_value = ( - mock_event_source - ) + mock_response.raise_for_status = MagicMock() + mock_httpx_client.send.return_value = mock_response - results = [ - item async for item in client.send_message_streaming(request=params) - ] + # Proto uses 'name' field for task identifier in request + request = GetTaskRequest(id=f'{task_id}') + response = await transport.get_task(request) - assert len(results) == 2 - assert isinstance(results[0], Message) - assert ( - results[0].model_dump() - == mock_stream_response_1.result.model_dump() - ) - assert isinstance(results[1], Message) - assert ( - results[1].model_dump() - == mock_stream_response_2.result.model_dump() - ) + assert isinstance(response, Task) + assert response.id == task_id + mock_httpx_client.build_request.assert_called_once() + call_args = mock_httpx_client.build_request.call_args + payload = call_args[1]['json'] + assert payload['method'] == 'GetTask' - # Repro of https://github.com/a2aproject/a2a-python/issues/540 @pytest.mark.asyncio - @respx.mock - async def test_send_message_streaming_comment_success( - self, - mock_agent_card: MagicMock, - ): - async with httpx.AsyncClient() as client: - transport = JsonRpcTransport( - httpx_client=client, agent_card=mock_agent_card - ) - params = MessageSendParams( - message=create_text_message_object(content='Hello stream') - ) - mock_stream_response_1 = SendMessageSuccessResponse( - id='stream_id_123', - jsonrpc='2.0', - result=create_text_message_object( - content='First part', role=Role.agent - ), - ) - mock_stream_response_2 = SendMessageSuccessResponse( - id='stream_id_123', - jsonrpc='2.0', - result=create_text_message_object( - content='Second part', role=Role.agent - ), - ) + async def test_get_task_with_history(self, transport, mock_httpx_client): + """Test task retrieval with history_length parameter.""" + task_id = str(uuid4()) + mock_response = MagicMock() + mock_response.json.return_value = { + 'jsonrpc': '2.0', + 'id': '1', + 'result': { + 'id': task_id, + 'contextId': 'ctx-123', + 'status': {'state': 'TASK_STATE_COMPLETED'}, + }, + } + mock_response.raise_for_status = MagicMock() + mock_httpx_client.send.return_value = mock_response - sse_content = ( - 'id: stream_id_1\n' - f'data: {mock_stream_response_1.model_dump_json()}\n\n' - ': keep-alive\n\n' - 'id: stream_id_2\n' - f'data: {mock_stream_response_2.model_dump_json()}\n\n' - ': keep-alive\n\n' - ) + request = GetTaskRequest(id=f'{task_id}', history_length=10) + response = await transport.get_task(request) - respx.post(mock_agent_card.url).mock( - return_value=httpx.Response( - 200, - headers={'Content-Type': 'text/event-stream'}, - content=sse_content, - ) - ) + assert isinstance(response, Task) + call_args = mock_httpx_client.build_request.call_args + payload = call_args[1]['json'] + assert payload['params']['historyLength'] == 10 - results = [ - item - async for item in transport.send_message_streaming( - request=params - ) - ] - assert len(results) == 2 - assert results[0] == mock_stream_response_1.result - assert results[1] == mock_stream_response_2.result +class TestCancelTask: + """Tests for the cancel_task method.""" @pytest.mark.asyncio - async def test_send_request_http_status_error( - self, mock_httpx_client: AsyncMock, mock_agent_card: MagicMock - ): - client = JsonRpcTransport( - httpx_client=mock_httpx_client, agent_card=mock_agent_card - ) - mock_response = MagicMock(spec=httpx.Response) - mock_response.status_code = 404 - mock_response.text = 'Not Found' - http_error = httpx.HTTPStatusError( - 'Not Found', request=MagicMock(), response=mock_response - ) - mock_httpx_client.post.side_effect = http_error - - with pytest.raises(A2AClientHTTPError) as exc_info: - await client._send_request({}, {}) + async def test_cancel_task_success(self, transport, mock_httpx_client): + """Test successful task cancellation.""" + task_id = str(uuid4()) + mock_response = MagicMock() + mock_response.json.return_value = { + 'jsonrpc': '2.0', + 'id': '1', + 'result': { + 'id': task_id, + 'contextId': 'ctx-123', + 'status': {'state': 5}, # TASK_STATE_CANCELED = 5 + }, + } + mock_response.raise_for_status = MagicMock() + mock_httpx_client.send.return_value = mock_response - assert exc_info.value.status_code == 404 - assert 'Not Found' in str(exc_info.value) + request = CancelTaskRequest(id=f'{task_id}') + response = await transport.cancel_task(request) - @pytest.mark.asyncio - async def test_send_request_json_decode_error( - self, mock_httpx_client: AsyncMock, mock_agent_card: MagicMock - ): - client = JsonRpcTransport( - httpx_client=mock_httpx_client, agent_card=mock_agent_card - ) - mock_response = AsyncMock(spec=httpx.Response) - mock_response.status_code = 200 - json_error = json.JSONDecodeError('Expecting value', 'doc', 0) - mock_response.json.side_effect = json_error - mock_httpx_client.post.return_value = mock_response + assert isinstance(response, Task) + assert response.status.state == TaskState.TASK_STATE_CANCELED + call_args = mock_httpx_client.build_request.call_args + payload = call_args[1]['json'] + assert payload['method'] == 'CancelTask' - with pytest.raises(A2AClientJSONError) as exc_info: - await client._send_request({}, {}) - assert 'Expecting value' in str(exc_info.value) +class TestTaskCallback: + """Tests for the task callback methods.""" @pytest.mark.asyncio - async def test_send_request_httpx_request_error( - self, mock_httpx_client: AsyncMock, mock_agent_card: MagicMock + async def test_get_task_push_notification_config_success( + self, transport, mock_httpx_client ): - client = JsonRpcTransport( - httpx_client=mock_httpx_client, agent_card=mock_agent_card - ) - request_error = httpx.RequestError('Network issue', request=MagicMock()) - mock_httpx_client.post.side_effect = request_error - - with pytest.raises(A2AClientHTTPError) as exc_info: - await client._send_request({}, {}) - - assert exc_info.value.status_code == 503 - assert 'Network communication error' in str(exc_info.value) - assert 'Network issue' in str(exc_info.value) + """Test successful task callback retrieval.""" + task_id = str(uuid4()) + mock_response = MagicMock() + mock_response.json.return_value = { + 'jsonrpc': '2.0', + 'id': '1', + 'result': { + 'task_id': f'{task_id}', + }, + } + mock_response.raise_for_status = MagicMock() + mock_httpx_client.send.return_value = mock_response - @pytest.mark.asyncio - async def test_send_message_client_timeout( - self, mock_httpx_client: AsyncMock, mock_agent_card: MagicMock - ): - mock_httpx_client.post.side_effect = httpx.ReadTimeout( - 'Request timed out' + request = GetTaskPushNotificationConfigRequest( + task_id=f'{task_id}', + id='config-1', ) - client = JsonRpcTransport( - httpx_client=mock_httpx_client, agent_card=mock_agent_card - ) - params = MessageSendParams( - message=create_text_message_object(content='Hello') - ) - - with pytest.raises(A2AClientTimeoutError) as exc_info: - await client.send_message(request=params) - - assert 'Client Request timed out' in str(exc_info.value) + response = await transport.get_task_push_notification_config(request) - @pytest.mark.asyncio - @patch('a2a.client.transports.jsonrpc.aconnect_sse') - async def test_send_message_streaming_timeout( - self, - mock_aconnect_sse: AsyncMock, - mock_httpx_client: AsyncMock, - mock_agent_card: MagicMock, - ): - client = JsonRpcTransport( - httpx_client=mock_httpx_client, agent_card=mock_agent_card - ) - params = MessageSendParams( - message=create_text_message_object(content='Hello stream') - ) - mock_event_source = AsyncMock(spec=EventSource) - mock_event_source.response = MagicMock(spec=httpx.Response) - mock_event_source.response.raise_for_status.return_value = None - mock_event_source.aiter_sse.side_effect = httpx.TimeoutException( - 'Read timed out' - ) - mock_aconnect_sse.return_value.__aenter__.return_value = ( - mock_event_source - ) - - with pytest.raises(A2AClientTimeoutError) as exc_info: - _ = [ - item - async for item in client.send_message_streaming(request=params) - ] - - assert 'Client Request timed out' in str(exc_info.value) + assert isinstance(response, TaskPushNotificationConfig) + call_args = mock_httpx_client.build_request.call_args + payload = call_args[1]['json'] + assert payload['method'] == 'GetTaskPushNotificationConfig' @pytest.mark.asyncio - async def test_get_task_success( - self, mock_httpx_client: AsyncMock, mock_agent_card: MagicMock + async def test_list_task_push_notification_configs_success( + self, transport, mock_httpx_client ): - client = JsonRpcTransport( - httpx_client=mock_httpx_client, agent_card=mock_agent_card - ) - params = TaskQueryParams(id='task-abc') - rpc_response = { - 'id': '123', + """Test successful task multiple callbacks retrieval.""" + task_id = str(uuid4()) + mock_response = MagicMock() + mock_response.json.return_value = { 'jsonrpc': '2.0', - 'result': MINIMAL_TASK, + 'id': '1', + 'result': { + 'configs': [ + { + 'task_id': f'{task_id}', + 'id': 'config-1', + 'url': 'https://example.com', + } + ] + }, } - with patch.object( - client, '_send_request', new_callable=AsyncMock - ) as mock_send_request: - mock_send_request.return_value = rpc_response - response = await client.get_task(request=params) + mock_response.raise_for_status = MagicMock() + mock_httpx_client.send.return_value = mock_response - assert isinstance(response, Task) - assert ( - response.model_dump() - == Task.model_validate(MINIMAL_TASK).model_dump() + request = ListTaskPushNotificationConfigsRequest( + task_id=f'{task_id}', ) - mock_send_request.assert_called_once() - sent_payload = mock_send_request.call_args.args[0] - assert sent_payload['method'] == 'tasks/get' + response = await transport.list_task_push_notification_configs(request) + + assert len(response.configs) == 1 + assert response.configs[0].task_id == task_id + call_args = mock_httpx_client.build_request.call_args + payload = call_args[1]['json'] + assert payload['method'] == 'ListTaskPushNotificationConfigs' @pytest.mark.asyncio - async def test_cancel_task_success( - self, mock_httpx_client: AsyncMock, mock_agent_card: MagicMock + async def test_delete_task_push_notification_config_success( + self, transport, mock_httpx_client ): - client = JsonRpcTransport( - httpx_client=mock_httpx_client, agent_card=mock_agent_card - ) - params = TaskIdParams(id='task-abc') - rpc_response = { - 'id': '123', + """Test successful task callback deletion.""" + task_id = str(uuid4()) + mock_response = MagicMock() + mock_response.json.return_value = { 'jsonrpc': '2.0', - 'result': MINIMAL_CANCELLED_TASK, + 'id': '1', + 'result': { + 'task_id': f'{task_id}', + }, } - with patch.object( - client, '_send_request', new_callable=AsyncMock - ) as mock_send_request: - mock_send_request.return_value = rpc_response - response = await client.cancel_task(request=params) + mock_response.raise_for_status = MagicMock() + mock_httpx_client.send.return_value = mock_response - assert isinstance(response, Task) - assert ( - response.model_dump() - == Task.model_validate(MINIMAL_CANCELLED_TASK).model_dump() + request = DeleteTaskPushNotificationConfigRequest( + task_id=f'{task_id}', + id='config-1', ) - mock_send_request.assert_called_once() - sent_payload = mock_send_request.call_args.args[0] - assert sent_payload['method'] == 'tasks/cancel' + response = await transport.delete_task_push_notification_config(request) - @pytest.mark.asyncio - async def test_set_task_callback_success( - self, mock_httpx_client: AsyncMock, mock_agent_card: MagicMock - ): - client = JsonRpcTransport( - httpx_client=mock_httpx_client, agent_card=mock_agent_card - ) - params = TaskPushNotificationConfig( - task_id='task-abc', - push_notification_config=PushNotificationConfig( - url='http://callback.com' - ), - ) - rpc_response = { - 'id': '123', - 'jsonrpc': '2.0', - 'result': params.model_dump(mode='json'), - } - with patch.object( - client, '_send_request', new_callable=AsyncMock - ) as mock_send_request: - mock_send_request.return_value = rpc_response - response = await client.set_task_callback(request=params) + mock_httpx_client.build_request.assert_called_once() + assert response is None + call_args = mock_httpx_client.build_request.call_args + payload = call_args[1]['json'] + assert payload['method'] == 'DeleteTaskPushNotificationConfig' - assert isinstance(response, TaskPushNotificationConfig) - assert response.model_dump() == params.model_dump() - mock_send_request.assert_called_once() - sent_payload = mock_send_request.call_args.args[0] - assert sent_payload['method'] == 'tasks/pushNotificationConfig/set' + +class TestClose: + """Tests for the close method.""" @pytest.mark.asyncio - async def test_get_task_callback_success( - self, mock_httpx_client: AsyncMock, mock_agent_card: MagicMock - ): - client = JsonRpcTransport( - httpx_client=mock_httpx_client, agent_card=mock_agent_card - ) - params = TaskIdParams(id='task-abc') - expected_response = TaskPushNotificationConfig( - task_id='task-abc', - push_notification_config=PushNotificationConfig( - url='http://callback.com' - ), - ) - rpc_response = { - 'id': '123', - 'jsonrpc': '2.0', - 'result': expected_response.model_dump(mode='json'), - } - with patch.object( - client, '_send_request', new_callable=AsyncMock - ) as mock_send_request: - mock_send_request.return_value = rpc_response - response = await client.get_task_callback(request=params) + async def test_close(self, transport, mock_httpx_client): + """Test that close properly closes the httpx client.""" + await transport.close() - assert isinstance(response, TaskPushNotificationConfig) - assert response.model_dump() == expected_response.model_dump() - mock_send_request.assert_called_once() - sent_payload = mock_send_request.call_args.args[0] - assert sent_payload['method'] == 'tasks/pushNotificationConfig/get' +class TestStreamingErrors: @pytest.mark.asyncio - @patch('a2a.client.transports.jsonrpc.aconnect_sse') + @patch('a2a.client.transports.http_helpers._SSEEventSource') async def test_send_message_streaming_sse_error( self, mock_aconnect_sse: AsyncMock, - mock_httpx_client: AsyncMock, - mock_agent_card: MagicMock, + transport: JsonRpcTransport, ): - client = JsonRpcTransport( - httpx_client=mock_httpx_client, agent_card=mock_agent_card - ) - params = MessageSendParams( - message=create_text_message_object(content='Hello stream') - ) - mock_event_source = AsyncMock(spec=EventSource) - mock_event_source.aiter_sse.side_effect = SSEError( - 'Simulated SSE error' + request = create_send_message_request() + mock_event_source = AsyncMock() + mock_event_source.response.raise_for_status = MagicMock() + mock_event_source.response.headers = { + 'content-type': 'text/event-stream' + } + mock_event_source.aiter_sse = MagicMock( + side_effect=SSEError('Simulated SSE error') ) mock_aconnect_sse.return_value.__aenter__.return_value = ( mock_event_source ) - with pytest.raises(A2AClientHTTPError): - _ = [ - item - async for item in client.send_message_streaming(request=params) - ] + with pytest.raises(A2AClientError): + async for _ in transport.send_message_streaming(request): + pass @pytest.mark.asyncio - @patch('a2a.client.transports.jsonrpc.aconnect_sse') - async def test_send_message_streaming_json_error( + @patch('a2a.client.transports.http_helpers._SSEEventSource') + async def test_send_message_streaming_request_error( self, mock_aconnect_sse: AsyncMock, - mock_httpx_client: AsyncMock, - mock_agent_card: MagicMock, + transport: JsonRpcTransport, ): - client = JsonRpcTransport( - httpx_client=mock_httpx_client, agent_card=mock_agent_card - ) - params = MessageSendParams( - message=create_text_message_object(content='Hello stream') - ) - sse_event = ServerSentEvent(data='{invalid json') - mock_event_source = AsyncMock(spec=EventSource) - mock_event_source.aiter_sse.return_value = async_iterable_from_list( - [sse_event] + request = create_send_message_request() + mock_event_source = AsyncMock() + mock_event_source.response.raise_for_status = MagicMock() + mock_event_source.response.headers = { + 'content-type': 'text/event-stream' + } + mock_event_source.aiter_sse = MagicMock( + side_effect=httpx.RequestError( + 'Simulated request error', request=MagicMock() + ) ) mock_aconnect_sse.return_value.__aenter__.return_value = ( mock_event_source ) - with pytest.raises(A2AClientJSONError): - _ = [ - item - async for item in client.send_message_streaming(request=params) - ] + with pytest.raises(A2AClientError): + async for _ in transport.send_message_streaming(request): + pass @pytest.mark.asyncio - @patch('a2a.client.transports.jsonrpc.aconnect_sse') - async def test_send_message_streaming_request_error( + @patch('a2a.client.transports.http_helpers._SSEEventSource') + async def test_send_message_streaming_timeout( self, mock_aconnect_sse: AsyncMock, - mock_httpx_client: AsyncMock, - mock_agent_card: MagicMock, + transport: JsonRpcTransport, ): - client = JsonRpcTransport( - httpx_client=mock_httpx_client, agent_card=mock_agent_card - ) - params = MessageSendParams( - message=create_text_message_object(content='Hello stream') - ) - mock_event_source = AsyncMock(spec=EventSource) - mock_event_source.aiter_sse.side_effect = httpx.RequestError( - 'Simulated request error', request=MagicMock() + request = create_send_message_request() + mock_event_source = AsyncMock() + mock_event_source.response.raise_for_status = MagicMock() + mock_event_source.response.headers = { + 'content-type': 'text/event-stream' + } + mock_event_source.aiter_sse = MagicMock( + side_effect=httpx.TimeoutException('Timeout') ) mock_aconnect_sse.return_value.__aenter__.return_value = ( mock_event_source ) - with pytest.raises(A2AClientHTTPError): - _ = [ - item - async for item in client.send_message_streaming(request=params) - ] + with pytest.raises(A2AClientError, match='timed out'): + async for _ in transport.send_message_streaming(request): + pass - @pytest.mark.asyncio - async def test_get_card_no_card_provided( - self, mock_httpx_client: AsyncMock - ): - client = JsonRpcTransport( - httpx_client=mock_httpx_client, url=self.AGENT_URL - ) - mock_response = AsyncMock(spec=httpx.Response) - mock_response.status_code = 200 - mock_response.json.return_value = AGENT_CARD.model_dump(mode='json') - mock_httpx_client.get.return_value = mock_response - card = await client.get_card() +class TestInterceptors: + """Tests for interceptor functionality.""" - assert card == AGENT_CARD - mock_httpx_client.get.assert_called_once() + +class TestExtensions: + """Tests for extension header functionality.""" @pytest.mark.asyncio - async def test_get_card_with_extended_card_support( - self, mock_httpx_client: AsyncMock + async def test_extensions_added_to_request( + self, mock_httpx_client, agent_card ): - agent_card = AGENT_CARD.model_copy( - update={'supports_authenticated_extended_card': True} - ) - client = JsonRpcTransport( - httpx_client=mock_httpx_client, agent_card=agent_card + """Test that extensions are added to request headers.""" + transport = JsonRpcTransport( + httpx_client=mock_httpx_client, + agent_card=agent_card, + url='http://test-agent.example.com', ) - rpc_response = { - 'id': '123', + mock_response = MagicMock() + mock_response.json.return_value = { 'jsonrpc': '2.0', - 'result': AGENT_CARD_EXTENDED.model_dump(mode='json'), + 'id': '1', + 'result': { + 'task': { + 'id': 'task-123', + 'contextId': 'ctx-123', + 'status': {'state': 'TASK_STATE_COMPLETED'}, + } + }, } - with patch.object( - client, '_send_request', new_callable=AsyncMock - ) as mock_send_request: - mock_send_request.return_value = rpc_response - card = await client.get_card() + mock_response.raise_for_status = MagicMock() + mock_httpx_client.send.return_value = mock_response - assert card == AGENT_CARD_EXTENDED - mock_send_request.assert_called_once() - sent_payload = mock_send_request.call_args.args[0] - assert sent_payload['method'] == 'agent/getAuthenticatedExtendedCard' - - @pytest.mark.asyncio - async def test_close(self, mock_httpx_client: AsyncMock): - client = JsonRpcTransport( - httpx_client=mock_httpx_client, url=self.AGENT_URL - ) - await client.close() - mock_httpx_client.aclose.assert_called_once() - - -class TestJsonRpcTransportExtensions: - @pytest.mark.asyncio - async def test_send_message_with_default_extensions( - self, mock_httpx_client: AsyncMock, mock_agent_card: MagicMock - ): - """Test that send_message adds extension headers when extensions are provided.""" - extensions = [ - 'https://example.com/test-ext/v1', - 'https://example.com/test-ext/v2', - ] - client = JsonRpcTransport( - httpx_client=mock_httpx_client, - agent_card=mock_agent_card, - extensions=extensions, - ) - params = MessageSendParams( - message=create_text_message_object(content='Hello') - ) - success_response = create_text_message_object( - role=Role.agent, content='Hi there!' - ) - rpc_response = SendMessageSuccessResponse( - id='123', jsonrpc='2.0', result=success_response - ) - # Mock the response from httpx_client.post - mock_response = AsyncMock(spec=httpx.Response) - mock_response.status_code = 200 - mock_response.json.return_value = rpc_response.model_dump(mode='json') - mock_httpx_client.post.return_value = mock_response - - await client.send_message(request=params) - - mock_httpx_client.post.assert_called_once() - _, mock_kwargs = mock_httpx_client.post.call_args - - _assert_extensions_header( - mock_kwargs, - { - 'https://example.com/test-ext/v1', - 'https://example.com/test-ext/v2', - }, - ) + request = create_send_message_request() - @pytest.mark.asyncio - @patch('a2a.client.transports.jsonrpc.aconnect_sse') - async def test_send_message_streaming_with_new_extensions( - self, - mock_aconnect_sse: AsyncMock, - mock_httpx_client: AsyncMock, - mock_agent_card: MagicMock, - ): - """Test X-A2A-Extensions header in send_message_streaming.""" - new_extensions = ['https://example.com/test-ext/v2'] - extensions = ['https://example.com/test-ext/v1'] - client = JsonRpcTransport( - httpx_client=mock_httpx_client, - agent_card=mock_agent_card, - extensions=extensions, - ) - params = MessageSendParams( - message=create_text_message_object(content='Hello stream') - ) + from a2a.client.client import ClientCallContext - mock_event_source = AsyncMock(spec=EventSource) - mock_event_source.aiter_sse.return_value = async_iterable_from_list([]) - mock_aconnect_sse.return_value.__aenter__.return_value = ( - mock_event_source + context = ClientCallContext( + service_parameters={'A2A-Extensions': 'https://example.com/ext1'} ) - async for _ in client.send_message_streaming( - request=params, extensions=new_extensions - ): - pass - - mock_aconnect_sse.assert_called_once() - _, kwargs = mock_aconnect_sse.call_args + await transport.send_message(request, context=context) - _assert_extensions_header( - kwargs, - { - 'https://example.com/test-ext/v2', - }, + # Verify request was made with extension headers + mock_httpx_client.build_request.assert_called_once() + call_args = mock_httpx_client.build_request.call_args + # Extensions should be in the kwargs + assert ( + call_args[1].get('headers', {}).get('A2A-Extensions') + == 'https://example.com/ext1' ) @pytest.mark.asyncio - @patch('a2a.client.transports.jsonrpc.aconnect_sse') + @patch('a2a.client.transports.http_helpers._SSEEventSource') async def test_send_message_streaming_server_error_propagates( self, mock_aconnect_sse: AsyncMock, mock_httpx_client: AsyncMock, - mock_agent_card: MagicMock, + agent_card: AgentCard, ): """Test that send_message_streaming propagates server errors (e.g., 403, 500) directly.""" client = JsonRpcTransport( httpx_client=mock_httpx_client, - agent_card=mock_agent_card, - ) - params = MessageSendParams( - message=create_text_message_object(content='Error stream') + agent_card=agent_card, + url='http://test-agent.example.com', ) + request = create_send_message_request(text='Error stream') mock_event_source = AsyncMock(spec=EventSource) mock_response = MagicMock(spec=httpx.Response) @@ -996,88 +584,104 @@ async def test_send_message_streaming_server_error_propagates( response=mock_response, ) mock_event_source.response = mock_response - mock_event_source.aiter_sse.return_value = async_iterable_from_list([]) + + async def empty_aiter(): + if False: + yield + + mock_event_source.aiter_sse = MagicMock(return_value=empty_aiter()) mock_aconnect_sse.return_value.__aenter__.return_value = ( mock_event_source ) - with pytest.raises(A2AClientHTTPError) as exc_info: - async for _ in client.send_message_streaming(request=params): + with pytest.raises(A2AClientError) as exc_info: + async for _ in client.send_message_streaming(request=request): pass - assert exc_info.value.status_code == 403 + assert 'HTTP Error 403' in str(exc_info.value) mock_aconnect_sse.assert_called_once() - @pytest.mark.asyncio - async def test_get_card_no_card_provided_with_extensions( - self, mock_httpx_client: AsyncMock - ): - """Test get_card with extensions set in Client when no card is initially provided. - Tests that the extensions are added to the HTTP GET request.""" - extensions = [ - 'https://example.com/test-ext/v1', - 'https://example.com/test-ext/v2', - ] - client = JsonRpcTransport( - httpx_client=mock_httpx_client, - url=TestJsonRpcTransport.AGENT_URL, - extensions=extensions, - ) - mock_response = AsyncMock(spec=httpx.Response) - mock_response.status_code = 200 - mock_response.json.return_value = AGENT_CARD.model_dump(mode='json') - mock_httpx_client.get.return_value = mock_response - - await client.get_card() - - mock_httpx_client.get.assert_called_once() - _, mock_kwargs = mock_httpx_client.get.call_args - - _assert_extensions_header( - mock_kwargs, - { - 'https://example.com/test-ext/v1', - 'https://example.com/test-ext/v2', - }, - ) - @pytest.mark.asyncio async def test_get_card_with_extended_card_support_with_extensions( - self, mock_httpx_client: AsyncMock + self, mock_httpx_client: AsyncMock, agent_card: AgentCard ): - """Test get_card with extensions passed to get_card call when extended card support is enabled. + """Test get_extended_agent_card with extensions passed to call when extended card support is enabled. Tests that the extensions are added to the RPC request.""" - extensions = [ - 'https://example.com/test-ext/v1', - 'https://example.com/test-ext/v2', - ] - agent_card = AGENT_CARD.model_copy( - update={'supports_authenticated_extended_card': True} + extensions_header_val = ( + 'https://example.com/test-ext/v1,https://example.com/test-ext/v2' ) + agent_card.capabilities.extended_agent_card = True + client = JsonRpcTransport( httpx_client=mock_httpx_client, agent_card=agent_card, - extensions=extensions, + url='http://test-agent.example.com', ) + extended_card = AgentCard() + extended_card.CopyFrom(agent_card) + extended_card.name = 'Extended' + + request = GetExtendedAgentCardRequest() rpc_response = { 'id': '123', 'jsonrpc': '2.0', - 'result': AGENT_CARD_EXTENDED.model_dump(mode='json'), + 'result': json_format.MessageToDict(extended_card), } + + from a2a.client.client import ClientCallContext + + context = ClientCallContext( + service_parameters={HTTP_EXTENSION_HEADER: extensions_header_val} + ) + with patch.object( client, '_send_request', new_callable=AsyncMock ) as mock_send_request: mock_send_request.return_value = rpc_response - await client.get_card(extensions=extensions) + await client.get_extended_agent_card(request, context=context) mock_send_request.assert_called_once() _, mock_kwargs = mock_send_request.call_args[0] - _assert_extensions_header( - mock_kwargs, - { - 'https://example.com/test-ext/v1', - 'https://example.com/test-ext/v2', - }, - ) + # _send_request receives context as second arg OR http_kwargs if mocked lower level? + # In implementation: await self._send_request(rpc_request.data, context) + # So mocks should see context. + # Wait, the test asserts _send_request call args. + assert mock_kwargs == context + + # But verify headers are IN context or processed later? + # send_request calls _get_http_args(context) + # The test originally verified: _assert_extensions_header(mock_kwargs, ...) + # But mock_kwargs here is the 2nd argument to _send_request which IS context. + # The original test mocked _send_request? + # Let's check original test. + # "with patch.object(client, '_send_request', ...)" + # "mock_send_request.assert_called_once()" + # "_, mock_kwargs = mock_send_request.call_args[0]" + # The args to _send_request are (self, payload, context). + # So mock_kwargs is CONTEXT. + # The original assertion _assert_extensions_header checked mock_kwargs.get('headers'). + # DOES context have headers/get method? No. + # So the original test was mocking _send_request but maybe assuming it was modifying kwargs or similar? + # No, _send_request signature is (payload, context). + # Ah, maybe I should check what _send_request DOES implicitly? + # Or maybe test was testing logic INSIDE _send_request but mocking it? That defeats the purpose. + # Ah, original test: `client = JsonRpcTransport(...)` + # `await client.get_extended_agent_card(request, extensions=extensions)` + # The client calls `await self._send_request(rpc_request.data, context)`. + # So calling `_send_request` mock. + # The original test verified `mock_kwargs`. + # Maybe the original `get_extended_agent_card` constructed `http_kwargs` and passed it? + # In original code (which I can't see but guess), maybe `get_extended_agent_card` computed extensions headers? + + # In current implementation (Step 480): + # get_extended_agent_card calls `await self._send_request(rpc_request.data, context)` + # It does NOT inspect extensions. + # So verifying `mock_kwargs` (which is context) is useless for headers unless context has them. + # But I'm creating context with headers in service_parameters. + # So I can verify context has expected service_parameters. + + assert mock_kwargs.service_parameters == { + HTTP_EXTENSION_HEADER: extensions_header_val + } diff --git a/tests/client/transports/test_rest_client.py b/tests/client/transports/test_rest_client.py index 8f2232fbd..1e9398181 100644 --- a/tests/client/transports/test_rest_client.py +++ b/tests/client/transports/test_rest_client.py @@ -3,23 +3,34 @@ import httpx import pytest -import respx -from google.protobuf.json_format import MessageToJson +from google.protobuf import json_format +from google.protobuf.timestamp_pb2 import Timestamp from httpx_sse import EventSource, ServerSentEvent -from a2a.client import create_text_message_object -from a2a.client.errors import A2AClientHTTPError, A2AClientTimeoutError +from a2a.helpers.proto_helpers import new_text_message +from a2a.client.client import ClientCallContext +from a2a.client.errors import A2AClientError from a2a.client.transports.rest import RestTransport from a2a.extensions.common import HTTP_EXTENSION_HEADER -from a2a.grpc import a2a_pb2 -from a2a.types import ( +from a2a.types.a2a_pb2 import ( AgentCapabilities, AgentCard, - MessageSendParams, - Role, + AgentInterface, + CancelTaskRequest, + DeleteTaskPushNotificationConfigRequest, + GetExtendedAgentCardRequest, + GetTaskPushNotificationConfigRequest, + GetTaskRequest, + ListTaskPushNotificationConfigsRequest, + ListTasksRequest, + SendMessageRequest, + SubscribeToTaskRequest, + TaskPushNotificationConfig, + TaskState, ) -from a2a.utils import proto_utils +from a2a.utils.constants import TransportProtocol +from a2a.utils.errors import A2A_REST_ERROR_MAPPING @pytest.fixture @@ -30,7 +41,14 @@ def mock_httpx_client() -> AsyncMock: @pytest.fixture def mock_agent_card() -> MagicMock: mock = MagicMock(spec=AgentCard, url='http://agent.example.com/api') - mock.supports_authenticated_extended_card = False + mock.supported_interfaces = [ + AgentInterface( + protocol_binding=TransportProtocol.HTTP_JSON, + url='http://agent.example.com/api', + ) + ] + mock.capabilities = MagicMock() + mock.capabilities.extended_agent_card = False return mock @@ -52,7 +70,7 @@ def _assert_extensions_header(mock_kwargs: dict, expected_extensions: set[str]): class TestRestTransport: @pytest.mark.asyncio - @patch('a2a.client.transports.rest.aconnect_sse') + @patch('a2a.client.transports.http_helpers._SSEEventSource') async def test_send_message_streaming_timeout( self, mock_aconnect_sse: AsyncMock, @@ -60,13 +78,18 @@ async def test_send_message_streaming_timeout( mock_agent_card: MagicMock, ): client = RestTransport( - httpx_client=mock_httpx_client, agent_card=mock_agent_card + httpx_client=mock_httpx_client, + agent_card=mock_agent_card, + url='http://agent.example.com/api', ) - params = MessageSendParams( - message=create_text_message_object(content='Hello stream') + params = SendMessageRequest( + message=new_text_message(text='Hello stream') ) mock_event_source = AsyncMock(spec=EventSource) mock_event_source.response = MagicMock(spec=httpx.Response) + mock_event_source.response.headers = { + 'content-type': 'text/event-stream' + } mock_event_source.response.raise_for_status.return_value = None mock_event_source.aiter_sse.side_effect = httpx.TimeoutException( 'Read timed out' @@ -75,7 +98,7 @@ async def test_send_message_streaming_timeout( mock_event_source ) - with pytest.raises(A2AClientTimeoutError) as exc_info: + with pytest.raises(A2AClientError) as exc_info: _ = [ item async for item in client.send_message_streaming(request=params) @@ -83,6 +106,130 @@ async def test_send_message_streaming_timeout( assert 'Client Request timed out' in str(exc_info.value) + @pytest.mark.parametrize('error_cls', list(A2A_REST_ERROR_MAPPING.keys())) + @pytest.mark.asyncio + async def test_rest_mapped_errors( + self, + mock_httpx_client: AsyncMock, + mock_agent_card: MagicMock, + error_cls, + ): + """Test handling of mapped REST HTTP error responses.""" + client = RestTransport( + httpx_client=mock_httpx_client, + agent_card=mock_agent_card, + url='http://agent.example.com/api', + ) + params = SendMessageRequest(message=new_text_message(text='Hello')) + + mock_build_request = MagicMock( + return_value=AsyncMock(spec=httpx.Request) + ) + mock_httpx_client.build_request = mock_build_request + + mock_response = AsyncMock(spec=httpx.Response) + mock_response.status_code = 500 + + reason = A2A_REST_ERROR_MAPPING[error_cls][2] + + mock_response.json.return_value = { + 'error': { + 'code': 500, + 'status': 'UNKNOWN', + 'message': 'Mapped Error', + 'details': [ + { + '@type': 'type.googleapis.com/google.rpc.ErrorInfo', + 'reason': reason, + 'domain': 'a2a-protocol.org', + 'metadata': {}, + } + ], + } + } + + error = httpx.HTTPStatusError( + 'Server Error', + request=httpx.Request('POST', 'http://test.url'), + response=mock_response, + ) + + mock_httpx_client.send.side_effect = error + + with pytest.raises(error_cls): + await client.send_message(request=params) + + @pytest.mark.asyncio + async def test_send_message_with_timeout_context( + self, mock_httpx_client: AsyncMock, mock_agent_card: MagicMock + ): + """Test that send_message passes context timeout to build_request.""" + + client = RestTransport( + httpx_client=mock_httpx_client, + agent_card=mock_agent_card, + url='http://agent.example.com/api', + ) + params = SendMessageRequest(message=new_text_message(text='Hello')) + context = ClientCallContext(timeout=10.0) + + mock_build_request = MagicMock( + return_value=AsyncMock(spec=httpx.Request) + ) + mock_httpx_client.build_request = mock_build_request + + mock_response = AsyncMock(spec=httpx.Response) + mock_response.status_code = 200 + mock_httpx_client.send.return_value = mock_response + + await client.send_message(request=params, context=context) + + mock_build_request.assert_called_once() + _, kwargs = mock_build_request.call_args + assert 'timeout' in kwargs + assert kwargs['timeout'] == httpx.Timeout(10.0) + + @pytest.mark.asyncio + async def test_url_serialization( + self, mock_httpx_client: AsyncMock, mock_agent_card: MagicMock + ): + """Test that query parameters are correctly serialized to the URL.""" + client = RestTransport( + httpx_client=mock_httpx_client, + agent_card=mock_agent_card, + url='http://agent.example.com/api', + ) + + timestamp = Timestamp() + timestamp.FromJsonString('2024-03-09T16:00:00Z') + + request = ListTasksRequest( + tenant='my-tenant', + status=TaskState.TASK_STATE_WORKING, + include_artifacts=True, + status_timestamp_after=timestamp, + ) + + # Use real build_request to get actual URL serialization + mock_httpx_client.build_request.side_effect = ( + httpx.AsyncClient().build_request + ) + mock_httpx_client.send.return_value = AsyncMock( + spec=httpx.Response, status_code=200, json=lambda: {'tasks': []} + ) + + await client.list_tasks(request=request) + + mock_httpx_client.send.assert_called_once() + sent_request = mock_httpx_client.send.call_args[0][0] + + # Check decoded query parameters for spec compliance + params = sent_request.url.params + assert params['status'] == 'TASK_STATE_WORKING' + assert params['includeArtifacts'] == 'true' + assert params['statusTimestampAfter'] == '2024-03-09T16:00:00Z' + assert 'tenant' not in params + class TestRestTransportExtensions: @pytest.mark.asyncio @@ -90,18 +237,12 @@ async def test_send_message_with_default_extensions( self, mock_httpx_client: AsyncMock, mock_agent_card: MagicMock ): """Test that send_message adds extensions to headers.""" - extensions = [ - 'https://example.com/test-ext/v1', - 'https://example.com/test-ext/v2', - ] client = RestTransport( httpx_client=mock_httpx_client, - extensions=extensions, agent_card=mock_agent_card, + url='http://agent.example.com/api', ) - params = MessageSendParams( - message=create_text_message_object(content='Hello') - ) + params = SendMessageRequest(message=new_text_message(text='Hello')) # Mock the build_request method to capture its inputs mock_build_request = MagicMock( @@ -114,7 +255,12 @@ async def test_send_message_with_default_extensions( mock_response.status_code = 200 mock_httpx_client.send.return_value = mock_response - await client.send_message(request=params) + context = ClientCallContext( + service_parameters={ + 'A2A-Extensions': 'https://example.com/test-ext/v1,https://example.com/test-ext/v2' + } + ) + await client.send_message(request=params, context=context) mock_build_request.assert_called_once() _, kwargs = mock_build_request.call_args @@ -127,92 +273,42 @@ async def test_send_message_with_default_extensions( }, ) - # Repro of https://github.com/a2aproject/a2a-python/issues/540 - @pytest.mark.asyncio - @respx.mock - async def test_send_message_streaming_comment_success( - self, - mock_agent_card: MagicMock, - ): - """Test that SSE comments are ignored.""" - async with httpx.AsyncClient() as client: - transport = RestTransport( - httpx_client=client, agent_card=mock_agent_card - ) - params = MessageSendParams( - message=create_text_message_object(content='Hello stream') - ) - - mock_stream_response_1 = a2a_pb2.StreamResponse( - msg=proto_utils.ToProto.message( - create_text_message_object( - content='First part', role=Role.agent - ) - ) - ) - mock_stream_response_2 = a2a_pb2.StreamResponse( - msg=proto_utils.ToProto.message( - create_text_message_object( - content='Second part', role=Role.agent - ) - ) - ) - - sse_content = ( - 'id: stream_id_1\n' - f'data: {MessageToJson(mock_stream_response_1, indent=None)}\n\n' - ': keep-alive\n\n' - 'id: stream_id_2\n' - f'data: {MessageToJson(mock_stream_response_2, indent=None)}\n\n' - ': keep-alive\n\n' - ) - - respx.post( - f'{mock_agent_card.url.rstrip("/")}/v1/message:stream' - ).mock( - return_value=httpx.Response( - 200, - headers={'Content-Type': 'text/event-stream'}, - content=sse_content, - ) - ) - - results = [] - async for item in transport.send_message_streaming(request=params): - results.append(item) - - assert len(results) == 2 - assert results[0].parts[0].root.text == 'First part' - assert results[1].parts[0].root.text == 'Second part' - @pytest.mark.asyncio - @patch('a2a.client.transports.rest.aconnect_sse') + @patch('a2a.client.transports.http_helpers._SSEEventSource') async def test_send_message_streaming_with_new_extensions( self, mock_aconnect_sse: AsyncMock, mock_httpx_client: AsyncMock, mock_agent_card: MagicMock, ): - """Test X-A2A-Extensions header in send_message_streaming.""" - new_extensions = ['https://example.com/test-ext/v2'] - extensions = ['https://example.com/test-ext/v1'] + """Test A2A-Extensions header in send_message_streaming.""" client = RestTransport( httpx_client=mock_httpx_client, agent_card=mock_agent_card, - extensions=extensions, + url='http://agent.example.com/api', ) - params = MessageSendParams( - message=create_text_message_object(content='Hello stream') + params = SendMessageRequest( + message=new_text_message(text='Hello stream') ) mock_event_source = AsyncMock(spec=EventSource) + mock_event_source.response = MagicMock(spec=httpx.Response) + mock_event_source.response.headers = { + 'content-type': 'text/event-stream' + } mock_event_source.aiter_sse.return_value = async_iterable_from_list([]) mock_aconnect_sse.return_value.__aenter__.return_value = ( mock_event_source ) + context = ClientCallContext( + service_parameters={ + 'A2A-Extensions': 'https://example.com/test-ext/v2' + } + ) + async for _ in client.send_message_streaming( - request=params, extensions=new_extensions + request=params, context=context ): pass @@ -227,7 +323,7 @@ async def test_send_message_streaming_with_new_extensions( ) @pytest.mark.asyncio - @patch('a2a.client.transports.rest.aconnect_sse') + @patch('a2a.client.transports.http_helpers._SSEEventSource') async def test_send_message_streaming_server_error_propagates( self, mock_aconnect_sse: AsyncMock, @@ -238,9 +334,10 @@ async def test_send_message_streaming_server_error_propagates( client = RestTransport( httpx_client=mock_httpx_client, agent_card=mock_agent_card, + url='http://agent.example.com/api', ) - params = MessageSendParams( - message=create_text_message_object(content='Error stream') + request = SendMessageRequest( + message=new_text_message(text='Error stream') ) mock_event_source = AsyncMock(spec=EventSource) @@ -251,109 +348,394 @@ async def test_send_message_streaming_server_error_propagates( request=httpx.Request('POST', 'http://test.url'), response=mock_response, ) + + async def empty_aiter(): + if False: + yield + mock_event_source.response = mock_response - mock_event_source.aiter_sse.return_value = async_iterable_from_list([]) + mock_event_source.aiter_sse = MagicMock(return_value=empty_aiter()) mock_aconnect_sse.return_value.__aenter__.return_value = ( mock_event_source ) - with pytest.raises(A2AClientHTTPError) as exc_info: - async for _ in client.send_message_streaming(request=params): + with pytest.raises(A2AClientError) as exc_info: + async for _ in client.send_message_streaming(request=request): pass - assert exc_info.value.status_code == 403 + assert 'HTTP Error 403' in str(exc_info.value) mock_aconnect_sse.assert_called_once() @pytest.mark.asyncio - async def test_get_card_no_card_provided_with_extensions( + async def test_get_card_with_extended_card_support_with_extensions( self, mock_httpx_client: AsyncMock ): - """Test get_card with extensions set in Client when no card is initially provided. - Tests that the extensions are added to the HTTP GET request.""" - extensions = [ - 'https://example.com/test-ext/v1', - 'https://example.com/test-ext/v2', - ] + """Test get_extended_agent_card with extensions passed to call when extended card support is enabled. + Tests that the extensions are added to the GET request.""" + extensions_str = ( + 'https://example.com/test-ext/v1,https://example.com/test-ext/v2' + ) + agent_card = AgentCard( + name='Test Agent', + description='Test Agent Description', + version='1.0.0', + capabilities=AgentCapabilities(extended_agent_card=True), + ) + interface = agent_card.supported_interfaces.add() + interface.protocol_binding = TransportProtocol.HTTP_JSON + interface.url = 'http://agent.example.com/api' + client = RestTransport( httpx_client=mock_httpx_client, + agent_card=agent_card, url='http://agent.example.com/api', - extensions=extensions, ) mock_response = AsyncMock(spec=httpx.Response) mock_response.status_code = 200 - mock_response.json.return_value = { - 'name': 'Test Agent', - 'description': 'Test Agent Description', - 'url': 'http://agent.example.com/api', - 'version': '1.0.0', - 'default_input_modes': ['text'], - 'default_output_modes': ['text'], - 'capabilities': AgentCapabilities().model_dump(), - 'skills': [], - } - mock_httpx_client.get.return_value = mock_response + mock_response.json.return_value = json_format.MessageToDict( + agent_card + ) # Extended card same for mock + mock_httpx_client.send.return_value = mock_response - await client.get_card() + request = GetExtendedAgentCardRequest() - mock_httpx_client.get.assert_called_once() - _, mock_kwargs = mock_httpx_client.get.call_args + context = ClientCallContext( + service_parameters={HTTP_EXTENSION_HEADER: extensions_str} + ) - _assert_extensions_header( - mock_kwargs, - { - 'https://example.com/test-ext/v1', - 'https://example.com/test-ext/v2', - }, + with patch.object( + client, '_execute_request', new_callable=AsyncMock + ) as mock_execute_request: + mock_execute_request.return_value = json_format.MessageToDict( + agent_card + ) + await client.get_extended_agent_card(request, context=context) + + mock_execute_request.assert_called_once() + call_args = mock_execute_request.call_args + assert ( + call_args[1].get('context') == context or call_args[0][3] == context ) + _context = call_args[1].get('context') or call_args[0][3] + assert _context.service_parameters == { + HTTP_EXTENSION_HEADER: extensions_str + } + + +class TestTaskCallback: + """Tests for the task callback methods.""" + @pytest.mark.asyncio - async def test_get_card_with_extended_card_support_with_extensions( - self, mock_httpx_client: AsyncMock + async def test_list_task_push_notification_configs_success( + self, mock_httpx_client: AsyncMock, mock_agent_card: MagicMock ): - """Test get_card with extensions passed to get_card call when extended card support is enabled. - Tests that the extensions are added to the GET request.""" - extensions = [ - 'https://example.com/test-ext/v1', - 'https://example.com/test-ext/v2', - ] - agent_card = AgentCard( - name='Test Agent', - description='Test Agent Description', + """Test successful task multiple callbacks retrieval.""" + client = RestTransport( + httpx_client=mock_httpx_client, + agent_card=mock_agent_card, url='http://agent.example.com/api', - version='1.0.0', - default_input_modes=['text'], - default_output_modes=['text'], - capabilities=AgentCapabilities(), - skills=[], - supports_authenticated_extended_card=True, ) + task_id = 'task-1' + mock_response = AsyncMock(spec=httpx.Response) + mock_response.status_code = 200 + mock_response.json.return_value = { + 'configs': [ + { + 'taskId': task_id, + 'id': 'config-1', + 'url': 'https://example.com', + } + ] + } + mock_httpx_client.send.return_value = mock_response + + # Mock the build_request method to capture its inputs + mock_build_request = MagicMock( + return_value=AsyncMock(spec=httpx.Request) + ) + mock_httpx_client.build_request = mock_build_request + + request = ListTaskPushNotificationConfigsRequest( + task_id=task_id, + ) + response = await client.list_task_push_notification_configs(request) + + assert len(response.configs) == 1 + assert response.configs[0].task_id == task_id + + mock_build_request.assert_called_once() + call_args = mock_build_request.call_args + assert call_args[0][0] == 'GET' + assert f'/tasks/{task_id}/pushNotificationConfigs' in call_args[0][1] + + @pytest.mark.asyncio + async def test_delete_task_push_notification_config_success( + self, mock_httpx_client: AsyncMock, mock_agent_card: MagicMock + ): + """Test successful task callback deletion.""" client = RestTransport( httpx_client=mock_httpx_client, - agent_card=agent_card, + agent_card=mock_agent_card, + url='http://agent.example.com/api', ) - + task_id = 'task-1' mock_response = AsyncMock(spec=httpx.Response) mock_response.status_code = 200 - mock_response.json.return_value = agent_card.model_dump(mode='json') + mock_response.json.return_value = {} mock_httpx_client.send.return_value = mock_response - with patch.object( - client, '_send_get_request', new_callable=AsyncMock - ) as mock_send_get_request: - mock_send_get_request.return_value = agent_card.model_dump( - mode='json' - ) - await client.get_card(extensions=extensions) + # Mock the build_request method to capture its inputs + mock_build_request = MagicMock( + return_value=AsyncMock(spec=httpx.Request) + ) + mock_httpx_client.build_request = mock_build_request - mock_send_get_request.assert_called_once() - _, _, mock_kwargs = mock_send_get_request.call_args[0] + request = DeleteTaskPushNotificationConfigRequest( + task_id=task_id, + id='config-1', + ) + await client.delete_task_push_notification_config(request) - _assert_extensions_header( - mock_kwargs, - { - 'https://example.com/test-ext/v1', - 'https://example.com/test-ext/v2', - }, + mock_build_request.assert_called_once() + call_args = mock_build_request.call_args + assert call_args[0][0] == 'DELETE' + assert ( + f'/tasks/{task_id}/pushNotificationConfigs/config-1' + in call_args[0][1] + ) + + +class TestRestTransportTenant: + """Tests for tenant path prepending in RestTransport.""" + + @pytest.mark.parametrize( + 'method_name, request_obj, expected_path', + [ + ( + 'send_message', + SendMessageRequest( + tenant='my-tenant', + message=new_text_message(text='hi'), + ), + '/my-tenant/message:send', + ), + ( + 'list_tasks', + ListTasksRequest(tenant='my-tenant'), + '/my-tenant/tasks', + ), + ( + 'get_task', + GetTaskRequest(tenant='my-tenant', id='task-123'), + '/my-tenant/tasks/task-123', + ), + ( + 'cancel_task', + CancelTaskRequest(tenant='my-tenant', id='task-123'), + '/my-tenant/tasks/task-123:cancel', + ), + ( + 'create_task_push_notification_config', + TaskPushNotificationConfig( + tenant='my-tenant', task_id='task-123' + ), + '/my-tenant/tasks/task-123/pushNotificationConfigs', + ), + ( + 'get_task_push_notification_config', + GetTaskPushNotificationConfigRequest( + tenant='my-tenant', task_id='task-123', id='cfg-1' + ), + '/my-tenant/tasks/task-123/pushNotificationConfigs/cfg-1', + ), + ( + 'list_task_push_notification_configs', + ListTaskPushNotificationConfigsRequest( + tenant='my-tenant', task_id='task-123' + ), + '/my-tenant/tasks/task-123/pushNotificationConfigs', + ), + ( + 'delete_task_push_notification_config', + DeleteTaskPushNotificationConfigRequest( + tenant='my-tenant', task_id='task-123', id='cfg-1' + ), + '/my-tenant/tasks/task-123/pushNotificationConfigs/cfg-1', + ), + ], + ) + @pytest.mark.asyncio + async def test_rest_methods_prepend_tenant( + self, + method_name, + request_obj, + expected_path, + mock_httpx_client, + mock_agent_card, + ): + client = RestTransport( + httpx_client=mock_httpx_client, + agent_card=mock_agent_card, + url='http://agent.example.com/api', + ) + + # 1. Get the method dynamically + method = getattr(client, method_name) + + # 2. Setup mocks + mock_httpx_client.build_request.return_value = MagicMock( + spec=httpx.Request + ) + mock_httpx_client.send.return_value = AsyncMock( + spec=httpx.Response, + status_code=200, + json=MagicMock(return_value={}), + ) + + # 3. Call the method + await method(request=request_obj) + + # 4. Verify the URL + args, _ = mock_httpx_client.build_request.call_args + assert args[1] == f'http://agent.example.com/api{expected_path}' + + @pytest.mark.asyncio + async def test_rest_get_extended_agent_card_prepend_tenant( + self, + mock_httpx_client, + mock_agent_card, + ): + mock_agent_card.capabilities.extended_agent_card = True + client = RestTransport( + httpx_client=mock_httpx_client, + agent_card=mock_agent_card, + url='http://agent.example.com/api', + ) + + request = GetExtendedAgentCardRequest(tenant='my-tenant') + + # 1. Setup mocks + mock_httpx_client.build_request.return_value = MagicMock( + spec=httpx.Request + ) + mock_httpx_client.send.return_value = AsyncMock( + spec=httpx.Response, + status_code=200, + json=MagicMock(return_value={}), + ) + + # 2. Call the method + await client.get_extended_agent_card(request=request) + + # 3. Verify the URL + args, _ = mock_httpx_client.build_request.call_args + assert ( + args[1] + == 'http://agent.example.com/api/my-tenant/extendedAgentCard' + ) + + @pytest.mark.asyncio + async def test_rest_get_task_prepend_empty_tenant( + self, + mock_httpx_client, + mock_agent_card, + ): + client = RestTransport( + httpx_client=mock_httpx_client, + agent_card=mock_agent_card, + url='http://agent.example.com/api', + ) + + request = GetTaskRequest(tenant='', id='task-123') + + # 1. Setup mocks + mock_httpx_client.build_request.return_value = MagicMock( + spec=httpx.Request + ) + mock_httpx_client.send.return_value = AsyncMock( + spec=httpx.Response, + status_code=200, + json=MagicMock(return_value={}), + ) + + # 2. Call the method + await client.get_task(request=request) + + # 3. Verify the URL + args, _ = mock_httpx_client.build_request.call_args + assert args[1] == 'http://agent.example.com/api/tasks/task-123' + + @pytest.mark.parametrize( + 'method_name, request_obj, expected_path', + [ + ( + 'subscribe', + SubscribeToTaskRequest(tenant='my-tenant', id='task-123'), + '/my-tenant/tasks/task-123:subscribe', + ), + ( + 'send_message_streaming', + SendMessageRequest( + tenant='my-tenant', + message=new_text_message(text='hi'), + ), + '/my-tenant/message:stream', + ), + ], + ) + @pytest.mark.asyncio + @patch('a2a.client.transports.http_helpers._SSEEventSource') + async def test_rest_streaming_methods_prepend_tenant( # noqa: PLR0913 + self, + mock_aconnect_sse, + method_name, + request_obj, + expected_path, + mock_httpx_client, + mock_agent_card, + ): + client = RestTransport( + httpx_client=mock_httpx_client, + agent_card=mock_agent_card, + url='http://agent.example.com/api', ) + + # 1. Get the method dynamically + method = getattr(client, method_name) + + # 2. Setup mocks + mock_event_source = AsyncMock(spec=EventSource) + mock_event_source.response = MagicMock(spec=httpx.Response) + mock_event_source.response.headers = { + 'content-type': 'text/event-stream' + } + mock_event_source.response.raise_for_status.return_value = None + + async def empty_aiter(): + if False: + yield + + mock_event_source.aiter_sse.return_value = empty_aiter() + mock_aconnect_sse.return_value.__aenter__.return_value = ( + mock_event_source + ) + + # 3. Call the method + async for _ in method(request=request_obj): + pass + + # 4. Verify the URL and method + mock_aconnect_sse.assert_called_once() + args, kwargs = mock_aconnect_sse.call_args + # method is 2nd positional argument + assert args[1] == 'POST' + if method_name == 'subscribe': + assert kwargs.get('json') is None + else: + assert kwargs.get('json') == json_format.MessageToDict(request_obj) + + # url is 3rd positional argument in aconnect_sse(client, method, url, ...) + assert args[2] == f'http://agent.example.com/api{expected_path}' diff --git a/tests/client/transports/test_tenant_decorator.py b/tests/client/transports/test_tenant_decorator.py new file mode 100644 index 000000000..b08406bad --- /dev/null +++ b/tests/client/transports/test_tenant_decorator.py @@ -0,0 +1,129 @@ +import pytest +from unittest.mock import AsyncMock, MagicMock + +from a2a.client.transports.base import ClientTransport +from a2a.client.transports.tenant_decorator import TenantTransportDecorator +from a2a.types.a2a_pb2 import ( + AgentCard, + CancelTaskRequest, + TaskPushNotificationConfig, + DeleteTaskPushNotificationConfigRequest, + GetExtendedAgentCardRequest, + GetTaskPushNotificationConfigRequest, + GetTaskRequest, + ListTaskPushNotificationConfigsRequest, + ListTasksRequest, + Message, + Part, + SendMessageRequest, + StreamResponse, + SubscribeToTaskRequest, +) + + +@pytest.fixture +def mock_transport() -> AsyncMock: + return AsyncMock(spec=ClientTransport) + + +class TestTenantTransportDecorator: + @pytest.mark.asyncio + async def test_resolve_tenant_logic( + self, mock_transport: AsyncMock + ) -> None: + tenant_id = 'test-tenant' + decorator = TenantTransportDecorator(mock_transport, tenant_id) + + # Case 1: Tenant already set on request + assert decorator._resolve_tenant('existing-tenant') == 'existing-tenant' + + # Case 2: Tenant not set (empty string) + assert decorator._resolve_tenant('') == tenant_id + + @pytest.mark.asyncio + async def test_resolve_tenant_logic_empty_tenant( + self, mock_transport: AsyncMock + ) -> None: + decorator = TenantTransportDecorator(mock_transport, '') + + # Case 1: Tenant already set on request + assert decorator._resolve_tenant('existing-tenant') == 'existing-tenant' + + # Case 2: Tenant not set (empty string) + assert decorator._resolve_tenant('') == '' + + @pytest.mark.parametrize( + 'method_name, request_obj', + [ + ( + 'send_message', + SendMessageRequest(message=Message(parts=[Part(text='hello')])), + ), + ( + 'get_task', + GetTaskRequest(id='t1'), + ), + ( + 'list_tasks', + ListTasksRequest(), + ), + ( + 'cancel_task', + CancelTaskRequest(id='t1'), + ), + ( + 'create_task_push_notification_config', + TaskPushNotificationConfig(task_id='t1'), + ), + ( + 'get_task_push_notification_config', + GetTaskPushNotificationConfigRequest(task_id='t1', id='c1'), + ), + ( + 'list_task_push_notification_configs', + ListTaskPushNotificationConfigsRequest(task_id='t1'), + ), + ( + 'delete_task_push_notification_config', + DeleteTaskPushNotificationConfigRequest(task_id='t1', id='c1'), + ), + ('get_extended_agent_card', GetExtendedAgentCardRequest()), + ], + ) + @pytest.mark.asyncio + async def test_methods( + self, mock_transport: AsyncMock, method_name, request_obj + ) -> None: + """Test that tenant is set on the request for all methods.""" + tenant_id = 'test-tenant' + decorator = TenantTransportDecorator(mock_transport, tenant_id) + mock_method = getattr(mock_transport, method_name) + + await getattr(decorator, method_name)(request_obj) + + mock_method.assert_called_once() + assert mock_transport.mock_calls[0][0] == method_name + assert request_obj.tenant == tenant_id + + @pytest.mark.asyncio + async def test_streaming_methods(self, mock_transport: AsyncMock) -> None: + """Test that tenant is set on the request for streaming methods.""" + tenant_id = 'test-tenant' + decorator = TenantTransportDecorator(mock_transport, tenant_id) + + async def mock_stream(*args, **kwargs): + yield StreamResponse() + + # Test subscribe + mock_transport.subscribe.return_value = mock_stream() + request_sub = SubscribeToTaskRequest(id='t1') + async for _ in decorator.subscribe(request_sub): + pass + assert request_sub.tenant == tenant_id + + # Test send_message_streaming + mock_transport.send_message_streaming.return_value = mock_stream() + request_msg = SendMessageRequest() + async for _ in decorator.send_message_streaming(request_msg): + pass + assert request_msg.tenant == tenant_id diff --git a/tests/compat/__init__.py b/tests/compat/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/compat/v0_3/__init__.py b/tests/compat/v0_3/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/compat/v0_3/test_context_builders.py b/tests/compat/v0_3/test_context_builders.py new file mode 100644 index 000000000..1b711f52f --- /dev/null +++ b/tests/compat/v0_3/test_context_builders.py @@ -0,0 +1,159 @@ +from unittest.mock import AsyncMock, MagicMock + +import grpc + +from starlette.datastructures import Headers + +from a2a.compat.v0_3.context_builders import ( + V03GrpcServerCallContextBuilder, + V03ServerCallContextBuilder, +) +from a2a.compat.v0_3.extension_headers import LEGACY_HTTP_EXTENSION_HEADER +from a2a.extensions.common import HTTP_EXTENSION_HEADER +from a2a.server.context import ServerCallContext +from a2a.server.request_handlers.grpc_handler import ( + DefaultGrpcServerCallContextBuilder, +) +from a2a.server.routes.common import DefaultServerCallContextBuilder + + +def _make_mock_request(headers=None): + request = MagicMock() + request.scope = {} + request.headers = Headers(headers or {}) + return request + + +def _make_mock_grpc_context(metadata: list[tuple[str, str]]) -> AsyncMock: + context = AsyncMock(spec=grpc.aio.ServicerContext) + context.invocation_metadata.return_value = grpc.aio.Metadata(*metadata) + return context + + +class TestV03ServerCallContextBuilder: + def test_legacy_header_only(self): + request = _make_mock_request( + headers={LEGACY_HTTP_EXTENSION_HEADER: 'legacy-ext'} + ) + builder = V03ServerCallContextBuilder(DefaultServerCallContextBuilder()) + + ctx = builder.build(request) + + assert isinstance(ctx, ServerCallContext) + assert ctx.requested_extensions == {'legacy-ext'} + + def test_spec_header_only(self): + request = _make_mock_request( + headers={HTTP_EXTENSION_HEADER: 'spec-ext'} + ) + builder = V03ServerCallContextBuilder(DefaultServerCallContextBuilder()) + + ctx = builder.build(request) + + assert ctx.requested_extensions == {'spec-ext'} + + def test_both_headers_merged(self): + request = _make_mock_request( + headers={ + HTTP_EXTENSION_HEADER: 'spec-ext', + LEGACY_HTTP_EXTENSION_HEADER: 'legacy-ext', + } + ) + builder = V03ServerCallContextBuilder(DefaultServerCallContextBuilder()) + + ctx = builder.build(request) + + assert ctx.requested_extensions == {'spec-ext', 'legacy-ext'} + + def test_legacy_header_comma_separated(self): + request = _make_mock_request( + headers={LEGACY_HTTP_EXTENSION_HEADER: 'foo, bar'} + ) + builder = V03ServerCallContextBuilder(DefaultServerCallContextBuilder()) + + ctx = builder.build(request) + + assert ctx.requested_extensions == {'foo', 'bar'} + + def test_no_extensions(self): + request = _make_mock_request() + builder = V03ServerCallContextBuilder(DefaultServerCallContextBuilder()) + + ctx = builder.build(request) + + assert ctx.requested_extensions == set() + + +class TestV03GrpcServerCallContextBuilder: + def test_legacy_metadata_only(self): + context = _make_mock_grpc_context( + [(LEGACY_HTTP_EXTENSION_HEADER.lower(), 'legacy-ext')] + ) + builder = V03GrpcServerCallContextBuilder( + DefaultGrpcServerCallContextBuilder() + ) + + ctx = builder.build(context) + + assert isinstance(ctx, ServerCallContext) + assert ctx.requested_extensions == {'legacy-ext'} + + def test_spec_metadata_only(self): + context = _make_mock_grpc_context( + [(HTTP_EXTENSION_HEADER.lower(), 'spec-ext')] + ) + builder = V03GrpcServerCallContextBuilder( + DefaultGrpcServerCallContextBuilder() + ) + + ctx = builder.build(context) + + assert ctx.requested_extensions == {'spec-ext'} + + def test_both_metadata_merged(self): + context = _make_mock_grpc_context( + [ + (HTTP_EXTENSION_HEADER.lower(), 'spec-ext'), + (LEGACY_HTTP_EXTENSION_HEADER.lower(), 'legacy-ext'), + ] + ) + builder = V03GrpcServerCallContextBuilder( + DefaultGrpcServerCallContextBuilder() + ) + + ctx = builder.build(context) + + assert ctx.requested_extensions == {'spec-ext', 'legacy-ext'} + + def test_legacy_metadata_comma_separated(self): + context = _make_mock_grpc_context( + [(LEGACY_HTTP_EXTENSION_HEADER.lower(), 'foo, bar')] + ) + builder = V03GrpcServerCallContextBuilder( + DefaultGrpcServerCallContextBuilder() + ) + + ctx = builder.build(context) + + assert ctx.requested_extensions == {'foo', 'bar'} + + def test_no_extensions(self): + context = _make_mock_grpc_context([]) + builder = V03GrpcServerCallContextBuilder( + DefaultGrpcServerCallContextBuilder() + ) + + ctx = builder.build(context) + + assert ctx.requested_extensions == set() + + def test_no_metadata(self): + context = AsyncMock(spec=grpc.aio.ServicerContext) + context.invocation_metadata.return_value = None + builder = V03GrpcServerCallContextBuilder( + DefaultGrpcServerCallContextBuilder() + ) + + ctx = builder.build(context) + + assert ctx.requested_extensions == set() diff --git a/tests/compat/v0_3/test_conversions.py b/tests/compat/v0_3/test_conversions.py new file mode 100644 index 000000000..78a6d563b --- /dev/null +++ b/tests/compat/v0_3/test_conversions.py @@ -0,0 +1,2040 @@ +import base64 + +import pytest + +from google.protobuf.json_format import ParseDict +import json + +from a2a.compat.v0_3 import types as types_v03 +from a2a.compat.v0_3.conversions import ( + to_compat_agent_capabilities, + to_compat_agent_card, + to_compat_agent_card_signature, + to_compat_agent_extension, + to_compat_agent_interface, + to_compat_agent_provider, + to_compat_agent_skill, + to_compat_artifact, + to_compat_authentication_info, + to_compat_cancel_task_request, + to_compat_create_task_push_notification_config_request, + to_compat_delete_task_push_notification_config_request, + to_compat_get_extended_agent_card_request, + to_compat_get_task_push_notification_config_request, + to_compat_get_task_request, + to_compat_list_task_push_notification_config_request, + to_compat_list_task_push_notification_config_response, + to_compat_message, + to_compat_oauth_flows, + to_compat_part, + to_compat_push_notification_config, + to_compat_security_requirement, + to_compat_security_scheme, + to_compat_send_message_configuration, + to_compat_send_message_request, + to_compat_send_message_response, + to_compat_stream_response, + to_compat_subscribe_to_task_request, + to_compat_task, + to_compat_task_artifact_update_event, + to_compat_task_push_notification_config, + to_compat_task_status, + to_compat_task_status_update_event, + to_core_agent_capabilities, + to_core_agent_card, + to_core_agent_card_signature, + to_core_agent_extension, + to_core_agent_interface, + to_core_agent_provider, + to_core_agent_skill, + to_core_artifact, + to_core_authentication_info, + to_core_cancel_task_request, + to_core_create_task_push_notification_config_request, + to_core_delete_task_push_notification_config_request, + to_core_get_extended_agent_card_request, + to_core_get_task_push_notification_config_request, + to_core_get_task_request, + to_core_list_task_push_notification_config_request, + to_core_list_task_push_notification_config_response, + to_core_message, + to_core_oauth_flows, + to_core_part, + to_core_push_notification_config, + to_core_security_requirement, + to_core_security_scheme, + to_core_send_message_configuration, + to_core_send_message_request, + to_core_send_message_response, + to_core_stream_response, + to_core_subscribe_to_task_request, + to_core_task, + to_core_task_artifact_update_event, + to_core_task_push_notification_config, + to_core_task_status, + to_core_task_status_update_event, +) +from a2a.compat.v0_3.model_conversions import ( + core_to_compat_task_model, + compat_task_model_to_core, + core_to_compat_push_notification_config_model, + compat_push_notification_config_model_to_core, +) +from a2a.server.models import PushNotificationConfigModel, TaskModel +from cryptography.fernet import Fernet +from a2a.types import a2a_pb2 as pb2_v10 +from a2a.utils.errors import VersionNotSupportedError + + +def test_text_part_conversion(): + v03_part = types_v03.Part( + root=types_v03.TextPart(text='Hello, World!', metadata={'test': 'val'}) + ) + v10_expected = pb2_v10.Part(text='Hello, World!') + v10_expected.metadata.update({'test': 'val'}) + + v10_part = to_core_part(v03_part) + assert v10_part == v10_expected + + v03_restored = to_compat_part(v10_part) + assert v03_restored == v03_part + + +def test_data_part_conversion(): + data = {'key': 'val', 'nested': {'a': 1}} + v03_part = types_v03.Part(root=types_v03.DataPart(data=data)) + v10_expected = pb2_v10.Part() + ParseDict(data, v10_expected.data.struct_value) + + v10_part = to_core_part(v03_part) + assert v10_part == v10_expected + + v03_restored = to_compat_part(v10_part) + assert v03_restored == v03_part + + +def test_data_part_conversion_primitive(): + primitive_cases = [ + 'Primitive String', + 42, + 3.14, + True, + False, + ['a', 'b', 'c'], + [1, 2, 3], + None, + ] + + for val in primitive_cases: + v10_expected = pb2_v10.Part() + ParseDict(val, v10_expected.data) + + # Test v10 -> v03 + v03_part = to_compat_part(v10_expected) + assert isinstance(v03_part.root, types_v03.DataPart) + assert v03_part.root.data == {'value': val} + assert v03_part.root.metadata['data_part_compat'] is True + + # Test v03 -> v10 + v10_restored = to_core_part(v03_part) + assert v10_restored == v10_expected + + +def test_file_part_uri_conversion(): + v03_file = types_v03.FileWithUri( + uri='http://example.com/file', mime_type='text/plain', name='file.txt' + ) + v03_part = types_v03.Part(root=types_v03.FilePart(file=v03_file)) + v10_expected = pb2_v10.Part( + url='http://example.com/file', + media_type='text/plain', + filename='file.txt', + ) + + v10_part = to_core_part(v03_part) + assert v10_part == v10_expected + + v03_restored = to_compat_part(v10_part) + assert v03_restored == v03_part + + +def test_file_part_bytes_conversion(): + content = b'hello world' + b64 = base64.b64encode(content).decode('utf-8') + v03_file = types_v03.FileWithBytes( + bytes=b64, mime_type='application/octet-stream', name='file.bin' + ) + v03_part = types_v03.Part(root=types_v03.FilePart(file=v03_file)) + v10_expected = pb2_v10.Part( + raw=content, media_type='application/octet-stream', filename='file.bin' + ) + + v10_part = to_core_part(v03_part) + assert v10_part == v10_expected + + v03_restored = to_compat_part(v10_part) + assert v03_restored == v03_part + + +def test_message_conversion(): + v03_msg = types_v03.Message( + message_id='m1', + role=types_v03.Role.user, + context_id='c1', + task_id='t1', + reference_task_ids=['rt1'], + metadata={'k': 'v'}, + extensions=['ext1'], + parts=[types_v03.Part(root=types_v03.TextPart(text='hi'))], + ) + v10_expected = pb2_v10.Message( + message_id='m1', + role=pb2_v10.Role.ROLE_USER, + context_id='c1', + task_id='t1', + reference_task_ids=['rt1'], + extensions=['ext1'], + parts=[pb2_v10.Part(text='hi')], + ) + ParseDict({'k': 'v'}, v10_expected.metadata) + + v10_msg = to_core_message(v03_msg) + assert v10_msg == v10_expected + + v03_restored = to_compat_message(v10_msg) + assert v03_restored == v03_msg + + +def test_message_conversion_minimal(): + v03_msg = types_v03.Message( + message_id='m1', + role=types_v03.Role.agent, + parts=[types_v03.Part(root=types_v03.TextPart(text='hi'))], + ) + v10_expected = pb2_v10.Message( + message_id='m1', + role=pb2_v10.Role.ROLE_AGENT, + parts=[pb2_v10.Part(text='hi')], + ) + + v10_msg = to_core_message(v03_msg) + assert v10_msg == v10_expected + + v03_restored = to_compat_message(v10_msg) + # v03 expects None for missing fields, conversions.py handles this correctly + assert v03_restored == v03_msg + + +def test_task_status_conversion(): + now_v03 = '2023-01-01T12:00:00Z' + v03_msg = types_v03.Message( + message_id='m1', + role=types_v03.Role.agent, + parts=[types_v03.Part(root=types_v03.TextPart(text='status'))], + ) + v03_status = types_v03.TaskStatus( + state=types_v03.TaskState.working, message=v03_msg, timestamp=now_v03 + ) + + v10_expected = pb2_v10.TaskStatus( + state=pb2_v10.TaskState.TASK_STATE_WORKING, + message=pb2_v10.Message( + message_id='m1', + role=pb2_v10.Role.ROLE_AGENT, + parts=[pb2_v10.Part(text='status')], + ), + ) + v10_expected.timestamp.FromJsonString(now_v03) + + v10_status = to_core_task_status(v03_status) + assert v10_status == v10_expected + + v03_restored = to_compat_task_status(v10_status) + assert v03_restored == v03_status + + +def test_task_status_conversion_special_states(): + # input-required + s1 = types_v03.TaskStatus(state=types_v03.TaskState.input_required) + assert ( + to_core_task_status(s1).state + == pb2_v10.TaskState.TASK_STATE_INPUT_REQUIRED + ) + assert to_compat_task_status(to_core_task_status(s1)).state == s1.state + + # auth-required + s2 = types_v03.TaskStatus(state=types_v03.TaskState.auth_required) + assert ( + to_core_task_status(s2).state + == pb2_v10.TaskState.TASK_STATE_AUTH_REQUIRED + ) + assert to_compat_task_status(to_core_task_status(s2)).state == s2.state + + # unknown + s3 = types_v03.TaskStatus(state=types_v03.TaskState.unknown) + assert ( + to_core_task_status(s3).state + == pb2_v10.TaskState.TASK_STATE_UNSPECIFIED + ) + assert to_compat_task_status(to_core_task_status(s3)).state == s3.state + + +def test_task_conversion(): + v03_msg = types_v03.Message( + message_id='m1', + role=types_v03.Role.user, + parts=[types_v03.Part(root=types_v03.TextPart(text='hi'))], + ) + v03_status = types_v03.TaskStatus(state=types_v03.TaskState.submitted) + v03_art = types_v03.Artifact( + artifact_id='a1', + parts=[types_v03.Part(root=types_v03.TextPart(text='data'))], + ) + + v03_task = types_v03.Task( + id='t1', + context_id='c1', + status=v03_status, + history=[v03_msg], + artifacts=[v03_art], + metadata={'m': 'v'}, + ) + + v10_expected = pb2_v10.Task( + id='t1', + context_id='c1', + status=pb2_v10.TaskStatus(state=pb2_v10.TaskState.TASK_STATE_SUBMITTED), + history=[ + pb2_v10.Message( + message_id='m1', + role=pb2_v10.Role.ROLE_USER, + parts=[pb2_v10.Part(text='hi')], + ) + ], + artifacts=[ + pb2_v10.Artifact( + artifact_id='a1', parts=[pb2_v10.Part(text='data')] + ) + ], + ) + ParseDict({'m': 'v'}, v10_expected.metadata) + + v10_task = to_core_task(v03_task) + assert v10_task == v10_expected + + v03_restored = to_compat_task(v10_task) + # v03 restored artifacts will have None for name/desc/etc + v03_expected_restored = types_v03.Task( + id='t1', + context_id='c1', + status=v03_status, + history=[v03_msg], + artifacts=[ + types_v03.Artifact( + artifact_id='a1', + parts=[types_v03.Part(root=types_v03.TextPart(text='data'))], + name=None, + description=None, + metadata=None, + extensions=None, + ) + ], + metadata={'m': 'v'}, + ) + assert v03_restored == v03_expected_restored + + +def test_task_conversion_minimal(): + # Test v10 to v03 minimal + v10_min = pb2_v10.Task(id='tm', context_id='cm') + v03_expected_restored = types_v03.Task( + id='tm', + context_id='cm', + status=types_v03.TaskStatus(state=types_v03.TaskState.unknown), + ) + v03_min_restored = to_compat_task(v10_min) + assert v03_min_restored == v03_expected_restored + + +def test_authentication_info_conversion(): + v03_auth = types_v03.PushNotificationAuthenticationInfo( + schemes=['Bearer'], credentials='token123' + ) + v10_expected = pb2_v10.AuthenticationInfo( + scheme='Bearer', credentials='token123' + ) + v10_auth = to_core_authentication_info(v03_auth) + assert v10_auth == v10_expected + + v03_restored = to_compat_authentication_info(v10_auth) + assert v03_restored == v03_auth + + +def test_authentication_info_conversion_minimal(): + v03_auth = types_v03.PushNotificationAuthenticationInfo(schemes=[]) + v10_expected = pb2_v10.AuthenticationInfo() + + v10_auth = to_core_authentication_info(v03_auth) + assert v10_auth == v10_expected + + v03_restored = to_compat_authentication_info(v10_auth) + v03_expected_restored = types_v03.PushNotificationAuthenticationInfo( + schemes=[], credentials=None + ) + assert v03_restored == v03_expected_restored + + +def test_push_notification_config_conversion(): + v03_auth = types_v03.PushNotificationAuthenticationInfo(schemes=['Basic']) + v03_config = types_v03.PushNotificationConfig( + id='c1', + url='http://test.com', + token='tok', # noqa: S106 + authentication=v03_auth, + ) + + v10_expected = pb2_v10.TaskPushNotificationConfig( + id='c1', + url='http://test.com', + token='tok', # noqa: S106 + authentication=pb2_v10.AuthenticationInfo(scheme='Basic'), + ) + + v10_config = to_core_push_notification_config(v03_config) + assert v10_config == v10_expected + + v03_restored = to_compat_push_notification_config(v10_config) + assert v03_restored == v03_config + + +def test_push_notification_config_conversion_minimal(): + v03_config = types_v03.PushNotificationConfig(url='http://test.com') + v10_expected = pb2_v10.TaskPushNotificationConfig(url='http://test.com') + + v10_config = to_core_push_notification_config(v03_config) + assert v10_config == v10_expected + + v03_restored = to_compat_push_notification_config(v10_config) + v03_expected_restored = types_v03.PushNotificationConfig( + url='http://test.com', id=None, token=None, authentication=None + ) + assert v03_restored == v03_expected_restored + + +def test_send_message_configuration_conversion(): + v03_auth = types_v03.PushNotificationAuthenticationInfo(schemes=['Basic']) + v03_push = types_v03.PushNotificationConfig( + url='http://test', authentication=v03_auth + ) + + v03_config = types_v03.MessageSendConfiguration( + accepted_output_modes=['text/plain', 'application/json'], + history_length=10, + blocking=True, + push_notification_config=v03_push, + ) + + v10_expected = pb2_v10.SendMessageConfiguration( + accepted_output_modes=['text/plain', 'application/json'], + history_length=10, + task_push_notification_config=pb2_v10.TaskPushNotificationConfig( + url='http://test', + authentication=pb2_v10.AuthenticationInfo(scheme='Basic'), + ), + ) + + v10_config = to_core_send_message_configuration(v03_config) + assert v10_config == v10_expected + + v03_restored = to_compat_send_message_configuration(v10_config) + assert v03_restored == v03_config + + +def test_send_message_configuration_conversion_minimal(): + v03_config = types_v03.MessageSendConfiguration() + v10_expected = pb2_v10.SendMessageConfiguration() + + v10_config = to_core_send_message_configuration(v03_config) + assert v10_config == v10_expected + v03_restored = to_compat_send_message_configuration(v10_config) + v03_expected_restored = types_v03.MessageSendConfiguration( + accepted_output_modes=None, + history_length=None, + blocking=True, + push_notification_config=None, + ) + assert v03_restored == v03_expected_restored + + +def test_artifact_conversion_full(): + v03_artifact = types_v03.Artifact( + artifact_id='a1', + name='Test Art', + description='A test artifact', + parts=[types_v03.Part(root=types_v03.TextPart(text='data'))], + metadata={'k': 'v'}, + extensions=['ext1'], + ) + + v10_expected = pb2_v10.Artifact( + artifact_id='a1', + name='Test Art', + description='A test artifact', + parts=[pb2_v10.Part(text='data')], + extensions=['ext1'], + ) + ParseDict({'k': 'v'}, v10_expected.metadata) + + v10_art = to_core_artifact(v03_artifact) + assert v10_art == v10_expected + + v03_restored = to_compat_artifact(v10_art) + assert v03_restored == v03_artifact + + +def test_artifact_conversion_minimal(): + v03_artifact = types_v03.Artifact( + artifact_id='a1', + parts=[types_v03.Part(root=types_v03.TextPart(text='data'))], + ) + + v10_expected = pb2_v10.Artifact( + artifact_id='a1', parts=[pb2_v10.Part(text='data')] + ) + + v10_art = to_core_artifact(v03_artifact) + assert v10_art == v10_expected + + v03_restored = to_compat_artifact(v10_art) + v03_expected_restored = types_v03.Artifact( + artifact_id='a1', + parts=[types_v03.Part(root=types_v03.TextPart(text='data'))], + name=None, + description=None, + metadata=None, + extensions=None, + ) + assert v03_restored == v03_expected_restored + + +def test_task_status_update_event_conversion(): + v03_status = types_v03.TaskStatus(state=types_v03.TaskState.completed) + v03_event = types_v03.TaskStatusUpdateEvent( + task_id='t1', + context_id='c1', + status=v03_status, + metadata={'m': 'v'}, + final=True, + ) + + v10_expected = pb2_v10.TaskStatusUpdateEvent( + task_id='t1', + context_id='c1', + status=pb2_v10.TaskStatus(state=pb2_v10.TaskState.TASK_STATE_COMPLETED), + ) + ParseDict({'m': 'v'}, v10_expected.metadata) + + v10_event = to_core_task_status_update_event(v03_event) + assert v10_event == v10_expected + + v03_restored = to_compat_task_status_update_event(v10_event) + v03_expected_restored = types_v03.TaskStatusUpdateEvent( + task_id='t1', + context_id='c1', + status=v03_status, + metadata={'m': 'v'}, + final=True, # final is computed based on status.state + ) + assert v03_restored == v03_expected_restored + + +def test_task_status_update_event_conversion_terminal_states(): + # Test all terminal states result in final=True + terminal_states = [ + ( + pb2_v10.TaskState.TASK_STATE_COMPLETED, + types_v03.TaskState.completed, + ), + (pb2_v10.TaskState.TASK_STATE_CANCELED, types_v03.TaskState.canceled), + (pb2_v10.TaskState.TASK_STATE_FAILED, types_v03.TaskState.failed), + (pb2_v10.TaskState.TASK_STATE_REJECTED, types_v03.TaskState.rejected), + ] + + for core_st, compat_st in terminal_states: + v10_event = pb2_v10.TaskStatusUpdateEvent( + status=pb2_v10.TaskStatus(state=core_st) + ) + v03_restored = to_compat_task_status_update_event(v10_event) + assert v03_restored.final is True + assert v03_restored.status.state == compat_st + + # Test non-terminal states result in final=False + non_terminal_states = [ + ( + pb2_v10.TaskState.TASK_STATE_SUBMITTED, + types_v03.TaskState.submitted, + ), + (pb2_v10.TaskState.TASK_STATE_WORKING, types_v03.TaskState.working), + ( + pb2_v10.TaskState.TASK_STATE_INPUT_REQUIRED, + types_v03.TaskState.input_required, + ), + ( + pb2_v10.TaskState.TASK_STATE_AUTH_REQUIRED, + types_v03.TaskState.auth_required, + ), + ( + pb2_v10.TaskState.TASK_STATE_UNSPECIFIED, + types_v03.TaskState.unknown, + ), + ] + + for core_st, compat_st in non_terminal_states: + v10_event = pb2_v10.TaskStatusUpdateEvent( + status=pb2_v10.TaskStatus(state=core_st) + ) + v03_restored = to_compat_task_status_update_event(v10_event) + assert v03_restored.final is False + assert v03_restored.status.state == compat_st + + +def test_task_status_update_event_conversion_minimal(): + # v03 status is required but might be constructed empty internally + v10_event = pb2_v10.TaskStatusUpdateEvent(task_id='t1', context_id='c1') + v03_restored = to_compat_task_status_update_event(v10_event) + v03_expected = types_v03.TaskStatusUpdateEvent( + task_id='t1', + context_id='c1', + status=types_v03.TaskStatus(state=types_v03.TaskState.unknown), + final=False, + ) + assert v03_restored == v03_expected + + +def test_task_artifact_update_event_conversion(): + v03_art = types_v03.Artifact( + artifact_id='a1', + parts=[types_v03.Part(root=types_v03.TextPart(text='d'))], + ) + v03_event = types_v03.TaskArtifactUpdateEvent( + task_id='t1', + context_id='c1', + artifact=v03_art, + append=True, + last_chunk=False, + metadata={'k': 'v'}, + ) + + v10_expected = pb2_v10.TaskArtifactUpdateEvent( + task_id='t1', + context_id='c1', + artifact=pb2_v10.Artifact( + artifact_id='a1', parts=[pb2_v10.Part(text='d')] + ), + append=True, + last_chunk=False, + ) + ParseDict({'k': 'v'}, v10_expected.metadata) + + v10_event = to_core_task_artifact_update_event(v03_event) + assert v10_event == v10_expected + + v03_restored = to_compat_task_artifact_update_event(v10_event) + assert v03_restored == v03_event + + +def test_task_artifact_update_event_conversion_minimal(): + v03_art = types_v03.Artifact( + artifact_id='a1', + parts=[types_v03.Part(root=types_v03.TextPart(text='d'))], + ) + v03_event = types_v03.TaskArtifactUpdateEvent( + task_id='t1', context_id='c1', artifact=v03_art + ) + + v10_expected = pb2_v10.TaskArtifactUpdateEvent( + task_id='t1', + context_id='c1', + artifact=pb2_v10.Artifact( + artifact_id='a1', parts=[pb2_v10.Part(text='d')] + ), + ) + + v10_event = to_core_task_artifact_update_event(v03_event) + assert v10_event == v10_expected + + v03_restored = to_compat_task_artifact_update_event(v10_event) + v03_expected_restored = types_v03.TaskArtifactUpdateEvent( + task_id='t1', + context_id='c1', + artifact=v03_art, + append=False, # primitive bools default to False + last_chunk=False, + metadata=None, + ) + assert v03_restored == v03_expected_restored + + +def test_security_requirement_conversion(): + v03_req = {'oauth': ['read', 'write'], 'apikey': []} + + v10_expected = pb2_v10.SecurityRequirement() + sl_oauth = pb2_v10.StringList() + sl_oauth.list.extend(['read', 'write']) + sl_apikey = pb2_v10.StringList() + v10_expected.schemes['oauth'].CopyFrom(sl_oauth) + v10_expected.schemes['apikey'].CopyFrom(sl_apikey) + + v10_req = to_core_security_requirement(v03_req) + assert v10_req == v10_expected + + v03_restored = to_compat_security_requirement(v10_req) + assert v03_restored == v03_req + + +def test_oauth_flows_conversion_auth_code(): + v03_flows = types_v03.OAuthFlows( + authorization_code=types_v03.AuthorizationCodeOAuthFlow( + authorization_url='http://auth', + token_url='http://token', # noqa: S106 + scopes={'a': 'b'}, + refresh_url='ref1', + ) + ) + v10_expected = pb2_v10.OAuthFlows( + authorization_code=pb2_v10.AuthorizationCodeOAuthFlow( + authorization_url='http://auth', + token_url='http://token', # noqa: S106 + scopes={'a': 'b'}, + refresh_url='ref1', + ) + ) + v10_flows = to_core_oauth_flows(v03_flows) + assert v10_flows == v10_expected + v03_restored = to_compat_oauth_flows(v10_flows) + assert v03_restored == v03_flows + + +def test_oauth_flows_conversion_client_credentials(): + v03_flows = types_v03.OAuthFlows( + client_credentials=types_v03.ClientCredentialsOAuthFlow( + token_url='http://token2', # noqa: S106 + scopes={'c': 'd'}, + refresh_url='ref2', + ) + ) + v10_expected = pb2_v10.OAuthFlows( + client_credentials=pb2_v10.ClientCredentialsOAuthFlow( + token_url='http://token2', # noqa: S106 + scopes={'c': 'd'}, + refresh_url='ref2', + ) + ) + v10_flows = to_core_oauth_flows(v03_flows) + assert v10_flows == v10_expected + v03_restored = to_compat_oauth_flows(v10_flows) + assert v03_restored == v03_flows + + +def test_oauth_flows_conversion_implicit(): + v03_flows = types_v03.OAuthFlows( + implicit=types_v03.ImplicitOAuthFlow( + authorization_url='http://auth2', + scopes={'e': 'f'}, + refresh_url='ref3', + ) + ) + v10_expected = pb2_v10.OAuthFlows( + implicit=pb2_v10.ImplicitOAuthFlow( + authorization_url='http://auth2', + scopes={'e': 'f'}, + refresh_url='ref3', + ) + ) + v10_flows = to_core_oauth_flows(v03_flows) + assert v10_flows == v10_expected + v03_restored = to_compat_oauth_flows(v10_flows) + assert v03_restored == v03_flows + + +def test_oauth_flows_conversion_password(): + v03_flows = types_v03.OAuthFlows( + password=types_v03.PasswordOAuthFlow( + token_url='http://token3', # noqa: S106 + scopes={'g': 'h'}, + refresh_url='ref4', + ) + ) + v10_expected = pb2_v10.OAuthFlows( + password=pb2_v10.PasswordOAuthFlow( + token_url='http://token3', # noqa: S106 + scopes={'g': 'h'}, + refresh_url='ref4', + ) + ) + v10_flows = to_core_oauth_flows(v03_flows) + assert v10_flows == v10_expected + v03_restored = to_compat_oauth_flows(v10_flows) + assert v03_restored == v03_flows + + +def test_security_scheme_apikey(): + v03_scheme = types_v03.SecurityScheme( + root=types_v03.APIKeySecurityScheme( + in_=types_v03.In.header, name='X-API-KEY', description='desc' + ) + ) + v10_expected = pb2_v10.SecurityScheme( + api_key_security_scheme=pb2_v10.APIKeySecurityScheme( + location='header', name='X-API-KEY', description='desc' + ) + ) + v10_scheme = to_core_security_scheme(v03_scheme) + assert v10_scheme == v10_expected + v03_restored = to_compat_security_scheme(v10_scheme) + assert v03_restored == v03_scheme + + +def test_security_scheme_http_auth(): + v03_scheme = types_v03.SecurityScheme( + root=types_v03.HTTPAuthSecurityScheme( + scheme='Bearer', bearer_format='JWT', description='desc' + ) + ) + v10_expected = pb2_v10.SecurityScheme( + http_auth_security_scheme=pb2_v10.HTTPAuthSecurityScheme( + scheme='Bearer', bearer_format='JWT', description='desc' + ) + ) + v10_scheme = to_core_security_scheme(v03_scheme) + assert v10_scheme == v10_expected + v03_restored = to_compat_security_scheme(v10_scheme) + assert v03_restored == v03_scheme + + +def test_security_scheme_oauth2(): + v03_flows = types_v03.OAuthFlows( + authorization_code=types_v03.AuthorizationCodeOAuthFlow( + authorization_url='u', + token_url='t', # noqa: S106 + scopes={}, + ) + ) + v03_scheme = types_v03.SecurityScheme( + root=types_v03.OAuth2SecurityScheme( + flows=v03_flows, oauth2_metadata_url='url', description='desc' + ) + ) + + v10_expected = pb2_v10.SecurityScheme( + oauth2_security_scheme=pb2_v10.OAuth2SecurityScheme( + flows=pb2_v10.OAuthFlows( + authorization_code=pb2_v10.AuthorizationCodeOAuthFlow( + authorization_url='u', + token_url='t', # noqa: S106 + ) + ), + oauth2_metadata_url='url', + description='desc', + ) + ) + v10_scheme = to_core_security_scheme(v03_scheme) + assert v10_scheme == v10_expected + v03_restored = to_compat_security_scheme(v10_scheme) + assert v03_restored == v03_scheme + + +def test_security_scheme_oidc(): + v03_scheme = types_v03.SecurityScheme( + root=types_v03.OpenIdConnectSecurityScheme( + open_id_connect_url='url', description='desc' + ) + ) + v10_expected = pb2_v10.SecurityScheme( + open_id_connect_security_scheme=pb2_v10.OpenIdConnectSecurityScheme( + open_id_connect_url='url', description='desc' + ) + ) + v10_scheme = to_core_security_scheme(v03_scheme) + assert v10_scheme == v10_expected + v03_restored = to_compat_security_scheme(v10_scheme) + assert v03_restored == v03_scheme + + +def test_security_scheme_mtls(): + v03_scheme = types_v03.SecurityScheme( + root=types_v03.MutualTLSSecurityScheme(description='desc') + ) + v10_expected = pb2_v10.SecurityScheme( + mtls_security_scheme=pb2_v10.MutualTlsSecurityScheme(description='desc') + ) + v10_scheme = to_core_security_scheme(v03_scheme) + assert v10_scheme == v10_expected + v03_restored = to_compat_security_scheme(v10_scheme) + assert v03_restored == v03_scheme + + +def test_oauth_flows_conversion_minimal(): + v03_flows = types_v03.OAuthFlows( + authorization_code=types_v03.AuthorizationCodeOAuthFlow( + authorization_url='http://auth', + token_url='http://token', # noqa: S106 + scopes={'a': 'b'}, + ) # no refresh_url + ) + v10_expected = pb2_v10.OAuthFlows( + authorization_code=pb2_v10.AuthorizationCodeOAuthFlow( + authorization_url='http://auth', + token_url='http://token', # noqa: S106 + scopes={'a': 'b'}, + ) + ) + v10_flows = to_core_oauth_flows(v03_flows) + assert v10_flows == v10_expected + + v03_restored = to_compat_oauth_flows(v10_flows) + assert v03_restored == v03_flows + + +def test_security_scheme_minimal(): + v03_scheme = types_v03.SecurityScheme( + root=types_v03.APIKeySecurityScheme( + in_=types_v03.In.header, + name='X-API-KEY', # no description + ) + ) + v10_expected = pb2_v10.SecurityScheme( + api_key_security_scheme=pb2_v10.APIKeySecurityScheme( + location='header', name='X-API-KEY' + ) + ) + v10_scheme = to_core_security_scheme(v03_scheme) + assert v10_scheme == v10_expected + v03_restored = to_compat_security_scheme(v10_scheme) + assert v03_restored == v03_scheme + + +def test_security_scheme_http_auth_minimal(): + v03_scheme = types_v03.SecurityScheme( + root=types_v03.HTTPAuthSecurityScheme( + scheme='Bearer' # no bearer_format, no description + ) + ) + v10_expected = pb2_v10.SecurityScheme( + http_auth_security_scheme=pb2_v10.HTTPAuthSecurityScheme( + scheme='Bearer' + ) + ) + v10_scheme = to_core_security_scheme(v03_scheme) + assert v10_scheme == v10_expected + v03_restored = to_compat_security_scheme(v10_scheme) + assert v03_restored == v03_scheme + + +def test_security_scheme_oauth2_minimal(): + v03_flows = types_v03.OAuthFlows( + implicit=types_v03.ImplicitOAuthFlow(authorization_url='u', scopes={}) + ) + v03_scheme = types_v03.SecurityScheme( + root=types_v03.OAuth2SecurityScheme( + flows=v03_flows # no oauth2_metadata_url, no description + ) + ) + v10_expected = pb2_v10.SecurityScheme( + oauth2_security_scheme=pb2_v10.OAuth2SecurityScheme( + flows=pb2_v10.OAuthFlows( + implicit=pb2_v10.ImplicitOAuthFlow(authorization_url='u') + ) + ) + ) + v10_scheme = to_core_security_scheme(v03_scheme) + assert v10_scheme == v10_expected + v03_restored = to_compat_security_scheme(v10_scheme) + assert v03_restored == v03_scheme + + +def test_security_scheme_oidc_minimal(): + v03_scheme = types_v03.SecurityScheme( + root=types_v03.OpenIdConnectSecurityScheme( + open_id_connect_url='url' # no description + ) + ) + v10_expected = pb2_v10.SecurityScheme( + open_id_connect_security_scheme=pb2_v10.OpenIdConnectSecurityScheme( + open_id_connect_url='url' + ) + ) + v10_scheme = to_core_security_scheme(v03_scheme) + assert v10_scheme == v10_expected + v03_restored = to_compat_security_scheme(v10_scheme) + assert v03_restored == v03_scheme + + +def test_security_scheme_mtls_minimal(): + v03_scheme = types_v03.SecurityScheme( + root=types_v03.MutualTLSSecurityScheme() + ) + v10_expected = pb2_v10.SecurityScheme( + mtls_security_scheme=pb2_v10.MutualTlsSecurityScheme() + ) + v10_scheme = to_core_security_scheme(v03_scheme) + assert v10_scheme == v10_expected + v03_restored = to_compat_security_scheme(v10_scheme) + assert v03_restored == v03_scheme + v10_scheme = pb2_v10.SecurityScheme() + with pytest.raises(ValueError, match='Unknown security scheme type'): + to_compat_security_scheme(v10_scheme) + + +def test_agent_interface_conversion(): + v03_int = types_v03.AgentInterface(url='http', transport='JSONRPC') + v10_expected = pb2_v10.AgentInterface( + url='http', protocol_binding='JSONRPC', protocol_version='0.3' + ) + v10_int = to_core_agent_interface(v03_int) + assert v10_int == v10_expected + v03_restored = to_compat_agent_interface(v10_int) + assert v03_restored == v03_int + + +def test_agent_provider_conversion(): + v03_prov = types_v03.AgentProvider(url='u', organization='org') + v10_expected = pb2_v10.AgentProvider(url='u', organization='org') + v10_prov = to_core_agent_provider(v03_prov) + assert v10_prov == v10_expected + v03_restored = to_compat_agent_provider(v10_prov) + assert v03_restored == v03_prov + + +def test_agent_extension_conversion(): + v03_ext = types_v03.AgentExtension( + uri='u', description='d', required=True, params={'k': 'v'} + ) + v10_expected = pb2_v10.AgentExtension( + uri='u', description='d', required=True + ) + ParseDict({'k': 'v'}, v10_expected.params) + v10_ext = to_core_agent_extension(v03_ext) + assert v10_ext == v10_expected + v03_restored = to_compat_agent_extension(v10_ext) + assert v03_restored == v03_ext + + +def test_agent_capabilities_conversion(): + v03_ext = types_v03.AgentExtension(uri='u', required=False) + v03_cap = types_v03.AgentCapabilities( + streaming=True, + push_notifications=False, + extensions=[v03_ext], + state_transition_history=True, + ) + v10_expected = pb2_v10.AgentCapabilities( + streaming=True, + push_notifications=False, + extensions=[pb2_v10.AgentExtension(uri='u', required=False)], + ) + v10_cap = to_core_agent_capabilities(v03_cap) + assert v10_cap == v10_expected + v03_restored = to_compat_agent_capabilities(v10_cap) + v03_expected_restored = types_v03.AgentCapabilities( + streaming=True, + push_notifications=False, + extensions=[v03_ext], + state_transition_history=None, + ) + assert v03_restored == v03_expected_restored + + +def test_agent_skill_conversion(): + v03_skill = types_v03.AgentSkill( + id='s1', + name='n', + description='d', + tags=['t'], + examples=['e'], + input_modes=['i'], + output_modes=['o'], + security=[{'s': ['1']}], + ) + v10_expected = pb2_v10.AgentSkill( + id='s1', + name='n', + description='d', + tags=['t'], + examples=['e'], + input_modes=['i'], + output_modes=['o'], + ) + sl = pb2_v10.StringList() + sl.list.extend(['1']) + v10_expected.security_requirements.add().schemes['s'].CopyFrom(sl) + + v10_skill = to_core_agent_skill(v03_skill) + assert v10_skill == v10_expected + v03_restored = to_compat_agent_skill(v10_skill) + assert v03_restored == v03_skill + + +def test_agent_card_signature_conversion(): + v03_sig = types_v03.AgentCardSignature( + protected='p', signature='s', header={'h': 'v'} + ) + v10_expected = pb2_v10.AgentCardSignature(protected='p', signature='s') + ParseDict({'h': 'v'}, v10_expected.header) + v10_sig = to_core_agent_card_signature(v03_sig) + assert v10_sig == v10_expected + v03_restored = to_compat_agent_card_signature(v10_sig) + assert v03_restored == v03_sig + + +def test_agent_card_conversion(): + v03_int = types_v03.AgentInterface(url='u2', transport='HTTP') + v03_cap = types_v03.AgentCapabilities(streaming=True) + v03_skill = types_v03.AgentSkill( + id='s1', + name='sn', + description='sd', + tags=[], + input_modes=[], + output_modes=[], + ) + v03_prov = types_v03.AgentProvider(url='pu', organization='po') + + v03_card = types_v03.AgentCard( + name='n', + description='d', + version='v', + url='u1', + preferred_transport='JSONRPC', + protocol_version='0.3.0', + additional_interfaces=[v03_int], + provider=v03_prov, + documentation_url='du', + icon_url='iu', + capabilities=v03_cap, + supports_authenticated_extended_card=True, + security=[{'s': []}], + default_input_modes=['i'], + default_output_modes=['o'], + skills=[v03_skill], + ) + + v10_expected = pb2_v10.AgentCard( + name='n', + description='d', + version='v', + documentation_url='du', + icon_url='iu', + default_input_modes=['i'], + default_output_modes=['o'], + ) + v10_expected.supported_interfaces.extend( + [ + pb2_v10.AgentInterface( + url='u1', protocol_binding='JSONRPC', protocol_version='0.3.0' + ), + pb2_v10.AgentInterface( + url='u2', protocol_binding='HTTP', protocol_version='0.3' + ), + ] + ) + v10_expected.provider.CopyFrom( + pb2_v10.AgentProvider(url='pu', organization='po') + ) + v10_expected.capabilities.CopyFrom( + pb2_v10.AgentCapabilities(streaming=True, extended_agent_card=True) + ) + v10_expected.security_requirements.add().schemes['s'].CopyFrom( + pb2_v10.StringList() + ) + v10_expected.skills.add().CopyFrom( + pb2_v10.AgentSkill(id='s1', name='sn', description='sd') + ) + + v10_card = to_core_agent_card(v03_card) + assert v10_card == v10_expected + + v03_restored = to_compat_agent_card(v10_card) + # We must explicitly set capabilities.state_transition_history to None in our original to match the restored + v03_card.capabilities.state_transition_history = None + # AgentSkill empty lists are converted to None during restoration + v03_card.skills[0].input_modes = None + v03_card.skills[0].output_modes = None + v03_card.skills[0].security = None + v03_card.skills[0].examples = None + assert v03_restored == v03_card + + +def test_agent_card_conversion_minimal(): + v03_cap = types_v03.AgentCapabilities() + v03_card = types_v03.AgentCard( + name='n', + description='d', + version='v', + url='u1', + preferred_transport='JSONRPC', + protocol_version='0.3.0', + capabilities=v03_cap, + default_input_modes=[], + default_output_modes=[], + skills=[], + ) + v10_expected = pb2_v10.AgentCard( + name='n', + description='d', + version='v', + capabilities=pb2_v10.AgentCapabilities(), + ) + v10_expected.supported_interfaces.extend( + [ + pb2_v10.AgentInterface( + url='u1', protocol_binding='JSONRPC', protocol_version='0.3.0' + ) + ] + ) + v10_card = to_core_agent_card(v03_card) + assert v10_card == v10_expected + + v03_restored = to_compat_agent_card(v10_card) + v03_card.capabilities.state_transition_history = None + assert v03_restored == v03_card + + +def test_agent_skill_conversion_minimal(): + v03_skill = types_v03.AgentSkill( + id='s1', + name='n', + description='d', + tags=[], + input_modes=[], + output_modes=[], + ) + v10_expected = pb2_v10.AgentSkill(id='s1', name='n', description='d') + v10_skill = to_core_agent_skill(v03_skill) + assert v10_skill == v10_expected + v03_restored = to_compat_agent_skill(v10_skill) + + # Restore sets missing optional lists to None usually. We adjust expected here + v03_expected_restored = types_v03.AgentSkill( + id='s1', + name='n', + description='d', + tags=[], + examples=None, + input_modes=None, + output_modes=None, + security=None, + ) + assert v03_restored == v03_expected_restored + + +def test_agent_extension_conversion_minimal(): + v03_ext = types_v03.AgentExtension(uri='u', required=False) + v10_expected = pb2_v10.AgentExtension(uri='u', required=False) + v10_ext = to_core_agent_extension(v03_ext) + assert v10_ext == v10_expected + v03_restored = to_compat_agent_extension(v10_ext) + v03_expected_restored = types_v03.AgentExtension( + uri='u', description=None, required=False, params=None + ) + assert v03_restored == v03_expected_restored + + +def test_task_push_notification_config_conversion(): + v03_auth = types_v03.PushNotificationAuthenticationInfo(schemes=['Basic']) + v03_cfg = types_v03.TaskPushNotificationConfig( + task_id='t1', + push_notification_config=types_v03.PushNotificationConfig( + id='c1', + url='http://url', + token='tok', # noqa: S106 + authentication=v03_auth, + ), + ) + v10_expected = pb2_v10.TaskPushNotificationConfig( + task_id='t1', + id='c1', + url='http://url', + token='tok', # noqa: S106 + authentication=pb2_v10.AuthenticationInfo(scheme='Basic'), + ) + v10_cfg = to_core_task_push_notification_config(v03_cfg) + assert v10_cfg == v10_expected + v03_restored = to_compat_task_push_notification_config(v10_cfg) + + v03_expected_restored = types_v03.TaskPushNotificationConfig( + task_id='t1', + push_notification_config=types_v03.PushNotificationConfig( + id='c1', + url='http://url', + token='tok', # noqa: S106 + authentication=v03_auth, + ), + ) + assert v03_restored == v03_expected_restored + + +def test_task_push_notification_config_conversion_minimal(): + v03_cfg = types_v03.TaskPushNotificationConfig( + task_id='t1', + push_notification_config=types_v03.PushNotificationConfig( + url='http://url' + ), + ) + v10_expected = pb2_v10.TaskPushNotificationConfig( + task_id='t1', url='http://url' + ) + v10_cfg = to_core_task_push_notification_config(v03_cfg) + assert v10_cfg == v10_expected + v03_restored = to_compat_task_push_notification_config(v10_cfg) + v03_expected_restored = types_v03.TaskPushNotificationConfig( + task_id='t1', + push_notification_config=types_v03.PushNotificationConfig( + url='http://url' + ), + ) + assert v03_restored == v03_expected_restored + + +def test_send_message_request_conversion(): + v03_msg = types_v03.Message( + message_id='m1', + role=types_v03.Role.user, + parts=[types_v03.Part(root=types_v03.TextPart(text='Hi'))], + ) + v03_cfg = types_v03.MessageSendConfiguration(history_length=5) + v03_req = types_v03.SendMessageRequest( + id='conv', + params=types_v03.MessageSendParams( + message=v03_msg, configuration=v03_cfg, metadata={'k': 'v'} + ), + ) + v10_expected = pb2_v10.SendMessageRequest( + message=pb2_v10.Message( + message_id='m1', + role=pb2_v10.Role.ROLE_USER, + parts=[pb2_v10.Part(text='Hi')], + ), + configuration=pb2_v10.SendMessageConfiguration(history_length=5), + ) + ParseDict({'k': 'v'}, v10_expected.metadata) + + v10_req = to_core_send_message_request(v03_req) + assert v10_req == v10_expected + v03_restored = to_compat_send_message_request(v10_req, request_id='conv') + assert v03_restored.id == 'conv' + assert v03_restored.params.message.message_id == 'm1' + assert v03_restored.params.configuration.history_length == 5 + assert v03_restored.params.metadata == {'k': 'v'} + + +def test_get_task_request_conversion(): + v03_req = types_v03.GetTaskRequest( + id='conv', params=types_v03.TaskQueryParams(id='t1', history_length=10) + ) + v10_expected = pb2_v10.GetTaskRequest(id='t1', history_length=10) + v10_req = to_core_get_task_request(v03_req) + assert v10_req == v10_expected + v03_restored = to_compat_get_task_request(v10_req, request_id='conv') + assert v03_restored == v03_req + + +def test_get_task_request_conversion_minimal(): + v03_req = types_v03.GetTaskRequest( + id='conv', params=types_v03.TaskQueryParams(id='t1') + ) + v10_expected = pb2_v10.GetTaskRequest(id='t1') + v10_req = to_core_get_task_request(v03_req) + assert v10_req == v10_expected + v03_restored = to_compat_get_task_request(v10_req, request_id='conv') + assert v03_restored == v03_req + + +def test_cancel_task_request_conversion(): + v03_req = types_v03.CancelTaskRequest( + id='conv', + params=types_v03.TaskIdParams(id='t1', metadata={'reason': 'test'}), + ) + v10_expected = pb2_v10.CancelTaskRequest(id='t1') + ParseDict({'reason': 'test'}, v10_expected.metadata) + v10_req = to_core_cancel_task_request(v03_req) + assert v10_req == v10_expected + v03_restored = to_compat_cancel_task_request(v10_req, request_id='conv') + assert v03_restored == v03_req + + +def test_cancel_task_request_conversion_minimal(): + v03_req = types_v03.CancelTaskRequest( + id='conv', params=types_v03.TaskIdParams(id='t1') + ) + v10_expected = pb2_v10.CancelTaskRequest(id='t1') + v10_req = to_core_cancel_task_request(v03_req) + assert v10_req == v10_expected + v03_restored = to_compat_cancel_task_request(v10_req, request_id='conv') + assert v03_restored == v03_req + + +def test_create_task_push_notification_config_request_conversion(): + v03_cfg = types_v03.TaskPushNotificationConfig( + task_id='t1', + push_notification_config=types_v03.PushNotificationConfig(url='u'), + ) + v03_req = types_v03.SetTaskPushNotificationConfigRequest( + id='conv', params=v03_cfg + ) + v10_expected = pb2_v10.TaskPushNotificationConfig(task_id='t1', url='u') + v10_req = to_core_create_task_push_notification_config_request(v03_req) + assert v10_req == v10_expected + v03_restored = to_compat_create_task_push_notification_config_request( + v10_req, request_id='conv' + ) + assert v03_restored == v03_req + + +def test_stream_response_conversion(): + v03_msg = types_v03.Message( + message_id='m1', + role=types_v03.Role.user, + parts=[types_v03.Part(root=types_v03.TextPart(text='Hi'))], + ) + v03_res = types_v03.SendStreamingMessageSuccessResponse(result=v03_msg) + v10_expected = pb2_v10.StreamResponse( + message=pb2_v10.Message( + message_id='m1', + role=pb2_v10.Role.ROLE_USER, + parts=[pb2_v10.Part(text='Hi')], + ) + ) + v10_res = to_core_stream_response(v03_res) + assert v10_res == v10_expected + + +def test_get_task_push_notification_config_request_conversion(): + v03_req = types_v03.GetTaskPushNotificationConfigRequest( + id='conv', params=types_v03.TaskIdParams(id='t1') + ) + v10_expected = pb2_v10.GetTaskPushNotificationConfigRequest(task_id='t1') + v10_req = to_core_get_task_push_notification_config_request(v03_req) + assert v10_req == v10_expected + v03_restored = to_compat_get_task_push_notification_config_request( + v10_req, request_id='conv' + ) + assert v03_restored == v03_req + + +def test_delete_task_push_notification_config_request_conversion(): + v03_req = types_v03.DeleteTaskPushNotificationConfigRequest( + id='conv', + params=types_v03.DeleteTaskPushNotificationConfigParams( + id='t1', push_notification_config_id='p1' + ), + ) + v10_expected = pb2_v10.DeleteTaskPushNotificationConfigRequest( + task_id='t1', id='p1' + ) + v10_req = to_core_delete_task_push_notification_config_request(v03_req) + assert v10_req == v10_expected + v03_restored = to_compat_delete_task_push_notification_config_request( + v10_req, request_id='conv' + ) + assert v03_restored == v03_req + + +def test_subscribe_to_task_request_conversion(): + v03_req = types_v03.TaskResubscriptionRequest( + id='conv', params=types_v03.TaskIdParams(id='t1') + ) + v10_expected = pb2_v10.SubscribeToTaskRequest(id='t1') + v10_req = to_core_subscribe_to_task_request(v03_req) + assert v10_req == v10_expected + v03_restored = to_compat_subscribe_to_task_request( + v10_req, request_id='conv' + ) + assert v03_restored == v03_req + + +def test_list_task_push_notification_config_request_conversion(): + v03_req = types_v03.ListTaskPushNotificationConfigRequest( + id='conv', + params=types_v03.ListTaskPushNotificationConfigParams(id='t1'), + ) + v10_expected = pb2_v10.ListTaskPushNotificationConfigsRequest(task_id='t1') + v10_req = to_core_list_task_push_notification_config_request(v03_req) + assert v10_req == v10_expected + v03_restored = to_compat_list_task_push_notification_config_request( + v10_req, request_id='conv' + ) + assert v03_restored == v03_req + + +def test_list_task_push_notification_config_response_conversion(): + v03_cfg = types_v03.TaskPushNotificationConfig( + task_id='t1', + push_notification_config=types_v03.PushNotificationConfig(url='u'), + ) + v03_res = types_v03.ListTaskPushNotificationConfigResponse( + root=types_v03.ListTaskPushNotificationConfigSuccessResponse( + id='conv', result=[v03_cfg] + ) + ) + v10_expected = pb2_v10.ListTaskPushNotificationConfigsResponse( + configs=[pb2_v10.TaskPushNotificationConfig(task_id='t1', url='u')] + ) + v10_res = to_core_list_task_push_notification_config_response(v03_res) + assert v10_res == v10_expected + v03_restored = to_compat_list_task_push_notification_config_response( + v10_res, request_id='conv' + ) + assert v03_restored == v03_res + + +def test_send_message_response_conversion(): + v03_task = types_v03.Task( + id='t1', + context_id='c1', + status=types_v03.TaskStatus(state=types_v03.TaskState.unknown), + ) + v03_res = types_v03.SendMessageResponse( + root=types_v03.SendMessageSuccessResponse(id='conv', result=v03_task) + ) + v10_expected = pb2_v10.SendMessageResponse( + task=pb2_v10.Task( + id='t1', + context_id='c1', + status=pb2_v10.TaskStatus( + state=pb2_v10.TaskState.TASK_STATE_UNSPECIFIED + ), + ) + ) + v10_res = to_core_send_message_response(v03_res) + assert v10_res == v10_expected + v03_restored = to_compat_send_message_response(v10_res, request_id='conv') + assert v03_restored == v03_res + + +def test_stream_response_conversion_with_id(): + v10_res = pb2_v10.StreamResponse( + message=pb2_v10.Message( + message_id='m1', + role=pb2_v10.Role.ROLE_USER, + parts=[pb2_v10.Part(text='Hi')], + ) + ) + v03_res = to_compat_stream_response(v10_res, request_id='req123') + assert v03_res.id == 'req123' + assert v03_res.result.message_id == 'm1' + + +def test_get_extended_agent_card_request_conversion(): + v03_req = types_v03.GetAuthenticatedExtendedCardRequest(id='conv') + v10_expected = pb2_v10.GetExtendedAgentCardRequest() + v10_req = to_core_get_extended_agent_card_request(v03_req) + assert v10_req == v10_expected + v03_restored = to_compat_get_extended_agent_card_request( + v10_req, request_id='conv' + ) + assert v03_restored == v03_req + + +def test_get_task_push_notification_config_request_conversion_full_params(): + v03_req = types_v03.GetTaskPushNotificationConfigRequest( + id='conv', + params=types_v03.GetTaskPushNotificationConfigParams( + id='t1', push_notification_config_id='p1' + ), + ) + v10_expected = pb2_v10.GetTaskPushNotificationConfigRequest( + task_id='t1', id='p1' + ) + v10_req = to_core_get_task_push_notification_config_request(v03_req) + assert v10_req == v10_expected + v03_restored = to_compat_get_task_push_notification_config_request( + v10_req, request_id='conv' + ) + assert v03_restored == v03_req + + +def test_send_message_response_conversion_message(): + v03_msg = types_v03.Message( + message_id='m1', + role=types_v03.Role.agent, + parts=[types_v03.Part(root=types_v03.TextPart(text='Hi'))], + ) + v03_res = types_v03.SendMessageResponse( + root=types_v03.SendMessageSuccessResponse(id='conv', result=v03_msg) + ) + v10_expected = pb2_v10.SendMessageResponse( + message=pb2_v10.Message( + message_id='m1', + role=pb2_v10.Role.ROLE_AGENT, + parts=[pb2_v10.Part(text='Hi')], + ) + ) + v10_res = to_core_send_message_response(v03_res) + assert v10_res == v10_expected + v03_restored = to_compat_send_message_response(v10_res, request_id='conv') + assert v03_restored == v03_res + + +def test_stream_response_conversion_status_update(): + v03_status_event = types_v03.TaskStatusUpdateEvent( + task_id='t1', + context_id='c1', + status=types_v03.TaskStatus(state=types_v03.TaskState.working), + final=False, + ) + v03_res = types_v03.SendStreamingMessageSuccessResponse( + id='conv', result=v03_status_event + ) + v10_expected = pb2_v10.StreamResponse( + status_update=pb2_v10.TaskStatusUpdateEvent( + task_id='t1', + context_id='c1', + status=pb2_v10.TaskStatus( + state=pb2_v10.TaskState.TASK_STATE_WORKING + ), + ) + ) + v10_res = to_core_stream_response(v03_res) + assert v10_res == v10_expected + v03_restored = to_compat_stream_response(v10_res, request_id='conv') + assert v03_restored == v03_res + + +def test_stream_response_conversion_artifact_update(): + v03_art = types_v03.Artifact( + artifact_id='a1', + parts=[types_v03.Part(root=types_v03.TextPart(text='d'))], + ) + v03_artifact_event = types_v03.TaskArtifactUpdateEvent( + task_id='t1', context_id='c1', artifact=v03_art + ) + v03_res = types_v03.SendStreamingMessageSuccessResponse( + id='conv', result=v03_artifact_event + ) + v10_expected = pb2_v10.StreamResponse( + artifact_update=pb2_v10.TaskArtifactUpdateEvent( + task_id='t1', + context_id='c1', + artifact=pb2_v10.Artifact( + artifact_id='a1', parts=[pb2_v10.Part(text='d')] + ), + ) + ) + v10_res = to_core_stream_response(v03_res) + assert v10_res == v10_expected + v03_restored = to_compat_stream_response(v10_res, request_id='conv') + # restored artifact update has default append=False, last_chunk=False + v03_expected = types_v03.SendStreamingMessageSuccessResponse( + id='conv', + result=types_v03.TaskArtifactUpdateEvent( + task_id='t1', + context_id='c1', + artifact=v03_art, + append=False, + last_chunk=False, + ), + ) + assert v03_restored == v03_expected + + +def test_oauth_flows_conversion_priority(): + # v03 allows multiple, v10 allows one (oneof) + v03_flows = types_v03.OAuthFlows( + authorization_code=types_v03.AuthorizationCodeOAuthFlow( + authorization_url='http://auth', + token_url='http://token', # noqa: S106 + scopes={'a': 'b'}, + ), + client_credentials=types_v03.ClientCredentialsOAuthFlow( + token_url='http://token2', # noqa: S106 + scopes={'c': 'd'}, + ), + ) + + core_flows = to_core_oauth_flows(v03_flows) + # The last one set wins in proto oneof. In conversions.py order is: + # authorization_code, client_credentials, implicit, password. + # So client_credentials should win over authorization_code. + assert core_flows.WhichOneof('flow') == 'client_credentials' + assert core_flows.client_credentials.token_url == 'http://token2' # noqa: S105 + + +def test_to_core_part_data_part_with_metadata_not_compat(): + v03_part = types_v03.Part( + root=types_v03.DataPart( + data={'foo': 'bar'}, metadata={'other_key': 'val'} + ) + ) + core_part = to_core_part(v03_part) + assert core_part.data.struct_value['foo'] == 'bar' + assert core_part.metadata['other_key'] == 'val' + + +def test_to_core_part_file_with_bytes_minimal(): + v03_part = types_v03.Part( + root=types_v03.FilePart( + file=types_v03.FileWithBytes(bytes='YmFzZTY0') + # missing mime_type and name + ) + ) + core_part = to_core_part(v03_part) + assert core_part.raw == b'base64' + assert not core_part.media_type + assert not core_part.filename + + +def test_to_core_part_file_with_uri_minimal(): + v03_part = types_v03.Part( + root=types_v03.FilePart( + file=types_v03.FileWithUri(uri='http://test') + # missing mime_type and name + ) + ) + core_part = to_core_part(v03_part) + assert core_part.url == 'http://test' + assert not core_part.media_type + assert not core_part.filename + + +def test_to_compat_part_unknown_content(): + core_part = pb2_v10.Part() + # It has no content set (WhichOneof returns None) + with pytest.raises(ValueError, match='Unknown part content type: None'): + to_compat_part(core_part) + + +def test_to_core_message_unspecified_role(): + v03_msg = types_v03.Message( + message_id='m1', + role=types_v03.Role.user, # Required by pydantic model, bypass to None for test + parts=[], + ) + v03_msg.role = None + core_msg = to_core_message(v03_msg) + assert core_msg.role == pb2_v10.Role.ROLE_UNSPECIFIED + + +def test_to_core_task_status_missing_state(): + v03_status = types_v03.TaskStatus.model_construct(state=None) + core_status = to_core_task_status(v03_status) + assert core_status.state == pb2_v10.TaskState.TASK_STATE_UNSPECIFIED + + +def test_to_core_task_status_update_event_missing_status(): + v03_event = types_v03.TaskStatusUpdateEvent.model_construct( + task_id='t1', context_id='c1', status=None, final=False + ) + core_event = to_core_task_status_update_event(v03_event) + assert not core_event.HasField('status') + + +def test_to_core_task_artifact_update_event_missing_artifact(): + v03_event = types_v03.TaskArtifactUpdateEvent.model_construct( + task_id='t1', context_id='c1', artifact=None + ) + core_event = to_core_task_artifact_update_event(v03_event) + assert not core_event.HasField('artifact') + + +def test_to_core_agent_card_with_security_and_signatures(): + v03_card = types_v03.AgentCard.model_construct( + name='test', + description='test', + version='1.0', + url='http://url', + capabilities=types_v03.AgentCapabilities(), + security_schemes={ + 'scheme1': types_v03.SecurityScheme( + root=types_v03.MutualTLSSecurityScheme.model_construct( + description='mtls' + ) + ) + }, + signatures=[ + types_v03.AgentCardSignature.model_construct( + protected='prot', signature='sig' + ) + ], + default_input_modes=[], + default_output_modes=[], + skills=[], + ) + core_card = to_core_agent_card(v03_card) + assert 'scheme1' in core_card.security_schemes + assert len(core_card.signatures) == 1 + assert core_card.signatures[0].signature == 'sig' + + +def test_to_core_send_message_request_no_configuration(): + v03_req = types_v03.SendMessageRequest.model_construct( + id=1, + params=types_v03.MessageSendParams.model_construct( + message=None, configuration=None, metadata=None + ), + ) + core_req = to_core_send_message_request(v03_req) + # Blocking by default (return_immediately=False) + assert core_req.configuration.return_immediately is False + assert not core_req.HasField('message') + + +def test_to_core_list_task_push_notification_config_response_error(): + v03_res = types_v03.ListTaskPushNotificationConfigResponse( + root=types_v03.JSONRPCErrorResponse( + id=1, error=types_v03.JSONRPCError(code=-32000, message='Error') + ) + ) + core_res = to_core_list_task_push_notification_config_response(v03_res) + assert len(core_res.configs) == 0 + + +def test_to_core_send_message_response_error(): + v03_res = types_v03.SendMessageResponse( + root=types_v03.JSONRPCErrorResponse( + id=1, error=types_v03.JSONRPCError(code=-32000, message='Error') + ) + ) + core_res = to_core_send_message_response(v03_res) + assert not core_res.HasField('message') + assert not core_res.HasField('task') + + +def test_stream_response_task_variant(): + v03_task = types_v03.Task( + id='t1', + context_id='c1', + status=types_v03.TaskStatus(state=types_v03.TaskState.working), + ) + v03_res = types_v03.SendStreamingMessageSuccessResponse( + id=1, result=v03_task + ) + core_res = to_core_stream_response(v03_res) + assert core_res.HasField('task') + assert core_res.task.id == 't1' + + v03_restored = to_compat_stream_response(core_res, request_id=1) + assert isinstance(v03_restored.result, types_v03.Task) + assert v03_restored.result.id == 't1' + + +def test_to_compat_stream_response_unknown(): + core_res = pb2_v10.StreamResponse() + with pytest.raises( + ValueError, match='Unknown stream response event type: None' + ): + to_compat_stream_response(core_res) + + +def test_to_core_part_file_part_with_metadata(): + v03_part = types_v03.Part( + root=types_v03.FilePart( + file=types_v03.FileWithBytes( + bytes='YmFzZTY0', mime_type='test/test', name='test.txt' + ), + metadata={'test': 'val'}, + ) + ) + core_part = to_core_part(v03_part) + assert core_part.metadata['test'] == 'val' + + +def test_to_core_part_file_part_invalid_file_type(): + v03_part = types_v03.Part.model_construct( + root=types_v03.FilePart.model_construct( + file=None, # Not FileWithBytes or FileWithUri + metadata=None, + ) + ) + core_part = to_core_part(v03_part) + # Should fall through to the end and return an empty part + assert not core_part.HasField('raw') + + +def test_to_core_task_missing_status(): + v03_task = types_v03.Task.model_construct( + id='t1', context_id='c1', status=None + ) + core_task = to_core_task(v03_task) + assert not core_task.HasField('status') + + +def test_to_core_security_scheme_unknown_type(): + v03_scheme = types_v03.SecurityScheme.model_construct(root=None) + core_scheme = to_core_security_scheme(v03_scheme) + # Returns an empty SecurityScheme + assert core_scheme.WhichOneof('scheme') is None + + +def test_to_core_agent_extension_minimal(): + v03_ext = types_v03.AgentExtension.model_construct( + uri='', description=None, required=None, params=None + ) + core_ext = to_core_agent_extension(v03_ext) + assert core_ext.uri == '' + + +def test_to_core_task_push_notification_config_missing_config(): + v03_config = types_v03.TaskPushNotificationConfig.model_construct( + task_id='t1', push_notification_config=None + ) + core_config = to_core_task_push_notification_config(v03_config) + assert not core_config.url + + +def test_to_core_create_task_push_notification_config_request_missing_config(): + v03_req = types_v03.SetTaskPushNotificationConfigRequest.model_construct( + id=1, + params=types_v03.TaskPushNotificationConfig.model_construct( + task_id='t1', push_notification_config=None + ), + ) + core_req = to_core_create_task_push_notification_config_request(v03_req) + assert not core_req.url + + +def test_to_core_list_task_push_notification_config_request_missing_id(): + v03_req = types_v03.ListTaskPushNotificationConfigRequest.model_construct( + id=1, + params=types_v03.ListTaskPushNotificationConfigParams.model_construct( + id='' + ), + ) + core_req = to_core_list_task_push_notification_config_request(v03_req) + assert core_req.task_id == '' + + +def test_to_core_stream_response_unknown_result(): + v03_res = types_v03.SendStreamingMessageSuccessResponse.model_construct( + id=1, result=None + ) + core_res = to_core_stream_response(v03_res) + assert core_res.WhichOneof('payload') is None + + +def test_to_core_part_unknown_part(): + # If the root of the part is somehow none of TextPart, DataPart, or FilePart, + # it should just return an empty core Part. + v03_part = types_v03.Part.model_construct(root=None) + core_part = to_core_part(v03_part) + assert not core_part.HasField('text') + assert not core_part.HasField('data') + assert not core_part.HasField('raw') + assert not core_part.HasField('url') + + +def test_task_db_conversion(): + v10_task = pb2_v10.Task( + id='task-123', + context_id='ctx-456', + status=pb2_v10.TaskStatus( + state=pb2_v10.TaskState.TASK_STATE_WORKING, + ), + metadata={'m1': 'v1'}, + ) + owner = 'owner-789' + + # Test Core -> Model + model = core_to_compat_task_model(v10_task, owner) + assert model.id == 'task-123' + assert model.context_id == 'ctx-456' + assert model.owner == owner + assert model.protocol_version == '0.3' + assert model.status['state'] == 'working' + assert model.task_metadata == {'m1': 'v1'} + + # Test Model -> Core + v10_restored = compat_task_model_to_core(model) + assert v10_restored.id == v10_task.id + assert v10_restored.context_id == v10_task.context_id + assert v10_restored.status.state == v10_task.status.state + assert v10_restored.metadata == v10_task.metadata + + +def test_push_notification_config_db_conversion(): + task_id = 'task-123' + v10_config = pb2_v10.TaskPushNotificationConfig( + id='pnc-1', + url='https://example.com/push', + token='secret-token', + ) + owner = 'owner-789' + + # Test Core -> Model (No encryption) + model = core_to_compat_push_notification_config_model( + task_id, v10_config, owner + ) + assert model.task_id == task_id + assert model.config_id == 'pnc-1' + assert model.owner == owner + assert model.protocol_version == '0.3' + + import json + + data = json.loads(model.config_data.decode('utf-8')) + assert data['url'] == 'https://example.com/push' + assert data['token'] == 'secret-token' + + # Test Model -> Core + v10_restored = compat_push_notification_config_model_to_core( + model.config_data.decode('utf-8'), task_id + ) + assert v10_restored.id == v10_config.id + assert v10_restored.url == v10_config.url + assert v10_restored.token == v10_config.token + + +def test_push_notification_config_persistence_conversion_with_encryption(): + task_id = 'task-123' + v10_config = pb2_v10.TaskPushNotificationConfig( + id='pnc-1', + url='https://example.com/push', + token='secret-token', + ) + owner = 'owner-789' + key = Fernet.generate_key() + fernet = Fernet(key) + + # Test Core -> Model (With encryption) + model = core_to_compat_push_notification_config_model( + task_id, v10_config, owner, fernet=fernet + ) + assert ( + model.config_data != v10_config.SerializeToString() + ) # Should be encrypted + + # Decrypt and verify + decrypted_data = fernet.decrypt(model.config_data) + + data = json.loads(decrypted_data.decode('utf-8')) + assert data['url'] == 'https://example.com/push' + assert data['token'] == 'secret-token' + + # Test Model -> Core + v10_restored = compat_push_notification_config_model_to_core( + decrypted_data.decode('utf-8'), task_id + ) + assert v10_restored.id == v10_config.id + assert v10_restored.url == v10_config.url + assert v10_restored.token == v10_config.token + + +def test_to_compat_agent_card_unsupported_version(): + card = pb2_v10.AgentCard( + name='Modern Agent', + description='Only supports 1.0', + version='1.0.0', + supported_interfaces=[ + pb2_v10.AgentInterface( + url='http://grpc.v10.com', + protocol_binding='GRPC', + protocol_version='1.0.0', + ), + ], + capabilities=pb2_v10.AgentCapabilities(), + ) + with pytest.raises( + VersionNotSupportedError, + match='AgentCard must have at least one interface with compatible protocol version.', + ): + to_compat_agent_card(card) diff --git a/tests/compat/v0_3/test_extension_headers.py b/tests/compat/v0_3/test_extension_headers.py new file mode 100644 index 000000000..d5abbdfcc --- /dev/null +++ b/tests/compat/v0_3/test_extension_headers.py @@ -0,0 +1,39 @@ +from a2a.compat.v0_3.extension_headers import ( + LEGACY_HTTP_EXTENSION_HEADER, + add_legacy_extension_header, +) +from a2a.extensions.common import HTTP_EXTENSION_HEADER + + +def test_legacy_header_constant_value(): + assert LEGACY_HTTP_EXTENSION_HEADER == 'X-A2A-Extensions' + + +def test_mirrors_spec_header_under_legacy_name(): + params = {HTTP_EXTENSION_HEADER: 'foo,bar'} + + add_legacy_extension_header(params) + + assert params == { + HTTP_EXTENSION_HEADER: 'foo,bar', + LEGACY_HTTP_EXTENSION_HEADER: 'foo,bar', + } + + +def test_no_op_when_spec_header_absent(): + params = {'Other': 'value'} + + add_legacy_extension_header(params) + + assert params == {'Other': 'value'} + + +def test_does_not_overwrite_existing_legacy_header(): + params = { + HTTP_EXTENSION_HEADER: 'spec', + LEGACY_HTTP_EXTENSION_HEADER: 'legacy-original', + } + + add_legacy_extension_header(params) + + assert params[LEGACY_HTTP_EXTENSION_HEADER] == 'legacy-original' diff --git a/tests/compat/v0_3/test_grpc_handler.py b/tests/compat/v0_3/test_grpc_handler.py new file mode 100644 index 000000000..fbd74f29f --- /dev/null +++ b/tests/compat/v0_3/test_grpc_handler.py @@ -0,0 +1,506 @@ +import grpc +import grpc.aio +import pytest +from unittest.mock import AsyncMock, MagicMock, ANY + +from a2a.compat.v0_3 import ( + a2a_v0_3_pb2, + grpc_handler as compat_grpc_handler, +) +from a2a.server.request_handlers import RequestHandler +from a2a.types import a2a_pb2 +from a2a.utils.errors import TaskNotFoundError, InvalidParamsError + + +@pytest.fixture +def mock_request_handler() -> AsyncMock: + return AsyncMock(spec=RequestHandler) + + +@pytest.fixture +def mock_grpc_context() -> AsyncMock: + context = AsyncMock(spec=grpc.aio.ServicerContext) + context.abort = AsyncMock() + context.set_trailing_metadata = MagicMock() + context.invocation_metadata = MagicMock(return_value=grpc.aio.Metadata()) + return context + + +@pytest.fixture +def sample_agent_card() -> a2a_pb2.AgentCard: + return a2a_pb2.AgentCard( + name='Test Agent', + description='A test agent', + version='1.0.0', + capabilities=a2a_pb2.AgentCapabilities( + streaming=True, + push_notifications=True, + extended_agent_card=True, + ), + supported_interfaces=[ + a2a_pb2.AgentInterface( + url='http://jsonrpc.v03.com', + protocol_binding='JSONRPC', + protocol_version='0.3', + ), + ], + ) + + +@pytest.fixture +def handler( + mock_request_handler: AsyncMock, sample_agent_card: a2a_pb2.AgentCard +) -> compat_grpc_handler.CompatGrpcHandler: + return compat_grpc_handler.CompatGrpcHandler( + request_handler=mock_request_handler, + ) + + +@pytest.mark.asyncio +async def test_send_message_success_task( + handler: compat_grpc_handler.CompatGrpcHandler, + mock_request_handler: AsyncMock, + mock_grpc_context: AsyncMock, +) -> None: + request = a2a_v0_3_pb2.SendMessageRequest( + request=a2a_v0_3_pb2.Message( + message_id='msg-1', role=a2a_v0_3_pb2.Role.ROLE_USER + ) + ) + mock_request_handler.on_message_send.return_value = a2a_pb2.Task( + id='task-1', context_id='ctx-1' + ) + + response = await handler.SendMessage(request, mock_grpc_context) + + expected_req = a2a_pb2.SendMessageRequest( + message=a2a_pb2.Message( + message_id='msg-1', role=a2a_pb2.Role.ROLE_USER + ), + configuration=a2a_pb2.SendMessageConfiguration( + history_length=0, return_immediately=True + ), + ) + mock_request_handler.on_message_send.assert_called_once_with( + expected_req, ANY + ) + + expected_res = a2a_v0_3_pb2.SendMessageResponse( + task=a2a_v0_3_pb2.Task( + id='task-1', context_id='ctx-1', status=a2a_v0_3_pb2.TaskStatus() + ) + ) + assert response == expected_res + + +@pytest.mark.asyncio +async def test_send_message_success_message( + handler: compat_grpc_handler.CompatGrpcHandler, + mock_request_handler: AsyncMock, + mock_grpc_context: AsyncMock, +) -> None: + request = a2a_v0_3_pb2.SendMessageRequest( + request=a2a_v0_3_pb2.Message( + message_id='msg-1', role=a2a_v0_3_pb2.Role.ROLE_USER + ) + ) + mock_request_handler.on_message_send.return_value = a2a_pb2.Message( + message_id='msg-2', role=a2a_pb2.Role.ROLE_AGENT + ) + + response = await handler.SendMessage(request, mock_grpc_context) + + expected_req = a2a_pb2.SendMessageRequest( + message=a2a_pb2.Message( + message_id='msg-1', role=a2a_pb2.Role.ROLE_USER + ), + configuration=a2a_pb2.SendMessageConfiguration( + history_length=0, return_immediately=True + ), + ) + mock_request_handler.on_message_send.assert_called_once_with( + expected_req, ANY + ) + + expected_res = a2a_v0_3_pb2.SendMessageResponse( + msg=a2a_v0_3_pb2.Message( + message_id='msg-2', role=a2a_v0_3_pb2.Role.ROLE_AGENT + ) + ) + assert response == expected_res + + +@pytest.mark.asyncio +async def test_send_streaming_message_success( + handler: compat_grpc_handler.CompatGrpcHandler, + mock_request_handler: AsyncMock, + mock_grpc_context: AsyncMock, +) -> None: + async def mock_stream(*args, **kwargs): + yield a2a_pb2.Task(id='task-1', context_id='ctx-1') + yield a2a_pb2.Message(message_id='msg-2', role=a2a_pb2.Role.ROLE_AGENT) + yield a2a_pb2.TaskStatusUpdateEvent( + task_id='task-1', + context_id='ctx-1', + status=a2a_pb2.TaskStatus( + state=a2a_pb2.TaskState.TASK_STATE_WORKING + ), + ) + yield a2a_pb2.TaskArtifactUpdateEvent( + task_id='task-1', + context_id='ctx-1', + artifact=a2a_pb2.Artifact(artifact_id='art-1'), + ) + + mock_request_handler.on_message_send_stream.side_effect = mock_stream + request = a2a_v0_3_pb2.SendMessageRequest( + request=a2a_v0_3_pb2.Message( + message_id='msg-1', role=a2a_v0_3_pb2.Role.ROLE_USER + ) + ) + + responses = [] + async for res in handler.SendStreamingMessage(request, mock_grpc_context): + responses.append(res) + + expected_req = a2a_pb2.SendMessageRequest( + message=a2a_pb2.Message( + message_id='msg-1', role=a2a_pb2.Role.ROLE_USER + ), + configuration=a2a_pb2.SendMessageConfiguration( + history_length=0, return_immediately=True + ), + ) + mock_request_handler.on_message_send_stream.assert_called_once_with( + expected_req, ANY + ) + + expected_responses = [ + a2a_v0_3_pb2.StreamResponse( + task=a2a_v0_3_pb2.Task( + id='task-1', + context_id='ctx-1', + status=a2a_v0_3_pb2.TaskStatus(), + ) + ), + a2a_v0_3_pb2.StreamResponse( + msg=a2a_v0_3_pb2.Message( + message_id='msg-2', role=a2a_v0_3_pb2.Role.ROLE_AGENT + ) + ), + a2a_v0_3_pb2.StreamResponse( + status_update=a2a_v0_3_pb2.TaskStatusUpdateEvent( + task_id='task-1', + context_id='ctx-1', + status=a2a_v0_3_pb2.TaskStatus( + state=a2a_v0_3_pb2.TaskState.TASK_STATE_WORKING + ), + ) + ), + a2a_v0_3_pb2.StreamResponse( + artifact_update=a2a_v0_3_pb2.TaskArtifactUpdateEvent( + task_id='task-1', + context_id='ctx-1', + artifact=a2a_v0_3_pb2.Artifact(artifact_id='art-1'), + ) + ), + ] + assert responses == expected_responses + + +@pytest.mark.asyncio +async def test_get_task_success( + handler: compat_grpc_handler.CompatGrpcHandler, + mock_request_handler: AsyncMock, + mock_grpc_context: AsyncMock, +) -> None: + request = a2a_v0_3_pb2.GetTaskRequest(name='tasks/task-1') + mock_request_handler.on_get_task.return_value = a2a_pb2.Task( + id='task-1', context_id='ctx-1' + ) + + response = await handler.GetTask(request, mock_grpc_context) + + expected_req = a2a_pb2.GetTaskRequest(id='task-1') + mock_request_handler.on_get_task.assert_called_once_with(expected_req, ANY) + + expected_res = a2a_v0_3_pb2.Task( + id='task-1', context_id='ctx-1', status=a2a_v0_3_pb2.TaskStatus() + ) + assert response == expected_res + + +@pytest.mark.asyncio +async def test_get_task_not_found( + handler: compat_grpc_handler.CompatGrpcHandler, + mock_request_handler: AsyncMock, + mock_grpc_context: AsyncMock, +) -> None: + request = a2a_v0_3_pb2.GetTaskRequest(name='tasks/task-1') + mock_request_handler.on_get_task.return_value = None + + await handler.GetTask(request, mock_grpc_context) + + expected_req = a2a_pb2.GetTaskRequest(id='task-1') + mock_request_handler.on_get_task.assert_called_once_with(expected_req, ANY) + mock_grpc_context.abort.assert_called() + assert mock_grpc_context.abort.call_args[0][0] == grpc.StatusCode.NOT_FOUND + + +@pytest.mark.asyncio +async def test_cancel_task_success( + handler: compat_grpc_handler.CompatGrpcHandler, + mock_request_handler: AsyncMock, + mock_grpc_context: AsyncMock, +) -> None: + request = a2a_v0_3_pb2.CancelTaskRequest(name='tasks/task-1') + mock_request_handler.on_cancel_task.return_value = a2a_pb2.Task( + id='task-1', context_id='ctx-1' + ) + + response = await handler.CancelTask(request, mock_grpc_context) + + expected_req = a2a_pb2.CancelTaskRequest(id='task-1') + mock_request_handler.on_cancel_task.assert_called_once_with( + expected_req, ANY + ) + + expected_res = a2a_v0_3_pb2.Task( + id='task-1', context_id='ctx-1', status=a2a_v0_3_pb2.TaskStatus() + ) + assert response == expected_res + + +@pytest.mark.asyncio +async def test_task_subscription_success( + handler: compat_grpc_handler.CompatGrpcHandler, + mock_request_handler: AsyncMock, + mock_grpc_context: AsyncMock, +) -> None: + async def mock_stream(*args, **kwargs): + yield a2a_pb2.TaskStatusUpdateEvent( + task_id='task-1', + context_id='ctx-1', + status=a2a_pb2.TaskStatus( + state=a2a_pb2.TaskState.TASK_STATE_WORKING + ), + ) + + mock_request_handler.on_subscribe_to_task.side_effect = mock_stream + request = a2a_v0_3_pb2.TaskSubscriptionRequest(name='tasks/task-1') + + responses = [] + async for res in handler.TaskSubscription(request, mock_grpc_context): + responses.append(res) + + expected_req = a2a_pb2.SubscribeToTaskRequest(id='task-1') + mock_request_handler.on_subscribe_to_task.assert_called_once_with( + expected_req, ANY + ) + + expected_responses = [ + a2a_v0_3_pb2.StreamResponse( + status_update=a2a_v0_3_pb2.TaskStatusUpdateEvent( + task_id='task-1', + context_id='ctx-1', + status=a2a_v0_3_pb2.TaskStatus( + state=a2a_v0_3_pb2.TaskState.TASK_STATE_WORKING + ), + ) + ) + ] + assert responses == expected_responses + + +@pytest.mark.asyncio +async def test_create_push_config_success( + handler: compat_grpc_handler.CompatGrpcHandler, + mock_request_handler: AsyncMock, + mock_grpc_context: AsyncMock, +) -> None: + request = a2a_v0_3_pb2.CreateTaskPushNotificationConfigRequest( + parent='tasks/task-1', + config=a2a_v0_3_pb2.TaskPushNotificationConfig( + push_notification_config=a2a_v0_3_pb2.PushNotificationConfig( + url='http://example.com' + ) + ), + ) + mock_request_handler.on_create_task_push_notification_config.return_value = a2a_pb2.TaskPushNotificationConfig( + task_id='task-1', + url='http://example.com', + id='cfg-1', + ) + + response = await handler.CreateTaskPushNotificationConfig( + request, mock_grpc_context + ) + + expected_req = a2a_pb2.TaskPushNotificationConfig( + task_id='task-1', + url='http://example.com', + ) + mock_request_handler.on_create_task_push_notification_config.assert_called_once_with( + expected_req, ANY + ) + + expected_res = a2a_v0_3_pb2.TaskPushNotificationConfig( + name='tasks/task-1/pushNotificationConfigs/cfg-1', + push_notification_config=a2a_v0_3_pb2.PushNotificationConfig( + url='http://example.com', id='cfg-1' + ), + ) + assert response == expected_res + + +@pytest.mark.asyncio +async def test_get_push_config_success( + handler: compat_grpc_handler.CompatGrpcHandler, + mock_request_handler: AsyncMock, + mock_grpc_context: AsyncMock, +) -> None: + request = a2a_v0_3_pb2.GetTaskPushNotificationConfigRequest( + name='tasks/task-1/pushNotificationConfigs/cfg-1' + ) + mock_request_handler.on_get_task_push_notification_config.return_value = ( + a2a_pb2.TaskPushNotificationConfig( + task_id='task-1', + url='http://example.com', + id='cfg-1', + ) + ) + + response = await handler.GetTaskPushNotificationConfig( + request, mock_grpc_context + ) + + expected_req = a2a_pb2.GetTaskPushNotificationConfigRequest( + task_id='task-1', id='cfg-1' + ) + mock_request_handler.on_get_task_push_notification_config.assert_called_once_with( + expected_req, ANY + ) + + expected_res = a2a_v0_3_pb2.TaskPushNotificationConfig( + name='tasks/task-1/pushNotificationConfigs/cfg-1', + push_notification_config=a2a_v0_3_pb2.PushNotificationConfig( + url='http://example.com', id='cfg-1' + ), + ) + assert response == expected_res + + +@pytest.mark.asyncio +async def test_list_push_config_success( + handler: compat_grpc_handler.CompatGrpcHandler, + mock_request_handler: AsyncMock, + mock_grpc_context: AsyncMock, +) -> None: + request = a2a_v0_3_pb2.ListTaskPushNotificationConfigRequest( + parent='tasks/task-1' + ) + mock_request_handler.on_list_task_push_notification_configs.return_value = ( + a2a_pb2.ListTaskPushNotificationConfigsResponse( + configs=[ + a2a_pb2.TaskPushNotificationConfig( + task_id='task-1', url='http://example.com', id='cfg-1' + ) + ] + ) + ) + + response = await handler.ListTaskPushNotificationConfig( + request, mock_grpc_context + ) + + expected_req = a2a_pb2.ListTaskPushNotificationConfigsRequest( + task_id='task-1' + ) + mock_request_handler.on_list_task_push_notification_configs.assert_called_once_with( + expected_req, ANY + ) + + expected_res = a2a_v0_3_pb2.ListTaskPushNotificationConfigResponse( + configs=[ + a2a_v0_3_pb2.TaskPushNotificationConfig( + name='tasks/task-1/pushNotificationConfigs/cfg-1', + push_notification_config=a2a_v0_3_pb2.PushNotificationConfig( + url='http://example.com', id='cfg-1' + ), + ) + ] + ) + assert response == expected_res + + +@pytest.mark.asyncio +async def test_get_agent_card_success( + handler: compat_grpc_handler.CompatGrpcHandler, + mock_request_handler: AsyncMock, + mock_grpc_context: AsyncMock, + sample_agent_card: a2a_pb2.AgentCard, +) -> None: + request = a2a_v0_3_pb2.GetAgentCardRequest() + mock_request_handler.on_get_extended_agent_card.return_value = ( + sample_agent_card + ) + + response = await handler.GetAgentCard(request, mock_grpc_context) + + expected_res = a2a_v0_3_pb2.AgentCard( + name='Test Agent', + description='A test agent', + url='http://jsonrpc.v03.com', + version='1.0.0', + protocol_version='0.3', + supports_authenticated_extended_card=True, + preferred_transport='JSONRPC', + capabilities=a2a_v0_3_pb2.AgentCapabilities( + streaming=True, + push_notifications=True, + ), + ) + assert response == expected_res + + +@pytest.mark.asyncio +async def test_delete_push_config_success( + handler: compat_grpc_handler.CompatGrpcHandler, + mock_request_handler: AsyncMock, + mock_grpc_context: AsyncMock, +) -> None: + request = a2a_v0_3_pb2.DeleteTaskPushNotificationConfigRequest( + name='tasks/task-1/pushNotificationConfigs/cfg-1' + ) + mock_request_handler.on_delete_task_push_notification_config.return_value = None + + from google.protobuf import empty_pb2 + + response = await handler.DeleteTaskPushNotificationConfig( + request, mock_grpc_context + ) + + expected_req = a2a_pb2.DeleteTaskPushNotificationConfigRequest( + task_id='task-1', id='cfg-1' + ) + mock_request_handler.on_delete_task_push_notification_config.assert_called_once_with( + expected_req, ANY + ) + + assert isinstance(response, empty_pb2.Empty) + + +@pytest.mark.asyncio +async def test_extract_task_id_invalid( + handler: compat_grpc_handler.CompatGrpcHandler, +): + with pytest.raises(InvalidParamsError): + handler._extract_task_id('invalid-name') + + +@pytest.mark.asyncio +async def test_extract_task_and_config_id_invalid( + handler: compat_grpc_handler.CompatGrpcHandler, +): + with pytest.raises(InvalidParamsError): + handler._extract_task_and_config_id('invalid-name') diff --git a/tests/compat/v0_3/test_grpc_transport.py b/tests/compat/v0_3/test_grpc_transport.py new file mode 100644 index 000000000..402a57000 --- /dev/null +++ b/tests/compat/v0_3/test_grpc_transport.py @@ -0,0 +1,68 @@ +from unittest.mock import AsyncMock, MagicMock + +import pytest + +from a2a.client.client import ClientCallContext +from a2a.client.optionals import Channel +from a2a.compat.v0_3 import a2a_v0_3_pb2 +from a2a.compat.v0_3.grpc_transport import CompatGrpcTransport +from a2a.types.a2a_pb2 import ( + Message, + Role, + SendMessageRequest, + SendMessageResponse, +) + + +@pytest.mark.asyncio +async def test_compat_grpc_transport_send_message_response_msg_parsing(): + mock_channel = AsyncMock(spec=Channel) + transport = CompatGrpcTransport(channel=mock_channel, agent_card=None) + + mock_stub = MagicMock() + + expected_resp = a2a_v0_3_pb2.SendMessageResponse( + msg=a2a_v0_3_pb2.Message( + message_id='msg-123', role=a2a_v0_3_pb2.Role.ROLE_AGENT + ) + ) + + mock_stub.SendMessage = AsyncMock(return_value=expected_resp) + transport.stub = mock_stub + + req = SendMessageRequest( + message=Message(message_id='msg-1', role=Role.ROLE_USER) + ) + + response = await transport.send_message(req) + + assert isinstance(response, SendMessageResponse) + assert response.HasField('message') + assert response.message.message_id == 'msg-123' + + +def test_compat_grpc_transport_mirrors_extension_metadata(): + """Compat gRPC client must also emit the legacy x-a2a-extensions metadata + so that v0.3 servers (which only know that name) understand the request.""" + transport = CompatGrpcTransport( + channel=AsyncMock(spec=Channel), agent_card=None + ) + context = ClientCallContext( + service_parameters={'A2A-Extensions': 'foo,bar'} + ) + + metadata = dict(transport._get_grpc_metadata(context)) + + assert metadata['a2a-extensions'] == 'foo,bar' + assert metadata['x-a2a-extensions'] == 'foo,bar' + + +def test_compat_grpc_transport_no_extension_metadata(): + transport = CompatGrpcTransport( + channel=AsyncMock(spec=Channel), agent_card=None + ) + + metadata = dict(transport._get_grpc_metadata(None)) + + assert 'a2a-extensions' not in metadata + assert 'x-a2a-extensions' not in metadata diff --git a/tests/compat/v0_3/test_jsonrpc_app_compat.py b/tests/compat/v0_3/test_jsonrpc_app_compat.py new file mode 100644 index 000000000..6658097dc --- /dev/null +++ b/tests/compat/v0_3/test_jsonrpc_app_compat.py @@ -0,0 +1,149 @@ +import logging + +from typing import Any +from unittest.mock import AsyncMock, MagicMock + +import pytest +from starlette.testclient import TestClient + +from starlette.applications import Starlette +from a2a.server.routes import create_jsonrpc_routes +from a2a.server.request_handlers.request_handler import RequestHandler +from a2a.types.a2a_pb2 import ( + AgentCard, + AgentCapabilities, + AgentInterface, + Message as Message10, + Part as Part10, + Role as Role10, + Task as Task10, + TaskStatus as TaskStatus10, + TaskState as TaskState10, +) + +from a2a.compat.v0_3 import a2a_v0_3_pb2 + + +logger = logging.getLogger(__name__) + + +@pytest.fixture +def mock_handler(): + handler = AsyncMock(spec=RequestHandler) + handler.on_message_send.return_value = Message10( + message_id='test', + role=Role10.ROLE_AGENT, + parts=[Part10(text='response message')], + ) + handler.on_get_task.return_value = Task10( + id='test_task_id', + context_id='test_context_id', + status=TaskStatus10( + state=TaskState10.TASK_STATE_COMPLETED, + ), + ) + return handler + + +@pytest.fixture +def agent_card(): + card = AgentCard( + name='TestAgent', + description='Test Description', + version='1.0.0', + capabilities=AgentCapabilities( + streaming=False, push_notifications=True, extended_agent_card=True + ), + ) + interface = card.supported_interfaces.add() + interface.url = 'http://mockurl.com' + interface.protocol_binding = 'jsonrpc' + interface.protocol_version = '0.3' + return card + + +@pytest.fixture +def test_app(mock_handler, agent_card): + mock_handler._agent_card = agent_card + jsonrpc_routes = create_jsonrpc_routes( + request_handler=mock_handler, + enable_v0_3_compat=True, + rpc_url='/', + ) + return Starlette(routes=jsonrpc_routes) + + +@pytest.fixture +def client(test_app): + return TestClient(test_app) + + +def test_send_message_v03_compat( + client: TestClient, mock_handler: AsyncMock +) -> None: + request_payload = { + 'jsonrpc': '2.0', + 'id': '1', + 'method': 'message/send', + 'params': { + 'message': { + 'messageId': 'req', + 'role': 'user', + 'parts': [{'text': 'hello'}], + } + }, + } + + response = client.post('/', json=request_payload) + assert response.status_code == 200 + data = response.json() + + assert data['jsonrpc'] == '2.0' + assert data['id'] == '1' + assert 'result' in data + assert data['result']['messageId'] == 'test' + assert data['result']['parts'][0]['text'] == 'response message' + + +def test_get_task_v03_compat( + client: TestClient, mock_handler: AsyncMock +) -> None: + request_payload = { + 'jsonrpc': '2.0', + 'id': '2', + 'method': 'tasks/get', + 'params': {'id': 'test_task_id'}, + } + + response = client.post('/', json=request_payload) + assert response.status_code == 200 + data = response.json() + + assert data['jsonrpc'] == '2.0' + assert data['id'] == '2' + assert 'result' in data + assert data['result']['id'] == 'test_task_id' + assert data['result']['status']['state'] == 'completed' + + +def test_get_extended_agent_card_v03_compat( + client: TestClient, mock_handler: AsyncMock, agent_card: AgentCard +) -> None: + """Test that the v0.3 method name 'agent/getAuthenticatedExtendedCard' is correctly routed.""" + mock_handler.on_get_extended_agent_card.return_value = agent_card + request_payload = { + 'jsonrpc': '2.0', + 'id': '3', + 'method': 'agent/getAuthenticatedExtendedCard', + 'params': {}, + } + + response = client.post('/', json=request_payload) + assert response.status_code == 200 + data = response.json() + + assert data['jsonrpc'] == '2.0' + assert data['id'] == '3' + assert 'result' in data + # The result should be a v0.3 AgentCard + assert 'supportsAuthenticatedExtendedCard' in data['result'] diff --git a/tests/compat/v0_3/test_jsonrpc_transport.py b/tests/compat/v0_3/test_jsonrpc_transport.py new file mode 100644 index 000000000..70291f005 --- /dev/null +++ b/tests/compat/v0_3/test_jsonrpc_transport.py @@ -0,0 +1,567 @@ +from unittest.mock import AsyncMock, MagicMock, patch + +import httpx +import pytest + +from a2a.client.errors import A2AClientError +from a2a.compat.v0_3.jsonrpc_transport import CompatJsonRpcTransport +from a2a.types.a2a_pb2 import ( + AgentCapabilities, + AgentCard, + CancelTaskRequest, + DeleteTaskPushNotificationConfigRequest, + GetExtendedAgentCardRequest, + GetTaskPushNotificationConfigRequest, + GetTaskRequest, + ListTaskPushNotificationConfigsRequest, + ListTaskPushNotificationConfigsResponse, + ListTasksRequest, + Message, + Role, + SendMessageRequest, + SendMessageResponse, + StreamResponse, + SubscribeToTaskRequest, + Task, + TaskPushNotificationConfig, + TaskState, +) +from a2a.utils.errors import InvalidParamsError + + +@pytest.fixture +def mock_httpx_client(): + return AsyncMock(spec=httpx.AsyncClient) + + +@pytest.fixture +def agent_card(): + return AgentCard(capabilities=AgentCapabilities(extended_agent_card=True)) + + +@pytest.fixture +def transport(mock_httpx_client, agent_card): + return CompatJsonRpcTransport( + httpx_client=mock_httpx_client, + agent_card=agent_card, + url='http://example.com', + ) + + +@pytest.mark.asyncio +async def test_compat_jsonrpc_transport_send_message_response_msg_parsing( + transport, +): + async def mock_send_request(*args, **kwargs): + return { + 'result': { + 'messageId': 'msg-123', + 'role': 'agent', + 'parts': [{'text': 'Hello'}], + } + } + + transport._send_request = mock_send_request + + req = SendMessageRequest( + message=Message(message_id='msg-1', role=Role.ROLE_USER) + ) + + response = await transport.send_message(req) + + expected_response = SendMessageResponse( + message=Message( + message_id='msg-123', + role=Role.ROLE_AGENT, + parts=[{'text': 'Hello'}], + ) + ) + assert response == expected_response + + +@pytest.mark.asyncio +async def test_compat_jsonrpc_transport_send_message_task(transport): + async def mock_send_request(*args, **kwargs): + return { + 'result': { + 'id': 'task-123', + 'contextId': 'ctx-456', + 'status': { + 'state': 'working', + 'message': { + 'messageId': 'msg-123', + 'role': 'agent', + 'parts': [], + }, + }, + } + } + + transport._send_request = mock_send_request + + req = SendMessageRequest( + message=Message(message_id='msg-1', role=Role.ROLE_USER) + ) + + response = await transport.send_message(req) + + expected_response = SendMessageResponse( + task=Task( + id='task-123', + context_id='ctx-456', + status={ + 'state': TaskState.TASK_STATE_WORKING, + 'message': {'message_id': 'msg-123', 'role': Role.ROLE_AGENT}, + }, + ) + ) + assert response == expected_response + + +@pytest.mark.asyncio +async def test_compat_jsonrpc_transport_get_task(transport): + async def mock_send_request(*args, **kwargs): + return { + 'result': { + 'id': 'task-123', + 'contextId': 'ctx-456', + 'status': { + 'state': 'completed', + 'message': { + 'messageId': 'msg-789', + 'role': 'agent', + 'parts': [{'text': 'Done'}], + }, + }, + } + } + + transport._send_request = mock_send_request + + req = GetTaskRequest(id='task-123') + response = await transport.get_task(req) + + expected_response = Task( + id='task-123', + context_id='ctx-456', + status={ + 'state': TaskState.TASK_STATE_COMPLETED, + 'message': { + 'message_id': 'msg-789', + 'role': Role.ROLE_AGENT, + 'parts': [{'text': 'Done'}], + }, + }, + ) + assert response == expected_response + + +@pytest.mark.asyncio +async def test_compat_jsonrpc_transport_cancel_task(transport): + async def mock_send_request(*args, **kwargs): + return { + 'result': { + 'id': 'task-123', + 'contextId': 'ctx-456', + 'status': { + 'state': 'canceled', + 'message': { + 'messageId': 'msg-789', + 'role': 'agent', + 'parts': [{'text': 'Cancelled'}], + }, + }, + } + } + + transport._send_request = mock_send_request + + req = CancelTaskRequest(id='task-123') + response = await transport.cancel_task(req) + + expected_response = Task( + id='task-123', + context_id='ctx-456', + status={ + 'state': TaskState.TASK_STATE_CANCELED, + 'message': { + 'message_id': 'msg-789', + 'role': Role.ROLE_AGENT, + 'parts': [{'text': 'Cancelled'}], + }, + }, + ) + assert response == expected_response + + +@pytest.mark.asyncio +async def test_compat_jsonrpc_transport_create_task_push_notification_config( + transport, +): + async def mock_send_request(*args, **kwargs): + return { + 'result': { + 'taskId': 'task-123', + 'name': 'tasks/task-123/pushNotificationConfigs/push-123', + 'pushNotificationConfig': { + 'url': 'http://push', + 'id': 'push-123', + }, + } + } + + transport._send_request = mock_send_request + + req = TaskPushNotificationConfig( + task_id='task-123', id='push-123', url='http://push' + ) + response = await transport.create_task_push_notification_config(req) + + expected_response = TaskPushNotificationConfig( + id='push-123', task_id='task-123', url='http://push' + ) + assert response == expected_response + + +@pytest.mark.asyncio +async def test_compat_jsonrpc_transport_get_task_push_notification_config( + transport, +): + async def mock_send_request(*args, **kwargs): + return { + 'result': { + 'taskId': 'task-123', + 'name': 'tasks/task-123/pushNotificationConfigs/push-123', + 'pushNotificationConfig': { + 'url': 'http://push', + 'id': 'push-123', + }, + } + } + + transport._send_request = mock_send_request + + req = GetTaskPushNotificationConfigRequest( + task_id='task-123', id='push-123' + ) + response = await transport.get_task_push_notification_config(req) + + expected_response = TaskPushNotificationConfig( + id='push-123', task_id='task-123', url='http://push' + ) + assert response == expected_response + + +@pytest.mark.asyncio +async def test_compat_jsonrpc_transport_list_task_push_notification_configs( + transport, +): + async def mock_send_request(*args, **kwargs): + return { + 'result': [ + { + 'taskId': 'task-123', + 'name': 'tasks/task-123/pushNotificationConfigs/push-123', + 'pushNotificationConfig': { + 'url': 'http://push', + 'id': 'push-123', + }, + } + ] + } + + transport._send_request = mock_send_request + + req = ListTaskPushNotificationConfigsRequest(task_id='task-123') + response = await transport.list_task_push_notification_configs(req) + + expected_response = ListTaskPushNotificationConfigsResponse( + configs=[ + TaskPushNotificationConfig( + id='push-123', task_id='task-123', url='http://push' + ) + ] + ) + assert response == expected_response + + +@pytest.mark.asyncio +async def test_compat_jsonrpc_transport_delete_task_push_notification_config( + transport, +): + async def mock_send_request(*args, **kwargs): + return {'result': {}} + + transport._send_request = mock_send_request + + req = DeleteTaskPushNotificationConfigRequest( + task_id='task-123', id='push-123' + ) + assert await transport.delete_task_push_notification_config(req) is None + + +@pytest.mark.asyncio +async def test_compat_jsonrpc_transport_get_extended_agent_card(transport): + async def mock_send_request(*args, **kwargs): + return { + 'result': { + 'name': 'ExtendedAgent', + 'url': 'http://agent', + 'version': '1.0.0', + 'description': 'Description', + 'skills': [], + 'defaultInputModes': [], + 'defaultOutputModes': [], + 'capabilities': {}, + 'supportsAuthenticatedExtendedCard': True, + } + } + + transport._send_request = mock_send_request + + req = GetExtendedAgentCardRequest() + response = await transport.get_extended_agent_card(req) + + expected_response = AgentCard( + name='ExtendedAgent', + version='1.0.0', + description='Description', + capabilities=AgentCapabilities(extended_agent_card=True), + ) + expected_response.supported_interfaces.add( + url='http://agent', + protocol_binding='JSONRPC', + protocol_version='0.3.0', + ) + assert response == expected_response + + +@pytest.mark.asyncio +async def test_compat_jsonrpc_transport_get_extended_agent_card_not_supported( + transport, +): + transport.agent_card.capabilities.extended_agent_card = False + + req = GetExtendedAgentCardRequest() + response = await transport.get_extended_agent_card(req) + + assert response == transport.agent_card + + +@pytest.mark.asyncio +async def test_compat_jsonrpc_transport_get_extended_agent_card_method_name( + transport, +): + """Verify the correct v0.3 method name 'agent/getAuthenticatedExtendedCard' is used.""" + captured_request: dict | None = None + + async def mock_send_request(data, *args, **kwargs): + nonlocal captured_request + captured_request = data + return { + 'result': { + 'name': 'ExtendedAgent', + 'url': 'http://agent', + 'version': '1.0.0', + 'description': 'Description', + 'skills': [], + 'defaultInputModes': [], + 'defaultOutputModes': [], + 'capabilities': {}, + 'supportsAuthenticatedExtendedCard': True, + } + } + + transport._send_request = mock_send_request + + req = GetExtendedAgentCardRequest() + await transport.get_extended_agent_card(req) + + assert captured_request is not None + assert captured_request['method'] == 'agent/getAuthenticatedExtendedCard' + + +@pytest.mark.asyncio +async def test_compat_jsonrpc_transport_close(transport, mock_httpx_client): + await transport.close() + mock_httpx_client.aclose.assert_called_once() + + +@pytest.mark.asyncio +async def test_compat_jsonrpc_transport_send_message_streaming(transport): + async def mock_send_stream_request(*args, **kwargs): + task = Task(id='task-123', context_id='ctx') + task.status.message.role = Role.ROLE_AGENT + yield StreamResponse(task=task) + yield StreamResponse( + message=Message(message_id='msg-123', role=Role.ROLE_AGENT) + ) + + transport._send_stream_request = mock_send_stream_request + + req = SendMessageRequest( + message=Message(message_id='msg-1', role=Role.ROLE_USER) + ) + + events = [event async for event in transport.send_message_streaming(req)] + + assert len(events) == 2 + expected_task = Task(id='task-123', context_id='ctx') + expected_task.status.message.role = Role.ROLE_AGENT + assert events[0] == StreamResponse(task=expected_task) + assert events[1] == StreamResponse( + message=Message(message_id='msg-123', role=Role.ROLE_AGENT) + ) + + +@pytest.mark.asyncio +async def test_compat_jsonrpc_transport_subscribe(transport): + async def mock_send_stream_request(*args, **kwargs): + task = Task(id='task-123', context_id='ctx') + task.status.message.role = Role.ROLE_AGENT + yield StreamResponse(task=task) + + transport._send_stream_request = mock_send_stream_request + + req = SubscribeToTaskRequest(id='task-123') + events = [event async for event in transport.subscribe(req)] + + assert len(events) == 1 + expected_task = Task(id='task-123', context_id='ctx') + expected_task.status.message.role = Role.ROLE_AGENT + assert events[0] == StreamResponse(task=expected_task) + + +def test_compat_jsonrpc_transport_handle_http_error(transport): + mock_response = MagicMock(spec=httpx.Response) + mock_response.status_code = 400 + + mock_request = MagicMock(spec=httpx.Request) + mock_request.url = 'http://example.com' + + error = httpx.HTTPStatusError( + 'Error', request=mock_request, response=mock_response + ) + + with pytest.raises(A2AClientError) as exc_info: + transport._handle_http_error(error) + + assert str(exc_info.value) == 'HTTP Error: 400' + + +def test_compat_jsonrpc_transport_create_jsonrpc_error(transport): + error_dict = {'code': -32602, 'message': 'Invalid parameters'} + + error = transport._create_jsonrpc_error(error_dict) + assert isinstance(error, InvalidParamsError) + assert str(error) == 'Invalid parameters' + + +def test_compat_jsonrpc_transport_create_jsonrpc_error_unknown(transport): + error_dict = {'code': -12345, 'message': 'Unknown Error'} + + error = transport._create_jsonrpc_error(error_dict) + assert isinstance(error, A2AClientError) + assert str(error) == 'Unknown Error' + + +@pytest.mark.asyncio +async def test_compat_jsonrpc_transport_list_tasks(transport): + with pytest.raises(NotImplementedError): + await transport.list_tasks(ListTasksRequest()) + + +@pytest.mark.asyncio +async def test_compat_jsonrpc_transport_send_message_empty(transport): + async def mock_send_request(*args, **kwargs): + return {'result': {}} + + transport._send_request = mock_send_request + + req = SendMessageRequest( + message=Message(message_id='msg-1', role=Role.ROLE_USER) + ) + + response = await transport.send_message(req) + assert response == SendMessageResponse() + + +@pytest.mark.asyncio +@patch('a2a.compat.v0_3.jsonrpc_transport.send_http_stream_request') +async def test_compat_jsonrpc_transport_send_stream_request( + mock_send_http_stream_request, transport +): + async def mock_generator(*args, **kwargs): + yield b'{"result": {"id": "task-123", "contextId": "ctx-456", "kind": "task", "status": {"state": "working", "message": {"messageId": "msg-1", "role": "agent", "parts": []}}}}' + + mock_send_http_stream_request.return_value = mock_generator() + + events = [ + event + async for event in transport._send_stream_request({'some': 'data'}) + ] + + assert len(events) == 1 + expected_task = Task(id='task-123', context_id='ctx-456') + expected_task.status.state = TaskState.TASK_STATE_WORKING + expected_task.status.message.message_id = 'msg-1' + expected_task.status.message.role = Role.ROLE_AGENT + assert events[0] == StreamResponse(task=expected_task) + + mock_send_http_stream_request.assert_called_once_with( + transport.httpx_client, + 'POST', + 'http://example.com', + transport._handle_http_error, + json={'some': 'data'}, + headers={'a2a-version': '0.3'}, + ) + + +@pytest.mark.asyncio +@patch('a2a.compat.v0_3.jsonrpc_transport.send_http_request') +async def test_compat_jsonrpc_transport_send_request( + mock_send_http_request, transport +): + mock_send_http_request.return_value = {'result': {'ok': True}} + mock_request = httpx.Request('POST', 'http://example.com') + transport.httpx_client.build_request.return_value = mock_request + + res = await transport._send_request({'some': 'data'}) + assert res == {'result': {'ok': True}} + + transport.httpx_client.build_request.assert_called_once_with( + 'POST', + 'http://example.com', + json={'some': 'data'}, + headers={'a2a-version': '0.3'}, + ) + mock_send_http_request.assert_called_once_with( + transport.httpx_client, mock_request, transport._handle_http_error + ) + + +@pytest.mark.asyncio +@patch('a2a.compat.v0_3.jsonrpc_transport.send_http_request') +async def test_compat_jsonrpc_transport_mirrors_extension_header( + mock_send_http_request, transport +): + """Compat client must also emit the legacy X-A2A-Extensions header so + that v0.3 servers (which only know that name) understand the request.""" + from a2a.client.client import ClientCallContext + + mock_send_http_request.return_value = {'result': {'ok': True}} + transport.httpx_client.build_request.return_value = httpx.Request( + 'POST', 'http://example.com' + ) + + context = ClientCallContext( + service_parameters={'A2A-Extensions': 'foo,bar'} + ) + + await transport._send_request({'some': 'data'}, context=context) + + _, kwargs = transport.httpx_client.build_request.call_args + headers = kwargs['headers'] + assert headers['A2A-Extensions'] == 'foo,bar' + assert headers['X-A2A-Extensions'] == 'foo,bar' diff --git a/tests/compat/v0_3/test_proto_utils.py b/tests/compat/v0_3/test_proto_utils.py new file mode 100644 index 000000000..7d421a5f8 --- /dev/null +++ b/tests/compat/v0_3/test_proto_utils.py @@ -0,0 +1,732 @@ +""" +This file was migrated from the a2a-python SDK version 0.3. +It provides utilities for converting between legacy v0.3 Pydantic models and legacy v0.3 Protobuf definitions. +""" + +import base64 +from unittest import mock + +import pytest + +from a2a.compat.v0_3 import types +from a2a.compat.v0_3 import a2a_v0_3_pb2 as a2a_pb2 +from a2a.compat.v0_3 import proto_utils +from a2a.utils.errors import InvalidParamsError + + +# --- Test Data --- + + +@pytest.fixture +def sample_message() -> types.Message: + return types.Message( + message_id='msg-1', + context_id='ctx-1', + task_id='task-1', + role=types.Role.user, + parts=[ + types.Part(root=types.TextPart(text='Hello')), + types.Part( + root=types.FilePart( + file=types.FileWithUri( + uri='file:///test.txt', + name='test.txt', + mime_type='text/plain', + ), + ) + ), + types.Part(root=types.DataPart(data={'key': 'value'})), + ], + metadata={'source': 'test'}, + ) + + +@pytest.fixture +def sample_task(sample_message: types.Message) -> types.Task: + return types.Task( + id='task-1', + context_id='ctx-1', + status=types.TaskStatus( + state=types.TaskState.working, message=sample_message + ), + history=[sample_message], + artifacts=[ + types.Artifact( + artifact_id='art-1', + parts=[ + types.Part(root=types.TextPart(text='Artifact content')) + ], + ) + ], + metadata={'source': 'test'}, + ) + + +@pytest.fixture +def sample_agent_card() -> types.AgentCard: + return types.AgentCard( + name='Test Agent', + description='A test agent', + url='http://localhost', + version='1.0.0', + capabilities=types.AgentCapabilities( + streaming=True, push_notifications=True + ), + default_input_modes=['text/plain'], + default_output_modes=['text/plain'], + skills=[ + types.AgentSkill( + id='skill1', + name='Test Skill', + description='A test skill', + tags=['test'], + ) + ], + provider=types.AgentProvider( + organization='Test Org', url='http://test.org' + ), + security=[{'oauth_scheme': ['read', 'write']}], + security_schemes={ + 'oauth_scheme': types.SecurityScheme( + root=types.OAuth2SecurityScheme( + flows=types.OAuthFlows( + client_credentials=types.ClientCredentialsOAuthFlow( + token_url='http://token.url', + scopes={ + 'read': 'Read access', + 'write': 'Write access', + }, + ) + ) + ) + ), + 'apiKey': types.SecurityScheme( + root=types.APIKeySecurityScheme( + name='X-API-KEY', in_=types.In.header + ) + ), + 'httpAuth': types.SecurityScheme( + root=types.HTTPAuthSecurityScheme(scheme='bearer') + ), + 'oidc': types.SecurityScheme( + root=types.OpenIdConnectSecurityScheme( + open_id_connect_url='http://oidc.url' + ) + ), + }, + signatures=[ + types.AgentCardSignature( + protected='protected_test', + signature='signature_test', + header={'alg': 'ES256'}, + ), + types.AgentCardSignature( + protected='protected_val', + signature='signature_val', + header={'alg': 'ES256', 'kid': 'unique-key-identifier-123'}, + ), + ], + ) + + +# --- Test Cases --- + + +class TestToProto: + def test_part_unsupported_type(self): + """Test that ToProto.part raises ValueError for an unsupported Part type.""" + + class FakePartType: + kind = 'fake' + + # Create a mock Part object that has a .root attribute pointing to the fake type + mock_part = mock.MagicMock(spec=types.Part) + mock_part.root = FakePartType() + + with pytest.raises(ValueError, match='Unsupported part type'): + proto_utils.ToProto.part(mock_part) + + +class TestFromProto: + def test_part_unsupported_type(self): + """Test that FromProto.part raises ValueError for an unsupported part type in proto.""" + unsupported_proto_part = ( + a2a_pb2.Part() + ) # An empty part with no oneof field set + with pytest.raises(ValueError, match='Unsupported part type'): + proto_utils.FromProto.part(unsupported_proto_part) + + def test_task_query_params_invalid_name(self): + request = a2a_pb2.GetTaskRequest(name='invalid-name-format') + with pytest.raises(InvalidParamsError) as exc_info: + proto_utils.FromProto.task_query_params(request) + assert 'No task for' in str(exc_info.value) + + +class TestProtoUtils: + def test_roundtrip_message(self, sample_message: types.Message): + """Test conversion of Message to proto and back.""" + proto_msg = proto_utils.ToProto.message(sample_message) + assert isinstance(proto_msg, a2a_pb2.Message) + + # Test file part handling + assert proto_msg.content[1].file.file_with_uri == 'file:///test.txt' + assert proto_msg.content[1].file.mime_type == 'text/plain' + assert proto_msg.content[1].file.name == 'test.txt' + + roundtrip_msg = proto_utils.FromProto.message(proto_msg) + assert roundtrip_msg == sample_message + + def test_enum_conversions(self): + """Test conversions for all enum types.""" + assert ( + proto_utils.ToProto.role(types.Role.agent) + == a2a_pb2.Role.ROLE_AGENT + ) + assert ( + proto_utils.FromProto.role(a2a_pb2.Role.ROLE_USER) + == types.Role.user + ) + + for state in types.TaskState: + proto_state = proto_utils.ToProto.task_state(state) + assert proto_utils.FromProto.task_state(proto_state) == state + + # Test unknown state case + assert ( + proto_utils.FromProto.task_state( + a2a_pb2.TaskState.TASK_STATE_UNSPECIFIED + ) + == types.TaskState.unknown + ) + assert ( + proto_utils.ToProto.task_state(types.TaskState.unknown) + == a2a_pb2.TaskState.TASK_STATE_UNSPECIFIED + ) + + def test_oauth_flows_conversion(self): + """Test conversion of different OAuth2 flows.""" + # Test password flow + password_flow = types.OAuthFlows( + password=types.PasswordOAuthFlow( + token_url='http://token.url', scopes={'read': 'Read'} + ) + ) + proto_password_flow = proto_utils.ToProto.oauth2_flows(password_flow) + assert proto_password_flow.HasField('password') + + # Test implicit flow + implicit_flow = types.OAuthFlows( + implicit=types.ImplicitOAuthFlow( + authorization_url='http://auth.url', scopes={'read': 'Read'} + ) + ) + proto_implicit_flow = proto_utils.ToProto.oauth2_flows(implicit_flow) + assert proto_implicit_flow.HasField('implicit') + + # Test authorization code flow + auth_code_flow = types.OAuthFlows( + authorization_code=types.AuthorizationCodeOAuthFlow( + authorization_url='http://auth.url', + token_url='http://token.url', + scopes={'read': 'read'}, + ) + ) + proto_auth_code_flow = proto_utils.ToProto.oauth2_flows(auth_code_flow) + assert proto_auth_code_flow.HasField('authorization_code') + + # Test invalid flow + with pytest.raises(ValueError): + proto_utils.ToProto.oauth2_flows(types.OAuthFlows()) + + # Test FromProto + roundtrip_password = proto_utils.FromProto.oauth2_flows( + proto_password_flow + ) + assert roundtrip_password.password is not None + + roundtrip_implicit = proto_utils.FromProto.oauth2_flows( + proto_implicit_flow + ) + assert roundtrip_implicit.implicit is not None + + def test_task_id_params_from_proto_invalid_name(self): + request = a2a_pb2.CancelTaskRequest(name='invalid-name-format') + with pytest.raises(InvalidParamsError) as exc_info: + proto_utils.FromProto.task_id_params(request) + assert 'No task for' in str(exc_info.value) + + def test_task_push_config_from_proto_invalid_parent(self): + request = a2a_pb2.TaskPushNotificationConfig(name='invalid-name-format') + with pytest.raises(InvalidParamsError) as exc_info: + proto_utils.FromProto.task_push_notification_config(request) + assert 'Bad TaskPushNotificationConfig resource name' in str( + exc_info.value + ) + + def test_none_handling(self): + """Test that None inputs are handled gracefully.""" + assert proto_utils.ToProto.message(None) is None + assert proto_utils.ToProto.metadata(None) is None + assert proto_utils.ToProto.provider(None) is None + assert proto_utils.ToProto.security(None) is None + assert proto_utils.ToProto.security_schemes(None) is None + + def test_metadata_conversion(self): + """Test metadata conversion with various data types.""" + metadata = { + 'null_value': None, + 'bool_value': True, + 'int_value': 42, + 'float_value': 3.14, + 'string_value': 'hello', + 'dict_value': {'nested': 'dict', 'count': 10}, + 'list_value': [1, 'two', 3.0, True, None], + 'tuple_value': (1, 2, 3), + 'complex_list': [ + {'name': 'item1', 'values': [1, 2, 3]}, + {'name': 'item2', 'values': [4, 5, 6]}, + ], + } + + # Convert to proto + proto_metadata = proto_utils.ToProto.metadata(metadata) + assert proto_metadata is not None + + # Convert back to Python + roundtrip_metadata = proto_utils.FromProto.metadata(proto_metadata) + + # Verify all values are preserved correctly + assert roundtrip_metadata['null_value'] is None + assert roundtrip_metadata['bool_value'] is True + assert roundtrip_metadata['int_value'] == 42 + assert roundtrip_metadata['float_value'] == 3.14 + assert roundtrip_metadata['string_value'] == 'hello' + assert roundtrip_metadata['dict_value']['nested'] == 'dict' + assert roundtrip_metadata['dict_value']['count'] == 10 + assert roundtrip_metadata['list_value'] == [1, 'two', 3.0, True, None] + assert roundtrip_metadata['tuple_value'] == [ + 1, + 2, + 3, + ] # tuples become lists + assert len(roundtrip_metadata['complex_list']) == 2 + assert roundtrip_metadata['complex_list'][0]['name'] == 'item1' + + def test_metadata_with_custom_objects(self): + """Test metadata conversion with custom objects using preprocessing utility.""" + + class CustomObject: + def __str__(self): + return 'custom_object_str' + + def __repr__(self): + return 'CustomObject()' + + metadata = { + 'custom_obj': CustomObject(), + 'list_with_custom': [1, CustomObject(), 'text'], + 'nested_custom': {'obj': CustomObject(), 'normal': 'value'}, + } + + # Use preprocessing utility to make it serializable + serializable_metadata = proto_utils.make_dict_serializable(metadata) + + # Convert to proto + proto_metadata = proto_utils.ToProto.metadata(serializable_metadata) + assert proto_metadata is not None + + # Convert back to Python + roundtrip_metadata = proto_utils.FromProto.metadata(proto_metadata) + + # Custom objects should be converted to strings + assert roundtrip_metadata['custom_obj'] == 'custom_object_str' + assert roundtrip_metadata['list_with_custom'] == [ + 1, + 'custom_object_str', + 'text', + ] + assert roundtrip_metadata['nested_custom']['obj'] == 'custom_object_str' + assert roundtrip_metadata['nested_custom']['normal'] == 'value' + + def test_metadata_edge_cases(self): + """Test metadata conversion with edge cases.""" + metadata = { + 'empty_dict': {}, + 'empty_list': [], + 'zero': 0, + 'false': False, + 'empty_string': '', + 'unicode_string': 'string test', + 'safe_number': 9007199254740991, # JavaScript MAX_SAFE_INTEGER + 'negative_number': -42, + 'float_precision': 0.123456789, + 'numeric_string': '12345', + } + + # Convert to proto and back + proto_metadata = proto_utils.ToProto.metadata(metadata) + roundtrip_metadata = proto_utils.FromProto.metadata(proto_metadata) + + # Verify edge cases are handled correctly + assert roundtrip_metadata['empty_dict'] == {} + assert roundtrip_metadata['empty_list'] == [] + assert roundtrip_metadata['zero'] == 0 + assert roundtrip_metadata['false'] is False + assert roundtrip_metadata['empty_string'] == '' + assert roundtrip_metadata['unicode_string'] == 'string test' + assert roundtrip_metadata['safe_number'] == 9007199254740991 + assert roundtrip_metadata['negative_number'] == -42 + assert abs(roundtrip_metadata['float_precision'] - 0.123456789) < 1e-10 + assert roundtrip_metadata['numeric_string'] == '12345' + + def test_make_dict_serializable(self): + """Test the make_dict_serializable utility function.""" + + class CustomObject: + def __str__(self): + return 'custom_str' + + test_data = { + 'string': 'hello', + 'int': 42, + 'float': 3.14, + 'bool': True, + 'none': None, + 'custom': CustomObject(), + 'list': [1, 'two', CustomObject()], + 'tuple': (1, 2, CustomObject()), + 'nested': {'inner_custom': CustomObject(), 'inner_normal': 'value'}, + } + + result = proto_utils.make_dict_serializable(test_data) + + # Basic types should be unchanged + assert result['string'] == 'hello' + assert result['int'] == 42 + assert result['float'] == 3.14 + assert result['bool'] is True + assert result['none'] is None + + # Custom objects should be converted to strings + assert result['custom'] == 'custom_str' + assert result['list'] == [1, 'two', 'custom_str'] + assert result['tuple'] == [1, 2, 'custom_str'] # tuples become lists + assert result['nested']['inner_custom'] == 'custom_str' + assert result['nested']['inner_normal'] == 'value' + + def test_normalize_large_integers_to_strings(self): + """Test the normalize_large_integers_to_strings utility function.""" + + test_data = { + 'small_int': 42, + 'large_int': 9999999999999999999, # > 15 digits + 'negative_large': -9999999999999999999, + 'float': 3.14, + 'string': 'hello', + 'list': [123, 9999999999999999999, 'text'], + 'nested': {'inner_large': 9999999999999999999, 'inner_small': 100}, + } + + result = proto_utils.normalize_large_integers_to_strings(test_data) + + # Small integers should remain as integers + assert result['small_int'] == 42 + assert isinstance(result['small_int'], int) + + # Large integers should be converted to strings + assert result['large_int'] == '9999999999999999999' + assert isinstance(result['large_int'], str) + assert result['negative_large'] == '-9999999999999999999' + assert isinstance(result['negative_large'], str) + + # Other types should be unchanged + assert result['float'] == 3.14 + assert result['string'] == 'hello' + + # Lists should be processed recursively + assert result['list'] == [123, '9999999999999999999', 'text'] + + # Nested dicts should be processed recursively + assert result['nested']['inner_large'] == '9999999999999999999' + assert result['nested']['inner_small'] == 100 + + def test_parse_string_integers_in_dict(self): + """Test the parse_string_integers_in_dict utility function.""" + + test_data = { + 'regular_string': 'hello', + 'numeric_string_small': '123', # small, should stay as string + 'numeric_string_large': '9999999999999999999', # > 15 digits, should become int + 'negative_large_string': '-9999999999999999999', + 'float_string': '3.14', # not all digits, should stay as string + 'mixed_string': '123abc', # not all digits, should stay as string + 'int': 42, + 'list': ['hello', '9999999999999999999', '123'], + 'nested': { + 'inner_large_string': '9999999999999999999', + 'inner_regular': 'value', + }, + } + + result = proto_utils.parse_string_integers_in_dict(test_data) + + # Regular strings should remain unchanged + assert result['regular_string'] == 'hello' + assert ( + result['numeric_string_small'] == '123' + ) # too small, stays string + assert result['float_string'] == '3.14' # not all digits + assert result['mixed_string'] == '123abc' # not all digits + + # Large numeric strings should be converted to integers + assert result['numeric_string_large'] == 9999999999999999999 + assert isinstance(result['numeric_string_large'], int) + assert result['negative_large_string'] == -9999999999999999999 + assert isinstance(result['negative_large_string'], int) + + # Other types should be unchanged + assert result['int'] == 42 + + # Lists should be processed recursively + assert result['list'] == ['hello', 9999999999999999999, '123'] + + # Nested dicts should be processed recursively + assert result['nested']['inner_large_string'] == 9999999999999999999 + assert result['nested']['inner_regular'] == 'value' + + def test_large_integer_roundtrip_with_utilities(self): + """Test large integer handling with preprocessing and post-processing utilities.""" + + original_data = { + 'large_int': 9999999999999999999, + 'small_int': 42, + 'nested': {'another_large': 12345678901234567890, 'normal': 'text'}, + } + + # Step 1: Preprocess to convert large integers to strings + preprocessed = proto_utils.normalize_large_integers_to_strings( + original_data + ) + + # Step 2: Convert to proto + proto_metadata = proto_utils.ToProto.metadata(preprocessed) + assert proto_metadata is not None + + # Step 3: Convert back from proto + dict_from_proto = proto_utils.FromProto.metadata(proto_metadata) + + # Step 4: Post-process to convert large integer strings back to integers + final_result = proto_utils.parse_string_integers_in_dict( + dict_from_proto + ) + + # Verify roundtrip preserved the original data + assert final_result['large_int'] == 9999999999999999999 + assert isinstance(final_result['large_int'], int) + assert final_result['small_int'] == 42 + assert final_result['nested']['another_large'] == 12345678901234567890 + assert isinstance(final_result['nested']['another_large'], int) + assert final_result['nested']['normal'] == 'text' + + def test_task_conversion_roundtrip( + self, sample_task: types.Task, sample_message: types.Message + ): + """Test conversion of Task to proto and back.""" + proto_task = proto_utils.ToProto.task(sample_task) + assert isinstance(proto_task, a2a_pb2.Task) + + roundtrip_task = proto_utils.FromProto.task(proto_task) + assert roundtrip_task.id == 'task-1' + assert roundtrip_task.context_id == 'ctx-1' + assert roundtrip_task.status == types.TaskStatus( + state=types.TaskState.working, message=sample_message + ) + assert roundtrip_task.history == sample_task.history + assert roundtrip_task.artifacts == [ + types.Artifact( + artifact_id='art-1', + description='', + metadata={}, + name='', + parts=[ + types.Part(root=types.TextPart(text='Artifact content')) + ], + ) + ] + assert roundtrip_task.metadata == {'source': 'test'} + + def test_agent_card_conversion_roundtrip( + self, sample_agent_card: types.AgentCard + ): + """Test conversion of AgentCard to proto and back.""" + proto_card = proto_utils.ToProto.agent_card(sample_agent_card) + assert isinstance(proto_card, a2a_pb2.AgentCard) + + roundtrip_card = proto_utils.FromProto.agent_card(proto_card) + assert roundtrip_card.name == 'Test Agent' + assert roundtrip_card.description == 'A test agent' + assert roundtrip_card.url == 'http://localhost' + assert roundtrip_card.version == '1.0.0' + assert roundtrip_card.capabilities == types.AgentCapabilities( + extensions=[], streaming=True, push_notifications=True + ) + assert roundtrip_card.default_input_modes == ['text/plain'] + assert roundtrip_card.default_output_modes == ['text/plain'] + assert roundtrip_card.skills == [ + types.AgentSkill( + id='skill1', + name='Test Skill', + description='A test skill', + tags=['test'], + examples=[], + input_modes=[], + output_modes=[], + ) + ] + assert roundtrip_card.provider == types.AgentProvider( + organization='Test Org', url='http://test.org' + ) + assert roundtrip_card.security == [{'oauth_scheme': ['read', 'write']}] + + # Normalized version of security_schemes. None fields are filled with defaults. + expected_security_schemes = { + 'oauth_scheme': types.SecurityScheme( + root=types.OAuth2SecurityScheme( + description='', + flows=types.OAuthFlows( + client_credentials=types.ClientCredentialsOAuthFlow( + refresh_url='', + scopes={ + 'write': 'Write access', + 'read': 'Read access', + }, + token_url='http://token.url', + ), + ), + ) + ), + 'apiKey': types.SecurityScheme( + root=types.APIKeySecurityScheme( + description='', + in_=types.In.header, + name='X-API-KEY', + ) + ), + 'httpAuth': types.SecurityScheme( + root=types.HTTPAuthSecurityScheme( + bearer_format='', + description='', + scheme='bearer', + ) + ), + 'oidc': types.SecurityScheme( + root=types.OpenIdConnectSecurityScheme( + description='', + open_id_connect_url='http://oidc.url', + ) + ), + } + assert roundtrip_card.security_schemes == expected_security_schemes + assert roundtrip_card.signatures == [ + types.AgentCardSignature( + protected='protected_test', + signature='signature_test', + header={'alg': 'ES256'}, + ), + types.AgentCardSignature( + protected='protected_val', + signature='signature_val', + header={'alg': 'ES256', 'kid': 'unique-key-identifier-123'}, + ), + ] + + @pytest.mark.parametrize( + 'signature_data, expected_data', + [ + ( + types.AgentCardSignature( + protected='protected_val', + signature='signature_val', + header={'alg': 'ES256'}, + ), + types.AgentCardSignature( + protected='protected_val', + signature='signature_val', + header={'alg': 'ES256'}, + ), + ), + ( + types.AgentCardSignature( + protected='protected_val', + signature='signature_val', + header=None, + ), + types.AgentCardSignature( + protected='protected_val', + signature='signature_val', + header={}, + ), + ), + ( + types.AgentCardSignature( + protected='', + signature='', + header={}, + ), + types.AgentCardSignature( + protected='', + signature='', + header={}, + ), + ), + ], + ) + def test_agent_card_signature_conversion_roundtrip( + self, signature_data, expected_data + ): + """Test conversion of AgentCardSignature to proto and back.""" + proto_signature = proto_utils.ToProto.agent_card_signature( + signature_data + ) + assert isinstance(proto_signature, a2a_pb2.AgentCardSignature) + roundtrip_signature = proto_utils.FromProto.agent_card_signature( + proto_signature + ) + assert roundtrip_signature == expected_data + + def test_roundtrip_message_with_file_bytes(self): + """Test round-trip conversion of Message with FileWithBytes.""" + file_content = b'binary data' + b64_content = base64.b64encode(file_content).decode('utf-8') + message = types.Message( + message_id='msg-bytes', + role=types.Role.user, + parts=[ + types.Part( + root=types.FilePart( + file=types.FileWithBytes( + bytes=b64_content, + name='file.bin', + mime_type='application/octet-stream', + ) + ) + ) + ], + metadata={}, + ) + + proto_msg = proto_utils.ToProto.message(message) + # Current implementation just encodes the string to bytes + assert proto_msg.content[0].file.file_with_bytes == b64_content.encode( + 'utf-8' + ) + + roundtrip_msg = proto_utils.FromProto.message(proto_msg) + assert roundtrip_msg.message_id == message.message_id + assert roundtrip_msg.role == message.role + assert roundtrip_msg.metadata == message.metadata + assert ( + roundtrip_msg.parts[0].root.file.bytes + == message.parts[0].root.file.bytes + ) diff --git a/tests/compat/v0_3/test_request_handler.py b/tests/compat/v0_3/test_request_handler.py new file mode 100644 index 000000000..26ad74264 --- /dev/null +++ b/tests/compat/v0_3/test_request_handler.py @@ -0,0 +1,389 @@ +from unittest.mock import AsyncMock, MagicMock + +import pytest + +from a2a.compat.v0_3 import types as types_v03 +from a2a.compat.v0_3.request_handler import RequestHandler03 +from a2a.server.context import ServerCallContext +from a2a.server.request_handlers.request_handler import RequestHandler +from a2a.types.a2a_pb2 import ( + AgentCapabilities, + AgentCard, + AgentInterface, + ListTaskPushNotificationConfigsResponse as V10ListPushConfigsResp, + Message as V10Message, + Part as V10Part, + Task as V10Task, + TaskPushNotificationConfig as V10PushConfig, + TaskState as V10TaskState, + TaskStatus as V10TaskStatus, +) +from a2a.utils.errors import TaskNotFoundError + + +@pytest.fixture +def mock_core_handler(): + handler = AsyncMock(spec=RequestHandler) + + handler.agent_card = AgentCard( + capabilities=AgentCapabilities( + streaming=True, + push_notifications=True, + extended_agent_card=True, + ) + ) + return handler + + +@pytest.fixture +def v03_handler(mock_core_handler): + return RequestHandler03(request_handler=mock_core_handler) + + +@pytest.fixture +def mock_context(): + return MagicMock(spec=ServerCallContext) + + +@pytest.mark.anyio +async def test_on_message_send_returns_message( + v03_handler, mock_core_handler, mock_context +): + v03_req = types_v03.SendMessageRequest( + id='req-1', + method='message/send', + params=types_v03.MessageSendParams( + message=types_v03.Message( + message_id='msg-1', + role='user', + parts=[types_v03.TextPart(text='Hello')], + ) + ), + ) + + mock_core_handler.on_message_send.return_value = V10Message( + message_id='msg-2', role=2, parts=[V10Part(text='Hi there')] + ) + + result = await v03_handler.on_message_send(v03_req, mock_context) + + assert isinstance(result, types_v03.Message) + assert result.message_id == 'msg-2' + assert result.role == 'agent' + assert len(result.parts) == 1 + assert result.parts[0].root.text == 'Hi there' + + +@pytest.mark.anyio +async def test_on_message_send_returns_task( + v03_handler, mock_core_handler, mock_context +): + v03_req = types_v03.SendMessageRequest( + id='req-1', + method='message/send', + params=types_v03.MessageSendParams( + message=types_v03.Message( + message_id='msg-1', + role='user', + parts=[types_v03.TextPart(text='Hello')], + ) + ), + ) + + mock_core_handler.on_message_send.return_value = V10Task( + id='task-1', + context_id='ctx-1', + status=V10TaskStatus(state=V10TaskState.TASK_STATE_WORKING), + ) + + result = await v03_handler.on_message_send(v03_req, mock_context) + + assert isinstance(result, types_v03.Task) + assert result.id == 'task-1' + assert result.context_id == 'ctx-1' + assert result.status.state == 'working' + + +@pytest.mark.anyio +async def test_on_message_send_stream( + v03_handler, mock_core_handler, mock_context +): + v03_req = types_v03.SendMessageRequest( + id='req-1', + method='message/send', + params=types_v03.MessageSendParams( + message=types_v03.Message( + message_id='msg-1', + role='user', + parts=[types_v03.TextPart(text='Hello')], + ) + ), + ) + + async def mock_stream(*args, **kwargs): + yield V10Message( + message_id='msg-2', + role=2, + parts=[V10Part(text='Chunk 1')], + ) + yield V10Message( + message_id='msg-2', + role=2, + parts=[V10Part(text='Chunk 2')], + ) + + mock_core_handler.on_message_send_stream.side_effect = mock_stream + + results = [ + chunk + async for chunk in v03_handler.on_message_send_stream( + v03_req, mock_context + ) + ] + + assert len(results) == 2 + assert all( + isinstance(r, types_v03.SendStreamingMessageSuccessResponse) + for r in results + ) + assert results[0].result.parts[0].root.text == 'Chunk 1' + assert results[1].result.parts[0].root.text == 'Chunk 2' + + +@pytest.mark.anyio +async def test_on_cancel_task(v03_handler, mock_core_handler, mock_context): + v03_req = types_v03.CancelTaskRequest( + id='req-1', + method='tasks/cancel', + params=types_v03.TaskIdParams(id='task-1'), + ) + + mock_core_handler.on_cancel_task.return_value = V10Task( + id='task-1', + status=V10TaskStatus(state=V10TaskState.TASK_STATE_CANCELED), + ) + + result = await v03_handler.on_cancel_task(v03_req, mock_context) + + assert isinstance(result, types_v03.Task) + assert result.id == 'task-1' + assert result.status.state == 'canceled' + + +@pytest.mark.anyio +async def test_on_cancel_task_not_found( + v03_handler, mock_core_handler, mock_context +): + v03_req = types_v03.CancelTaskRequest( + id='req-1', + method='tasks/cancel', + params=types_v03.TaskIdParams(id='task-1'), + ) + + mock_core_handler.on_cancel_task.return_value = None + + with pytest.raises(TaskNotFoundError): + await v03_handler.on_cancel_task(v03_req, mock_context) + + +@pytest.mark.anyio +async def test_on_subscribe_to_task( + v03_handler, mock_core_handler, mock_context +): + v03_req = types_v03.TaskResubscriptionRequest( + id='req-1', + method='tasks/resubscribe', + params=types_v03.TaskIdParams(id='task-1'), + ) + + async def mock_stream(*args, **kwargs): + yield V10Message( + message_id='msg-2', + role=2, + parts=[V10Part(text='Update 1')], + ) + + mock_core_handler.on_subscribe_to_task.side_effect = mock_stream + + results = [ + chunk + async for chunk in v03_handler.on_subscribe_to_task( + v03_req, mock_context + ) + ] + + assert len(results) == 1 + assert results[0].result.parts[0].root.text == 'Update 1' + + +@pytest.mark.anyio +async def test_on_get_task_push_notification_config( + v03_handler, mock_core_handler, mock_context +): + v03_req = types_v03.GetTaskPushNotificationConfigRequest( + id='req-1', + method='tasks/pushNotificationConfig/get', + params=types_v03.GetTaskPushNotificationConfigParams( + id='task-1', push_notification_config_id='push-1' + ), + ) + + mock_core_handler.on_get_task_push_notification_config.return_value = ( + V10PushConfig(id='push-1', url='http://example.com') + ) + + result = await v03_handler.on_get_task_push_notification_config( + v03_req, mock_context + ) + + assert isinstance(result, types_v03.TaskPushNotificationConfig) + assert result.push_notification_config.id == 'push-1' + assert result.push_notification_config.url == 'http://example.com' + + +@pytest.mark.anyio +async def test_on_create_task_push_notification_config( + v03_handler, mock_core_handler, mock_context +): + v03_req = types_v03.SetTaskPushNotificationConfigRequest( + id='req-1', + method='tasks/pushNotificationConfig/set', + params=types_v03.TaskPushNotificationConfig( + task_id='task-1', + push_notification_config=types_v03.PushNotificationConfig( + url='http://example.com' + ), + ), + ) + + mock_core_handler.on_create_task_push_notification_config.return_value = ( + V10PushConfig(id='push-1', url='http://example.com') + ) + + result = await v03_handler.on_create_task_push_notification_config( + v03_req, mock_context + ) + + assert isinstance(result, types_v03.TaskPushNotificationConfig) + assert result.push_notification_config.id == 'push-1' + assert result.push_notification_config.url == 'http://example.com' + + +@pytest.mark.anyio +async def test_on_get_task(v03_handler, mock_core_handler, mock_context): + v03_req = types_v03.GetTaskRequest( + id='req-1', + method='tasks/get', + params=types_v03.TaskQueryParams(id='task-1'), + ) + + mock_core_handler.on_get_task.return_value = V10Task( + id='task-1', status=V10TaskStatus(state=V10TaskState.TASK_STATE_WORKING) + ) + + result = await v03_handler.on_get_task(v03_req, mock_context) + + assert isinstance(result, types_v03.Task) + assert result.id == 'task-1' + assert result.status.state == 'working' + + +@pytest.mark.anyio +async def test_on_get_task_not_found( + v03_handler, mock_core_handler, mock_context +): + v03_req = types_v03.GetTaskRequest( + id='req-1', + method='tasks/get', + params=types_v03.TaskQueryParams(id='task-1'), + ) + + mock_core_handler.on_get_task.return_value = None + + with pytest.raises(TaskNotFoundError): + await v03_handler.on_get_task(v03_req, mock_context) + + +@pytest.mark.anyio +async def test_on_list_task_push_notification_configs( + v03_handler, mock_core_handler, mock_context +): + v03_req = types_v03.ListTaskPushNotificationConfigRequest( + id='req-1', + method='tasks/pushNotificationConfig/list', + params=types_v03.ListTaskPushNotificationConfigParams(id='task-1'), + ) + + mock_core_handler.on_list_task_push_notification_configs.return_value = ( + V10ListPushConfigsResp( + configs=[ + V10PushConfig(id='push-1', url='http://example1.com'), + V10PushConfig(id='push-2', url='http://example2.com'), + ] + ) + ) + + result = await v03_handler.on_list_task_push_notification_configs( + v03_req, mock_context + ) + + assert isinstance(result, list) + assert len(result) == 2 + assert result[0].push_notification_config.id == 'push-1' + assert result[1].push_notification_config.id == 'push-2' + + +@pytest.mark.anyio +async def test_on_delete_task_push_notification_config( + v03_handler, mock_core_handler, mock_context +): + v03_req = types_v03.DeleteTaskPushNotificationConfigRequest( + id='req-1', + method='tasks/pushNotificationConfig/delete', + params=types_v03.DeleteTaskPushNotificationConfigParams( + id='task-1', push_notification_config_id='push-1' + ), + ) + + mock_core_handler.on_delete_task_push_notification_config.return_value = ( + None + ) + + result = await v03_handler.on_delete_task_push_notification_config( + v03_req, mock_context + ) + + assert result is None + mock_core_handler.on_delete_task_push_notification_config.assert_called_once() + + +@pytest.mark.anyio +async def test_on_get_extended_agent_card_success( + v03_handler, mock_core_handler, mock_context +): + v03_req = types_v03.GetAuthenticatedExtendedCardRequest(id=0) + + mock_core_handler.on_get_extended_agent_card.return_value = AgentCard( + name='Extended Agent', + description='An extended test agent', + version='1.0.0', + supported_interfaces=[ + AgentInterface( + url='http://jsonrpc.v03.com', + protocol_version='0.3', + ) + ], + capabilities=AgentCapabilities( + streaming=True, + push_notifications=True, + extended_agent_card=True, + ), + ) + + result = await v03_handler.on_get_extended_agent_card(v03_req, mock_context) + + assert isinstance(result, types_v03.AgentCard) + assert result.name == 'Extended Agent' + assert result.capabilities.streaming is True + assert result.capabilities.push_notifications is True + mock_core_handler.on_get_extended_agent_card.assert_called_once() diff --git a/tests/compat/v0_3/test_rest_handler.py b/tests/compat/v0_3/test_rest_handler.py new file mode 100644 index 000000000..6ff44abb1 --- /dev/null +++ b/tests/compat/v0_3/test_rest_handler.py @@ -0,0 +1,399 @@ +import json + +from unittest.mock import AsyncMock, MagicMock + +import pytest + +from a2a.compat.v0_3 import types as types_v03 +from a2a.compat.v0_3.rest_handler import REST03Handler +from a2a.server.context import ServerCallContext +from a2a.server.request_handlers.request_handler import RequestHandler +from a2a.types.a2a_pb2 import AgentCard + + +@pytest.fixture +def mock_core_handler(): + return AsyncMock(spec=RequestHandler) + + +@pytest.fixture +def agent_card(): + card = MagicMock(spec=AgentCard) + card.capabilities = MagicMock() + card.capabilities.streaming = True + card.capabilities.push_notifications = True + return card + + +@pytest.fixture +def rest_handler(agent_card, mock_core_handler): + handler = REST03Handler(request_handler=mock_core_handler) + # Mock the internal handler03 for easier testing of translations + handler.handler03 = AsyncMock() + return handler + + +@pytest.fixture +def mock_context(): + m = MagicMock(spec=ServerCallContext) + m.state = {'headers': {'A2A-Version': '0.3'}} + return m + + +@pytest.fixture +def mock_request(): + req = MagicMock() + req.path_params = {} + req.query_params = {} + return req + + +@pytest.mark.anyio +async def test_on_message_send(rest_handler, mock_request, mock_context): + request_body = { + 'request': { + 'messageId': 'msg-1', + 'role': 'ROLE_USER', + 'content': [{'text': 'Hello'}], + } + } + mock_request.body = AsyncMock( + return_value=json.dumps(request_body).encode('utf-8') + ) + + # Configure handler03 to return a types_v03.Message + rest_handler.handler03.on_message_send.return_value = types_v03.Message( + message_id='msg-2', role='agent', parts=[types_v03.TextPart(text='Hi')] + ) + + result = await rest_handler.on_message_send(mock_request, mock_context) + + assert result == { + 'message': { + 'messageId': 'msg-2', + 'role': 'ROLE_AGENT', + 'content': [{'text': 'Hi'}], + } + } + + rest_handler.handler03.on_message_send.assert_called_once() + called_req = rest_handler.handler03.on_message_send.call_args[0][0] + assert isinstance(called_req, types_v03.SendMessageRequest) + assert called_req.params.message.message_id == 'msg-1' + + +@pytest.mark.anyio +async def test_on_message_send_stream(rest_handler, mock_request, mock_context): + request_body = { + 'request': { + 'messageId': 'msg-1', + 'role': 'ROLE_USER', + 'content': [{'text': 'Hello'}], + } + } + mock_request.body = AsyncMock( + return_value=json.dumps(request_body).encode('utf-8') + ) + + async def mock_stream(*args, **kwargs): + yield types_v03.SendStreamingMessageSuccessResponse( + id='req-1', + result=types_v03.Message( + message_id='msg-2', + role='agent', + parts=[types_v03.TextPart(text='Chunk')], + ), + ) + + rest_handler.handler03.on_message_send_stream = MagicMock( + side_effect=mock_stream + ) + + results = [ + chunk + async for chunk in rest_handler.on_message_send_stream( + mock_request, mock_context + ) + ] + + assert results == [ + { + 'message': { + 'messageId': 'msg-2', + 'role': 'ROLE_AGENT', + 'content': [{'text': 'Chunk'}], + } + } + ] + + +@pytest.mark.anyio +async def test_on_cancel_task(rest_handler, mock_request, mock_context): + mock_request.path_params = {'id': 'task-1'} + + rest_handler.handler03.on_cancel_task.return_value = types_v03.Task( + id='task-1', + context_id='ctx-1', + status=types_v03.TaskStatus(state='canceled'), + ) + + result = await rest_handler.on_cancel_task(mock_request, mock_context) + + assert result == { + 'id': 'task-1', + 'contextId': 'ctx-1', + 'status': {'state': 'TASK_STATE_CANCELLED'}, + } + + rest_handler.handler03.on_cancel_task.assert_called_once() + called_req = rest_handler.handler03.on_cancel_task.call_args[0][0] + assert called_req.params.id == 'task-1' + + +@pytest.mark.anyio +async def test_on_subscribe_to_task(rest_handler, mock_request, mock_context): + mock_request.path_params = {'id': 'task-1'} + + async def mock_stream(*args, **kwargs): + yield types_v03.SendStreamingMessageSuccessResponse( + id='req-1', + result=types_v03.Message( + message_id='msg-2', + role='agent', + parts=[types_v03.TextPart(text='Update')], + ), + ) + + rest_handler.handler03.on_subscribe_to_task = MagicMock( + side_effect=mock_stream + ) + + results = [ + chunk + async for chunk in rest_handler.on_subscribe_to_task( + mock_request, mock_context + ) + ] + + assert results == [ + { + 'message': { + 'messageId': 'msg-2', + 'role': 'ROLE_AGENT', + 'content': [{'text': 'Update'}], + } + } + ] + + +@pytest.mark.anyio +async def test_on_subscribe_to_task_post( + rest_handler, mock_request, mock_context +): + mock_request.path_params = {'id': 'task-1'} + mock_request.method = 'POST' + request_body = {'name': 'tasks/task-1'} + mock_request.body = AsyncMock( + return_value=json.dumps(request_body).encode('utf-8') + ) + + async def mock_stream(*args, **kwargs): + yield types_v03.SendStreamingMessageSuccessResponse( + id='req-1', + result=types_v03.Message( + message_id='msg-2', + role='agent', + parts=[types_v03.TextPart(text='Update')], + ), + ) + + rest_handler.handler03.on_subscribe_to_task = MagicMock( + side_effect=mock_stream + ) + + results = [ + chunk + async for chunk in rest_handler.on_subscribe_to_task( + mock_request, mock_context + ) + ] + + assert len(results) == 1 + rest_handler.handler03.on_subscribe_to_task.assert_called_once() + called_req = rest_handler.handler03.on_subscribe_to_task.call_args[0][0] + assert called_req.params.id == 'task-1' + + +@pytest.mark.anyio +async def test_get_push_notification(rest_handler, mock_request, mock_context): + mock_request.path_params = {'id': 'task-1', 'push_id': 'push-1'} + + rest_handler.handler03.on_get_task_push_notification_config.return_value = ( + types_v03.TaskPushNotificationConfig( + task_id='task-1', + push_notification_config=types_v03.PushNotificationConfig( + id='push-1', url='http://example.com' + ), + ) + ) + + result = await rest_handler.get_push_notification( + mock_request, mock_context + ) + + assert result == { + 'name': 'tasks/task-1/pushNotificationConfigs/push-1', + 'pushNotificationConfig': { + 'id': 'push-1', + 'url': 'http://example.com', + }, + } + + +@pytest.mark.anyio +async def test_set_push_notification(rest_handler, mock_request, mock_context): + mock_request.path_params = {'id': 'task-1'} + request_body = { + 'parent': 'tasks/task-1', + 'config': {'pushNotificationConfig': {'url': 'http://example.com'}}, + } + mock_request.body = AsyncMock( + return_value=json.dumps(request_body).encode('utf-8') + ) + + rest_handler.handler03.on_create_task_push_notification_config.return_value = types_v03.TaskPushNotificationConfig( + task_id='task-1', + push_notification_config=types_v03.PushNotificationConfig( + id='push-1', url='http://example.com' + ), + ) + + result = await rest_handler.set_push_notification( + mock_request, mock_context + ) + + assert result == { + 'name': 'tasks/task-1/pushNotificationConfigs/push-1', + 'pushNotificationConfig': { + 'id': 'push-1', + 'url': 'http://example.com', + }, + } + + rest_handler.handler03.on_create_task_push_notification_config.assert_called_once() + called_req = rest_handler.handler03.on_create_task_push_notification_config.call_args[ + 0 + ][0] + assert called_req.params.task_id == 'task-1' + assert ( + called_req.params.push_notification_config.url == 'http://example.com' + ) + + +@pytest.mark.anyio +async def test_on_get_task(rest_handler, mock_request, mock_context): + mock_request.path_params = {'id': 'task-1'} + mock_request.query_params = {'historyLength': '5'} + + rest_handler.handler03.on_get_task.return_value = types_v03.Task( + id='task-1', + context_id='ctx-1', + status=types_v03.TaskStatus(state='working'), + ) + + result = await rest_handler.on_get_task(mock_request, mock_context) + + assert result == { + 'id': 'task-1', + 'contextId': 'ctx-1', + 'status': {'state': 'TASK_STATE_WORKING'}, + } + + rest_handler.handler03.on_get_task.assert_called_once() + called_req = rest_handler.handler03.on_get_task.call_args[0][0] + assert called_req.params.id == 'task-1' + assert called_req.params.history_length == 5 + + +@pytest.mark.anyio +async def test_list_push_notifications( + rest_handler, mock_request, mock_context +): + mock_request.path_params = {'id': 'task-1'} + rest_handler.handler03.on_list_task_push_notification_configs = AsyncMock( + return_value=[ + types_v03.TaskPushNotificationConfig( + task_id='task-1', + push_notification_config=types_v03.PushNotificationConfig( + id='push-1', + url='http://example.com/notify', + ), + ) + ] + ) + + result = await rest_handler.list_push_notifications( + mock_request, mock_context + ) + + assert result == { + 'configs': [ + { + 'name': 'tasks/task-1/pushNotificationConfigs/push-1', + 'pushNotificationConfig': { + 'id': 'push-1', + 'url': 'http://example.com/notify', + }, + } + ] + } + + rest_handler.handler03.on_list_task_push_notification_configs.assert_called_once() + called_req = ( + rest_handler.handler03.on_list_task_push_notification_configs.call_args[ + 0 + ][0] + ) + assert called_req.params.id == 'task-1' + + +@pytest.mark.anyio +async def test_list_tasks(rest_handler, mock_request, mock_context): + with pytest.raises(NotImplementedError): + await rest_handler.list_tasks(mock_request, mock_context) + + +# Add our new translation method test +@pytest.mark.anyio +async def test_on_get_extended_agent_card_success( + rest_handler, mock_request, mock_context +): + rest_handler.handler03.on_get_extended_agent_card.return_value = ( + types_v03.AgentCard( + name='Extended Agent', + description='An extended test agent', + version='1.0.0', + url='http://jsonrpc.v03.com', + preferred_transport='JSONRPC', + protocol_version='0.3', + default_input_modes=[], + default_output_modes=[], + skills=[], + capabilities=types_v03.AgentCapabilities( + streaming=True, + push_notifications=True, + ), + ) + ) + + result = await rest_handler.on_get_extended_agent_card( + mock_request, mock_context + ) + + # on_get_extended_agent_card returns a JSON-friendly dict via model_dump + assert isinstance(result, dict) + assert result['name'] == 'Extended Agent' + assert result['capabilities']['streaming'] is True + assert result['capabilities']['pushNotifications'] is True + + rest_handler.handler03.on_get_extended_agent_card.assert_called_once() diff --git a/tests/compat/v0_3/test_rest_routes_compat.py b/tests/compat/v0_3/test_rest_routes_compat.py new file mode 100644 index 000000000..b3b9e70b3 --- /dev/null +++ b/tests/compat/v0_3/test_rest_routes_compat.py @@ -0,0 +1,194 @@ +import logging + +from typing import Any +from unittest.mock import MagicMock + +import pytest + +from fastapi import FastAPI +from google.protobuf import json_format +from httpx import ASGITransport, AsyncClient +from starlette.applications import Starlette +from a2a.server.routes.rest_routes import create_rest_routes +from a2a.server.routes import create_agent_card_routes +from a2a.server.request_handlers.request_handler import RequestHandler +from a2a.types.a2a_pb2 import ( + AgentCard, + Message as Message10, + Part as Part10, + Role as Role10, + Task as Task10, + TaskStatus as TaskStatus10, + TaskState as TaskState10, +) +from a2a.compat.v0_3 import a2a_v0_3_pb2 + + +logger = logging.getLogger(__name__) + + +@pytest.fixture +async def agent_card() -> AgentCard: + mock_agent_card = MagicMock(spec=AgentCard) + mock_agent_card.url = 'http://mockurl.com' + + # Mock the capabilities object with streaming disabled + mock_capabilities = MagicMock() + mock_capabilities.streaming = False + mock_capabilities.push_notifications = True + mock_capabilities.extended_agent_card = True + mock_agent_card.capabilities = mock_capabilities + + return mock_agent_card + + +@pytest.fixture +async def request_handler() -> RequestHandler: + return MagicMock(spec=RequestHandler) + + +@pytest.fixture +async def app( + agent_card: AgentCard, + request_handler: RequestHandler, +) -> Starlette: + """Builds the Starlette application for testing.""" + request_handler._agent_card = agent_card + rest_routes = create_rest_routes( + request_handler=request_handler, enable_v0_3_compat=True + ) + agent_card_routes = create_agent_card_routes( + agent_card=agent_card, card_url='/well-known/agent.json' + ) + return Starlette(routes=rest_routes + agent_card_routes) + + +@pytest.fixture +async def client(app: FastAPI) -> AsyncClient: + return AsyncClient( + transport=ASGITransport(app=app), base_url='http://testapp' + ) + + +@pytest.mark.anyio +async def test_send_message_success_message_v03( + client: AsyncClient, request_handler: MagicMock +) -> None: + expected_response = a2a_v0_3_pb2.SendMessageResponse( + msg=a2a_v0_3_pb2.Message( + message_id='test', + role=a2a_v0_3_pb2.Role.ROLE_AGENT, + content=[a2a_v0_3_pb2.Part(text='response message')], + ), + ) + request_handler.on_message_send.return_value = Message10( + message_id='test', + role=Role10.ROLE_AGENT, + parts=[Part10(text='response message')], + ) + + request = a2a_v0_3_pb2.SendMessageRequest( + request=a2a_v0_3_pb2.Message( + message_id='req', + role=a2a_v0_3_pb2.Role.ROLE_USER, + content=[a2a_v0_3_pb2.Part(text='hello')], + ), + ) + + response = await client.post( + '/v1/message:send', json=json_format.MessageToDict(request) + ) + response.raise_for_status() + + actual_response = a2a_v0_3_pb2.SendMessageResponse() + json_format.Parse(response.text, actual_response) + assert expected_response == actual_response + + +@pytest.mark.anyio +async def test_send_message_success_task_v03( + client: AsyncClient, request_handler: MagicMock +) -> None: + expected_response = a2a_v0_3_pb2.SendMessageResponse( + task=a2a_v0_3_pb2.Task( + id='test_task_id', + context_id='test_context_id', + status=a2a_v0_3_pb2.TaskStatus( + state=a2a_v0_3_pb2.TaskState.TASK_STATE_COMPLETED, + ), + ), + ) + request_handler.on_message_send.return_value = Task10( + id='test_task_id', + context_id='test_context_id', + status=TaskStatus10( + state=TaskState10.TASK_STATE_COMPLETED, + ), + ) + + request = a2a_v0_3_pb2.SendMessageRequest( + request=a2a_v0_3_pb2.Message(), + ) + + response = await client.post( + '/v1/message:send', json=json_format.MessageToDict(request) + ) + response.raise_for_status() + + actual_response = a2a_v0_3_pb2.SendMessageResponse() + json_format.Parse(response.text, actual_response) + assert expected_response == actual_response + + +@pytest.mark.anyio +async def test_get_task_v03( + client: AsyncClient, request_handler: MagicMock +) -> None: + expected_response = a2a_v0_3_pb2.Task( + id='test_task_id', + context_id='test_context_id', + status=a2a_v0_3_pb2.TaskStatus( + state=a2a_v0_3_pb2.TaskState.TASK_STATE_COMPLETED, + ), + ) + request_handler.on_get_task.return_value = Task10( + id='test_task_id', + context_id='test_context_id', + status=TaskStatus10( + state=TaskState10.TASK_STATE_COMPLETED, + ), + ) + + response = await client.get('/v1/tasks/test_task_id') + response.raise_for_status() + + actual_response = a2a_v0_3_pb2.Task() + json_format.Parse(response.text, actual_response) + assert expected_response == actual_response + + +@pytest.mark.anyio +async def test_cancel_task_v03( + client: AsyncClient, request_handler: MagicMock +) -> None: + expected_response = a2a_v0_3_pb2.Task( + id='test_task_id', + context_id='test_context_id', + status=a2a_v0_3_pb2.TaskStatus( + state=a2a_v0_3_pb2.TaskState.TASK_STATE_CANCELLED, + ), + ) + request_handler.on_cancel_task.return_value = Task10( + id='test_task_id', + context_id='test_context_id', + status=TaskStatus10( + state=TaskState10.TASK_STATE_CANCELED, + ), + ) + + response = await client.post('/v1/tasks/test_task_id:cancel') + response.raise_for_status() + + actual_response = a2a_v0_3_pb2.Task() + json_format.Parse(response.text, actual_response) + assert expected_response == actual_response diff --git a/tests/compat/v0_3/test_rest_transport.py b/tests/compat/v0_3/test_rest_transport.py new file mode 100644 index 000000000..2bea70f42 --- /dev/null +++ b/tests/compat/v0_3/test_rest_transport.py @@ -0,0 +1,666 @@ +import json + +from unittest.mock import AsyncMock, MagicMock, patch + +import httpx +import pytest + +from a2a.client.errors import A2AClientError +from a2a.compat.v0_3.rest_transport import CompatRestTransport +from a2a.types.a2a_pb2 import ( + AgentCapabilities, + AgentCard, + CancelTaskRequest, + DeleteTaskPushNotificationConfigRequest, + GetExtendedAgentCardRequest, + GetTaskPushNotificationConfigRequest, + GetTaskRequest, + ListTaskPushNotificationConfigsRequest, + ListTasksRequest, + Message, + Role, + SendMessageRequest, + SendMessageResponse, + StreamResponse, + SubscribeToTaskRequest, + Task, + TaskPushNotificationConfig, +) +from a2a.utils.errors import InvalidParamsError, MethodNotFoundError + + +@pytest.fixture +def mock_httpx_client(): + return AsyncMock(spec=httpx.AsyncClient) + + +@pytest.fixture +def agent_card(): + return AgentCard(capabilities=AgentCapabilities(extended_agent_card=True)) + + +@pytest.fixture +def transport(mock_httpx_client, agent_card): + return CompatRestTransport( + httpx_client=mock_httpx_client, + agent_card=agent_card, + url='http://example.com', + ) + + +@pytest.mark.asyncio +async def test_compat_rest_transport_send_message_response_msg_parsing( + transport, +): + mock_response = MagicMock(spec=httpx.Response) + mock_response.json.return_value = { + 'msg': {'messageId': 'msg-123', 'role': 'agent'} + } + + async def mock_send_request(*args, **kwargs): + return mock_response.json() + + transport._send_request = mock_send_request + + req = SendMessageRequest( + message=Message(message_id='msg-1', role=Role.ROLE_USER) + ) + + response = await transport.send_message(req) + + expected_response = SendMessageResponse( + message=Message(message_id='msg-123', role=Role.ROLE_AGENT) + ) + assert response == expected_response + + +@pytest.mark.asyncio +async def test_compat_rest_transport_send_message_task(transport): + mock_response = MagicMock(spec=httpx.Response) + mock_response.json.return_value = {'task': {'id': 'task-123'}} + + async def mock_send_request(*args, **kwargs): + return mock_response.json() + + transport._send_request = mock_send_request + + req = SendMessageRequest( + message=Message(message_id='msg-1', role=Role.ROLE_USER) + ) + + response = await transport.send_message(req) + + expected_response = SendMessageResponse( + task=Task(id='task-123', status=Task(id='task-123').status) + ) + # The default conversion from 0.3 task generates a TaskStatus with a default empty message with role=ROLE_AGENT + expected_response.task.status.message.role = Role.ROLE_AGENT + assert response == expected_response + + +@pytest.mark.asyncio +async def test_compat_rest_transport_get_task(transport): + async def mock_send_request(*args, **kwargs): + return {'id': 'task-123'} + + transport._send_request = mock_send_request + + req = GetTaskRequest(id='task-123') + response = await transport.get_task(req) + + expected_response = Task(id='task-123') + expected_response.status.message.role = Role.ROLE_AGENT + assert response == expected_response + + +@pytest.mark.asyncio +async def test_compat_rest_transport_cancel_task(transport): + async def mock_send_request(*args, **kwargs): + return {'id': 'task-123'} + + transport._send_request = mock_send_request + + req = CancelTaskRequest(id='task-123') + response = await transport.cancel_task(req) + + expected_response = Task(id='task-123') + expected_response.status.message.role = Role.ROLE_AGENT + assert response == expected_response + + +@pytest.mark.asyncio +async def test_compat_rest_transport_create_task_push_notification_config( + transport, +): + async def mock_send_request(*args, **kwargs): + return { + 'name': 'tasks/task-123/pushNotificationConfigs/push-123', + 'pushNotificationConfig': {'url': 'http://push', 'id': 'push-123'}, + } + + transport._send_request = mock_send_request + + req = TaskPushNotificationConfig( + task_id='task-123', id='push-123', url='http://push' + ) + response = await transport.create_task_push_notification_config(req) + + expected_response = TaskPushNotificationConfig( + id='push-123', task_id='task-123', url='http://push' + ) + assert response == expected_response + + +@pytest.mark.asyncio +async def test_compat_rest_transport_get_task_push_notification_config( + transport, +): + async def mock_send_request(*args, **kwargs): + return { + 'name': 'tasks/task-123/pushNotificationConfigs/push-123', + 'pushNotificationConfig': {'url': 'http://push', 'id': 'push-123'}, + } + + transport._send_request = mock_send_request + + req = GetTaskPushNotificationConfigRequest( + task_id='task-123', id='push-123' + ) + response = await transport.get_task_push_notification_config(req) + + expected_response = TaskPushNotificationConfig( + id='push-123', task_id='task-123', url='http://push' + ) + assert response == expected_response + + +@pytest.mark.asyncio +async def test_compat_rest_transport_get_extended_agent_card(transport): + async def mock_send_request(*args, **kwargs): + return { + 'name': 'ExtendedAgent', + 'capabilities': {}, + 'supportsAuthenticatedExtendedCard': True, + } + + transport._send_request = mock_send_request + + req = GetExtendedAgentCardRequest() + response = await transport.get_extended_agent_card(req) + + assert response.name == 'ExtendedAgent' + assert response.capabilities.extended_agent_card is True + + +@pytest.mark.asyncio +async def test_compat_rest_transport_get_extended_agent_card_not_supported( + transport, +): + transport.agent_card.capabilities.extended_agent_card = False + + req = GetExtendedAgentCardRequest() + response = await transport.get_extended_agent_card(req) + + assert response == transport.agent_card + + +@pytest.mark.asyncio +async def test_compat_rest_transport_close(transport, mock_httpx_client): + await transport.close() + mock_httpx_client.aclose.assert_called_once() + + +@pytest.mark.asyncio +async def test_compat_rest_transport_send_message_streaming(transport): + async def mock_send_stream_request(*args, **kwargs): + task = Task(id='task-123') + task.status.message.role = Role.ROLE_AGENT + yield StreamResponse(task=task) + yield StreamResponse(message=Message(message_id='msg-123')) + + transport._send_stream_request = mock_send_stream_request + + req = SendMessageRequest( + message=Message(message_id='msg-1', role=Role.ROLE_USER) + ) + + events = [event async for event in transport.send_message_streaming(req)] + + assert len(events) == 2 + expected_task = Task(id='task-123') + expected_task.status.message.role = Role.ROLE_AGENT + assert events[0] == StreamResponse(task=expected_task) + assert events[1] == StreamResponse(message=Message(message_id='msg-123')) + + +def create_405_error(): + mock_response = MagicMock(spec=httpx.Response) + mock_response.status_code = 405 + mock_response.json.return_value = { + 'type': 'MethodNotAllowed', + 'message': 'Method Not Allowed', + } + mock_request = MagicMock(spec=httpx.Request) + mock_request.url = 'http://example.com/v1/tasks/task-123:subscribe' + + status_error = httpx.HTTPStatusError( + '405 Method Not Allowed', request=mock_request, response=mock_response + ) + raise A2AClientError('HTTP Error 405') from status_error + + +def create_500_error(): + mock_response = MagicMock(spec=httpx.Response) + mock_response.status_code = 500 + mock_response.json.return_value = { + 'type': 'InternalError', + 'message': 'Internal Error', + } + mock_request = MagicMock(spec=httpx.Request) + + status_error = httpx.HTTPStatusError( + '500 Internal Error', request=mock_request, response=mock_response + ) + raise A2AClientError('HTTP Error 500') from status_error + + +@pytest.mark.asyncio +async def test_compat_rest_transport_subscribe_post_works_no_retry(transport): + """Scenario: POST works, no retry.""" + + async def mock_stream(method, path, context=None, json=None): + assert method == 'POST' + assert json is None + task = Task(id='task-123') + task.status.message.role = Role.ROLE_AGENT + yield StreamResponse(task=task) + + transport._send_stream_request = mock_stream + + req = SubscribeToTaskRequest(id='task-123') + events = [event async for event in transport.subscribe(req)] + + assert len(events) == 1 + expected_task = Task(id='task-123') + expected_task.status.message.role = Role.ROLE_AGENT + assert events[0] == StreamResponse(task=expected_task) + assert transport._subscribe_method_override is None + + +@pytest.mark.asyncio +async def test_compat_rest_transport_subscribe_post_405_retry_get_success( + transport, +): + """Scenario: POST returns 405, automatic retry GET. Second call uses GET directly.""" + call_count = 0 + + async def mock_stream(method, path, context=None, json=None): + nonlocal call_count + call_count += 1 + if method == 'POST': + assert json is None + create_405_error() + if method == 'GET': + assert json is None + task = Task(id='task-123') + task.status.message.role = Role.ROLE_AGENT + yield StreamResponse(task=task) + + transport._send_stream_request = mock_stream + + req = SubscribeToTaskRequest(id='task-123') + events = [event async for event in transport.subscribe(req)] + + assert len(events) == 1 + assert call_count == 2 + assert transport._subscribe_method_override == 'GET' + + # Second call should use GET directly + call_count = 0 + events = [event async for event in transport.subscribe(req)] + assert len(events) == 1 + assert call_count == 1 # Only GET called + assert transport._subscribe_method_override == 'GET' + + +@pytest.mark.asyncio +async def test_compat_rest_transport_subscribe_post_405_get_405_fails( + transport, +): + """Scenario: POST return 405, retry GET, return 405 - error. Second call is just POST.""" + + method_count = {} + + async def mock_stream(method, path, context=None, json=None): + method_count[method] = method_count.get(method, 0) + 1 + if method in {'POST', 'GET'}: + assert json is None + # To make it an async generator even when it raises + if False: + yield + create_405_error() + + transport._send_stream_request = mock_stream + + req = SubscribeToTaskRequest(id='task-123') + with pytest.raises(A2AClientError) as exc_info: + [event async for event in transport.subscribe(req)] + + assert '405' in str(exc_info.value) + assert transport._subscribe_method_override == 'POST' + assert method_count == {'POST': 1, 'GET': 1} + assert transport._subscribe_auto_method_override is False + + # Second call should try POST directly and fail without retry + with pytest.raises(A2AClientError): + [event async for event in transport.subscribe(req)] + assert transport._subscribe_auto_method_override is False + assert transport._subscribe_method_override == 'POST' + assert method_count == {'POST': 2, 'GET': 1} + + +@pytest.mark.asyncio +async def test_compat_rest_transport_subscribe_post_500_no_retry(transport): + """Scenario: POST return 500, no automatic retry.""" + call_count = 0 + + async def mock_stream(method, path, context=None, json=None): + nonlocal call_count + call_count += 1 + assert method == 'POST' + assert json is None + if False: + yield + create_500_error() + + transport._send_stream_request = mock_stream + + req = SubscribeToTaskRequest(id='task-123') + with pytest.raises(A2AClientError) as exc_info: + [event async for event in transport.subscribe(req)] + + assert '500' in str(exc_info.value) + assert call_count == 1 # No retry on 500 + assert transport._subscribe_method_override is None + + +@pytest.mark.asyncio +async def test_compat_rest_transport_subscribe_method_override_avoids_retry_get( + mock_httpx_client, agent_card +): + """Scenario: Init with GET override, server returns 405, no automatic retry.""" + transport = CompatRestTransport( + httpx_client=mock_httpx_client, + agent_card=agent_card, + url='http://example.com', + subscribe_method_override='GET', + ) + call_count = 0 + + async def mock_stream(method, path, context=None, json=None): + nonlocal call_count + call_count += 1 + assert method == 'GET' + assert json is None + if False: + yield + create_405_error() + + transport._send_stream_request = mock_stream + + req = SubscribeToTaskRequest(id='task-123') + with pytest.raises(A2AClientError) as exc_info: + [event async for event in transport.subscribe(req)] + + assert '405' in str(exc_info.value) + assert call_count == 1 + + +@pytest.mark.asyncio +async def test_compat_rest_transport_subscribe_method_override_avoids_retry_post( + mock_httpx_client, agent_card +): + """Scenario: Init with POST override, server returns 405, no automatic retry.""" + transport = CompatRestTransport( + httpx_client=mock_httpx_client, + agent_card=agent_card, + url='http://example.com', + subscribe_method_override='POST', + ) + call_count = 0 + + async def mock_stream(method, path, context=None, json=None): + nonlocal call_count + call_count += 1 + assert method == 'POST' + assert json is None + if False: + yield + create_405_error() + + transport._send_stream_request = mock_stream + + req = SubscribeToTaskRequest(id='task-123') + with pytest.raises(A2AClientError) as exc_info: + [event async for event in transport.subscribe(req)] + + assert '405' in str(exc_info.value) + assert call_count == 1 + + +def test_compat_rest_transport_handle_http_error(transport): + mock_response = MagicMock(spec=httpx.Response) + mock_response.status_code = 400 + mock_response.json.return_value = { + 'type': 'InvalidParamsError', + 'message': 'Invalid parameters', + } + + mock_request = MagicMock(spec=httpx.Request) + mock_request.url = 'http://example.com' + + error = httpx.HTTPStatusError( + 'Error', request=mock_request, response=mock_response + ) + + with pytest.raises(InvalidParamsError) as exc_info: + transport._handle_http_error(error) + + assert str(exc_info.value) == 'Invalid parameters' + + +def test_compat_rest_transport_handle_http_error_not_found(transport): + mock_response = MagicMock(spec=httpx.Response) + mock_response.status_code = 404 + mock_response.json.side_effect = json.JSONDecodeError('msg', 'doc', 0) + + mock_request = MagicMock(spec=httpx.Request) + mock_request.url = 'http://example.com' + + error = httpx.HTTPStatusError( + 'Error', request=mock_request, response=mock_response + ) + + with pytest.raises(MethodNotFoundError): + transport._handle_http_error(error) + + +def test_compat_rest_transport_handle_http_error_generic(transport): + mock_response = MagicMock(spec=httpx.Response) + mock_response.status_code = 500 + mock_response.json.side_effect = json.JSONDecodeError('msg', 'doc', 0) + + mock_request = MagicMock(spec=httpx.Request) + mock_request.url = 'http://example.com' + + error = httpx.HTTPStatusError( + 'Error', request=mock_request, response=mock_response + ) + + with pytest.raises(A2AClientError): + transport._handle_http_error(error) + + +@pytest.mark.asyncio +async def test_compat_rest_transport_list_tasks(transport): + with pytest.raises(NotImplementedError): + await transport.list_tasks(ListTasksRequest()) + + +@pytest.mark.asyncio +async def test_compat_rest_transport_list_task_push_notification_configs( + transport, +): + with pytest.raises(NotImplementedError): + await transport.list_task_push_notification_configs( + ListTaskPushNotificationConfigsRequest() + ) + + +@pytest.mark.asyncio +async def test_compat_rest_transport_delete_task_push_notification_config( + transport, +): + with pytest.raises(NotImplementedError): + await transport.delete_task_push_notification_config( + DeleteTaskPushNotificationConfigRequest() + ) + + +@pytest.mark.asyncio +async def test_compat_rest_transport_send_message_empty(transport): + async def mock_send_request(*args, **kwargs): + return {} + + transport._send_request = mock_send_request + + req = SendMessageRequest( + message=Message(message_id='msg-1', role=Role.ROLE_USER) + ) + + response = await transport.send_message(req) + assert response == SendMessageResponse() + + +@pytest.mark.asyncio +async def test_compat_rest_transport_get_task_no_history(transport): + async def mock_execute_request(method, path, context=None, params=None): + assert 'historyLength' not in params + return {'id': 'task-123'} + + transport._execute_request = mock_execute_request + + req = GetTaskRequest(id='task-123') + response = await transport.get_task(req) + expected_response = Task(id='task-123') + expected_response.status.message.role = Role.ROLE_AGENT + assert response == expected_response + + +@pytest.mark.asyncio +async def test_compat_rest_transport_get_task_with_history(transport): + async def mock_execute_request(method, path, context=None, params=None): + assert params['historyLength'] == 10 + return {'id': 'task-123'} + + transport._execute_request = mock_execute_request + + req = GetTaskRequest(id='task-123', history_length=10) + response = await transport.get_task(req) + expected_response = Task(id='task-123') + expected_response.status.message.role = Role.ROLE_AGENT + assert response == expected_response + + +def test_compat_rest_transport_handle_http_error_invalid_error_type(transport): + mock_response = MagicMock(spec=httpx.Response) + mock_response.status_code = 500 + mock_response.json.return_value = { + 'type': 123, + 'message': 'Invalid parameters', + } + + mock_request = MagicMock(spec=httpx.Request) + mock_request.url = 'http://example.com' + + error = httpx.HTTPStatusError( + 'Error', request=mock_request, response=mock_response + ) + + with pytest.raises(A2AClientError): + transport._handle_http_error(error) + + +def test_compat_rest_transport_handle_http_error_unknown_error_type(transport): + mock_response = MagicMock(spec=httpx.Response) + mock_response.status_code = 500 + mock_response.json.return_value = { + 'type': 'SomeUnknownErrorClass', + 'message': 'Unknown', + } + + mock_request = MagicMock(spec=httpx.Request) + mock_request.url = 'http://example.com' + + error = httpx.HTTPStatusError( + 'Error', request=mock_request, response=mock_response + ) + + with pytest.raises(A2AClientError): + transport._handle_http_error(error) + + +@pytest.mark.asyncio +@patch('a2a.compat.v0_3.rest_transport.send_http_stream_request') +async def test_compat_rest_transport_send_stream_request( + mock_send_http_stream_request, transport +): + async def mock_generator(*args, **kwargs): + yield b'{"task": {"id": "task-123"}}' + + mock_send_http_stream_request.return_value = mock_generator() + + events = [ + event async for event in transport._send_stream_request('POST', '/test') + ] + + assert len(events) == 1 + expected_task = Task(id='task-123') + expected_task.status.message.role = Role.ROLE_AGENT + assert events[0] == StreamResponse(task=expected_task) + + mock_send_http_stream_request.assert_called_once_with( + transport.httpx_client, + 'POST', + 'http://example.com/test', + transport._handle_http_error, + json=None, + headers={'a2a-version': '0.3'}, + ) + + +@pytest.mark.asyncio +@patch('a2a.compat.v0_3.rest_transport.send_http_request') +async def test_compat_rest_transport_execute_request( + mock_send_http_request, transport +): + mock_send_http_request.return_value = {'ok': True} + mock_request = httpx.Request('POST', 'http://example.com') + transport.httpx_client.build_request.return_value = mock_request + + res = await transport._execute_request( + 'POST', '/test', json={'some': 'data'} + ) + assert res == {'ok': True} + + # Assert httpx client build_request was called correctly + transport.httpx_client.build_request.assert_called_once_with( + 'POST', + 'http://example.com/test', + json={'some': 'data'}, + params=None, + headers={'a2a-version': '0.3'}, + ) + mock_send_http_request.assert_called_once_with( + transport.httpx_client, mock_request, transport._handle_http_error + ) diff --git a/tests/compat/v0_3/test_versions.py b/tests/compat/v0_3/test_versions.py new file mode 100644 index 000000000..058b9ffdf --- /dev/null +++ b/tests/compat/v0_3/test_versions.py @@ -0,0 +1,27 @@ +"""Tests for version utility functions.""" + +import pytest + +from a2a.compat.v0_3.versions import is_legacy_version + + +@pytest.mark.parametrize( + 'version, expected', + [ + ('0.3', True), + ('0.3.0', True), + ('0.9', True), + ('0.9.9', True), + ('1.0', False), + ('1.0.0', False), + ('1.1', False), + ('0.2', False), + ('0.2.9', False), + (None, False), + ('', False), + ('invalid', False), + ('v0.3', True), + ], +) +def test_is_legacy_version(version, expected): + assert is_legacy_version(version) == expected diff --git a/tests/contrib/tasks/test_vertex_task_converter.py b/tests/contrib/tasks/test_vertex_task_converter.py index 4c2cec9d7..3d260c599 100644 --- a/tests/contrib/tasks/test_vertex_task_converter.py +++ b/tests/contrib/tasks/test_vertex_task_converter.py @@ -21,7 +21,7 @@ to_stored_task, to_stored_task_state, ) -from a2a.types import ( +from a2a.compat.v0_3.types import ( Artifact, DataPart, FilePart, diff --git a/tests/contrib/tasks/test_vertex_task_store.py b/tests/contrib/tasks/test_vertex_task_store.py index e7d31f435..c77493022 100644 --- a/tests/contrib/tasks/test_vertex_task_store.py +++ b/tests/contrib/tasks/test_vertex_task_store.py @@ -22,6 +22,8 @@ import pytest import pytest_asyncio +from .fake_vertex_client import FakeVertexClient + # Skip the entire test module if vertexai is not installed pytest.importorskip( @@ -40,7 +42,6 @@ 'VERTEX_API_VERSION', ] ) -import sys @pytest.fixture( @@ -61,7 +62,8 @@ def backend_type(request) -> str: from a2a.contrib.tasks.vertex_task_store import VertexTaskStore -from a2a.types import ( +from a2a.server.context import ServerCallContext +from a2a.types.a2a_pb2 import ( Artifact, Message, Part, @@ -69,21 +71,16 @@ def backend_type(request) -> str: Task, TaskState, TaskStatus, - TextPart, ) -# Minimal Task object for testing - remains the same -task_status_submitted = TaskStatus(state=TaskState.submitted) +# Minimal Task object for testing MINIMAL_TASK_OBJ = Task( id='task-abc', context_id='session-xyz', - status=task_status_submitted, - kind='task', - metadata={'test_key': 'test_value'}, - artifacts=[], - history=[], + status=TaskStatus(state=TaskState.TASK_STATE_SUBMITTED), ) +MINIMAL_TASK_OBJ.metadata['test_key'] = 'test_value' from collections.abc import Generator @@ -121,9 +118,6 @@ async def vertex_store( reusing the module-scoped engine. Uses fake client for 'fake' backend. """ if backend_type == 'fake': - sys.path.append(os.path.dirname(__file__)) - from fake_vertex_client import FakeVertexClient - client = FakeVertexClient() else: project = os.environ.get('VERTEX_PROJECT') @@ -145,30 +139,37 @@ async def vertex_store( @pytest.mark.asyncio async def test_save_task(vertex_store: VertexTaskStore) -> None: """Test saving a task to the VertexTaskStore.""" - task_to_save = MINIMAL_TASK_OBJ.model_copy(deep=True) # Ensure unique ID for parameterized tests if needed, or rely on table isolation + task_to_save = Task() + task_to_save.CopyFrom(MINIMAL_TASK_OBJ) task_to_save.id = 'save-test-task-2' - await vertex_store.save(task_to_save) + await vertex_store.save(task_to_save, ServerCallContext()) - retrieved_task = await vertex_store.get(task_to_save.id) + retrieved_task = await vertex_store.get( + task_to_save.id, ServerCallContext() + ) assert retrieved_task is not None assert retrieved_task.id == task_to_save.id - assert retrieved_task.model_dump() == task_to_save.model_dump() + assert retrieved_task == task_to_save @pytest.mark.asyncio async def test_get_task(vertex_store: VertexTaskStore) -> None: """Test retrieving a task from the VertexTaskStore.""" task_id = 'get-test-task-1' - task_to_save = MINIMAL_TASK_OBJ.model_copy(update={'id': task_id}) - await vertex_store.save(task_to_save) + task_to_save = Task() + task_to_save.CopyFrom(MINIMAL_TASK_OBJ) + task_to_save.id = task_id + await vertex_store.save(task_to_save, ServerCallContext()) - retrieved_task = await vertex_store.get(task_to_save.id) + retrieved_task = await vertex_store.get( + task_to_save.id, ServerCallContext() + ) assert retrieved_task is not None assert retrieved_task.id == task_to_save.id assert retrieved_task.context_id == task_to_save.context_id - assert retrieved_task.status.state == TaskState.submitted + assert retrieved_task.status.state == TaskState.TASK_STATE_SUBMITTED @pytest.mark.asyncio @@ -176,7 +177,9 @@ async def test_get_nonexistent_task( vertex_store: VertexTaskStore, ) -> None: """Test retrieving a nonexistent task.""" - retrieved_task = await vertex_store.get('nonexistent-task-id') + retrieved_task = await vertex_store.get( + 'nonexistent-task-id', ServerCallContext() + ) assert retrieved_task is None @@ -190,32 +193,28 @@ async def test_save_and_get_detailed_task( id=task_id, context_id='test-session-1', status=TaskStatus( - state=TaskState.submitted, + state=TaskState.TASK_STATE_SUBMITTED, ), - kind='task', - metadata={'key1': 'value1', 'key2': 123}, artifacts=[ Artifact( artifact_id='artifact-1', - parts=[Part(root=TextPart(text='hello'))], + parts=[Part(text='hello')], ) ], ) + test_task.metadata['key1'] = 'value1' + test_task.metadata['key2'] = 123 - await vertex_store.save(test_task) - retrieved_task = await vertex_store.get(test_task.id) + await vertex_store.save(test_task, ServerCallContext()) + retrieved_task = await vertex_store.get(test_task.id, ServerCallContext()) assert retrieved_task is not None assert retrieved_task.id == test_task.id assert retrieved_task.context_id == test_task.context_id - assert retrieved_task.status.state == TaskState.submitted - assert retrieved_task.metadata == {'key1': 'value1', 'key2': 123} - - # Pydantic models handle their own serialization for comparison if model_dump is used - assert ( - retrieved_task.model_dump()['artifacts'] - == test_task.model_dump()['artifacts'] - ) + assert retrieved_task.status.state == TaskState.TASK_STATE_SUBMITTED + assert retrieved_task.metadata['key1'] == 'value1' + assert retrieved_task.metadata['key2'] == 123 + assert retrieved_task.artifacts == test_task.artifacts @pytest.mark.asyncio @@ -227,29 +226,34 @@ async def test_update_task_status_and_metadata( original_task = Task( id=task_id, context_id='session-update', - status=TaskStatus(state=TaskState.submitted), - kind='task', - metadata=None, + status=TaskStatus(state=TaskState.TASK_STATE_SUBMITTED), artifacts=[], history=[], ) - await vertex_store.save(original_task) + await vertex_store.save(original_task, ServerCallContext()) - retrieved_before_update = await vertex_store.get(task_id) + retrieved_before_update = await vertex_store.get( + task_id, ServerCallContext() + ) assert retrieved_before_update is not None - assert retrieved_before_update.status.state == TaskState.submitted + assert ( + retrieved_before_update.status.state == TaskState.TASK_STATE_SUBMITTED + ) assert retrieved_before_update.metadata == {} - updated_task = original_task.model_copy(deep=True) - updated_task.status.state = TaskState.completed - updated_task.status.timestamp = '2023-01-02T11:00:00Z' - updated_task.metadata = {'update_key': 'update_value'} + updated_task = Task() + updated_task.CopyFrom(original_task) + updated_task.status.state = TaskState.TASK_STATE_COMPLETED + updated_task.status.timestamp.FromJsonString('2023-01-02T11:00:00Z') + updated_task.metadata.update({'update_key': 'update_value'}) - await vertex_store.save(updated_task) + await vertex_store.save(updated_task, ServerCallContext()) - retrieved_after_update = await vertex_store.get(task_id) + retrieved_after_update = await vertex_store.get( + task_id, ServerCallContext() + ) assert retrieved_after_update is not None - assert retrieved_after_update.status.state == TaskState.completed + assert retrieved_after_update.status.state == TaskState.TASK_STATE_COMPLETED assert retrieved_after_update.metadata == {'update_key': 'update_value'} @@ -260,49 +264,54 @@ async def test_update_task_add_artifact(vertex_store: VertexTaskStore) -> None: original_task = Task( id=task_id, context_id='session-update', - status=TaskStatus(state=TaskState.submitted), - kind='task', - metadata=None, + status=TaskStatus(state=TaskState.TASK_STATE_SUBMITTED), artifacts=[ Artifact( artifact_id='artifact-1', - parts=[Part(root=TextPart(text='hello'))], + parts=[Part(text='hello')], ) ], history=[], ) - await vertex_store.save(original_task) + await vertex_store.save(original_task, ServerCallContext()) - retrieved_before_update = await vertex_store.get(task_id) + retrieved_before_update = await vertex_store.get( + task_id, ServerCallContext() + ) assert retrieved_before_update is not None - assert retrieved_before_update.status.state == TaskState.submitted + assert ( + retrieved_before_update.status.state == TaskState.TASK_STATE_SUBMITTED + ) assert retrieved_before_update.metadata == {} - updated_task = original_task.model_copy(deep=True) - updated_task.status.state = TaskState.working - updated_task.status.timestamp = '2023-01-02T11:00:00Z' + updated_task = Task() + updated_task.CopyFrom(original_task) + updated_task.status.state = TaskState.TASK_STATE_WORKING + updated_task.status.timestamp.FromJsonString('2023-01-02T11:00:00Z') updated_task.artifacts.append( Artifact( artifact_id='artifact-2', - parts=[Part(root=TextPart(text='world'))], + parts=[Part(text='world')], ) ) - await vertex_store.save(updated_task) + await vertex_store.save(updated_task, ServerCallContext()) - retrieved_after_update = await vertex_store.get(task_id) + retrieved_after_update = await vertex_store.get( + task_id, ServerCallContext() + ) assert retrieved_after_update is not None - assert retrieved_after_update.status.state == TaskState.working + assert retrieved_after_update.status.state == TaskState.TASK_STATE_WORKING assert retrieved_after_update.artifacts == [ Artifact( artifact_id='artifact-1', - parts=[Part(root=TextPart(text='hello'))], + parts=[Part(text='hello')], ), Artifact( artifact_id='artifact-2', - parts=[Part(root=TextPart(text='world'))], + parts=[Part(text='world')], ), ] @@ -316,48 +325,53 @@ async def test_update_task_update_artifact( original_task = Task( id=task_id, context_id='session-update', - status=TaskStatus(state=TaskState.submitted), - kind='task', - metadata=None, # Explicitly None + status=TaskStatus(state=TaskState.TASK_STATE_SUBMITTED), artifacts=[ Artifact( artifact_id='artifact-1', - parts=[Part(root=TextPart(text='hello'))], + parts=[Part(text='hello')], ), Artifact( artifact_id='artifact-2', - parts=[Part(root=TextPart(text='world'))], + parts=[Part(text='world')], ), ], history=[], ) - await vertex_store.save(original_task) + await vertex_store.save(original_task, ServerCallContext()) - retrieved_before_update = await vertex_store.get(task_id) + retrieved_before_update = await vertex_store.get( + task_id, ServerCallContext() + ) assert retrieved_before_update is not None - assert retrieved_before_update.status.state == TaskState.submitted + assert ( + retrieved_before_update.status.state == TaskState.TASK_STATE_SUBMITTED + ) assert retrieved_before_update.metadata == {} - updated_task = original_task.model_copy(deep=True) - updated_task.status.state = TaskState.working - updated_task.status.timestamp = '2023-01-02T11:00:00Z' + updated_task = Task() + updated_task.CopyFrom(original_task) + updated_task.status.state = TaskState.TASK_STATE_WORKING + updated_task.status.timestamp.FromJsonString('2023-01-02T11:00:00Z') - updated_task.artifacts[0].parts[0].root.text = 'ahoy' + updated_task.artifacts[0].parts[0].text = 'ahoy' - await vertex_store.save(updated_task) + await vertex_store.save(updated_task, ServerCallContext()) - retrieved_after_update = await vertex_store.get(task_id) + retrieved_after_update = await vertex_store.get( + task_id, ServerCallContext() + ) assert retrieved_after_update is not None - assert retrieved_after_update.status.state == TaskState.working + assert retrieved_after_update.status.state == TaskState.TASK_STATE_WORKING assert retrieved_after_update.artifacts == [ Artifact( artifact_id='artifact-1', - parts=[Part(root=TextPart(text='ahoy'))], + parts=[Part(text='ahoy')], ), Artifact( artifact_id='artifact-2', - parts=[Part(root=TextPart(text='world'))], + parts=[Part(text='world')], ), ] @@ -371,44 +385,49 @@ async def test_update_task_delete_artifact( original_task = Task( id=task_id, context_id='session-update', - status=TaskStatus(state=TaskState.submitted), - kind='task', - metadata=None, + status=TaskStatus(state=TaskState.TASK_STATE_SUBMITTED), artifacts=[ Artifact( artifact_id='artifact-1', - parts=[Part(root=TextPart(text='hello'))], + parts=[Part(text='hello')], ), Artifact( artifact_id='artifact-2', - parts=[Part(root=TextPart(text='world'))], + parts=[Part(text='world')], ), ], history=[], ) - await vertex_store.save(original_task) + await vertex_store.save(original_task, ServerCallContext()) - retrieved_before_update = await vertex_store.get(task_id) + retrieved_before_update = await vertex_store.get( + task_id, ServerCallContext() + ) assert retrieved_before_update is not None - assert retrieved_before_update.status.state == TaskState.submitted + assert ( + retrieved_before_update.status.state == TaskState.TASK_STATE_SUBMITTED + ) assert retrieved_before_update.metadata == {} - updated_task = original_task.model_copy(deep=True) - updated_task.status.state = TaskState.working - updated_task.status.timestamp = '2023-01-02T11:00:00Z' + updated_task = Task() + updated_task.CopyFrom(original_task) + updated_task.status.state = TaskState.TASK_STATE_WORKING + updated_task.status.timestamp.FromJsonString('2023-01-02T11:00:00Z') del updated_task.artifacts[1] - await vertex_store.save(updated_task) + await vertex_store.save(updated_task, ServerCallContext()) - retrieved_after_update = await vertex_store.get(task_id) + retrieved_after_update = await vertex_store.get( + task_id, ServerCallContext() + ) assert retrieved_after_update is not None - assert retrieved_after_update.status.state == TaskState.working + assert retrieved_after_update.status.state == TaskState.TASK_STATE_WORKING assert retrieved_after_update.artifacts == [ Artifact( artifact_id='artifact-1', - parts=[Part(root=TextPart(text='hello'))], + parts=[Part(text='hello')], ) ] @@ -417,7 +436,7 @@ async def test_update_task_delete_artifact( async def test_metadata_field_mapping( vertex_store: VertexTaskStore, ) -> None: - """Test that metadata field is correctly mapped between Pydantic and SQLAlchemy. + """Test that metadata field is correctly mapped between the core types and vertex. This test verifies: 1. Metadata can be None @@ -430,12 +449,12 @@ async def test_metadata_field_mapping( task_no_metadata = Task( id='task-metadata-test-1', context_id='session-meta-1', - status=TaskStatus(state=TaskState.submitted), - kind='task', - metadata=None, + status=TaskStatus(state=TaskState.TASK_STATE_SUBMITTED), + ) + await vertex_store.save(task_no_metadata, ServerCallContext()) + retrieved_no_metadata = await vertex_store.get( + 'task-metadata-test-1', ServerCallContext() ) - await vertex_store.save(task_no_metadata) - retrieved_no_metadata = await vertex_store.get('task-metadata-test-1') assert retrieved_no_metadata is not None assert retrieved_no_metadata.metadata == {} @@ -444,12 +463,13 @@ async def test_metadata_field_mapping( task_simple_metadata = Task( id='task-metadata-test-2', context_id='session-meta-2', - status=TaskStatus(state=TaskState.submitted), - kind='task', + status=TaskStatus(state=TaskState.TASK_STATE_SUBMITTED), metadata=simple_metadata, ) - await vertex_store.save(task_simple_metadata) - retrieved_simple = await vertex_store.get('task-metadata-test-2') + await vertex_store.save(task_simple_metadata, ServerCallContext()) + retrieved_simple = await vertex_store.get( + 'task-metadata-test-2', ServerCallContext() + ) assert retrieved_simple is not None assert retrieved_simple.metadata == simple_metadata @@ -469,12 +489,13 @@ async def test_metadata_field_mapping( task_complex_metadata = Task( id='task-metadata-test-3', context_id='session-meta-3', - status=TaskStatus(state=TaskState.submitted), - kind='task', + status=TaskStatus(state=TaskState.TASK_STATE_SUBMITTED), metadata=complex_metadata, ) - await vertex_store.save(task_complex_metadata) - retrieved_complex = await vertex_store.get('task-metadata-test-3') + await vertex_store.save(task_complex_metadata, ServerCallContext()) + retrieved_complex = await vertex_store.get( + 'task-metadata-test-3', ServerCallContext() + ) assert retrieved_complex is not None assert retrieved_complex.metadata == complex_metadata @@ -482,17 +503,20 @@ async def test_metadata_field_mapping( task_update_metadata = Task( id='task-metadata-test-4', context_id='session-meta-4', - status=TaskStatus(state=TaskState.submitted), - kind='task', - metadata=None, + status=TaskStatus(state=TaskState.TASK_STATE_SUBMITTED), ) - await vertex_store.save(task_update_metadata) + await vertex_store.save(task_update_metadata, ServerCallContext()) # Update metadata - task_update_metadata.metadata = {'updated': True, 'timestamp': '2024-01-01'} - await vertex_store.save(task_update_metadata) + task_update_metadata.metadata.Clear() + task_update_metadata.metadata.update( + {'updated': True, 'timestamp': '2024-01-01'} + ) + await vertex_store.save(task_update_metadata, ServerCallContext()) - retrieved_updated = await vertex_store.get('task-metadata-test-4') + retrieved_updated = await vertex_store.get( + 'task-metadata-test-4', ServerCallContext() + ) assert retrieved_updated is not None assert retrieved_updated.metadata == { 'updated': True, @@ -500,10 +524,12 @@ async def test_metadata_field_mapping( } # Test 5: Update metadata from dict to None - task_update_metadata.metadata = None - await vertex_store.save(task_update_metadata) + task_update_metadata.metadata.Clear() + await vertex_store.save(task_update_metadata, ServerCallContext()) - retrieved_none = await vertex_store.get('task-metadata-test-4') + retrieved_none = await vertex_store.get( + 'task-metadata-test-4', ServerCallContext() + ) assert retrieved_none is not None assert retrieved_none.metadata == {} @@ -519,38 +545,43 @@ async def test_metadata_empty_transitions( task = Task( id=task_id, context_id='session-meta-empty', - status=TaskStatus(state=TaskState.submitted), - kind='task', + status=TaskStatus(state=TaskState.TASK_STATE_SUBMITTED), metadata={}, ) - await vertex_store.save(task) + await vertex_store.save(task, ServerCallContext()) full_name = f'{vertex_store._agent_engine_resource_id}/a2aTasks/{task_id}' # Get initial event sequence number stored_task_before = ( - await vertex_store._client.aio.agent_engines.a2a_tasks.get(full_name) + await vertex_store._client.aio.agent_engines.a2a_tasks.get( + name=full_name + ) ) initial_seq = stored_task_before.next_event_sequence_number # Step 2: Update metadata to None - updated_task = task.model_copy(deep=True) - updated_task.metadata = None - await vertex_store.save(updated_task) + updated_task = Task() + updated_task.CopyFrom(task) + updated_task.metadata.Clear() + await vertex_store.save(updated_task, ServerCallContext()) # Step 3: Update back to {} - task_back = updated_task.model_copy(deep=True) + task_back = Task() + task_back.CopyFrom(updated_task) task_back.metadata = {} - await vertex_store.save(task_back) + await vertex_store.save(task_back, ServerCallContext()) # Verify that retrieved task still has {} (due to mapping) - retrieved = await vertex_store.get(task_id) + retrieved = await vertex_store.get(task_id, ServerCallContext()) assert retrieved is not None assert retrieved.metadata == {} # Verify that next_event_sequence_number did NOT increase (no events generated) stored_task_after = ( - await vertex_store._client.aio.agent_engines.a2a_tasks.get(full_name) + await vertex_store._client.aio.agent_engines.a2a_tasks.get( + name=full_name + ) ) assert stored_task_after.next_event_sequence_number == initial_seq @@ -564,56 +595,60 @@ async def test_update_task_status_details( original_task = Task( id=task_id, context_id='session-update', - status=TaskStatus(state=TaskState.submitted), - kind='task', + status=TaskStatus(state=TaskState.TASK_STATE_SUBMITTED), metadata=None, artifacts=[], history=[], ) - await vertex_store.save(original_task) + await vertex_store.save(original_task, ServerCallContext()) - retrieved_before_update = await vertex_store.get(task_id) + retrieved_before_update = await vertex_store.get( + task_id, ServerCallContext() + ) assert retrieved_before_update is not None - assert retrieved_before_update.status.message is None - - updated_task = original_task.model_copy(deep=True) - updated_task.status.state = TaskState.failed - updated_task.status.timestamp = '2023-01-02T11:00:00Z' - updated_task.status.message = Message( - message_id='msg-error-1', - role=Role.agent, - parts=[ - Part( - root=TextPart( + assert ( + retrieved_before_update.status.state == TaskState.TASK_STATE_SUBMITTED + ) + + updated_task = Task() + updated_task.CopyFrom(original_task) + updated_task.status.state = TaskState.TASK_STATE_FAILED + updated_task.status.timestamp.FromJsonString('2023-01-02T11:00:00Z') + updated_task.status.message.CopyFrom( + Message( + message_id='msg-error-1', + role=Role.ROLE_AGENT, + parts=[ + Part( text='Task failed due to an unknown error', metadata={'error_code': 'UNKNOWN', 'retryable': False}, ) - ) - ], + ], + ) ) - await vertex_store.save(updated_task) + await vertex_store.save(updated_task, ServerCallContext()) - retrieved_after_update = await vertex_store.get(task_id) + retrieved_after_update = await vertex_store.get( + task_id, ServerCallContext() + ) assert retrieved_after_update is not None - assert retrieved_after_update.status.state == TaskState.failed + assert retrieved_after_update.status.state == TaskState.TASK_STATE_FAILED assert retrieved_after_update.status.message is not None assert retrieved_after_update.status.message.message_id == 'msg-error-1' - assert retrieved_after_update.status.message.role == Role.agent + assert retrieved_after_update.status.message.role == Role.ROLE_AGENT assert len(retrieved_after_update.status.message.parts) == 1 - assert isinstance( - retrieved_after_update.status.message.parts[0].root, TextPart - ) - text_part = retrieved_after_update.status.message.parts[0].root - assert text_part.text == 'Task failed due to an unknown error' - assert text_part.metadata == {'error_code': 'UNKNOWN', 'retryable': False} + part = retrieved_after_update.status.message.parts[0] + assert part.text == 'Task failed due to an unknown error' + assert part.metadata == {'error_code': 'UNKNOWN', 'retryable': False} # Also test clearing the message - cleared_task = updated_task.model_copy(deep=True) - cleared_task.status.message = None + cleared_task = Task() + cleared_task.CopyFrom(updated_task) + cleared_task.status.ClearField('message') - await vertex_store.save(cleared_task) - retrieved_cleared = await vertex_store.get(task_id) + await vertex_store.save(cleared_task, ServerCallContext()) + retrieved_cleared = await vertex_store.get(task_id, ServerCallContext()) assert retrieved_cleared is not None - assert retrieved_cleared.status.message is None + assert not retrieved_cleared.status.HasField('message') diff --git a/tests/e2e/__init__.py b/tests/e2e/__init__.py new file mode 100644 index 000000000..4a701e914 --- /dev/null +++ b/tests/e2e/__init__.py @@ -0,0 +1,3 @@ +# Copyright 2025 Google LLC +# SPDX-License-Identifier: Apache-2.0 +"""E2E tests package.""" diff --git a/tests/e2e/push_notifications/__init__.py b/tests/e2e/push_notifications/__init__.py new file mode 100644 index 000000000..b75e37d3d --- /dev/null +++ b/tests/e2e/push_notifications/__init__.py @@ -0,0 +1,3 @@ +# Copyright 2025 Google LLC +# SPDX-License-Identifier: Apache-2.0 +"""Push notifications e2e tests package.""" diff --git a/tests/e2e/push_notifications/agent_app.py b/tests/e2e/push_notifications/agent_app.py index 1fa9bc546..bc95f6c37 100644 --- a/tests/e2e/push_notifications/agent_app.py +++ b/tests/e2e/push_notifications/agent_app.py @@ -3,8 +3,11 @@ from fastapi import FastAPI from a2a.server.agent_execution import AgentExecutor, RequestContext -from a2a.server.apps import A2ARESTFastAPIApplication +from a2a.server.context import ServerCallContext from a2a.server.events import EventQueue +from starlette.applications import Starlette +from a2a.server.routes.rest_routes import create_rest_routes +from a2a.server.routes import create_agent_card_routes from a2a.server.request_handlers import DefaultRequestHandler from a2a.server.tasks import ( BasePushNotificationSender, @@ -12,19 +15,19 @@ InMemoryTaskStore, TaskUpdater, ) -from a2a.types import ( +from a2a.types import InvalidParamsError +from a2a.types.a2a_pb2 import ( AgentCapabilities, AgentCard, + AgentInterface, AgentSkill, - InvalidParamsError, Message, Task, ) -from a2a.utils import ( - new_agent_text_message, - new_task, +from a2a.helpers.proto_helpers import ( + new_text_message, + new_task_from_user_message, ) -from a2a.utils.errors import ServerError def test_agent_card(url: str) -> AgentCard: @@ -32,11 +35,14 @@ def test_agent_card(url: str) -> AgentCard: return AgentCard( name='Test Agent', description='Just a test agent', - url=url, version='1.0.0', default_input_modes=['text'], default_output_modes=['text'], - capabilities=AgentCapabilities(streaming=True, push_notifications=True), + capabilities=AgentCapabilities( + streaming=True, + push_notifications=True, + extended_agent_card=True, + ), skills=[ AgentSkill( id='greeting', @@ -46,7 +52,12 @@ def test_agent_card(url: str) -> AgentCard: examples=['Hello Agent!', 'How are you?'], ) ], - supports_authenticated_extended_card=True, + supported_interfaces=[ + AgentInterface( + url=url, + protocol_binding='HTTP+JSON', + ) + ], ) @@ -60,38 +71,36 @@ async def invoke( if ( not msg.parts or len(msg.parts) != 1 - or msg.parts[0].root.kind != 'text' + or not msg.parts[0].HasField('text') ): await updater.failed( - new_agent_text_message( + new_text_message( 'Unsupported message.', task.context_id, task.id ) ) return - text_message = msg.parts[0].root.text + text_message = msg.parts[0].text # Simple request-response flow. if text_message == 'Hello Agent!': await updater.complete( - new_agent_text_message('Hello User!', task.context_id, task.id) + new_text_message('Hello User!', task.context_id, task.id) ) # Flow with user input required: "How are you?" -> "Good! How are you?" -> "Good" -> "Amazing". elif text_message == 'How are you?': await updater.requires_input( - new_agent_text_message( - 'Good! How are you?', task.context_id, task.id - ) + new_text_message('Good! How are you?', task.context_id, task.id) ) elif text_message == 'Good': await updater.complete( - new_agent_text_message('Amazing', task.context_id, task.id) + new_text_message('Amazing', task.context_id, task.id) ) # Fail for unsupported messages. else: await updater.failed( - new_agent_text_message( + new_text_message( 'Unsupported message.', task.context_id, task.id ) ) @@ -109,11 +118,11 @@ async def execute( event_queue: EventQueue, ) -> None: if not context.message: - raise ServerError(error=InvalidParamsError(message='No message')) + raise InvalidParamsError(message='No message') task = context.current_task if not task: - task = new_task(context.message) + task = new_task_from_user_message(context.message) await event_queue.enqueue_event(task) updater = TaskUpdater(event_queue, task.id, task.context_id) @@ -127,19 +136,26 @@ async def cancel( def create_agent_app( url: str, notification_client: httpx.AsyncClient -) -> FastAPI: - """Creates a new HTTP+REST FastAPI application for the test agent.""" +) -> Starlette: + """Creates a new HTTP+REST Starlette application for the test agent.""" push_config_store = InMemoryPushNotificationConfigStore() - app = A2ARESTFastAPIApplication( - agent_card=test_agent_card(url), - http_handler=DefaultRequestHandler( - agent_executor=TestAgentExecutor(), - task_store=InMemoryTaskStore(), - push_config_store=push_config_store, - push_sender=BasePushNotificationSender( - httpx_client=notification_client, - config_store=push_config_store, - ), + card = test_agent_card(url) + extended_card = test_agent_card(url) + extended_card.name = 'Test Agent Extended' + handler = DefaultRequestHandler( + agent_executor=TestAgentExecutor(), + task_store=InMemoryTaskStore(), + agent_card=card, + extended_agent_card=extended_card, + push_config_store=push_config_store, + push_sender=BasePushNotificationSender( + httpx_client=notification_client, + config_store=push_config_store, + context=ServerCallContext(), ), ) - return app.build() + rest_routes = create_rest_routes(request_handler=handler) + agent_card_routes = create_agent_card_routes( + agent_card=card, card_url='/.well-known/agent-card.json' + ) + return Starlette(routes=[*rest_routes, *agent_card_routes]) diff --git a/tests/e2e/push_notifications/notifications_app.py b/tests/e2e/push_notifications/notifications_app.py index c12e98096..e8c56be22 100644 --- a/tests/e2e/push_notifications/notifications_app.py +++ b/tests/e2e/push_notifications/notifications_app.py @@ -1,17 +1,18 @@ import asyncio -from typing import Annotated +from typing import Annotated, Any from fastapi import FastAPI, HTTPException, Path, Request -from pydantic import BaseModel, ValidationError +from pydantic import BaseModel, ConfigDict, ValidationError -from a2a.types import Task +from a2a.types.a2a_pb2 import StreamResponse, Task +from google.protobuf.json_format import ParseDict, MessageToDict class Notification(BaseModel): """Encapsulates default push notification data.""" - task: Task + event: dict[str, Any] token: str @@ -33,16 +34,35 @@ async def add_notification(request: Request): detail='Missing "x-a2a-notification-token" header.', ) try: - task = Task.model_validate(await request.json()) - except ValidationError as e: + json_data = await request.json() + stream_response = ParseDict(json_data, StreamResponse()) + + payload_name = stream_response.WhichOneof('payload') + task_id = None + if payload_name: + event_payload = getattr(stream_response, payload_name) + # The 'Task' message uses 'id', while event messages use 'task_id'. + task_id = getattr( + event_payload, 'task_id', getattr(event_payload, 'id', None) + ) + + if not task_id: + raise HTTPException( + status_code=400, + detail='Missing "task_id" in push notification.', + ) + + except Exception as e: raise HTTPException(status_code=400, detail=str(e)) async with store_lock: - if task.id not in store: - store[task.id] = [] - store[task.id].append( + if task_id not in store: + store[task_id] = [] + store[task_id].append( Notification( - task=task, + event=MessageToDict( + stream_response, preserving_proto_field_name=True + ), token=token, ) ) @@ -50,7 +70,7 @@ async def add_notification(request: Request): 'status': 'received', } - @app.get('/tasks/{task_id}/notifications') + @app.get('/{task_id}/notifications') async def list_notifications_by_task( task_id: Annotated[ str, Path(title='The ID of the task to list the notifications for.') diff --git a/tests/e2e/push_notifications/test_default_push_notification_support.py b/tests/e2e/push_notifications/test_default_push_notification_support.py index d7364b840..35e4bbeb4 100644 --- a/tests/e2e/push_notifications/test_default_push_notification_support.py +++ b/tests/e2e/push_notifications/test_default_push_notification_support.py @@ -6,9 +6,9 @@ import pytest import pytest_asyncio -from agent_app import create_agent_app -from notifications_app import Notification, create_notifications_app -from utils import ( +from .agent_app import create_agent_app +from .notifications_app import Notification, create_notifications_app +from .utils import ( create_app_process, find_free_port, wait_for_server_ready, @@ -19,16 +19,17 @@ ClientFactory, minimal_agent_card, ) -from a2a.types import ( +from a2a.utils.constants import TransportProtocol +from a2a.types.a2a_pb2 import ( Message, Part, - PushNotificationConfig, + TaskPushNotificationConfig, Role, + SendMessageConfiguration, + SendMessageRequest, Task, TaskPushNotificationConfig, TaskState, - TextPart, - TransportProtocol, ) @@ -74,7 +75,9 @@ def agent_server(notifications_client: httpx.AsyncClient): ) process.start() try: - wait_for_server_ready(f'{url}/v1/card') + wait_for_server_ready( + f'{url}/extendedAgentCard', headers={'A2A-Version': '1.0'} + ) except TimeoutError as e: process.terminate() raise e @@ -105,42 +108,53 @@ async def test_notification_triggering_with_in_message_config_e2e( token = uuid.uuid4().hex a2a_client = ClientFactory( ClientConfig( - supported_transports=[TransportProtocol.http_json], - push_notification_configs=[ - PushNotificationConfig( - id='in-message-config', - url=f'{notifications_server}/notifications', - token=token, - ) - ], + supported_protocol_bindings=[TransportProtocol.HTTP_JSON], + push_notification_config=TaskPushNotificationConfig( + id='in-message-config', + url=f'{notifications_server}/notifications', + token=token, + ), ) - ).create(minimal_agent_card(agent_server, [TransportProtocol.http_json])) + ).create(minimal_agent_card(agent_server, [TransportProtocol.HTTP_JSON])) # Send a message and extract the returned task. responses = [ response async for response in a2a_client.send_message( - Message( - message_id='hello-agent', - parts=[Part(root=TextPart(text='Hello Agent!'))], - role=Role.user, + SendMessageRequest( + message=Message( + message_id='hello-agent', + parts=[Part(text='Hello Agent!')], + role=Role.ROLE_USER, + ) ) ) ] assert len(responses) == 1 - assert isinstance(responses[0], tuple) - assert isinstance(responses[0][0], Task) - task = responses[0][0] + stream_response = responses[0] + assert stream_response.HasField('task') + task = stream_response.task # Verify a single notification was sent. notifications = await wait_for_n_notifications( http_client, - f'{notifications_server}/tasks/{task.id}/notifications', - n=1, + f'{notifications_server}/{task.id}/notifications', + n=2, ) assert notifications[0].token == token - assert notifications[0].task.id == task.id - assert notifications[0].task.status.state == 'completed' + + # Verify exactly two consecutive events: SUBMITTED -> COMPLETED + assert len(notifications) == 2 + + # 1. First event: SUBMITTED (Task) + event0 = notifications[0].event + state0 = event0['task'].get('status', {}).get('state') + assert state0 == 'TASK_STATE_SUBMITTED' + + # 2. Second event: COMPLETED (TaskStatusUpdateEvent) + event1 = notifications[1].event + state1 = event1['status_update'].get('status', {}).get('state') + assert state1 == 'TASK_STATE_COMPLETED' @pytest.mark.asyncio @@ -153,44 +167,45 @@ async def test_notification_triggering_after_config_change_e2e( # Configure an A2A client without a push notification config. a2a_client = ClientFactory( ClientConfig( - supported_transports=[TransportProtocol.http_json], + supported_protocol_bindings=[TransportProtocol.HTTP_JSON], ) - ).create(minimal_agent_card(agent_server, [TransportProtocol.http_json])) + ).create(minimal_agent_card(agent_server, [TransportProtocol.HTTP_JSON])) # Send a message and extract the returned task. responses = [ response async for response in a2a_client.send_message( - Message( - message_id='how-are-you', - parts=[Part(root=TextPart(text='How are you?'))], - role=Role.user, + SendMessageRequest( + message=Message( + message_id='how-are-you', + parts=[Part(text='How are you?')], + role=Role.ROLE_USER, + ), + configuration=SendMessageConfiguration(), ) ) ] assert len(responses) == 1 - assert isinstance(responses[0], tuple) - assert isinstance(responses[0][0], Task) - task = responses[0][0] - assert task.status.state == TaskState.input_required + stream_response = responses[0] + assert stream_response.HasField('task') + task = stream_response.task + assert task.status.state == TaskState.TASK_STATE_INPUT_REQUIRED # Verify that no notification has been sent yet. response = await http_client.get( - f'{notifications_server}/tasks/{task.id}/notifications' + f'{notifications_server}/{task.id}/notifications' ) assert response.status_code == 200 assert len(response.json().get('notifications', [])) == 0 # Set the push notification config. token = uuid.uuid4().hex - await a2a_client.set_task_callback( + await a2a_client.create_task_push_notification_config( TaskPushNotificationConfig( - task_id=task.id, - push_notification_config=PushNotificationConfig( - id='after-config-change', - url=f'{notifications_server}/notifications', - token=token, - ), + task_id=f'{task.id}', + id='after-config-change', + url=f'{notifications_server}/notifications', + token=token, ) ) @@ -198,11 +213,14 @@ async def test_notification_triggering_after_config_change_e2e( responses = [ response async for response in a2a_client.send_message( - Message( - task_id=task.id, - message_id='good', - parts=[Part(root=TextPart(text='Good'))], - role=Role.user, + SendMessageRequest( + message=Message( + task_id=task.id, + message_id='good', + parts=[Part(text='Good')], + role=Role.ROLE_USER, + ), + configuration=SendMessageConfiguration(), ) ) ] @@ -211,11 +229,12 @@ async def test_notification_triggering_after_config_change_e2e( # Verify that the push notification was sent. notifications = await wait_for_n_notifications( http_client, - f'{notifications_server}/tasks/{task.id}/notifications', + f'{notifications_server}/{task.id}/notifications', n=1, ) - assert notifications[0].task.id == task.id - assert notifications[0].task.status.state == 'completed' + event = notifications[0].event + state = event['status_update'].get('status', {}).get('state', '') + assert state == 'TASK_STATE_COMPLETED' assert notifications[0].token == token diff --git a/tests/e2e/push_notifications/utils.py b/tests/e2e/push_notifications/utils.py index 01d84a30f..a7317f1b2 100644 --- a/tests/e2e/push_notifications/utils.py +++ b/tests/e2e/push_notifications/utils.py @@ -1,9 +1,9 @@ import contextlib +import multiprocessing import socket +import sys import time -from multiprocessing import Process - import httpx import uvicorn @@ -20,12 +20,14 @@ def run_server(app, host, port) -> None: uvicorn.run(app, host=host, port=port, log_level='warning') -def wait_for_server_ready(url: str, timeout: int = 10) -> None: +def wait_for_server_ready( + url: str, timeout: int = 10, headers: dict | None = None +) -> None: """Polls the provided URL endpoint until the server is up.""" start_time = time.time() while True: with contextlib.suppress(httpx.ConnectError): - with httpx.Client() as client: + with httpx.Client(headers=headers) as client: response = client.get(url) if response.status_code == 200: return @@ -36,9 +38,19 @@ def wait_for_server_ready(url: str, timeout: int = 10) -> None: time.sleep(0.1) -def create_app_process(app, host, port) -> Process: - """Creates a separate process for a given application.""" - return Process( +def create_app_process(app, host, port) -> 'Any': # type: ignore[name-defined] + """Creates a separate process for a given application. + + Uses 'fork' context on non-Windows platforms to avoid pickle issues + with FastAPI apps (which have closures that can't be pickled). + """ + # Use fork on Unix-like systems to avoid pickle issues with FastAPI + if sys.platform != 'win32': + ctx = multiprocessing.get_context('fork') + else: + ctx = multiprocessing.get_context('spawn') + + return ctx.Process( target=run_server, args=(app, host, port), daemon=True, diff --git a/tests/extensions/__init__.py b/tests/extensions/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/extensions/test_common.py b/tests/extensions/test_common.py index b3123028a..e1cf7594b 100644 --- a/tests/extensions/test_common.py +++ b/tests/extensions/test_common.py @@ -1,11 +1,16 @@ import pytest + from a2a.extensions.common import ( HTTP_EXTENSION_HEADER, find_extension_by_uri, get_requested_extensions, - update_extension_header, ) -from a2a.types import AgentCapabilities, AgentCard, AgentExtension +from a2a.types.a2a_pb2 import ( + AgentCapabilities, + AgentInterface, + AgentCard, + AgentExtension, +) def test_get_requested_extensions(): @@ -34,7 +39,9 @@ def test_find_extension_by_uri(): name='Test Agent', description='Test Agent Description', version='1.0', - url='http://test.com', + supported_interfaces=[ + AgentInterface(url='http://test.com', protocol_binding='HTTP+JSON') + ], skills=[], default_input_modes=['text/plain'], default_output_modes=['text/plain'], @@ -51,7 +58,9 @@ def test_find_extension_by_uri_no_extensions(): name='Test Agent', description='Test Agent Description', version='1.0', - url='http://test.com', + supported_interfaces=[ + AgentInterface(url='http://test.com', protocol_binding='HTTP+JSON') + ], skills=[], default_input_modes=['text/plain'], default_output_modes=['text/plain'], @@ -59,88 +68,3 @@ def test_find_extension_by_uri_no_extensions(): ) assert find_extension_by_uri(card, 'foo') is None - - -@pytest.mark.parametrize( - 'extensions, header, expected_extensions', - [ - ( - ['ext1', 'ext2'], # extensions - '', # header - { - 'ext1', - 'ext2', - }, # expected_extensions - ), # Case 1: New extensions provided, empty header. - ( - None, # extensions - 'ext1, ext2', # header - { - 'ext1', - 'ext2', - }, # expected_extensions - ), # Case 2: Extensions is None, existing header extensions. - ( - [], # extensions - 'ext1', # header - {}, # expected_extensions - ), # Case 3: New extensions is empty list, existing header extensions. - ( - ['ext1', 'ext2'], # extensions - 'ext3', # header - { - 'ext1', - 'ext2', - }, # expected_extensions - ), # Case 4: New extensions provided, and an existing header. New extensions should override active extensions. - ], -) -def test_update_extension_header_merge_with_existing_extensions( - extensions: list[str], - header: str, - expected_extensions: set[str], -): - http_kwargs = {'headers': {HTTP_EXTENSION_HEADER: header}} - result_kwargs = update_extension_header(http_kwargs, extensions) - header_value = result_kwargs['headers'][HTTP_EXTENSION_HEADER] - if not header_value: - actual_extensions = {} - else: - actual_extensions_list = [e.strip() for e in header_value.split(',')] - actual_extensions = set(actual_extensions_list) - assert actual_extensions == expected_extensions - - -def test_update_extension_header_with_other_headers(): - extensions = ['ext'] - http_kwargs = {'headers': {'X_Other': 'Test'}} - result_kwargs = update_extension_header(http_kwargs, extensions) - headers = result_kwargs.get('headers', {}) - assert HTTP_EXTENSION_HEADER in headers - assert headers[HTTP_EXTENSION_HEADER] == 'ext' - assert headers['X_Other'] == 'Test' - - -@pytest.mark.parametrize( - 'http_kwargs', - [ - None, - {}, - ], -) -def test_update_extension_header_headers_not_in_kwargs( - http_kwargs: dict[str, str] | None, -): - extensions = ['ext'] - http_kwargs = {} - result_kwargs = update_extension_header(http_kwargs, extensions) - headers = result_kwargs.get('headers', {}) - assert HTTP_EXTENSION_HEADER in headers - assert headers[HTTP_EXTENSION_HEADER] == 'ext' - - -def test_update_extension_header_with_other_headers_extensions_none(): - http_kwargs = {'headers': {'X_Other': 'Test'}} - result_kwargs = update_extension_header(http_kwargs, None) - assert HTTP_EXTENSION_HEADER not in result_kwargs['headers'] - assert result_kwargs['headers']['X_Other'] == 'Test' diff --git a/tests/helpers/test_agent_card_display.py b/tests/helpers/test_agent_card_display.py new file mode 100644 index 000000000..e252a52fe --- /dev/null +++ b/tests/helpers/test_agent_card_display.py @@ -0,0 +1,194 @@ +"""Tests for display_agent_card utility.""" + +import pytest + +from a2a.types.a2a_pb2 import ( + AgentCapabilities, + AgentCard, + AgentInterface, + AgentProvider, + AgentSkill, +) +from a2a.helpers.agent_card import display_agent_card + + +@pytest.fixture +def full_agent_card() -> AgentCard: + return AgentCard( + name='Sample Agent', + description='A sample agent.', + version='1.0.0', + documentation_url='https://docs.example.com', + icon_url='https://example.com/icon.png', + provider=AgentProvider( + organization='Example Org', url='https://example.com' + ), + supported_interfaces=[ + AgentInterface( + url='http://localhost:9999/a2a/jsonrpc', + protocol_binding='JSONRPC', + protocol_version='1.0', + ), + AgentInterface( + url='http://localhost:9999/a2a/rest', + protocol_binding='HTTP+JSON', + protocol_version='1.0', + tenant='tenant-a', + ), + ], + capabilities=AgentCapabilities( + streaming=True, + push_notifications=False, + extended_agent_card=True, + ), + default_input_modes=['text'], + default_output_modes=['text', 'task-status'], + skills=[ + AgentSkill( + id='skill-1', + name='My Skill', + description='Does something useful.', + tags=['foo', 'bar'], + examples=['Do the thing', 'Another example'], + ), + AgentSkill( + id='skill-2', + name='Other Skill', + description='Does something else.', + tags=['baz'], + ), + ], + ) + + +class TestDisplayAgentCard: + def test_full_card_output( + self, full_agent_card: AgentCard, capsys: pytest.CaptureFixture[str] + ) -> None: + """Golden test: exact output for a fully-populated card.""" + display_agent_card(full_agent_card) + assert capsys.readouterr().out == ( + '====================================================\n' + ' AgentCard \n' + '====================================================\n' + '--- General ---\n' + 'Name : Sample Agent\n' + 'Description : A sample agent.\n' + 'Version : 1.0.0\n' + 'Docs URL : https://docs.example.com\n' + 'Icon URL : https://example.com/icon.png\n' + 'Provider : Example Org (https://example.com)\n' + '\n' + '--- Interfaces ---\n' + ' [0] http://localhost:9999/a2a/jsonrpc (JSONRPC 1.0)\n' + ' [1] http://localhost:9999/a2a/rest (HTTP+JSON 1.0, tenant=tenant-a)\n' + '\n' + '--- Capabilities ---\n' + 'Streaming : True\n' + 'Push notifications : False\n' + 'Extended agent card : True\n' + '\n' + '--- I/O Modes ---\n' + 'Input : text\n' + 'Output : text, task-status\n' + '\n' + '--- Skills ---\n' + '----------------------------------------------------\n' + ' ID : skill-1\n' + ' Name : My Skill\n' + ' Description : Does something useful.\n' + ' Tags : foo, bar\n' + ' Example : Do the thing\n' + ' Example : Another example\n' + '----------------------------------------------------\n' + ' ID : skill-2\n' + ' Name : Other Skill\n' + ' Description : Does something else.\n' + ' Tags : baz\n' + '====================================================\n' + ) + + def test_empty_card_output( + self, capsys: pytest.CaptureFixture[str] + ) -> None: + """Golden test: exact output for a card with only default/empty fields. + + An empty supported_interfaces section signals a malformed card — + the bare header with no entries is intentional and visible to the user. + """ + display_agent_card(AgentCard()) + assert capsys.readouterr().out == ( + '====================================================\n' + ' AgentCard \n' + '====================================================\n' + '--- General ---\n' + 'Name : \n' + 'Description : \n' + 'Version : \n' + '\n' + '--- Interfaces ---\n' + '\n' + '--- Capabilities ---\n' + 'Streaming : False\n' + 'Push notifications : False\n' + 'Extended agent card : False\n' + '\n' + '--- I/O Modes ---\n' + 'Input : (none)\n' + 'Output : (none)\n' + '\n' + '--- Skills ---\n' + ' (none)\n' + '====================================================\n' + ) + + def test_interface_without_protocol_version_has_no_trailing_space( + self, capsys: pytest.CaptureFixture[str] + ) -> None: + """No trailing space in the binding field when protocol_version is not set.""" + card = AgentCard( + supported_interfaces=[ + AgentInterface( + url='127.0.0.1:50051', + protocol_binding='GRPC', + ) + ] + ) + display_agent_card(card) + assert ' [0] 127.0.0.1:50051 (GRPC)' in capsys.readouterr().out + + def test_interface_without_binding_or_version_has_no_parentheses( + self, capsys: pytest.CaptureFixture[str] + ) -> None: + """No parentheses when neither protocol_binding nor protocol_version are set.""" + card = AgentCard( + supported_interfaces=[AgentInterface(url='127.0.0.1:50051')] + ) + display_agent_card(card) + assert ' [0] 127.0.0.1:50051\n' in capsys.readouterr().out + + def test_provider_with_url( + self, capsys: pytest.CaptureFixture[str] + ) -> None: + """Provider shows organization and URL in parentheses when both are set.""" + card = AgentCard( + provider=AgentProvider( + organization='Example Org', + url='https://example.com', + ), + ) + display_agent_card(card) + assert ( + 'Provider : Example Org (https://example.com)' + in capsys.readouterr().out + ) + + def test_provider_without_url_has_no_empty_parentheses( + self, capsys: pytest.CaptureFixture[str] + ) -> None: + """No empty parentheses when provider URL is not set.""" + card = AgentCard(provider=AgentProvider(organization='Example Org')) + display_agent_card(card) + out = capsys.readouterr().out + assert 'Provider : Example Org' in out + assert '()' not in out diff --git a/tests/helpers/test_proto_helpers.py b/tests/helpers/test_proto_helpers.py new file mode 100644 index 000000000..a4f6498ab --- /dev/null +++ b/tests/helpers/test_proto_helpers.py @@ -0,0 +1,230 @@ +"""Tests for proto helpers.""" + +import pytest +from a2a.helpers.proto_helpers import ( + new_message, + new_text_message, + get_message_text, + new_artifact, + new_text_artifact, + get_artifact_text, + new_task_from_user_message, + new_task, + get_text_parts, + new_text_status_update_event, + new_text_artifact_update_event, + get_stream_response_text, +) +from a2a.types.a2a_pb2 import ( + Part, + Role, + Message, + Artifact, + Task, + TaskState, + StreamResponse, +) + +# --- Message Helpers Tests --- + + +def test_new_message() -> None: + parts = [Part(text='hello')] + msg = new_message( + parts=parts, role=Role.ROLE_USER, context_id='ctx1', task_id='task1' + ) + assert msg.role == Role.ROLE_USER + assert msg.parts == parts + assert msg.context_id == 'ctx1' + assert msg.task_id == 'task1' + assert msg.message_id != '' + + +def test_new_text_message() -> None: + msg = new_text_message( + text='hello', context_id='ctx1', task_id='task1', role=Role.ROLE_USER + ) + assert msg.role == Role.ROLE_USER + assert len(msg.parts) == 1 + assert msg.parts[0].text == 'hello' + assert msg.context_id == 'ctx1' + assert msg.task_id == 'task1' + assert msg.message_id != '' + + +def test_get_message_text() -> None: + msg = Message(parts=[Part(text='hello'), Part(text='world')]) + assert get_message_text(msg) == 'hello\nworld' + assert get_message_text(msg, delimiter=' ') == 'hello world' + + +# --- Artifact Helpers Tests --- + + +def test_new_artifact() -> None: + parts = [Part(text='content')] + art = new_artifact(parts=parts, name='test', description='desc') + assert art.name == 'test' + assert art.description == 'desc' + assert art.parts == parts + assert art.artifact_id != '' + + +def test_new_text_artifact() -> None: + art = new_text_artifact(name='test', text='content', description='desc') + assert art.name == 'test' + assert art.description == 'desc' + assert len(art.parts) == 1 + assert art.parts[0].text == 'content' + assert art.artifact_id != '' + + +def test_new_text_artifact_with_id() -> None: + art = new_text_artifact( + name='test', text='content', description='desc', artifact_id='art1' + ) + assert art.name == 'test' + assert art.description == 'desc' + assert len(art.parts) == 1 + assert art.parts[0].text == 'content' + assert art.artifact_id == 'art1' + + +def test_get_artifact_text() -> None: + art = Artifact(parts=[Part(text='hello'), Part(text='world')]) + assert get_artifact_text(art) == 'hello\nworld' + assert get_artifact_text(art, delimiter=' ') == 'hello world' + + +# --- Task Helpers Tests --- + + +def test_new_task_from_user_message() -> None: + msg = Message( + role=Role.ROLE_USER, + parts=[Part(text='hello')], + task_id='task1', + context_id='ctx1', + ) + task = new_task_from_user_message(msg) + assert task.id == 'task1' + assert task.context_id == 'ctx1' + assert task.status.state == TaskState.TASK_STATE_SUBMITTED + assert len(task.history) == 1 + assert task.history[0] == msg + + +def test_new_task_from_user_message_empty_parts() -> None: + msg = Message(role=Role.ROLE_USER, parts=[]) + with pytest.raises(ValueError, match='Message parts cannot be empty'): + new_task_from_user_message(msg) + + +def test_new_task_from_user_message_empty_text() -> None: + msg = Message(role=Role.ROLE_USER, parts=[Part(text='')]) + with pytest.raises(ValueError, match='Message.text cannot be empty'): + new_task_from_user_message(msg) + + +def test_new_task() -> None: + task = new_task( + task_id='task1', context_id='ctx1', state=TaskState.TASK_STATE_WORKING + ) + assert task.id == 'task1' + assert task.context_id == 'ctx1' + assert task.status.state == TaskState.TASK_STATE_WORKING + assert len(task.history) == 0 + assert len(task.artifacts) == 0 + + +# --- Part Helpers Tests --- + + +def test_get_text_parts() -> None: + parts = [ + Part(text='hello'), + Part(url='http://example.com'), + Part(text='world'), + ] + assert get_text_parts(parts) == ['hello', 'world'] + + +# --- Event & Stream Helpers Tests --- + + +def test_new_text_status_update_event() -> None: + event = new_text_status_update_event( + task_id='task1', + context_id='ctx1', + state=TaskState.TASK_STATE_WORKING, + text='progress', + ) + assert event.task_id == 'task1' + assert event.context_id == 'ctx1' + assert event.status.state == TaskState.TASK_STATE_WORKING + assert event.status.message.parts[0].text == 'progress' + + +def test_new_text_artifact_update_event() -> None: + event = new_text_artifact_update_event( + task_id='task1', + context_id='ctx1', + name='test', + text='content', + append=True, + last_chunk=True, + ) + assert event.task_id == 'task1' + assert event.context_id == 'ctx1' + assert event.artifact.name == 'test' + assert event.artifact.parts[0].text == 'content' + assert event.append is True + assert event.last_chunk is True + + +def test_new_text_artifact_update_event_with_id() -> None: + event = new_text_artifact_update_event( + task_id='task1', + context_id='ctx1', + name='test', + text='content', + artifact_id='art1', + ) + assert event.task_id == 'task1' + assert event.context_id == 'ctx1' + assert event.artifact.name == 'test' + assert event.artifact.parts[0].text == 'content' + assert event.artifact.artifact_id == 'art1' + + +def test_get_stream_response_text_message() -> None: + resp = StreamResponse(message=Message(parts=[Part(text='hello')])) + assert get_stream_response_text(resp) == 'hello' + + +def test_get_stream_response_text_task() -> None: + resp = StreamResponse( + task=Task(artifacts=[Artifact(parts=[Part(text='hello')])]) + ) + assert get_stream_response_text(resp) == 'hello' + + +def test_get_stream_response_text_status_update() -> None: + resp = StreamResponse( + status_update=new_text_status_update_event( + 't', 'c', TaskState.TASK_STATE_WORKING, 'hello' + ) + ) + assert get_stream_response_text(resp) == 'hello' + + +def test_get_stream_response_text_artifact_update() -> None: + resp = StreamResponse( + artifact_update=new_text_artifact_update_event('t', 'c', 'n', 'hello') + ) + assert get_stream_response_text(resp) == 'hello' + + +def test_get_stream_response_text_empty() -> None: + resp = StreamResponse() + assert get_stream_response_text(resp) == '' diff --git a/tests/integration/__init__.py b/tests/integration/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/integration/cross_version/client_server/__init__.py b/tests/integration/cross_version/client_server/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/integration/cross_version/client_server/client_0_3.py b/tests/integration/cross_version/client_server/client_0_3.py new file mode 100644 index 000000000..8e0db5148 --- /dev/null +++ b/tests/integration/cross_version/client_server/client_0_3.py @@ -0,0 +1,292 @@ +import argparse +import asyncio +import grpc +import httpx +import json +from uuid import uuid4 + +from a2a.client import ClientFactory, ClientConfig +from a2a.types import ( + Message, + Part, + Role, + TextPart, + TransportProtocol, + TaskQueryParams, + TaskIdParams, + TaskState, + TaskPushNotificationConfig, + PushNotificationConfig, + FilePart, + FileWithUri, + FileWithBytes, + DataPart, +) +from a2a.client.errors import A2AClientJSONRPCError, A2AClientHTTPError +import sys +import traceback + + +async def test_send_message_stream(client): + print('Testing send_message (streaming)...') + + msg = Message( + role=Role.user, + message_id=f'stream-{uuid4()}', + parts=[ + Part(root=TextPart(text='stream')), + Part( + root=FilePart( + file=FileWithUri( + uri='https://example.com/file.txt', + mime_type='text/plain', + ) + ) + ), + Part( + root=FilePart( + file=FileWithBytes( + bytes=b'aGVsbG8=', mime_type='application/octet-stream' + ) + ) + ), + Part(root=DataPart(data={'key': 'value'})), + ], + metadata={'test_key': 'full_message'}, + ) + events = [] + + async for event in client.send_message(request=msg): + events.append(event) + break + + assert len(events) > 0, 'Expected at least one event' + first_event = events[0] + + event_obj = ( + first_event[0] if isinstance(first_event, tuple) else first_event + ) + task_id = getattr(event_obj, 'id', None) or getattr( + event_obj, 'task_id', 'unknown' + ) + + print(f'Success: send_message (streaming) passed. Task ID: {task_id}') + return task_id + + +async def test_send_message_sync(url, protocol_enum): + print('Testing send_message (synchronous)...') + config = ClientConfig() + config.httpx_client = httpx.AsyncClient(timeout=30.0) + config.grpc_channel_factory = grpc.aio.insecure_channel + config.supported_transports = [protocol_enum] + config.streaming = False + + client = await ClientFactory.connect(url, client_config=config) + msg = Message( + role=Role.user, + message_id=f'sync-{uuid4()}', + parts=[Part(root=TextPart(text='sync'))], + metadata={'test_key': 'simple_message'}, + ) + + async for event in client.send_message(request=msg): + assert event is not None + event_obj = event[0] if isinstance(event, tuple) else event + + status = getattr(event_obj, 'status', None) + if status and str(getattr(status, 'state', '')).endswith('completed'): + # In 0.3 SDK, the message on the status might be exposed as 'message' or 'update' + status_msg = getattr( + status, 'message', getattr(status, 'update', None) + ) + assert status_msg is not None, ( + 'TaskStatus message/update is missing' + ) + + metadata = getattr(status_msg, 'metadata', {}) + assert metadata.get('response_key') == 'response_value', ( + f'Missing response metadata: {metadata}' + ) + + # Check Part translation (root text part in 0.3) + parts = getattr( + status_msg, 'parts', getattr(status_msg, 'content', []) + ) + assert len(parts) > 0, 'No parts found in TaskStatus message' + first_part = parts[0] + text = getattr(first_part, 'text', '') + if ( + not text + and hasattr(first_part, 'root') + and hasattr(first_part.root, 'text') + ): + text = first_part.root.text + assert text == 'done', f"Expected 'done' text in Part, got '{text}'" + break + + print(f'Success: send_message (synchronous) passed.') + + +async def test_get_task(client, task_id): + print(f'Testing get_task ({task_id})...') + task = await client.get_task(request=TaskQueryParams(id=task_id)) + assert task.id == task_id + + user_msgs = [ + m for m in task.history if getattr(m, 'role', None) == Role.user + ] + assert user_msgs, 'Expected at least one ROLE_USER message in task history' + + client_msg = user_msgs[0] + + parts = client_msg.parts + assert len(parts) == 4, f'Expected 4 parts, got {len(parts)}' + + # 1. text part + text = getattr(parts[0].root, 'text', '') + assert text == 'stream', f"Expected 'stream', got {text}" + + # 2. uri part + file_uri = getattr(parts[1].root, 'file', None) + assert ( + file_uri is not None + and getattr(file_uri, 'uri', None) == 'https://example.com/file.txt' + ) + + # 3. bytes part + file_bytes = getattr(parts[2].root, 'file', None) + actual_bytes = getattr(file_bytes, 'bytes', None) + assert actual_bytes == 'aGVsbG8=', ( + f"Expected base64 'hello', got {actual_bytes}" + ) + + # 4. data part + data_val = getattr(parts[3].root, 'data', None) + assert data_val is not None + assert data_val == {'key': 'value'} + + print('Success: get_task passed.') + + +async def test_cancel_task(client, task_id): + print(f'Testing cancel_task ({task_id})...') + await client.cancel_task(request=TaskIdParams(id=task_id)) + task = await client.get_task(request=TaskQueryParams(id=task_id)) + assert task.status.state == TaskState.canceled, ( + f'Expected a canceled state, got {task.status.state}' + ) + print('Success: cancel_task passed.') + + +async def test_subscribe(client, task_id): + print(f'Testing subscribe ({task_id})...') + has_artifact = False + async for event in client.resubscribe(request=TaskIdParams(id=task_id)): + # event is tuple (Task, UpdateEvent) + task, update = event + if update and hasattr(update, 'artifact'): + has_artifact = True + artifact = update.artifact + assert artifact.name == 'test-artifact' + assert artifact.metadata.get('artifact_key') == 'artifact_value' + # part check + assert len(artifact.parts) > 0 + p = artifact.parts[0] + text = getattr(p.root, 'text', '') + assert text == 'artifact-chunk' + print('Success: received artifact update.') + + if has_artifact: + break + print('Success: subscribe passed.') + + +async def test_get_extended_agent_card(client): + print('Testing get_extended_agent_card...') + # In v0.3, extended card is fetched via get_card() on the client + card = await client.get_card() + assert card is not None + assert card.name in ('Server 0.3', 'Server 1.0') + assert card.version == '1.0.0' + assert 'Server running on a2a v' in card.description + + assert card.capabilities is not None + assert card.capabilities.streaming is True + assert card.capabilities.push_notifications is True + + if card.name == 'Server 0.3': + assert card.url is not None + assert card.preferred_transport == TransportProtocol.jsonrpc + assert len(card.additional_interfaces) == 2 + assert card.supports_authenticated_extended_card is False + else: + assert card.url is not None + assert card.preferred_transport is not None + print( + f'card.supports_authenticated_extended_card is: {card.supports_authenticated_extended_card}' + ) + assert card.supports_authenticated_extended_card in (False, None) + + print(f'Success: get_extended_agent_card passed.') + + +async def run_client(url: str, protocol: str): + protocol_enum_map = { + 'jsonrpc': TransportProtocol.jsonrpc, + 'rest': TransportProtocol.http_json, + 'grpc': TransportProtocol.grpc, + } + protocol_enum = protocol_enum_map[protocol] + + config = ClientConfig() + config.httpx_client = httpx.AsyncClient(timeout=30.0) + config.grpc_channel_factory = grpc.aio.insecure_channel + config.supported_transports = [protocol_enum] + config.streaming = True + + client = await ClientFactory.connect(url, client_config=config) + + # 1. Get Extended Agent Card + await test_get_extended_agent_card(client) + + # 2. Send Streaming Message + task_id = await test_send_message_stream(client) + + # 3. Get Task + await test_get_task(client, task_id) + + # 4. Subscribe to Task + await test_subscribe(client, task_id) + + # 5. Cancel Task + await test_cancel_task(client, task_id) + + # 6. Send Sync Message + await test_send_message_sync(url, protocol_enum) + + +def main(): + print('Starting client_0_3...') + + parser = argparse.ArgumentParser() + parser.add_argument('--url', type=str, required=True) + parser.add_argument('--protocols', type=str, nargs='+', required=True) + args = parser.parse_args() + + failed = False + for protocol in args.protocols: + print(f'\n=== Testing protocol: {protocol} ===') + try: + asyncio.run(run_client(args.url, protocol)) + except Exception as e: + traceback.print_exc() + print(f'FAILED protocol {protocol}: {e}') + failed = True + + if failed: + sys.exit(1) + + +if __name__ == '__main__': + main() diff --git a/tests/integration/cross_version/client_server/client_1_0.py b/tests/integration/cross_version/client_server/client_1_0.py new file mode 100644 index 000000000..6630bddad --- /dev/null +++ b/tests/integration/cross_version/client_server/client_1_0.py @@ -0,0 +1,351 @@ +import argparse +import asyncio +import grpc +import httpx +import sys +from uuid import uuid4 + +from a2a.client import ClientConfig, create_client +from a2a.utils import TransportProtocol +from a2a.types import ( + Message, + Part, + Role, + GetTaskRequest, + CancelTaskRequest, + SubscribeToTaskRequest, + GetExtendedAgentCardRequest, + SendMessageRequest, + TaskPushNotificationConfig, + GetTaskPushNotificationConfigRequest, + ListTaskPushNotificationConfigsRequest, + DeleteTaskPushNotificationConfigRequest, + TaskState, +) +from a2a.client.errors import A2AClientError +from google.protobuf.struct_pb2 import Struct, Value + + +async def test_send_message_stream(client): + print('Testing send_message (streaming)...') + + s = Struct() + s.update({'key': 'value'}) + + msg = Message( + role=Role.ROLE_USER, + message_id=f'stream-{uuid4()}', + parts=[ + Part(text='stream'), + Part(url='https://example.com/file.txt', media_type='text/plain'), + Part(raw=b'hello', media_type='application/octet-stream'), + Part(data=Value(struct_value=s)), + ], + metadata={'test_key': 'full_message'}, + ) + events = [] + + async for event in client.send_message( + request=SendMessageRequest(message=msg) + ): + events.append(event) + break + + assert len(events) > 0, 'Expected at least one event' + first_event = events[0] + + # In v1.0 SDK, send_message returns StreamResponse + stream_response = first_event + + # Try to find task_id in the oneof fields of StreamResponse + task_id = 'unknown' + if stream_response.HasField('task'): + task_id = stream_response.task.id + elif stream_response.HasField('message'): + task_id = stream_response.message.task_id + elif stream_response.HasField('status_update'): + task_id = stream_response.status_update.task_id + elif stream_response.HasField('artifact_update'): + task_id = stream_response.artifact_update.task_id + + print(f'Success: send_message (streaming) passed. Task ID: {task_id}') + return task_id + + +async def test_send_message_sync(url, protocol_enum): + print('Testing send_message (synchronous)...') + config = ClientConfig() + config.httpx_client = httpx.AsyncClient(timeout=30.0) + config.grpc_channel_factory = grpc.aio.insecure_channel + config.supported_protocol_bindings = [protocol_enum] + config.streaming = False + + client = await create_client(url, client_config=config) + msg = Message( + role=Role.ROLE_USER, + message_id=f'sync-{uuid4()}', + parts=[Part(text='sync')], + metadata={'test_key': 'simple_message'}, + ) + + async for event in client.send_message( + request=SendMessageRequest(message=msg) + ): + assert event is not None + stream_response = event + + status = None + if stream_response.HasField('task'): + status = stream_response.task.status + elif stream_response.HasField('status_update'): + status = stream_response.status_update.status + + if status and status.state == TaskState.TASK_STATE_COMPLETED: + metadata = dict(status.message.metadata) + assert metadata.get('response_key') == 'response_value', ( + f'Missing response metadata: {metadata}' + ) + assert status.message.parts[0].text == 'done' + break + else: + print(f'Ignore message: {stream_response}') + + print(f'Success: send_message (synchronous) passed.') + + +async def test_get_task(client, task_id): + print(f'Testing get_task ({task_id})...') + task = await client.get_task(request=GetTaskRequest(id=task_id)) + assert task.id == task_id + + user_msgs = [m for m in task.history if m.role == Role.ROLE_USER] + assert user_msgs, 'Expected at least one ROLE_USER message in task history' + client_msg = user_msgs[0] + + assert len(client_msg.parts) == 4, ( + f'Expected 4 parts, got {len(client_msg.parts)}' + ) + + # 1. text part + assert client_msg.parts[0].text == 'stream', ( + f"Expected 'stream', got {client_msg.parts[0].text}" + ) + + # 2. uri part + assert client_msg.parts[1].url == 'https://example.com/file.txt' + + # 3. bytes part + assert client_msg.parts[2].raw == b'hello' + + # 4. data part + data_dict = dict(client_msg.parts[3].data.struct_value.fields) + assert data_dict['key'].string_value == 'value' + + print('Success: get_task passed.') + + +async def test_cancel_task(client, task_id): + print(f'Testing cancel_task ({task_id})...') + await client.cancel_task(request=CancelTaskRequest(id=task_id)) + task = await client.get_task(request=GetTaskRequest(id=task_id)) + assert task.status.state == TaskState.TASK_STATE_CANCELED, ( + f'Expected {TaskState.TASK_STATE_CANCELED}, got {task.status.state}' + ) + print('Success: cancel_task passed.') + + +async def test_subscribe(client, task_id): + print(f'Testing subscribe ({task_id})...') + has_artifact = False + async for event in client.subscribe( + request=SubscribeToTaskRequest(id=task_id) + ): + assert event is not None + stream_response = event + if stream_response.HasField('artifact_update'): + has_artifact = True + artifact = stream_response.artifact_update.artifact + assert artifact.name == 'test-artifact' + val = artifact.metadata['artifact_key'] + if hasattr(val, 'string_value'): + assert val.string_value == 'artifact_value' + else: + assert val == 'artifact_value' + assert artifact.parts[0].text == 'artifact-chunk' + print('Success: received artifact update.') + + if has_artifact: + break + print('Success: subscribe passed.') + + +async def test_list_tasks(client, server_name): + from a2a.types import ListTasksRequest + from a2a.client.errors import A2AClientError + + print('Testing list_tasks...') + try: + resp = await client.list_tasks(request=ListTasksRequest()) + assert resp is not None + print(f'Success: list_tasks returned {len(resp.tasks)} tasks') + except NotImplementedError as e: + if server_name == 'Server 0.3': + print(f'Success: list_tasks gracefully failed on 0.3 Server: {e}') + else: + raise e + + +async def test_get_extended_agent_card(client): + print('Testing get_extended_agent_card...') + card = await client.get_extended_agent_card( + request=GetExtendedAgentCardRequest() + ) + assert card is not None + assert card.name in ('Server 0.3', 'Server 1.0') + assert card.version == '1.0.0' + assert 'Server running on a2a v' in card.description + + assert card.capabilities is not None + assert card.capabilities.streaming is True + assert card.capabilities.push_notifications is True + + if card.name == 'Server 1.0': + assert len(card.supported_interfaces) == 4 + assert card.capabilities.extended_agent_card in (False, None) + else: + assert len(card.supported_interfaces) > 0 + assert card.capabilities.extended_agent_card in (False, None) + + print(f'Success: get_extended_agent_card passed.') + return card.name + + +async def test_push_notification_lifecycle(client, task_id, server_name): + print(f'Testing Push Notification lifecycle for task {task_id}...') + config_id = f'push-{uuid4()}' + + # 1. Create + task_push_cfg = TaskPushNotificationConfig( + task_id=task_id, id=config_id, url='http://127.0.0.1:9999/webhook' + ) + + created = await client.create_task_push_notification_config( + request=task_push_cfg + ) + assert created.id == config_id + print('Success: create_task_push_notification_config passed.') + + # 2. Get + get_req = GetTaskPushNotificationConfigRequest( + task_id=task_id, id=config_id + ) + fetched = await client.get_task_push_notification_config(request=get_req) + assert fetched.id == config_id + print('Success: get_task_push_notification_config passed.') + + # 3. List + try: + list_req = ListTaskPushNotificationConfigsRequest(task_id=task_id) + listed = await client.list_task_push_notification_configs( + request=list_req + ) + assert any(c.id == config_id for c in listed.configs) + except (NotImplementedError, A2AClientError) as e: + if server_name == 'Server 0.3': + print( + 'EXPECTED: list_task_push_notification_configs not implemented' + ) + else: + raise e + print('Success: list_task_push_notification_configs passed.') + + try: + # 4. Delete + del_req = DeleteTaskPushNotificationConfigRequest( + task_id=task_id, id=config_id + ) + await client.delete_task_push_notification_config(request=del_req) + print('Success: delete_task_push_notification_config passed.') + + # Verify deletion + listed_after = await client.list_task_push_notification_configs( + request=list_req + ) + assert not any(c.id == config_id for c in listed_after.configs) + print('Success: verified deletion.') + except (NotImplementedError, A2AClientError) as e: + if server_name == 'Server 0.3': + print( + 'EXPECTED: delete_task_push_notification_config not implemented' + ) + else: + raise e + + +async def run_client(url: str, protocol: str): + protocol_enum_map = { + 'jsonrpc': TransportProtocol.JSONRPC, + 'rest': TransportProtocol.HTTP_JSON, + 'grpc': TransportProtocol.GRPC, + } + protocol_enum = protocol_enum_map[protocol] + + config = ClientConfig() + config.httpx_client = httpx.AsyncClient(timeout=30.0) + config.grpc_channel_factory = grpc.aio.insecure_channel + config.supported_protocol_bindings = [protocol_enum] + config.streaming = True + + client = await create_client(url, client_config=config) + + # 1. Get Extended Agent Card + server_name = await test_get_extended_agent_card(client) + + # 1.5. List Tasks + await test_list_tasks(client, server_name) + + # 2. Send Streaming Message + task_id = await test_send_message_stream(client) + + # 3. Get Task + await test_get_task(client, task_id) + + # 3.5 Push Notification Lifecycle + await test_push_notification_lifecycle(client, task_id, server_name) + + # 4. Subscribe to Task + await test_subscribe(client, task_id) + + # 5. Cancel Task + await test_cancel_task(client, task_id) + + # 6. Send Sync Message + await test_send_message_sync(url, protocol_enum) + + +def main(): + print('Starting client_1_0...') + + parser = argparse.ArgumentParser() + parser.add_argument('--url', type=str, required=True) + parser.add_argument('--protocols', type=str, nargs='+', required=True) + args = parser.parse_args() + + failed = False + for protocol in args.protocols: + print(f'\n=== Testing protocol: {protocol} ===') + try: + asyncio.run(run_client(args.url, protocol)) + except Exception as e: + import traceback + + traceback.print_exc() + print(f'FAILED protocol {protocol}: {e}') + failed = True + + if failed: + sys.exit(1) + + +if __name__ == '__main__': + main() diff --git a/tests/integration/cross_version/client_server/server_0_3.py b/tests/integration/cross_version/client_server/server_0_3.py new file mode 100644 index 000000000..875cbb1ca --- /dev/null +++ b/tests/integration/cross_version/client_server/server_0_3.py @@ -0,0 +1,238 @@ +import argparse +import uvicorn +from fastapi import FastAPI +import asyncio +import grpc +import sys +import time + +from a2a.server.agent_execution.agent_executor import AgentExecutor +from a2a.server.agent_execution.context import RequestContext +from a2a.server.apps.jsonrpc.fastapi_app import A2AFastAPIApplication +from a2a.server.apps.rest.fastapi_app import A2ARESTFastAPIApplication +from a2a.server.events.event_queue import EventQueue +from a2a.server.events.in_memory_queue_manager import InMemoryQueueManager +from a2a.server.request_handlers.default_request_handler import ( + DefaultRequestHandler, +) +from a2a.server.request_handlers.grpc_handler import GrpcHandler +from a2a.server.tasks.task_updater import TaskUpdater +from a2a.server.tasks.inmemory_push_notification_config_store import ( + InMemoryPushNotificationConfigStore, +) +from a2a.server.tasks.inmemory_task_store import InMemoryTaskStore +from a2a.types import ( + AgentCapabilities, + AgentCard, + AgentInterface, + Part, + TaskState, + TextPart, + FilePart, + TransportProtocol, + FileWithBytes, + FileWithUri, + DataPart, +) +from a2a.grpc import a2a_pb2_grpc +from starlette.requests import Request +from starlette.concurrency import iterate_in_threadpool +import time +from a2a.utils.task import new_task +from server_common import CustomLoggingMiddleware + + +class MockAgentExecutor(AgentExecutor): + def __init__(self): + self.events = {} + + async def execute(self, context: RequestContext, event_queue: EventQueue): + print(f'SERVER: execute called for task {context.task_id}') + + task = new_task(context.message) + task.id = context.task_id + task.context_id = context.context_id + task.status.state = TaskState.working + await event_queue.enqueue_event(task) + + task_updater = TaskUpdater( + event_queue, + context.task_id, + context.context_id, + ) + await task_updater.update_status(TaskState.working) + + text = '' + if context.message and context.message.parts: + part = context.message.parts[0] + if hasattr(part, 'root') and hasattr(part.root, 'text'): + text = part.root.text + elif hasattr(part, 'text'): + text = part.text + + metadata = ( + dict(context.message.metadata) + if context.message and context.message.metadata + else {} + ) + if metadata.get('test_key') not in ('full_message', 'simple_message'): + print(f'SERVER: WARNING: Missing or incorrect metadata: {metadata}') + raise ValueError( + f'Missing expected metadata from client. Got: {metadata}' + ) + + if metadata.get('test_key') == 'full_message': + expected_parts = [ + Part(root=TextPart(text='stream')), + Part( + root=FilePart( + file=FileWithUri( + uri='https://example.com/file.txt', + mime_type='text/plain', + ) + ) + ), + Part( + root=FilePart( + file=FileWithBytes( + bytes=b'aGVsbG8=', + mime_type='application/octet-stream', + ) + ) + ), + Part(root=DataPart(data={'key': 'value'})), + ] + assert context.message.parts == expected_parts + + print(f"SERVER: request message text='{text}'") + + if 'stream' in text: + print(f'SERVER: waiting on stream event for task {context.task_id}') + event = asyncio.Event() + self.events[context.task_id] = event + + async def emit_periodic(): + try: + while not event.is_set(): + await task_updater.update_status( + TaskState.working, + message=task_updater.new_agent_message( + [Part(root=TextPart(text='ping'))] + ), + ) + await task_updater.add_artifact( + [Part(root=TextPart(text='artifact-chunk'))], + name='test-artifact', + metadata={'artifact_key': 'artifact_value'}, + ) + await asyncio.sleep(0.1) + except asyncio.CancelledError: + pass + + bg_task = asyncio.create_task(emit_periodic()) + + await event.wait() + bg_task.cancel() + + print(f'SERVER: stream event triggered for task {context.task_id}') + + await task_updater.update_status( + TaskState.completed, + message=task_updater.new_agent_message( + [Part(root=TextPart(text='done'))], + metadata={'response_key': 'response_value'}, + ), + ) + print(f'SERVER: execute finished for task {context.task_id}') + + async def cancel(self, context: RequestContext, event_queue: EventQueue): + print(f'SERVER: cancel called for task {context.task_id}') + assert context.task_id in self.events + self.events[context.task_id].set() + task_updater = TaskUpdater( + event_queue, + context.task_id, + context.context_id, + ) + await task_updater.update_status(TaskState.canceled) + + +async def main_async(http_port: int, grpc_port: int): + print( + f'SERVER: Starting server on http_port={http_port}, grpc_port={grpc_port}' + ) + + agent_card = AgentCard( + name='Server 0.3', + description='Server running on a2a v0.3.0', + version='1.0.0', + url=f'http://127.0.0.1:{http_port}/jsonrpc/', + preferred_transport=TransportProtocol.jsonrpc, + skills=[], + capabilities=AgentCapabilities(streaming=True, push_notifications=True), + default_input_modes=['text/plain'], + default_output_modes=['text/plain'], + additional_interfaces=[ + AgentInterface( + transport=TransportProtocol.http_json, + url=f'http://127.0.0.1:{http_port}/rest/', + ), + AgentInterface( + transport=TransportProtocol.grpc, + url=f'127.0.0.1:{grpc_port}', + ), + ], + supports_authenticated_extended_card=False, + ) + + task_store = InMemoryTaskStore() + handler = DefaultRequestHandler( + agent_executor=MockAgentExecutor(), + task_store=task_store, + queue_manager=InMemoryQueueManager(), + push_config_store=InMemoryPushNotificationConfigStore(), + ) + + app = FastAPI() + app.mount( + '/jsonrpc', + A2AFastAPIApplication( + http_handler=handler, agent_card=agent_card + ).build(), + ) + app.mount( + '/rest', + A2ARESTFastAPIApplication( + http_handler=handler, agent_card=agent_card + ).build(), + ) + # Start gRPC Server + server = grpc.aio.server() + servicer = GrpcHandler(agent_card, handler) + a2a_pb2_grpc.add_A2AServiceServicer_to_server(servicer, server) + server.add_insecure_port(f'127.0.0.1:{grpc_port}') + await server.start() + + app.add_middleware(CustomLoggingMiddleware) + + # Start Uvicorn + config = uvicorn.Config( + app, host='127.0.0.1', port=http_port, log_level='info', access_log=True + ) + uvicorn_server = uvicorn.Server(config) + await uvicorn_server.serve() + + +def main(): + print('Starting server_0_3...') + + parser = argparse.ArgumentParser() + parser.add_argument('--http-port', type=int, required=True) + parser.add_argument('--grpc-port', type=int, required=True) + args = parser.parse_args() + + asyncio.run(main_async(args.http_port, args.grpc_port)) + + +if __name__ == '__main__': + main() diff --git a/tests/integration/cross_version/client_server/server_1_0.py b/tests/integration/cross_version/client_server/server_1_0.py new file mode 100644 index 000000000..06f7e5e97 --- /dev/null +++ b/tests/integration/cross_version/client_server/server_1_0.py @@ -0,0 +1,231 @@ +import argparse +import uvicorn +from fastapi import FastAPI +import asyncio +import grpc + +from a2a.server.agent_execution import AgentExecutor, RequestContext +from a2a.server.routes import create_agent_card_routes, create_jsonrpc_routes +from a2a.server.routes.rest_routes import create_rest_routes +from a2a.server.events import EventQueue +from a2a.server.events.in_memory_queue_manager import InMemoryQueueManager +from a2a.server.request_handlers import DefaultRequestHandler, GrpcHandler +from a2a.server.tasks import TaskUpdater +from a2a.server.tasks.inmemory_push_notification_config_store import ( + InMemoryPushNotificationConfigStore, +) +from a2a.server.tasks.inmemory_task_store import InMemoryTaskStore +from a2a.types.a2a_pb2 import ( + AgentCapabilities, + AgentCard, + AgentInterface, + Part, + TaskState, +) +from a2a.types import a2a_pb2_grpc +from a2a.compat.v0_3 import a2a_v0_3_pb2_grpc +from a2a.compat.v0_3.grpc_handler import CompatGrpcHandler +from a2a.utils import TransportProtocol +from server_common import CustomLoggingMiddleware +from google.protobuf.struct_pb2 import Struct, Value +from a2a.helpers.proto_helpers import new_task_from_user_message + + +class MockAgentExecutor(AgentExecutor): + def __init__(self): + self.events = {} + + async def execute(self, context: RequestContext, event_queue: EventQueue): + print(f'SERVER: execute called for task {context.task_id}') + task = new_task_from_user_message(context.message) + task.id = context.task_id + task.context_id = context.context_id + task.status.state = TaskState.TASK_STATE_WORKING + await event_queue.enqueue_event(task) + + task_updater = TaskUpdater( + event_queue, + context.task_id, + context.context_id, + ) + await task_updater.update_status(TaskState.TASK_STATE_WORKING) + + text = '' + if context.message and context.message.parts: + text = context.message.parts[0].text + + metadata = ( + dict(context.message.metadata) + if context.message and context.message.metadata + else {} + ) + if metadata.get('test_key') not in ('full_message', 'simple_message'): + print(f'SERVER: WARNING: Missing or incorrect metadata: {metadata}') + raise ValueError( + f'Missing expected metadata from client. Got: {metadata}' + ) + + for part in context.message.parts: + if part.HasField('raw'): + assert part.raw == b'hello' + + if metadata.get('test_key') == 'full_message': + s = Struct() + s.update({'key': 'value'}) + + expected_parts = [ + Part(text='stream'), + Part( + url='https://example.com/file.txt', media_type='text/plain' + ), + Part(raw=b'hello', media_type='application/octet-stream'), + Part(data=Value(struct_value=s)), + ] + assert context.message.parts == expected_parts + + if 'stream' in text: + print(f'SERVER: waiting on stream event for task {context.task_id}') + event = asyncio.Event() + self.events[context.task_id] = event + + async def emit_periodic(): + try: + while not event.is_set(): + await task_updater.update_status( + TaskState.TASK_STATE_WORKING, + message=task_updater.new_agent_message( + [Part(text='ping')] + ), + ) + await task_updater.add_artifact( + [Part(text='artifact-chunk')], + name='test-artifact', + metadata={'artifact_key': 'artifact_value'}, + ) + await asyncio.sleep(0.1) + except asyncio.CancelledError: + pass + + bg_task = asyncio.create_task(emit_periodic()) + await event.wait() + bg_task.cancel() + print(f'SERVER: stream event triggered for task {context.task_id}') + + await task_updater.update_status( + TaskState.TASK_STATE_COMPLETED, + message=task_updater.new_agent_message( + [Part(text='done')], metadata={'response_key': 'response_value'} + ), + ) + print(f'SERVER: execute finished for task {context.task_id}') + + async def cancel(self, context: RequestContext, event_queue: EventQueue): + print(f'SERVER: cancel called for task {context.task_id}') + assert context.task_id in self.events + self.events[context.task_id].set() + task_updater = TaskUpdater( + event_queue, + context.task_id, + context.context_id, + ) + await task_updater.update_status(TaskState.TASK_STATE_CANCELED) + + +async def main_async(http_port: int, grpc_port: int): + agent_card = AgentCard( + name='Server 1.0', + description='Server running on a2a v1.0', + version='1.0.0', + skills=[], + capabilities=AgentCapabilities(streaming=True, push_notifications=True), + default_input_modes=['text/plain'], + default_output_modes=['text/plain'], + supported_interfaces=[ + AgentInterface( + protocol_binding=TransportProtocol.JSONRPC, + url=f'http://127.0.0.1:{http_port}/jsonrpc/', + ), + AgentInterface( + protocol_binding=TransportProtocol.HTTP_JSON, + url=f'http://127.0.0.1:{http_port}/rest/', + protocol_version='1.0', + ), + AgentInterface( + protocol_binding=TransportProtocol.HTTP_JSON, + url=f'http://127.0.0.1:{http_port}/rest/', + protocol_version='0.3', + ), + AgentInterface( + protocol_binding=TransportProtocol.GRPC, + url=f'127.0.0.1:{grpc_port}', + ), + ], + ) + + task_store = InMemoryTaskStore() + handler = DefaultRequestHandler( + MockAgentExecutor(), + task_store, + agent_card, + queue_manager=InMemoryQueueManager(), + push_config_store=InMemoryPushNotificationConfigStore(), + extended_agent_card=agent_card, + ) + + app = FastAPI() + app.add_middleware(CustomLoggingMiddleware) + + agent_card_routes = create_agent_card_routes( + agent_card=agent_card, card_url='/.well-known/agent-card.json' + ) + jsonrpc_routes = create_jsonrpc_routes( + request_handler=handler, + rpc_url='/', + enable_v0_3_compat=True, + ) + app.mount( + '/jsonrpc', + FastAPI(routes=jsonrpc_routes + agent_card_routes), + ) + + rest_routes = create_rest_routes( + request_handler=handler, + enable_v0_3_compat=True, + ) + app.mount( + '/rest', + FastAPI(routes=rest_routes + agent_card_routes), + ) + + # Start gRPC Server + server = grpc.aio.server() + servicer = GrpcHandler(handler) + a2a_pb2_grpc.add_A2AServiceServicer_to_server(servicer, server) + + compat_servicer = CompatGrpcHandler(handler) + a2a_v0_3_pb2_grpc.add_A2AServiceServicer_to_server(compat_servicer, server) + + server.add_insecure_port(f'127.0.0.1:{grpc_port}') + await server.start() + + # Start Uvicorn + config = uvicorn.Config( + app, host='127.0.0.1', port=http_port, log_level='info', access_log=True + ) + uvicorn_server = uvicorn.Server(config) + await uvicorn_server.serve() + + +def main(): + print('Starting server_1_0...') + + parser = argparse.ArgumentParser() + parser.add_argument('--http-port', type=int, required=True) + parser.add_argument('--grpc-port', type=int, required=True) + args = parser.parse_args() + + asyncio.run(main_async(args.http_port, args.grpc_port)) + + +if __name__ == '__main__': + main() diff --git a/tests/integration/cross_version/client_server/server_common.py b/tests/integration/cross_version/client_server/server_common.py new file mode 100644 index 000000000..d66c1eb4a --- /dev/null +++ b/tests/integration/cross_version/client_server/server_common.py @@ -0,0 +1,47 @@ +import collections.abc +from typing import AsyncGenerator +from starlette.middleware.base import BaseHTTPMiddleware +from starlette.requests import Request + + +class PrintingAsyncGenerator(collections.abc.AsyncGenerator): + """ + Wraps an async generator to print items as they are yielded, + fully supporting bi-directional flow (asend, athrow, aclose). + """ + + def __init__(self, url: str, ag: AsyncGenerator): + self.url = url + self._ag = ag + + async def asend(self, value): + # Forward the sent value to the underlying async generator + result = await self._ag.asend(value) + print(f'PrintingAsyncGenerator::Generated: {self.url} {result}') + return result + + async def athrow(self, typ, val=None, tb=None): + # Forward exceptions to the underlying async generator + result = await self._ag.athrow(typ, val, tb) + print( + f'PrintingAsyncGenerator::Generated (via athrow): {self.url} {result}' + ) + return result + + async def aclose(self): + # Gracefully shut down the underlying generator + await self._ag.aclose() + + +class CustomLoggingMiddleware(BaseHTTPMiddleware): + async def dispatch(self, request: Request, call_next): + print('-' * 80) + print(f'REQUEST: {request.method} {request.url}') + print(f'REQUEST BODY: {await request.body()}') + + response = await call_next(request) + # Disabled by default. Can hang the test if enabled. + # response.body_iterator = PrintingAsyncGenerator(request.url, response.body_iterator) + + print('-' * 80) + return response diff --git a/tests/integration/cross_version/client_server/test_client_server.py b/tests/integration/cross_version/client_server/test_client_server.py new file mode 100644 index 000000000..e65aa185b --- /dev/null +++ b/tests/integration/cross_version/client_server/test_client_server.py @@ -0,0 +1,250 @@ +import os +import shutil +import socket +import subprocess +import time + +import pytest +import select +import signal + + +def get_free_port(): + with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as s: + s.bind(('127.0.0.1', 0)) + return s.getsockname()[1] + + +def wait_for_port(proc: subprocess.Popen, proc_name: str, port, timeout=5.0): + start_time = time.time() + while time.time() - start_time < timeout: + print( + f'Waiting for port {port} to be available for {timeout - (time.time() - start_time)} seconds...' + ) + try: + if proc.poll() is not None: + print( + f'Process {proc_name} died before port {port} was available' + ) + return False + with socket.create_connection(('127.0.0.1', port), timeout=0.1): + return True + except OSError: + time.sleep(0.1) + return False + + +def get_env(script: str) -> dict[str, str]: + new_env = os.environ.copy() + new_env['PYTHONUNBUFFERED'] = '1' + if '_1_0.py' in script: + new_env['PYTHONPATH'] = ( + os.path.abspath('src') + ':' + new_env.get('PYTHONPATH', '') + ) + return new_env + + +def finalize_process( + proc: subprocess.Popen, + name: str, + expected_return_code=None, + timeout: float = 5.0, +): + failure = False + if expected_return_code is not None: + try: + print(f'Waiting for process {name} to finish...') + if proc.wait(timeout=timeout) != expected_return_code: + print( + f'Process {name} returned code {proc.returncode}, expected {expected_return_code}' + ) + failure = True + except subprocess.TimeoutExpired: + print(f'Process {name} timed out after {timeout} seconds') + os.killpg(os.getpgid(proc.pid), signal.SIGTERM) + failure = True + else: + if proc.poll() is None: + os.killpg(os.getpgid(proc.pid), signal.SIGTERM) + else: + print(f'Process {name} already terminated!') + failure = True + + try: + proc.wait(timeout=2) + except subprocess.TimeoutExpired: + os.killpg(os.getpgid(proc.pid), signal.SIGKILL) + + print(f'Process {name} finished with code {proc.wait()}') + + stdout_text, stderr_text = proc.communicate(timeout=3.0) + + print('-' * 80) + print(f'Process {name} STDOUT:\n{stdout_text}') + print('-' * 80) + print(f'Process {name} STDERR:\n{stderr_text}') + print('-' * 80) + if failure: + pytest.fail(f'Process {name} failed.') + + +@pytest.fixture(scope='session') +def running_servers(): + uv_path = shutil.which('uv') + if not os.path.exists(uv_path): + pytest.fail(f"Could not find 'uv' executable at {uv_path}") + + # Server 1.0 setup + s10_http_port = get_free_port() + s10_grpc_port = get_free_port() + s10_deps = ['--with', 'uvicorn', '--with', 'fastapi', '--with', 'grpcio'] + s10_cmd = ( + [uv_path, 'run'] + + s10_deps + + [ + 'python', + 'tests/integration/cross_version/client_server/server_1_0.py', + '--http-port', + str(s10_http_port), + '--grpc-port', + str(s10_grpc_port), + ] + ) + s10_proc = subprocess.Popen( + s10_cmd, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + env=get_env('server_1_0.py'), + text=True, + start_new_session=True, + ) + + # Server 0.3 setup + s03_http_port = get_free_port() + s03_grpc_port = get_free_port() + s03_deps = [ + '--with', + 'a2a-sdk[grpc]==0.3.24', + '--with', + 'uvicorn', + '--with', + 'fastapi', + '--no-project', + ] + s03_cmd = ( + [uv_path, 'run'] + + s03_deps + + [ + 'python', + 'tests/integration/cross_version/client_server/server_0_3.py', + '--http-port', + str(s03_http_port), + '--grpc-port', + str(s03_grpc_port), + ] + ) + s03_proc = subprocess.Popen( + s03_cmd, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + env=get_env('server_0_3.py'), + text=True, + start_new_session=True, + ) + + try: + # Wait for ports + assert wait_for_port( + s10_proc, 'server_1_0.py', s10_http_port, timeout=3.0 + ), 'Server 1.0 HTTP failed to start' + assert wait_for_port( + s10_proc, 'server_1_0.py', s10_grpc_port, timeout=3.0 + ), 'Server 1.0 GRPC failed to start' + assert wait_for_port( + s03_proc, 'server_0_3.py', s03_http_port, timeout=3.0 + ), 'Server 0.3 HTTP failed to start' + assert wait_for_port( + s03_proc, 'server_0_3.py', s03_grpc_port, timeout=3.0 + ), 'Server 0.3 GRPC failed to start' + + print('SERVER READY') + + yield { + 'server_1_0.py': s10_http_port, + 'server_0_3.py': s03_http_port, + 'uv_path': uv_path, + 'procs': {'server_1_0.py': s10_proc, 'server_0_3.py': s03_proc}, + } + + finally: + print('SERVER CLEANUP') + for proc, name in [ + (s03_proc, 'server_0_3.py'), + (s10_proc, 'server_1_0.py'), + ]: + finalize_process(proc, name) + + +@pytest.mark.timeout(15) +@pytest.mark.parametrize( + 'server_script, client_script, client_deps, protocols', + [ + # Run 0.3 Server <-> 0.3 Client + ( + 'server_0_3.py', + 'client_0_3.py', + ['--with', 'a2a-sdk[grpc]==0.3.24', '--no-project'], + ['grpc', 'jsonrpc', 'rest'], + ), + # Run 1.0 Server <-> 0.3 Client + ( + 'server_1_0.py', + 'client_0_3.py', + ['--with', 'a2a-sdk[grpc]==0.3.24', '--no-project'], + ['grpc', 'jsonrpc', 'rest'], + ), + # Run 1.0 Server <-> 1.0 Client + ( + 'server_1_0.py', + 'client_1_0.py', + [], + ['grpc', 'jsonrpc', 'rest'], + ), + # Run 0.3 Server <-> 1.0 Client + ( + 'server_0_3.py', + 'client_1_0.py', + [], + ['grpc', 'jsonrpc', 'rest'], + ), + ], +) +def test_cross_version( + running_servers, server_script, client_script, client_deps, protocols +): + http_port = running_servers[server_script] + uv_path = running_servers['uv_path'] + + card_url = f'http://127.0.0.1:{http_port}/jsonrpc/' + client_cmd = ( + [uv_path, 'run'] + + client_deps + + [ + 'python', + f'tests/integration/cross_version/client_server/{client_script}', + '--url', + card_url, + '--protocols', + ] + + protocols + ) + + client_result = subprocess.Popen( + client_cmd, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + env=get_env(client_script), + text=True, + start_new_session=True, + ) + finalize_process(client_result, client_script, 0) diff --git a/tests/integration/cross_version/test_cross_version_card_validation.py b/tests/integration/cross_version/test_cross_version_card_validation.py new file mode 100644 index 000000000..25972b075 --- /dev/null +++ b/tests/integration/cross_version/test_cross_version_card_validation.py @@ -0,0 +1,199 @@ +import json +import subprocess + +from a2a.server.request_handlers.response_helpers import agent_card_to_dict +from a2a.types.a2a_pb2 import ( + APIKeySecurityScheme, + AgentCapabilities, + AgentCard, + AgentInterface, + AgentSkill, + AuthorizationCodeOAuthFlow, + HTTPAuthSecurityScheme, + MutualTlsSecurityScheme, + OAuth2SecurityScheme, + OAuthFlows, + OpenIdConnectSecurityScheme, + SecurityRequirement, + SecurityScheme, + StringList, +) +from a2a.client.card_resolver import parse_agent_card +from google.protobuf.json_format import MessageToDict, ParseDict + + +def test_cross_version_agent_card_deserialization() -> None: + # 1. Complex card + complex_card = AgentCard( + name='Complex Agent 0.3', + description='A very complex agent from 0.3.0', + version='1.5.2', + capabilities=AgentCapabilities( + extended_agent_card=True, streaming=True, push_notifications=True + ), + default_input_modes=['text/plain', 'application/json'], + default_output_modes=['application/json', 'image/png'], + supported_interfaces=[ + AgentInterface( + url='http://complex.agent.example.com/api', + protocol_binding='HTTP+JSON', + protocol_version='0.3.0', + ), + AgentInterface( + url='http://complex.agent.example.com/grpc', + protocol_binding='GRPC', + protocol_version='0.3.0', + ), + AgentInterface( + url='http://complex.agent.example.com/jsonrpc', + protocol_binding='JSONRPC', + protocol_version='0.3.0', + ), + ], + security_requirements=[ + SecurityRequirement( + schemes={ + 'test_oauth': StringList(list=['read', 'write']), + 'test_api_key': StringList(), + } + ), + SecurityRequirement(schemes={'test_http': StringList()}), + SecurityRequirement( + schemes={'test_oidc': StringList(list=['openid', 'profile'])} + ), + SecurityRequirement(schemes={'test_mtls': StringList()}), + ], + security_schemes={ + 'test_oauth': SecurityScheme( + oauth2_security_scheme=OAuth2SecurityScheme( + description='OAuth2 authentication', + flows=OAuthFlows( + authorization_code=AuthorizationCodeOAuthFlow( + authorization_url='http://auth.example.com', + token_url='http://token.example.com', + scopes={ + 'read': 'Read access', + 'write': 'Write access', + }, + ) + ), + ) + ), + 'test_api_key': SecurityScheme( + api_key_security_scheme=APIKeySecurityScheme( + description='API Key auth', + location='header', + name='X-API-KEY', + ) + ), + 'test_http': SecurityScheme( + http_auth_security_scheme=HTTPAuthSecurityScheme( + description='HTTP Basic auth', + scheme='basic', + bearer_format='JWT', + ) + ), + 'test_oidc': SecurityScheme( + open_id_connect_security_scheme=OpenIdConnectSecurityScheme( + description='OIDC Auth', + open_id_connect_url='https://example.com/.well-known/openid-configuration', + ) + ), + 'test_mtls': SecurityScheme( + mtls_security_scheme=MutualTlsSecurityScheme( + description='mTLS Auth' + ) + ), + }, + skills=[ + AgentSkill( + id='skill-1', + name='Complex Skill 1', + description='The first complex skill', + tags=['example', 'complex'], + input_modes=['application/json'], + output_modes=['application/json'], + security_requirements=[ + SecurityRequirement(schemes={'test_api_key': StringList()}) + ], + ), + AgentSkill( + id='skill-2', + name='Complex Skill 2', + description='The second complex skill', + tags=['example2'], + security_requirements=[ + SecurityRequirement( + schemes={'test_oidc': StringList(list=['openid'])} + ) + ], + ), + ], + ) + + # 2. Minimal card + minimal_card = AgentCard( + name='Minimal Agent', + supported_interfaces=[ + AgentInterface( + url='http://minimal.example.com', + protocol_binding='JSONRPC', + protocol_version='0.3.0', + ) + ], + ) + + # 3. Serialize both + payload = { + 'complex': json.dumps(agent_card_to_dict(complex_card)), + 'minimal': json.dumps(agent_card_to_dict(minimal_card)), + } + payload_json = json.dumps(payload) + + # 4. Feed it to the 0.3.24 SDK subprocess + result = subprocess.run( + [ # noqa: S607 + 'uv', + 'run', + '--with', + 'a2a-sdk==0.3.24', + '--no-project', + 'python', + 'tests/integration/cross_version/validate_agent_cards_030.py', + ], + input=payload_json, + capture_output=True, + text=True, + check=True, + ) + + # 5. Parse the response + payload_v030 = json.loads(result.stdout) + print(payload_v030['complex']) + cards_v030 = { + key: parse_agent_card(json.loads(card_json)) + for key, card_json in payload_v030.items() + } + + # 6. Validate the parsed cards from 0.3 + def _remove_empty_capabilities(card): + if card['capabilities'] == {}: + card.pop('capabilities') + return card + + assert _remove_empty_capabilities( + MessageToDict(cards_v030['minimal']) + ) == MessageToDict(minimal_card) + assert MessageToDict(cards_v030['complex']) == MessageToDict(complex_card) + + # 7. Validate parsing of 1.0 cards with ParseDict + cards_v100 = { + key: ParseDict( + json.loads(card_json), AgentCard(), ignore_unknown_fields=True + ) + for key, card_json in payload.items() + } + assert _remove_empty_capabilities( + MessageToDict(cards_v100['minimal']) + ) == MessageToDict(minimal_card) + assert MessageToDict(cards_v100['complex']) == MessageToDict(complex_card) diff --git a/tests/integration/cross_version/validate_agent_cards_030.py b/tests/integration/cross_version/validate_agent_cards_030.py new file mode 100644 index 000000000..75d55aeaf --- /dev/null +++ b/tests/integration/cross_version/validate_agent_cards_030.py @@ -0,0 +1,160 @@ +"""This is a script used by test_cross_version_card_validation.py. + +It is run in a subprocess with a SDK version 0.3. +Steps: +1. Read the serialized JSON payload from stdin. +2. Validate the AgentCards with 0.3.24. +3. Print re-serialized AgentCards to stdout. +""" + +import sys +import json +from a2a.types import ( + AgentCard, + AgentCapabilities, + AgentInterface, + AgentSkill, + APIKeySecurityScheme, + HTTPAuthSecurityScheme, + MutualTLSSecurityScheme, + OAuth2SecurityScheme, + OAuthFlows, + AuthorizationCodeOAuthFlow, + OpenIdConnectSecurityScheme, +) + + +def validate_complex_card(card: AgentCard) -> None: + expected_card = AgentCard( + name='Complex Agent 0.3', + description='A very complex agent from 0.3.0', + version='1.5.2', + protocolVersion='0.3.0', + supportsAuthenticatedExtendedCard=True, + capabilities=AgentCapabilities(streaming=True, pushNotifications=True), + url='http://complex.agent.example.com/api', + preferredTransport='HTTP+JSON', + additionalInterfaces=[ + AgentInterface( + url='http://complex.agent.example.com/grpc', + transport='GRPC', + ), + AgentInterface( + url='http://complex.agent.example.com/jsonrpc', + transport='JSONRPC', + ), + ], + defaultInputModes=['text/plain', 'application/json'], + defaultOutputModes=['application/json', 'image/png'], + security=[ + {'test_oauth': ['read', 'write'], 'test_api_key': []}, + {'test_http': []}, + {'test_oidc': ['openid', 'profile']}, + {'test_mtls': []}, + ], + securitySchemes={ + 'test_oauth': OAuth2SecurityScheme( + type='oauth2', + description='OAuth2 authentication', + flows=OAuthFlows( + authorizationCode=AuthorizationCodeOAuthFlow( + authorizationUrl='http://auth.example.com', + tokenUrl='http://token.example.com', + scopes={ + 'read': 'Read access', + 'write': 'Write access', + }, + ) + ), + ), + 'test_api_key': APIKeySecurityScheme( + type='apiKey', + description='API Key auth', + in_='header', + name='X-API-KEY', + ), + 'test_http': HTTPAuthSecurityScheme( + type='http', + description='HTTP Basic auth', + scheme='basic', + bearerFormat='JWT', + ), + 'test_oidc': OpenIdConnectSecurityScheme( + type='openIdConnect', + description='OIDC Auth', + openIdConnectUrl='https://example.com/.well-known/openid-configuration', + ), + 'test_mtls': MutualTLSSecurityScheme( + type='mutualTLS', description='mTLS Auth' + ), + }, + skills=[ + AgentSkill( + id='skill-1', + name='Complex Skill 1', + description='The first complex skill', + tags=['example', 'complex'], + inputModes=['application/json'], + outputModes=['application/json'], + security=[{'test_api_key': []}], + ), + AgentSkill( + id='skill-2', + name='Complex Skill 2', + description='The second complex skill', + tags=['example2'], + security=[{'test_oidc': ['openid']}], + ), + ], + ) + + assert card == expected_card + + +def validate_minimal_card(card: AgentCard) -> None: + expected_card = AgentCard( + name='Minimal Agent', + description='', + version='', + protocolVersion='0.3.0', + capabilities=AgentCapabilities(), + url='http://minimal.example.com', + preferredTransport='JSONRPC', + defaultInputModes=[], + defaultOutputModes=[], + skills=[], + ) + + assert card == expected_card + + +def main() -> None: + # Read the serialized JSON payload from stdin + input_text = sys.stdin.read().strip() + if not input_text: + sys.exit(1) + + try: + input_dict = json.loads(input_text) + + complex_card = AgentCard.model_validate_json(input_dict['complex']) + validate_complex_card(complex_card) + + minimal_card = AgentCard.model_validate_json(input_dict['minimal']) + validate_minimal_card(minimal_card) + + payload = { + 'complex': complex_card.model_dump_json(), + 'minimal': minimal_card.model_dump_json(), + } + print(json.dumps(payload)) + + except Exception as e: + print( + f'Failed to validate AgentCards with 0.3.24: {e}', file=sys.stderr + ) + sys.exit(1) + + +if __name__ == '__main__': + main() diff --git a/tests/integration/test_agent_card.py b/tests/integration/test_agent_card.py new file mode 100644 index 000000000..afa1078f0 --- /dev/null +++ b/tests/integration/test_agent_card.py @@ -0,0 +1,134 @@ +import httpx +import pytest + +from fastapi import FastAPI + +from a2a.server.agent_execution import AgentExecutor, RequestContext +from starlette.applications import Starlette +from a2a.server.routes.rest_routes import create_rest_routes +from a2a.server.routes import create_agent_card_routes, create_jsonrpc_routes +from a2a.server.events import EventQueue +from a2a.server.events.in_memory_queue_manager import InMemoryQueueManager +from a2a.server.request_handlers import DefaultRequestHandler +from a2a.server.tasks.inmemory_push_notification_config_store import ( + InMemoryPushNotificationConfigStore, +) +from a2a.server.tasks.inmemory_task_store import InMemoryTaskStore +from a2a.types.a2a_pb2 import ( + AgentCapabilities, + AgentCard, + AgentInterface, +) +from a2a.utils.constants import VERSION_HEADER, TransportProtocol + + +class DummyAgentExecutor(AgentExecutor): + """An agent executor that does nothing for integration testing.""" + + async def execute( + self, context: RequestContext, event_queue: EventQueue + ) -> None: + pass + + async def cancel( + self, context: RequestContext, event_queue: EventQueue + ) -> None: + pass + + +@pytest.mark.asyncio +@pytest.mark.parametrize('header_val', [None, '0.3', '1.0', '1.2', 'INVALID']) +async def test_agent_card_integration(header_val: str | None) -> None: + """Tests that the agent card is correctly served via REST and JSONRPC.""" + # 1. Define AgentCard + agent_card = AgentCard( + name='Test Agent', + description='An agent for testing agent card serving.', + version='1.0.0', + capabilities=AgentCapabilities(streaming=True, push_notifications=True), + skills=[], + default_input_modes=['text/plain'], + default_output_modes=['text/plain'], + supported_interfaces=[ + AgentInterface( + protocol_binding=TransportProtocol.JSONRPC, + url='http://localhost/jsonrpc/', + ), + AgentInterface( + protocol_binding=TransportProtocol.HTTP_JSON, + url='http://localhost/rest/', + ), + ], + ) + + # 2. Setup Server + task_store = InMemoryTaskStore() + handler = DefaultRequestHandler( + agent_executor=DummyAgentExecutor(), + task_store=task_store, + agent_card=agent_card, + queue_manager=InMemoryQueueManager(), + push_config_store=InMemoryPushNotificationConfigStore(), + ) + app = FastAPI() + + # Mount JSONRPC application + jsonrpc_routes = [ + *create_agent_card_routes( + agent_card=agent_card, card_url='/.well-known/agent-card.json' + ), + *create_jsonrpc_routes(request_handler=handler, rpc_url='/'), + ] + jsonrpc_app = Starlette(routes=jsonrpc_routes) + app.mount('/jsonrpc', jsonrpc_app) + + rest_routes = [ + *create_agent_card_routes( + agent_card=agent_card, card_url='/.well-known/agent-card.json' + ), + *create_rest_routes(request_handler=handler), + ] + rest_app = Starlette(routes=rest_routes) + app.mount('/rest', rest_app) + + expected_content = { + 'name': 'Test Agent', + 'description': 'An agent for testing agent card serving.', + 'supportedInterfaces': [ + {'url': 'http://localhost/jsonrpc/', 'protocolBinding': 'JSONRPC'}, + {'url': 'http://localhost/rest/', 'protocolBinding': 'HTTP+JSON'}, + ], + 'version': '1.0.0', + 'capabilities': {'streaming': True, 'pushNotifications': True}, + 'defaultInputModes': ['text/plain'], + 'defaultOutputModes': ['text/plain'], + 'additionalInterfaces': [ + {'transport': 'HTTP+JSON', 'url': 'http://localhost/rest/'} + ], + 'preferredTransport': 'JSONRPC', + 'protocolVersion': '0.3', + 'skills': [], + 'url': 'http://localhost/jsonrpc/', + } + + headers = {} + if header_val is not None: + headers[VERSION_HEADER] = header_val + + # 3. Use direct http client (ASGITransport) to fetch and assert + async with httpx.AsyncClient( + transport=httpx.ASGITransport(app=app), base_url='http://testserver' + ) as client: + # Fetch from JSONRPC endpoint + resp_jsonrpc = await client.get( + '/jsonrpc/.well-known/agent-card.json', headers=headers + ) + assert resp_jsonrpc.status_code == 200 + assert resp_jsonrpc.json() == expected_content + + # Fetch from REST endpoint + resp_rest = await client.get( + '/rest/.well-known/agent-card.json', headers=headers + ) + assert resp_rest.status_code == 200 + assert resp_rest.json() == expected_content diff --git a/tests/integration/test_client_server_integration.py b/tests/integration/test_client_server_integration.py index a97973eba..1711ac810 100644 --- a/tests/integration/test_client_server_integration.py +++ b/tests/integration/test_client_server_integration.py @@ -1,95 +1,137 @@ import asyncio + from collections.abc import AsyncGenerator -from typing import NamedTuple, Any +from typing import Any, NamedTuple from unittest.mock import ANY, AsyncMock, patch import grpc import httpx import pytest import pytest_asyncio -from grpc.aio import Channel -from jwt.api_jwk import PyJWK -from a2a.client import ClientConfig +from cryptography.hazmat.primitives.asymmetric import ec +from google.protobuf.json_format import MessageToDict +from google.protobuf.timestamp_pb2 import Timestamp +from starlette.applications import Starlette + +from a2a.client import Client, ClientConfig from a2a.client.base_client import BaseClient +from a2a.client.card_resolver import A2ACardResolver +from a2a.client.client import ClientCallContext +from a2a.client.client_factory import ClientFactory +from a2a.client.service_parameters import ( + ServiceParametersFactory, + with_a2a_extensions, +) from a2a.client.transports import JsonRpcTransport, RestTransport -from a2a.client.transports.base import ClientTransport -from a2a.client.transports.grpc import GrpcTransport -from a2a.grpc import a2a_pb2_grpc -from a2a.server.apps import A2AFastAPIApplication, A2ARESTFastAPIApplication + +# Compat v0.3 imports for dedicated tests +from a2a.compat.v0_3 import a2a_v0_3_pb2_grpc +from a2a.compat.v0_3.grpc_handler import CompatGrpcHandler from a2a.server.request_handlers import GrpcHandler, RequestHandler -from a2a.utils.signing import ( - create_agent_card_signer, - create_signature_verifier, +from a2a.server.routes import ( + create_agent_card_routes, + create_jsonrpc_routes, + create_rest_routes, +) +from a2a.server.request_handlers.default_request_handler import ( + LegacyRequestHandler, ) -from a2a.types import ( +from a2a.types import a2a_pb2_grpc +from a2a.types.a2a_pb2 import ( AgentCapabilities, AgentCard, AgentInterface, - GetTaskPushNotificationConfigParams, + CancelTaskRequest, + DeleteTaskPushNotificationConfigRequest, + GetExtendedAgentCardRequest, + GetTaskPushNotificationConfigRequest, + GetTaskRequest, + ListTaskPushNotificationConfigsRequest, + ListTaskPushNotificationConfigsResponse, + ListTasksRequest, + ListTasksResponse, Message, - MessageSendParams, Part, - PushNotificationConfig, Role, + SendMessageRequest, + SubscribeToTaskRequest, Task, - TaskIdParams, TaskPushNotificationConfig, - TaskQueryParams, TaskState, TaskStatus, TaskStatusUpdateEvent, - TextPart, +) +from a2a.utils.constants import ( + PROTOCOL_VERSION_CURRENT, + VERSION_HEADER, TransportProtocol, ) -from cryptography.hazmat.primitives import asymmetric +from a2a.utils.errors import ( + ContentTypeNotSupportedError, + ExtendedAgentCardNotConfiguredError, + ExtensionSupportRequiredError, + InvalidAgentResponseError, + PushNotificationNotSupportedError, + TaskNotCancelableError, + TaskNotFoundError, + UnsupportedOperationError, + VersionNotSupportedError, +) +from a2a.utils.signing import ( + create_agent_card_signer, + create_signature_verifier, +) + # --- Test Constants --- TASK_FROM_STREAM = Task( id='task-123-stream', context_id='ctx-456-stream', - status=TaskStatus(state=TaskState.completed), - kind='task', + status=TaskStatus(state=TaskState.TASK_STATE_COMPLETED), ) TASK_FROM_BLOCKING = Task( id='task-789-blocking', context_id='ctx-101-blocking', - status=TaskStatus(state=TaskState.completed), - kind='task', + status=TaskStatus(state=TaskState.TASK_STATE_COMPLETED), ) GET_TASK_RESPONSE = Task( id='task-get-456', context_id='ctx-get-789', - status=TaskStatus(state=TaskState.working), - kind='task', + status=TaskStatus(state=TaskState.TASK_STATE_WORKING), ) CANCEL_TASK_RESPONSE = Task( id='task-cancel-789', context_id='ctx-cancel-101', - status=TaskStatus(state=TaskState.canceled), - kind='task', + status=TaskStatus(state=TaskState.TASK_STATE_CANCELED), ) CALLBACK_CONFIG = TaskPushNotificationConfig( task_id='task-callback-123', - push_notification_config=PushNotificationConfig( - id='pnc-abc', url='http://callback.example.com', token='' - ), + id='pnc-abc', + url='http://callback.example.com', + token='', ) RESUBSCRIBE_EVENT = TaskStatusUpdateEvent( task_id='task-resub-456', context_id='ctx-resub-789', - status=TaskStatus(state=TaskState.working), - final=False, + status=TaskStatus(state=TaskState.TASK_STATE_WORKING), +) + +LIST_TASKS_RESPONSE = ListTasksResponse( + tasks=[TASK_FROM_BLOCKING, GET_TASK_RESPONSE], + next_page_token='page-2', + total_size=12, + page_size=10, ) -def create_key_provider(verification_key: PyJWK | str | bytes): +def create_key_provider(verification_key: Any): """Creates a key provider function for testing.""" def key_provider(kid: str | None, jku: str | None): @@ -102,11 +144,12 @@ def key_provider(kid: str | None, jku: str | None): @pytest.fixture -def mock_request_handler() -> AsyncMock: +def mock_request_handler(agent_card) -> AsyncMock: """Provides a mock RequestHandler for the server-side handlers.""" handler = AsyncMock(spec=RequestHandler) # Configure on_message_send for non-streaming calls + handler._agent_card = agent_card handler.on_message_send.return_value = TASK_FROM_BLOCKING # Configure on_message_send_stream for streaming calls @@ -118,15 +161,28 @@ async def stream_side_effect(*args, **kwargs): # Configure other methods handler.on_get_task.return_value = GET_TASK_RESPONSE handler.on_cancel_task.return_value = CANCEL_TASK_RESPONSE - handler.on_set_task_push_notification_config.side_effect = ( - lambda params, context: params + handler.on_list_tasks.return_value = LIST_TASKS_RESPONSE + handler.on_create_task_push_notification_config.return_value = ( + CALLBACK_CONFIG ) handler.on_get_task_push_notification_config.return_value = CALLBACK_CONFIG + handler.on_list_task_push_notification_configs.return_value = ( + ListTaskPushNotificationConfigsResponse(configs=[CALLBACK_CONFIG]) + ) + handler.on_delete_task_push_notification_config.return_value = None + + # Use async def to ensure it returns an awaitable + async def get_extended_agent_card_mock(*args, **kwargs): + return agent_card + + handler.on_get_extended_agent_card.side_effect = ( + get_extended_agent_card_mock # type: ignore[union-attr] + ) async def resubscribe_side_effect(*args, **kwargs): yield RESUBSCRIBE_EVENT - handler.on_resubscribe_to_task.side_effect = resubscribe_side_effect + handler.on_subscribe_to_task.side_effect = resubscribe_side_effect return handler @@ -137,30 +193,34 @@ def agent_card() -> AgentCard: return AgentCard( name='Test Agent', description='An agent for integration testing.', - url='http://testserver', version='1.0.0', - capabilities=AgentCapabilities(streaming=True, push_notifications=True), + capabilities=AgentCapabilities( + streaming=True, push_notifications=True, extended_agent_card=True + ), skills=[], default_input_modes=['text/plain'], default_output_modes=['text/plain'], - preferred_transport=TransportProtocol.jsonrpc, - supports_authenticated_extended_card=False, - additional_interfaces=[ + supported_interfaces=[ + AgentInterface( + protocol_binding=TransportProtocol.HTTP_JSON, + url='http://testserver', + ), AgentInterface( - transport=TransportProtocol.http_json, url='http://testserver' + protocol_binding=TransportProtocol.JSONRPC, + url='http://testserver', ), AgentInterface( - transport=TransportProtocol.grpc, url='localhost:50051' + protocol_binding=TransportProtocol.GRPC, url='localhost:50051' ), ], ) class TransportSetup(NamedTuple): - """Holds the transport and handler for a given test.""" + """Holds the client and handler for a given test.""" - transport: ClientTransport - handler: AsyncMock + client: Client + handler: RequestHandler | AsyncMock # --- HTTP/JSON-RPC/REST Setup --- @@ -179,616 +239,383 @@ def http_base_setup(mock_request_handler: AsyncMock, agent_card: AgentCard): def jsonrpc_setup(http_base_setup) -> TransportSetup: """Sets up the JsonRpcTransport and in-memory server.""" mock_request_handler, agent_card = http_base_setup - app_builder = A2AFastAPIApplication( - agent_card, mock_request_handler, extended_agent_card=agent_card + agent_card_routes = create_agent_card_routes( + agent_card=agent_card, card_url='/' ) - app = app_builder.build() + jsonrpc_routes = create_jsonrpc_routes( + request_handler=mock_request_handler, rpc_url='/' + ) + app = Starlette(routes=[*agent_card_routes, *jsonrpc_routes]) httpx_client = httpx.AsyncClient(transport=httpx.ASGITransport(app=app)) - transport = JsonRpcTransport( - httpx_client=httpx_client, agent_card=agent_card + factory = ClientFactory( + config=ClientConfig( + httpx_client=httpx_client, + supported_protocol_bindings=[TransportProtocol.JSONRPC], + ) ) - return TransportSetup(transport=transport, handler=mock_request_handler) + client = factory.create(agent_card) + return TransportSetup(client=client, handler=mock_request_handler) @pytest.fixture def rest_setup(http_base_setup) -> TransportSetup: """Sets up the RestTransport and in-memory server.""" mock_request_handler, agent_card = http_base_setup - app_builder = A2ARESTFastAPIApplication(agent_card, mock_request_handler) - app = app_builder.build() + rest_routes = create_rest_routes(mock_request_handler) + agent_card_routes = create_agent_card_routes( + agent_card=agent_card, card_url='/' + ) + app = Starlette(routes=[*rest_routes, *agent_card_routes]) httpx_client = httpx.AsyncClient(transport=httpx.ASGITransport(app=app)) - transport = RestTransport(httpx_client=httpx_client, agent_card=agent_card) - return TransportSetup(transport=transport, handler=mock_request_handler) - - -# --- gRPC Setup --- + factory = ClientFactory( + config=ClientConfig( + httpx_client=httpx_client, + supported_protocol_bindings=[TransportProtocol.HTTP_JSON], + ) + ) + client = factory.create(agent_card) + return TransportSetup(client=client, handler=mock_request_handler) @pytest_asyncio.fixture -async def grpc_server_and_handler( - mock_request_handler: AsyncMock, agent_card: AgentCard -) -> AsyncGenerator[tuple[str, AsyncMock], None]: - """Creates and manages an in-process gRPC test server.""" - server = grpc.aio.server() - port = server.add_insecure_port('[::]:0') - server_address = f'localhost:{port}' - servicer = GrpcHandler(agent_card, mock_request_handler) - a2a_pb2_grpc.add_A2AServiceServicer_to_server(servicer, server) - await server.start() - yield server_address, mock_request_handler - await server.stop(0) - +async def grpc_setup( + grpc_server_and_handler: tuple[str, AsyncMock], + agent_card: AgentCard, +) -> TransportSetup: + """Sets up the GrpcTransport and in-process server.""" + server_address, handler = grpc_server_and_handler -# --- The Integration Tests --- + # Update the gRPC interface dynamically based on the assigned port + for interface in agent_card.supported_interfaces: + if interface.protocol_binding == TransportProtocol.GRPC: + interface.url = server_address + break + else: + raise ValueError('No gRPC interface found in agent card') + + factory = ClientFactory( + config=ClientConfig( + grpc_channel_factory=grpc.aio.insecure_channel, + supported_protocol_bindings=[TransportProtocol.GRPC], + ) + ) + client = factory.create(agent_card) + return TransportSetup(client=client, handler=handler) -@pytest.mark.asyncio -@pytest.mark.parametrize( - 'transport_setup_fixture', - [ +@pytest.fixture( + params=[ pytest.param('jsonrpc_setup', id='JSON-RPC'), pytest.param('rest_setup', id='REST'), - ], + pytest.param('grpc_setup', id='gRPC'), + ] ) -async def test_http_transport_sends_message_streaming( - transport_setup_fixture: str, request -) -> None: - """ - Integration test for HTTP-based transports (JSON-RPC, REST) streaming. - """ - transport_setup: TransportSetup = request.getfixturevalue( - transport_setup_fixture - ) - transport = transport_setup.transport - handler = transport_setup.handler +def transport_setups(request) -> TransportSetup: + """Parametrized fixture that runs tests against all supported transports.""" + return request.getfixturevalue(request.param) - message_to_send = Message( - role=Role.user, - message_id='msg-integration-test', - parts=[Part(root=TextPart(text='Hello, integration test!'))], - ) - params = MessageSendParams(message=message_to_send) - stream = transport.send_message_streaming(request=params) - first_event = await anext(stream) +@pytest.fixture( + params=[ + pytest.param('jsonrpc_setup', id='JSON-RPC'), + pytest.param('rest_setup', id='REST'), + pytest.param('grpc_setup', id='gRPC'), + pytest.param('grpc_03_setup', id='gRPC-0.3'), + ] +) +def error_handling_setups(request) -> TransportSetup: + """Parametrized fixture for error tests including compat 0.3 endpoint verification.""" + return request.getfixturevalue(request.param) - assert first_event.id == TASK_FROM_STREAM.id - assert first_event.context_id == TASK_FROM_STREAM.context_id - handler.on_message_send_stream.assert_called_once() - call_args, _ = handler.on_message_send_stream.call_args - received_params: MessageSendParams = call_args[0] +@pytest.fixture( + params=[ + pytest.param('jsonrpc_setup', id='JSON-RPC'), + pytest.param('rest_setup', id='REST'), + ] +) +def http_transport_setups(request) -> TransportSetup: + """Parametrized fixture that runs tests against HTTP-based transports only.""" + return request.getfixturevalue(request.param) - assert received_params.message.message_id == message_to_send.message_id - assert ( - received_params.message.parts[0].root.text - == message_to_send.parts[0].root.text - ) - if hasattr(transport, 'close'): - await transport.close() +# --- gRPC Setup --- -@pytest.mark.asyncio -async def test_grpc_transport_sends_message_streaming( - grpc_server_and_handler: tuple[str, AsyncMock], - agent_card: AgentCard, -) -> None: - """ - Integration test specifically for the gRPC transport streaming. - """ - server_address, handler = grpc_server_and_handler - agent_card.url = server_address - - def channel_factory(address: str) -> Channel: - return grpc.aio.insecure_channel(address) +@pytest_asyncio.fixture +async def grpc_server_and_handler( + mock_request_handler: AsyncMock, agent_card: AgentCard +) -> AsyncGenerator[tuple[str, AsyncMock], None]: + """Creates and manages an in-process gRPC test server.""" + server = grpc.aio.server() + port = server.add_insecure_port('[::]:0') + server_address = f'localhost:{port}' + servicer = GrpcHandler(request_handler=mock_request_handler) + a2a_pb2_grpc.add_A2AServiceServicer_to_server(servicer, server) + await server.start() + try: + yield server_address, mock_request_handler + finally: + await server.stop(None) - channel = channel_factory(server_address) - transport = GrpcTransport(channel=channel, agent_card=agent_card) - message_to_send = Message( - role=Role.user, - message_id='msg-grpc-integration-test', - parts=[Part(root=TextPart(text='Hello, gRPC integration test!'))], +@pytest_asyncio.fixture +async def grpc_03_server_and_handler( + mock_request_handler: AsyncMock, agent_card: AgentCard +) -> AsyncGenerator[tuple[str, AsyncMock], None]: + """Creates and manages an in-process v0.3 compat gRPC test server.""" + server = grpc.aio.server() + port = server.add_insecure_port('[::]:0') + server_address = f'localhost:{port}' + servicer = CompatGrpcHandler( + request_handler=mock_request_handler, ) - params = MessageSendParams(message=message_to_send) - - stream = transport.send_message_streaming(request=params) - first_event = await anext(stream) + a2a_v0_3_pb2_grpc.add_A2AServiceServicer_to_server(servicer, server) + await server.start() + try: + yield server_address, mock_request_handler + finally: + await server.stop(None) - assert first_event.id == TASK_FROM_STREAM.id - assert first_event.context_id == TASK_FROM_STREAM.context_id - handler.on_message_send_stream.assert_called_once() - call_args, _ = handler.on_message_send_stream.call_args - received_params: MessageSendParams = call_args[0] +@pytest.fixture +def grpc_03_setup( + grpc_03_server_and_handler, agent_card: AgentCard +) -> TransportSetup: + """Sets up the CompatGrpcTransport and in-process 0.3 server.""" + server_address, handler = grpc_03_server_and_handler + from a2a.client.base_client import BaseClient + from a2a.client.client import ClientConfig + from a2a.compat.v0_3.grpc_transport import CompatGrpcTransport + + channel = grpc.aio.insecure_channel(server_address) + transport = CompatGrpcTransport(channel=channel, agent_card=agent_card) - assert received_params.message.message_id == message_to_send.message_id - assert ( - received_params.message.parts[0].root.text - == message_to_send.parts[0].root.text + client = BaseClient( + card=agent_card, + config=ClientConfig(), + transport=transport, + interceptors=[], ) + return TransportSetup(client=client, handler=handler) - await transport.close() + +# --- The Integration Tests --- @pytest.mark.asyncio -@pytest.mark.parametrize( - 'transport_setup_fixture', - [ - pytest.param('jsonrpc_setup', id='JSON-RPC'), - pytest.param('rest_setup', id='REST'), - ], -) -async def test_http_transport_sends_message_blocking( - transport_setup_fixture: str, request -) -> None: - """ - Integration test for HTTP-based transports (JSON-RPC, REST) blocking. - """ - transport_setup: TransportSetup = request.getfixturevalue( - transport_setup_fixture - ) - transport = transport_setup.transport - handler = transport_setup.handler +async def test_client_sends_message_streaming(transport_setups) -> None: + """Integration test for all transports streaming.""" + client = transport_setups.client + handler = transport_setups.handler message_to_send = Message( - role=Role.user, - message_id='msg-integration-test-blocking', - parts=[Part(root=TextPart(text='Hello, blocking test!'))], + role=Role.ROLE_USER, + message_id='msg-integration-test', + parts=[Part(text='Hello, integration test!')], ) - params = MessageSendParams(message=message_to_send) - - result = await transport.send_message(request=params) + params = SendMessageRequest(message=message_to_send) - assert result.id == TASK_FROM_BLOCKING.id - assert result.context_id == TASK_FROM_BLOCKING.context_id + stream = client.send_message(request=params) + events = [event async for event in stream] - handler.on_message_send.assert_awaited_once() - call_args, _ = handler.on_message_send.call_args - received_params: MessageSendParams = call_args[0] + assert len(events) == 1 + event = events[0] + task = event.task + assert task is not None + assert task.id == TASK_FROM_STREAM.id - assert received_params.message.message_id == message_to_send.message_id - assert ( - received_params.message.parts[0].root.text - == message_to_send.parts[0].root.text - ) + handler.on_message_send_stream.assert_called_once_with(params, ANY) - if hasattr(transport, 'close'): - await transport.close() + await client.close() @pytest.mark.asyncio -async def test_grpc_transport_sends_message_blocking( - grpc_server_and_handler: tuple[str, AsyncMock], - agent_card: AgentCard, -) -> None: - """ - Integration test specifically for the gRPC transport blocking. - """ - server_address, handler = grpc_server_and_handler - agent_card.url = server_address - - def channel_factory(address: str) -> Channel: - return grpc.aio.insecure_channel(address) +async def test_client_sends_message_blocking(transport_setups) -> None: + """Integration test for all transports blocking.""" + client = transport_setups.client + handler = transport_setups.handler - channel = channel_factory(server_address) - transport = GrpcTransport(channel=channel, agent_card=agent_card) + # Disable streaming to force blocking call + assert isinstance(client, BaseClient) + client._config.streaming = False message_to_send = Message( - role=Role.user, - message_id='msg-grpc-integration-test-blocking', - parts=[Part(root=TextPart(text='Hello, gRPC blocking test!'))], + role=Role.ROLE_USER, + message_id='msg-integration-test-blocking', + parts=[Part(text='Hello, blocking test!')], ) - params = MessageSendParams(message=message_to_send) - extensions = ['ext-1', 'ext-2'] + params = SendMessageRequest(message=message_to_send) - result = await transport.send_message(request=params, extensions=extensions) + events = [event async for event in client.send_message(request=params)] - assert result.id == TASK_FROM_BLOCKING.id - assert result.context_id == TASK_FROM_BLOCKING.context_id + assert len(events) == 1 + event = events[0] + task = event.task + assert task is not None + assert task.id == TASK_FROM_BLOCKING.id + handler.on_message_send.assert_awaited_once_with(params, ANY) - handler.on_message_send.assert_awaited_once() - call_args, _ = handler.on_message_send.call_args - received_params: MessageSendParams = call_args[0] - received_context = call_args[1] - - assert received_params.message.message_id == message_to_send.message_id - assert ( - received_params.message.parts[0].root.text - == message_to_send.parts[0].root.text - ) - assert received_context.requested_extensions == set(extensions) - - await transport.close() + await client.close() @pytest.mark.asyncio -@pytest.mark.parametrize( - 'transport_setup_fixture', - [ - pytest.param('jsonrpc_setup', id='JSON-RPC'), - pytest.param('rest_setup', id='REST'), - ], -) -async def test_http_transport_get_task( - transport_setup_fixture: str, request -) -> None: - transport_setup: TransportSetup = request.getfixturevalue( - transport_setup_fixture - ) - transport = transport_setup.transport - handler = transport_setup.handler +async def test_client_get_task(transport_setups) -> None: + client = transport_setups.client + handler = transport_setups.handler - params = TaskQueryParams(id=GET_TASK_RESPONSE.id) - result = await transport.get_task(request=params) + params = GetTaskRequest(id=GET_TASK_RESPONSE.id) + result = await client.get_task(request=params) assert result.id == GET_TASK_RESPONSE.id handler.on_get_task.assert_awaited_once_with(params, ANY) - if hasattr(transport, 'close'): - await transport.close() + await client.close() @pytest.mark.asyncio -async def test_grpc_transport_get_task( - grpc_server_and_handler: tuple[str, AsyncMock], - agent_card: AgentCard, -) -> None: - server_address, handler = grpc_server_and_handler - agent_card.url = server_address - - def channel_factory(address: str) -> Channel: - return grpc.aio.insecure_channel(address) - - channel = channel_factory(server_address) - transport = GrpcTransport(channel=channel, agent_card=agent_card) - - params = TaskQueryParams(id=GET_TASK_RESPONSE.id) - result = await transport.get_task(request=params) +async def test_client_list_tasks(transport_setups) -> None: + client = transport_setups.client + handler = transport_setups.handler + + t = Timestamp() + t.FromJsonString('2024-03-09T16:00:00Z') + params = ListTasksRequest( + context_id='ctx-1', + status=TaskState.TASK_STATE_WORKING, + page_size=10, + page_token='page-1', + history_length=5, + status_timestamp_after=t, + include_artifacts=True, + ) + result = await client.list_tasks(request=params) - assert result.id == GET_TASK_RESPONSE.id - handler.on_get_task.assert_awaited_once() - assert handler.on_get_task.call_args[0][0].id == GET_TASK_RESPONSE.id + assert len(result.tasks) == 2 + assert result.next_page_token == 'page-2' + handler.on_list_tasks.assert_awaited_once_with(params, ANY) - await transport.close() + await client.close() @pytest.mark.asyncio -@pytest.mark.parametrize( - 'transport_setup_fixture', - [ - pytest.param('jsonrpc_setup', id='JSON-RPC'), - pytest.param('rest_setup', id='REST'), - ], -) -async def test_http_transport_cancel_task( - transport_setup_fixture: str, request -) -> None: - transport_setup: TransportSetup = request.getfixturevalue( - transport_setup_fixture - ) - transport = transport_setup.transport - handler = transport_setup.handler +async def test_client_cancel_task(transport_setups) -> None: + client = transport_setups.client + handler = transport_setups.handler - params = TaskIdParams(id=CANCEL_TASK_RESPONSE.id) - result = await transport.cancel_task(request=params) + params = CancelTaskRequest(id=CANCEL_TASK_RESPONSE.id) + result = await client.cancel_task(request=params) assert result.id == CANCEL_TASK_RESPONSE.id handler.on_cancel_task.assert_awaited_once_with(params, ANY) - if hasattr(transport, 'close'): - await transport.close() + await client.close() @pytest.mark.asyncio -async def test_grpc_transport_cancel_task( - grpc_server_and_handler: tuple[str, AsyncMock], - agent_card: AgentCard, +async def test_client_create_task_push_notification_config( + transport_setups, ) -> None: - server_address, handler = grpc_server_and_handler - agent_card.url = server_address - - def channel_factory(address: str) -> Channel: - return grpc.aio.insecure_channel(address) - - channel = channel_factory(server_address) - transport = GrpcTransport(channel=channel, agent_card=agent_card) - - params = TaskIdParams(id=CANCEL_TASK_RESPONSE.id) - result = await transport.cancel_task(request=params) - - assert result.id == CANCEL_TASK_RESPONSE.id - handler.on_cancel_task.assert_awaited_once() - assert handler.on_cancel_task.call_args[0][0].id == CANCEL_TASK_RESPONSE.id - - await transport.close() + client = transport_setups.client + handler = transport_setups.handler + params = TaskPushNotificationConfig(task_id='task-callback-123') + result = await client.create_task_push_notification_config(request=params) -@pytest.mark.asyncio -@pytest.mark.parametrize( - 'transport_setup_fixture', - [ - pytest.param('jsonrpc_setup', id='JSON-RPC'), - pytest.param('rest_setup', id='REST'), - ], -) -async def test_http_transport_set_task_callback( - transport_setup_fixture: str, request -) -> None: - transport_setup: TransportSetup = request.getfixturevalue( - transport_setup_fixture - ) - transport = transport_setup.transport - handler = transport_setup.handler - - params = CALLBACK_CONFIG - result = await transport.set_task_callback(request=params) - - assert result.task_id == CALLBACK_CONFIG.task_id - assert ( - result.push_notification_config.id - == CALLBACK_CONFIG.push_notification_config.id - ) - assert ( - result.push_notification_config.url - == CALLBACK_CONFIG.push_notification_config.url - ) - handler.on_set_task_push_notification_config.assert_awaited_once_with( + assert result.id == CALLBACK_CONFIG.id + handler.on_create_task_push_notification_config.assert_awaited_once_with( params, ANY ) - if hasattr(transport, 'close'): - await transport.close() + await client.close() @pytest.mark.asyncio -async def test_grpc_transport_set_task_callback( - grpc_server_and_handler: tuple[str, AsyncMock], - agent_card: AgentCard, -) -> None: - server_address, handler = grpc_server_and_handler - agent_card.url = server_address - - def channel_factory(address: str) -> Channel: - return grpc.aio.insecure_channel(address) - - channel = channel_factory(server_address) - transport = GrpcTransport(channel=channel, agent_card=agent_card) - - params = CALLBACK_CONFIG - result = await transport.set_task_callback(request=params) - - assert result.task_id == CALLBACK_CONFIG.task_id - assert ( - result.push_notification_config.id - == CALLBACK_CONFIG.push_notification_config.id - ) - assert ( - result.push_notification_config.url - == CALLBACK_CONFIG.push_notification_config.url - ) - handler.on_set_task_push_notification_config.assert_awaited_once() - assert ( - handler.on_set_task_push_notification_config.call_args[0][0].task_id - == CALLBACK_CONFIG.task_id - ) - - await transport.close() - - -@pytest.mark.asyncio -@pytest.mark.parametrize( - 'transport_setup_fixture', - [ - pytest.param('jsonrpc_setup', id='JSON-RPC'), - pytest.param('rest_setup', id='REST'), - ], -) -async def test_http_transport_get_task_callback( - transport_setup_fixture: str, request +async def test_client_get_task_push_notification_config( + transport_setups, ) -> None: - transport_setup: TransportSetup = request.getfixturevalue( - transport_setup_fixture - ) - transport = transport_setup.transport - handler = transport_setup.handler + client = transport_setups.client + handler = transport_setups.handler - params = GetTaskPushNotificationConfigParams( - id=CALLBACK_CONFIG.task_id, - push_notification_config_id=CALLBACK_CONFIG.push_notification_config.id, + params = GetTaskPushNotificationConfigRequest( + task_id=CALLBACK_CONFIG.task_id, + id=CALLBACK_CONFIG.id, ) - result = await transport.get_task_callback(request=params) + result = await client.get_task_push_notification_config(request=params) - assert result.task_id == CALLBACK_CONFIG.task_id - assert ( - result.push_notification_config.id - == CALLBACK_CONFIG.push_notification_config.id - ) - assert ( - result.push_notification_config.url - == CALLBACK_CONFIG.push_notification_config.url - ) + assert result.id == CALLBACK_CONFIG.id handler.on_get_task_push_notification_config.assert_awaited_once_with( params, ANY ) - if hasattr(transport, 'close'): - await transport.close() + await client.close() @pytest.mark.asyncio -async def test_grpc_transport_get_task_callback( - grpc_server_and_handler: tuple[str, AsyncMock], - agent_card: AgentCard, +async def test_client_list_task_push_notification_configs( + transport_setups, ) -> None: - server_address, handler = grpc_server_and_handler - agent_card.url = server_address + client = transport_setups.client + handler = transport_setups.handler - def channel_factory(address: str) -> Channel: - return grpc.aio.insecure_channel(address) - - channel = channel_factory(server_address) - transport = GrpcTransport(channel=channel, agent_card=agent_card) - - params = GetTaskPushNotificationConfigParams( - id=CALLBACK_CONFIG.task_id, - push_notification_config_id=CALLBACK_CONFIG.push_notification_config.id, + params = ListTaskPushNotificationConfigsRequest( + task_id=CALLBACK_CONFIG.task_id, ) - result = await transport.get_task_callback(request=params) + result = await client.list_task_push_notification_configs(request=params) - assert result.task_id == CALLBACK_CONFIG.task_id - assert ( - result.push_notification_config.id - == CALLBACK_CONFIG.push_notification_config.id - ) - assert ( - result.push_notification_config.url - == CALLBACK_CONFIG.push_notification_config.url - ) - handler.on_get_task_push_notification_config.assert_awaited_once() - assert ( - handler.on_get_task_push_notification_config.call_args[0][0].id - == CALLBACK_CONFIG.task_id + assert len(result.configs) == 1 + handler.on_list_task_push_notification_configs.assert_awaited_once_with( + params, ANY ) - await transport.close() + await client.close() @pytest.mark.asyncio -@pytest.mark.parametrize( - 'transport_setup_fixture', - [ - pytest.param('jsonrpc_setup', id='JSON-RPC'), - pytest.param('rest_setup', id='REST'), - ], -) -async def test_http_transport_resubscribe( - transport_setup_fixture: str, request +async def test_client_delete_task_push_notification_config( + transport_setups, ) -> None: - transport_setup: TransportSetup = request.getfixturevalue( - transport_setup_fixture - ) - transport = transport_setup.transport - handler = transport_setup.handler - - params = TaskIdParams(id=RESUBSCRIBE_EVENT.task_id) - stream = transport.resubscribe(request=params) - first_event = await anext(stream) + client = transport_setups.client + handler = transport_setups.handler - assert first_event.task_id == RESUBSCRIBE_EVENT.task_id - handler.on_resubscribe_to_task.assert_called_once_with(params, ANY) - - if hasattr(transport, 'close'): - await transport.close() - - -@pytest.mark.asyncio -async def test_grpc_transport_resubscribe( - grpc_server_and_handler: tuple[str, AsyncMock], - agent_card: AgentCard, -) -> None: - server_address, handler = grpc_server_and_handler - agent_card.url = server_address - - def channel_factory(address: str) -> Channel: - return grpc.aio.insecure_channel(address) - - channel = channel_factory(server_address) - transport = GrpcTransport(channel=channel, agent_card=agent_card) - - params = TaskIdParams(id=RESUBSCRIBE_EVENT.task_id) - stream = transport.resubscribe(request=params) - first_event = await anext(stream) - - assert first_event.task_id == RESUBSCRIBE_EVENT.task_id - handler.on_resubscribe_to_task.assert_called_once() - assert ( - handler.on_resubscribe_to_task.call_args[0][0].id - == RESUBSCRIBE_EVENT.task_id + params = DeleteTaskPushNotificationConfigRequest( + task_id=CALLBACK_CONFIG.task_id, + id=CALLBACK_CONFIG.id, ) + await client.delete_task_push_notification_config(request=params) - await transport.close() - - -@pytest.mark.asyncio -@pytest.mark.parametrize( - 'transport_setup_fixture', - [ - pytest.param('jsonrpc_setup', id='JSON-RPC'), - pytest.param('rest_setup', id='REST'), - ], -) -async def test_http_transport_get_card( - transport_setup_fixture: str, request, agent_card: AgentCard -) -> None: - transport_setup: TransportSetup = request.getfixturevalue( - transport_setup_fixture + handler.on_delete_task_push_notification_config.assert_awaited_once_with( + params, ANY ) - transport = transport_setup.transport - # Get the base card. - result = await transport.get_card() - - assert result.name == agent_card.name - assert transport.agent_card.name == agent_card.name - assert transport._needs_extended_card is False - if hasattr(transport, 'close'): - await transport.close() + await client.close() @pytest.mark.asyncio -async def test_http_transport_get_authenticated_card( - agent_card: AgentCard, - mock_request_handler: AsyncMock, -) -> None: - agent_card.supports_authenticated_extended_card = True - extended_agent_card = agent_card.model_copy(deep=True) - extended_agent_card.name = 'Extended Agent Card' +async def test_client_subscribe(transport_setups) -> None: + client = transport_setups.client + handler = transport_setups.handler - app_builder = A2ARESTFastAPIApplication( - agent_card, - mock_request_handler, - extended_agent_card=extended_agent_card, - ) - app = app_builder.build() - httpx_client = httpx.AsyncClient(transport=httpx.ASGITransport(app=app)) + params = SubscribeToTaskRequest(id=RESUBSCRIBE_EVENT.task_id) + stream = client.subscribe(request=params) + first_event = await stream.__anext__() - transport = RestTransport(httpx_client=httpx_client, agent_card=agent_card) - result = await transport.get_card() - assert result.name == extended_agent_card.name - assert transport.agent_card is not None - assert transport.agent_card.name == extended_agent_card.name - assert transport._needs_extended_card is False + assert first_event.status_update.task_id == RESUBSCRIBE_EVENT.task_id + handler.on_subscribe_to_task.assert_called_once() - if hasattr(transport, 'close'): - await transport.close() + await client.close() @pytest.mark.asyncio -async def test_grpc_transport_get_card( - grpc_server_and_handler: tuple[str, AsyncMock], - agent_card: AgentCard, +async def test_client_get_extended_agent_card( + transport_setups, agent_card ) -> None: - server_address, _ = grpc_server_and_handler - agent_card.url = server_address + client = transport_setups.client + result = await client.get_extended_agent_card(GetExtendedAgentCardRequest()) + # The result could be the original card or a slightly modified one depending on transport + assert result.name in [agent_card.name, 'Extended Agent Card'] - def channel_factory(address: str) -> Channel: - return grpc.aio.insecure_channel(address) - - channel = channel_factory(server_address) - transport = GrpcTransport(channel=channel, agent_card=agent_card) - - # The transport starts with a minimal card, get_card() fetches the full one - assert transport.agent_card is not None - transport.agent_card.supports_authenticated_extended_card = True - result = await transport.get_card() - - assert result.name == agent_card.name - assert transport.agent_card.name == agent_card.name - assert transport._needs_extended_card is False - - await transport.close() + await client.close() @pytest.mark.asyncio @@ -798,7 +625,9 @@ async def test_json_transport_base_client_send_message_with_extensions( """ Integration test for BaseClient with JSON-RPC transport to ensure extensions are included in headers. """ - transport = jsonrpc_setup.transport + client_obj = jsonrpc_setup.client + assert isinstance(client_obj, BaseClient) + transport = client_obj._transport agent_card.capabilities.streaming = False # Create a BaseClient instance @@ -806,14 +635,13 @@ async def test_json_transport_base_client_send_message_with_extensions( card=agent_card, config=ClientConfig(streaming=False), transport=transport, - consumers=[], - middleware=[], + interceptors=[], ) message_to_send = Message( - role=Role.user, + role=Role.ROLE_USER, message_id='msg-integration-test-extensions', - parts=[Part(root=TextPart(text='Hello, extensions test!'))], + parts=[Part(text='Hello, extensions test!')], ) extensions = [ 'https://example.com/test-ext/v1', @@ -823,30 +651,37 @@ async def test_json_transport_base_client_send_message_with_extensions( with patch.object( transport, '_send_request', new_callable=AsyncMock ) as mock_send_request: + # Mock returns a JSON-RPC response with SendMessageResponse structure mock_send_request.return_value = { 'id': '123', 'jsonrpc': '2.0', - 'result': TASK_FROM_BLOCKING.model_dump(mode='json'), + 'result': {'task': MessageToDict(TASK_FROM_BLOCKING)}, } + service_params = ServiceParametersFactory.create( + [with_a2a_extensions(extensions)] + ) + context = ClientCallContext(service_parameters=service_params) + # Call send_message on the BaseClient async for _ in client.send_message( - request=message_to_send, extensions=extensions + request=SendMessageRequest(message=message_to_send), context=context ): pass mock_send_request.assert_called_once() - call_args, _ = mock_send_request.call_args - kwargs = call_args[1] - headers = kwargs.get('headers', {}) - assert 'X-A2A-Extensions' in headers + call_args, call_kwargs = mock_send_request.call_args + called_context = ( + call_args[1] if len(call_args) > 1 else call_kwargs.get('context') + ) + service_params = getattr(called_context, 'service_parameters', {}) + assert 'A2A-Extensions' in service_params assert ( - headers['X-A2A-Extensions'] + service_params['A2A-Extensions'] == 'https://example.com/test-ext/v1,https://example.com/test-ext/v2' ) - if hasattr(transport, 'close'): - await transport.close() + await client.close() @pytest.mark.asyncio @@ -860,10 +695,10 @@ async def test_json_transport_get_signed_base_card( The client then verifies the signature. """ mock_request_handler = jsonrpc_setup.handler - agent_card.supports_authenticated_extended_card = False + agent_card.capabilities.extended_agent_card = False # Setup signing on the server side - key = 'key12345' + key = 'testkey12345678901234567890123456789012345678901' signer = create_agent_card_signer( signing_key=key, protected_header={ @@ -874,53 +709,68 @@ async def test_json_transport_get_signed_base_card( }, ) - app_builder = A2AFastAPIApplication( - agent_card, - mock_request_handler, - card_modifier=signer, # Sign the base card - ) - app = app_builder.build() - httpx_client = httpx.AsyncClient(transport=httpx.ASGITransport(app=app)) + async def async_signer(card: AgentCard) -> AgentCard: + return signer(card) - transport = JsonRpcTransport( - httpx_client=httpx_client, - url=agent_card.url, - agent_card=None, + agent_card_routes = create_agent_card_routes( + agent_card=agent_card, card_url='/', card_modifier=async_signer + ) + jsonrpc_routes = create_jsonrpc_routes( + request_handler=mock_request_handler, rpc_url='/' + ) + app = Starlette(routes=[*agent_card_routes, *jsonrpc_routes]) + httpx_client = httpx.AsyncClient( + transport=httpx.ASGITransport(app=app), + headers={VERSION_HEADER: PROTOCOL_VERSION_CURRENT}, ) - # Get the card, this will trigger verification in get_card + agent_url = agent_card.supported_interfaces[0].url signature_verifier = create_signature_verifier( create_key_provider(key), ['HS384'] ) - result = await transport.get_card(signature_verifier=signature_verifier) + + resolver = A2ACardResolver( + httpx_client=httpx_client, + base_url=agent_url, + ) + + # Verification happens here + result = await resolver.get_agent_card( + relative_card_path='/', + signature_verifier=signature_verifier, + ) + + # Create transport with the verified card + transport = JsonRpcTransport( + httpx_client=httpx_client, + agent_card=result, + url=agent_url, + ) + assert result.name == agent_card.name - assert result.signatures is not None assert len(result.signatures) == 1 - assert transport.agent_card is not None - assert transport.agent_card.name == agent_card.name - assert transport._needs_extended_card is False - if hasattr(transport, 'close'): - await transport.close() + await transport.close() @pytest.mark.asyncio -async def test_json_transport_get_signed_extended_card( +async def test_client_get_signed_extended_card( jsonrpc_setup: TransportSetup, agent_card: AgentCard ) -> None: - """Tests fetching and verifying an asymmetrically signed extended AgentCard via JSON-RPC. + """Tests fetching and verifying an asymmetrically signed extended AgentCard at the client level. The client has a base card and fetches the extended card, which is signed by the server using ES256. The client verifies the signature on the received extended card. """ mock_request_handler = jsonrpc_setup.handler - agent_card.supports_authenticated_extended_card = True - extended_agent_card = agent_card.model_copy(deep=True) + agent_card.capabilities.extended_agent_card = True + extended_agent_card = AgentCard() + extended_agent_card.CopyFrom(agent_card) extended_agent_card.name = 'Extended Agent Card' # Setup signing on the server side - private_key = asymmetric.ec.generate_private_key(asymmetric.ec.SECP256R1()) + private_key = ec.generate_private_key(ec.SECP256R1()) public_key = private_key.public_key() signer = create_agent_card_signer( signing_key=private_key, @@ -932,55 +782,71 @@ async def test_json_transport_get_signed_extended_card( }, ) - app_builder = A2AFastAPIApplication( - agent_card, - mock_request_handler, - extended_agent_card=extended_agent_card, - extended_card_modifier=lambda card, ctx: signer( - card - ), # Sign the extended card + async def get_extended_agent_card_mock_2(*args, **kwargs) -> AgentCard: + return signer(extended_agent_card) + + mock_request_handler.on_get_extended_agent_card.side_effect = ( + get_extended_agent_card_mock_2 # type: ignore[union-attr] + ) + + agent_card_routes = create_agent_card_routes( + agent_card=agent_card, card_url='/' + ) + jsonrpc_routes = create_jsonrpc_routes( + request_handler=mock_request_handler, rpc_url='/' + ) + app = Starlette(routes=[*agent_card_routes, *jsonrpc_routes]) + httpx_client = httpx.AsyncClient( + transport=httpx.ASGITransport(app=app), + headers={VERSION_HEADER: PROTOCOL_VERSION_CURRENT}, ) - app = app_builder.build() - httpx_client = httpx.AsyncClient(transport=httpx.ASGITransport(app=app)) transport = JsonRpcTransport( - httpx_client=httpx_client, agent_card=agent_card + httpx_client=httpx_client, + agent_card=agent_card, + url=agent_card.supported_interfaces[0].url, + ) + client = BaseClient( + card=agent_card, + config=ClientConfig(streaming=False), + transport=transport, + interceptors=[], ) - # Get the card, this will trigger verification in get_card signature_verifier = create_signature_verifier( create_key_provider(public_key), ['HS384', 'ES256'] ) - result = await transport.get_card(signature_verifier=signature_verifier) + # Get the card, this will trigger verification in get_extended_agent_card + result = await client.get_extended_agent_card( + GetExtendedAgentCardRequest(), + signature_verifier=signature_verifier, + ) assert result.name == extended_agent_card.name assert result.signatures is not None assert len(result.signatures) == 1 - assert transport.agent_card is not None - assert transport.agent_card.name == extended_agent_card.name - assert transport._needs_extended_card is False - if hasattr(transport, 'close'): - await transport.close() + await client.close() @pytest.mark.asyncio -async def test_json_transport_get_signed_base_and_extended_cards( +async def test_client_get_signed_base_and_extended_cards( jsonrpc_setup: TransportSetup, agent_card: AgentCard ) -> None: - """Tests fetching and verifying both base and extended cards via JSON-RPC when no card is initially provided. + """Tests fetching and verifying both base and extended cards at the client level when no card is initially provided. The client starts with no card. It first fetches the base card, which is signed. It then fetches the extended card, which is also signed. Both signatures are verified independently upon retrieval. """ mock_request_handler = jsonrpc_setup.handler - assert agent_card.signatures is None - agent_card.supports_authenticated_extended_card = True - extended_agent_card = agent_card.model_copy(deep=True) + assert len(agent_card.signatures) == 0 + agent_card.capabilities.extended_agent_card = True + extended_agent_card = AgentCard() + extended_agent_card.CopyFrom(agent_card) extended_agent_card.name = 'Extended Agent Card' # Setup signing on the server side - private_key = asymmetric.ec.generate_private_key(asymmetric.ec.SECP256R1()) + private_key = ec.generate_private_key(ec.SECP256R1()) public_key = private_key.public_key() signer = create_agent_card_signer( signing_key=private_key, @@ -991,156 +857,481 @@ async def test_json_transport_get_signed_base_and_extended_cards( 'typ': 'JOSE', }, ) + signer(extended_agent_card) - app_builder = A2AFastAPIApplication( - agent_card, - mock_request_handler, - extended_agent_card=extended_agent_card, - card_modifier=signer, # Sign the base card - extended_card_modifier=lambda card, ctx: signer( - card - ), # Sign the extended card + # Use async def to ensure it returns an awaitable + async def get_extended_agent_card_mock_3(*args, **kwargs): + return extended_agent_card + + mock_request_handler.on_get_extended_agent_card.side_effect = ( + get_extended_agent_card_mock_3 # type: ignore[union-attr] ) - app = app_builder.build() - httpx_client = httpx.AsyncClient(transport=httpx.ASGITransport(app=app)) - transport = JsonRpcTransport( - httpx_client=httpx_client, - url=agent_card.url, - agent_card=None, + async def async_signer(card: AgentCard) -> AgentCard: + return signer(card) + + agent_card_routes = create_agent_card_routes( + agent_card=agent_card, card_url='/', card_modifier=async_signer + ) + jsonrpc_routes = create_jsonrpc_routes( + request_handler=mock_request_handler, rpc_url='/' + ) + app = Starlette(routes=[*agent_card_routes, *jsonrpc_routes]) + httpx_client = httpx.AsyncClient( + transport=httpx.ASGITransport(app=app), + headers={VERSION_HEADER: PROTOCOL_VERSION_CURRENT}, ) - # Get the card, this will trigger verification in get_card + agent_url = agent_card.supported_interfaces[0].url signature_verifier = create_signature_verifier( create_key_provider(public_key), ['HS384', 'ES256', 'RS256'] ) - result = await transport.get_card(signature_verifier=signature_verifier) + + resolver = A2ACardResolver( + httpx_client=httpx_client, + base_url=agent_url, + ) + + # 1. Fetch base card + base_card = await resolver.get_agent_card( + relative_card_path='/', + signature_verifier=signature_verifier, + ) + + # 2. Create transport with base card + transport = JsonRpcTransport( + httpx_client=httpx_client, + agent_card=base_card, + url=agent_url, + ) + client = BaseClient( + card=base_card, + config=ClientConfig(streaming=False), + transport=transport, + interceptors=[], + ) + + # 3. Fetch extended card via client + result = await client.get_extended_agent_card( + GetExtendedAgentCardRequest(), + signature_verifier=signature_verifier, + ) assert result.name == extended_agent_card.name - assert result.signatures is not None assert len(result.signatures) == 1 - assert transport.agent_card is not None - assert transport.agent_card.name == extended_agent_card.name - assert transport._needs_extended_card is False - if hasattr(transport, 'close'): - await transport.close() + await client.close() + + +@pytest.mark.asyncio +@pytest.mark.parametrize( + 'error_cls', + [ + TaskNotFoundError, + TaskNotCancelableError, + PushNotificationNotSupportedError, + UnsupportedOperationError, + ContentTypeNotSupportedError, + InvalidAgentResponseError, + ExtendedAgentCardNotConfiguredError, + ExtensionSupportRequiredError, + VersionNotSupportedError, + ], +) +async def test_client_handles_a2a_errors(transport_setups, error_cls) -> None: + """Integration test to verify error propagation from handler to client.""" + client = transport_setups.client + handler = transport_setups.handler + + # Mock the handler to raise the error + handler.on_get_task.side_effect = error_cls('Test error message') + + params = GetTaskRequest(id='some-id') + + # We expect the client to raise the same error_cls. + with pytest.raises(error_cls) as exc_info: + await client.get_task(request=params) + + assert 'Test error message' in str(exc_info.value) + + # Reset side_effect for other tests + handler.on_get_task.side_effect = None + + await client.close() @pytest.mark.asyncio -async def test_rest_transport_get_signed_card( - rest_setup: TransportSetup, agent_card: AgentCard +@pytest.mark.parametrize( + 'error_cls', + [ + TaskNotFoundError, + TaskNotCancelableError, + PushNotificationNotSupportedError, + UnsupportedOperationError, + ContentTypeNotSupportedError, + InvalidAgentResponseError, + ExtendedAgentCardNotConfiguredError, + ExtensionSupportRequiredError, + VersionNotSupportedError, + ], +) +@pytest.mark.parametrize( + 'handler_attr, client_method, request_params', + [ + pytest.param( + 'on_message_send_stream', + 'send_message', + SendMessageRequest( + message=Message( + role=Role.ROLE_USER, + message_id='msg-integration-test', + parts=[Part(text='Hello, integration test!')], + ) + ), + id='stream', + ), + pytest.param( + 'on_subscribe_to_task', + 'subscribe', + SubscribeToTaskRequest(id='some-id'), + id='subscribe', + ), + ], +) +async def test_client_handles_a2a_errors_streaming( + transport_setups, error_cls, handler_attr, client_method, request_params ) -> None: - """Tests fetching and verifying signed base and extended cards via REST. + """Integration test to verify error propagation from streaming handlers to client. - The client starts with no card. It first fetches the base card, which is - signed. It then fetches the extended card, which is also signed. Both signatures - are verified independently upon retrieval. + The handler raises an A2AError before yielding any events. All transports + must propagate this as the exact error_cls, not wrapped in an ExceptionGroup + or converted to a generic client error. """ - mock_request_handler = rest_setup.handler - agent_card.supports_authenticated_extended_card = True - extended_agent_card = agent_card.model_copy(deep=True) - extended_agent_card.name = 'Extended Agent Card' + client = transport_setups.client + handler = transport_setups.handler - # Setup signing on the server side - private_key = asymmetric.ec.generate_private_key(asymmetric.ec.SECP256R1()) - public_key = private_key.public_key() - signer = create_agent_card_signer( - signing_key=private_key, - protected_header={ - 'alg': 'ES256', - 'kid': 'testkey', - 'jku': None, - 'typ': 'JOSE', - }, - ) + async def mock_generator(*args, **kwargs): + raise error_cls('Test error message') + yield - app_builder = A2ARESTFastAPIApplication( - agent_card, - mock_request_handler, - extended_agent_card=extended_agent_card, - card_modifier=signer, # Sign the base card - extended_card_modifier=lambda card, ctx: signer( - card - ), # Sign the extended card - ) - app = app_builder.build() - httpx_client = httpx.AsyncClient(transport=httpx.ASGITransport(app=app)) + getattr(handler, handler_attr).side_effect = mock_generator - transport = RestTransport( - httpx_client=httpx_client, - url=agent_card.url, - agent_card=None, + with pytest.raises(error_cls) as exc_info: + async for _ in getattr(client, client_method)(request=request_params): + pass + + assert 'Test error message' in str(exc_info.value) + + getattr(handler, handler_attr).side_effect = None + + await client.close() + + +@pytest.mark.asyncio +@pytest.mark.parametrize( + 'error_cls,handler_attr,client_method,request_params', + [ + pytest.param( + UnsupportedOperationError, + 'on_subscribe_to_task', + 'subscribe', + SubscribeToTaskRequest(id='some-id'), + id='subscribe', + ), + ], +) +async def test_server_rejects_stream_on_validation_error( + transport_setups, error_cls, handler_attr, client_method, request_params +) -> None: + """Verify that the server returns an error directly and doesn't open a stream on validation error.""" + client = transport_setups.client + handler = transport_setups.handler + + async def mock_generator(*args, **kwargs): + raise error_cls('Validation failed') + yield + + getattr(handler, handler_attr).side_effect = mock_generator + + transport = client._transport + + if isinstance(transport, (RestTransport, JsonRpcTransport)): + # Spy on httpx client to check response headers + original_send = transport.httpx_client.send + response_headers = {} + + async def mock_send(*args, **kwargs): + resp = await original_send(*args, **kwargs) + response_headers['Content-Type'] = resp.headers.get('Content-Type') + return resp + + transport.httpx_client.send = mock_send + + try: + with pytest.raises(error_cls): + async for _ in getattr(client, client_method)( + request=request_params + ): + pass + finally: + transport.httpx_client.send = original_send + + # Verify that the response content type was NOT text/event-stream + assert not response_headers.get('Content-Type', '').startswith( + 'text/event-stream' + ) + else: + # For gRPC, we just verify it raises the error + with pytest.raises(error_cls): + async for _ in getattr(client, client_method)( + request=request_params + ): + pass + + getattr(handler, handler_attr).side_effect = None + await client.close() + + +@pytest.mark.asyncio +@pytest.mark.parametrize( + 'request_kwargs, expected_error_code', + [ + pytest.param( + {'content': 'not a json'}, + -32700, # Parse error + id='invalid-json', + ), + pytest.param( + { + 'json': { + 'jsonrpc': '2.0', + 'method': 'SendMessage', + 'params': {'message': 'should be an object'}, + 'id': 1, + } + }, + -32602, # Invalid params + id='wrong-params-type', + ), + ], +) +async def test_jsonrpc_malformed_payload( + jsonrpc_setup: TransportSetup, + request_kwargs: dict[str, Any], + expected_error_code: int, +) -> None: + """Integration test to verify that JSON-RPC malformed payloads don't return 500.""" + client_obj = jsonrpc_setup.client + assert isinstance(client_obj, BaseClient) + transport = client_obj._transport + assert isinstance(transport, JsonRpcTransport) + client = transport.httpx_client + url = transport.url + + response = await client.post(url, **request_kwargs) + assert response.status_code == 200 + assert response.json()['error']['code'] == expected_error_code + + await transport.close() + + +@pytest.mark.asyncio +@pytest.mark.parametrize( + 'method, path, request_kwargs', + [ + pytest.param( + 'POST', + '/message:send', + {'content': 'not a json'}, + id='invalid-json', + ), + pytest.param( + 'POST', + '/message:send', + {'json': {'message': 'should be an object'}}, + id='wrong-body-type', + ), + pytest.param( + 'GET', + '/tasks', + {'params': {'historyLength': 'not_an_int'}}, + id='wrong-query-param-type', + ), + ], +) +async def test_rest_malformed_payload( + rest_setup: TransportSetup, + method: str, + path: str, + request_kwargs: dict[str, Any], +) -> None: + """Integration test to verify that REST malformed payloads don't return 500.""" + client_obj = rest_setup.client + assert isinstance(client_obj, BaseClient) + transport = client_obj._transport + assert isinstance(transport, RestTransport) + client = transport.httpx_client + url = transport.url + + response = await client.request(method, f'{url}{path}', **request_kwargs) + assert response.status_code == 400 + + await transport.close() + + +@pytest.mark.asyncio +async def test_validate_version_unsupported(http_transport_setups) -> None: + """Integration test for @validate_version decorator.""" + client = http_transport_setups.client + + service_params = {'A2A-Version': '2.0.0'} + context = ClientCallContext(service_parameters=service_params) + + params = GetTaskRequest(id=GET_TASK_RESPONSE.id) + + with pytest.raises(VersionNotSupportedError): + await client.get_task(request=params, context=context) + + await client.close() + + +@pytest.mark.asyncio +async def test_validate_decorator_push_notifications_disabled( + error_handling_setups, agent_card: AgentCard +) -> None: + """Integration test for @validate decorator with push notifications disabled.""" + client = error_handling_setups.client + + real_handler = LegacyRequestHandler( + agent_executor=AsyncMock(), + task_store=AsyncMock(), + agent_card=agent_card, ) - # Get the card, this will trigger verification in get_card - signature_verifier = create_signature_verifier( - create_key_provider(public_key), ['HS384', 'ES256', 'RS256'] + error_handling_setups.handler.on_create_task_push_notification_config.side_effect = real_handler.on_create_task_push_notification_config + + params = TaskPushNotificationConfig( + task_id='123', + id='pnc-123', + url='http://example.com', ) - result = await transport.get_card(signature_verifier=signature_verifier) - assert result.name == extended_agent_card.name - assert result.signatures is not None - assert len(result.signatures) == 1 - assert transport.agent_card is not None - assert transport.agent_card.name == extended_agent_card.name - assert transport._needs_extended_card is False - if hasattr(transport, 'close'): - await transport.close() + with pytest.raises(PushNotificationNotSupportedError): + await client.create_task_push_notification_config(request=params) + + await client.close() @pytest.mark.asyncio -async def test_grpc_transport_get_signed_card( - mock_request_handler: AsyncMock, agent_card: AgentCard +async def test_validate_streaming_disabled( + error_handling_setups, agent_card: AgentCard ) -> None: - """Tests fetching and verifying a signed AgentCard via gRPC.""" - # Setup signing on the server side - agent_card.supports_authenticated_extended_card = True + """Integration test for @validate decorator when streaming is disabled.""" + client = error_handling_setups.client + transport = client._transport - private_key = asymmetric.ec.generate_private_key(asymmetric.ec.SECP256R1()) - public_key = private_key.public_key() - signer = create_agent_card_signer( - signing_key=private_key, - protected_header={ - 'alg': 'ES256', - 'kid': 'testkey', - 'jku': None, - 'typ': 'JOSE', - }, + agent_card.capabilities.streaming = False + + real_handler = LegacyRequestHandler( + agent_executor=AsyncMock(), + task_store=AsyncMock(), + agent_card=agent_card, ) - server = grpc.aio.server() - port = server.add_insecure_port('[::]:0') - server_address = f'localhost:{port}' - agent_card.url = server_address + error_handling_setups.handler.on_message_send_stream.side_effect = ( + real_handler.on_message_send_stream + ) + error_handling_setups.handler.on_subscribe_to_task.side_effect = ( + real_handler.on_subscribe_to_task + ) - servicer = GrpcHandler( - agent_card, - mock_request_handler, - card_modifier=signer, + params = SendMessageRequest( + message=Message( + role=Role.ROLE_USER, + parts=[Part(text='hi')], + message_id='msg-123', + ) ) - a2a_pb2_grpc.add_A2AServiceServicer_to_server(servicer, server) - await server.start() - transport = None # Initialize transport - try: + stream = transport.send_message_streaming(request=params) + + with pytest.raises(UnsupportedOperationError): + async for _ in stream: + pass - def channel_factory(address: str) -> Channel: - return grpc.aio.insecure_channel(address) + await transport.close() - channel = channel_factory(server_address) - transport = GrpcTransport(channel=channel, agent_card=agent_card) - transport.agent_card = None - assert transport._needs_extended_card is True - # Get the card, this will trigger verification in get_card - signature_verifier = create_signature_verifier( - create_key_provider(public_key), ['HS384', 'ES256', 'RS256'] - ) - result = await transport.get_card(signature_verifier=signature_verifier) - assert result.signatures is not None - assert len(result.signatures) == 1 - assert transport._needs_extended_card is False - finally: - if transport: - await transport.close() - await server.stop(0) # Gracefully stop the server +@pytest.mark.asyncio +@pytest.mark.parametrize( + 'error_cls', + [ + TaskNotFoundError, + TaskNotCancelableError, + PushNotificationNotSupportedError, + UnsupportedOperationError, + ContentTypeNotSupportedError, + InvalidAgentResponseError, + ExtendedAgentCardNotConfiguredError, + ExtensionSupportRequiredError, + VersionNotSupportedError, + ], +) +@pytest.mark.parametrize( + 'handler_attr, client_method, request_params', + [ + pytest.param( + 'on_message_send_stream', + 'send_message', + SendMessageRequest( + message=Message( + role=Role.ROLE_USER, + message_id='msg-midstream-test', + parts=[Part(text='Hello, mid-stream test!')], + ) + ), + id='stream', + ), + pytest.param( + 'on_subscribe_to_task', + 'subscribe', + SubscribeToTaskRequest(id='some-id'), + id='subscribe', + ), + ], +) +async def test_client_handles_mid_stream_a2a_errors( + transport_setups, + error_cls, + handler_attr, + client_method, + request_params, +) -> None: + """Integration test for mid-stream errors sent as SSE error events. + + The handler yields one event successfully, then raises an A2AError. + The client must receive the first event and then get the error as the + exact error_cls exception. This mirrors test_client_handles_a2a_errors_streaming + but verifies the error occurs *after* the stream has started producing events. + """ + client = transport_setups.client + handler = transport_setups.handler + + async def mock_generator(*args, **kwargs): + yield TASK_FROM_STREAM + raise error_cls('Mid-stream error') + + getattr(handler, handler_attr).side_effect = mock_generator + + received_events = [] + with pytest.raises(error_cls) as exc_info: + async for event in getattr(client, client_method)( + request=request_params + ): + received_events.append(event) # noqa: PERF401 + + assert 'Mid-stream error' in str(exc_info.value) + assert len(received_events) == 1 + + getattr(handler, handler_attr).side_effect = None + + await client.close() diff --git a/tests/integration/test_copying_observability.py b/tests/integration/test_copying_observability.py new file mode 100644 index 000000000..bc23b4696 --- /dev/null +++ b/tests/integration/test_copying_observability.py @@ -0,0 +1,190 @@ +import httpx +import pytest +from typing import NamedTuple + +from starlette.applications import Starlette + +from a2a.client.client import Client, ClientConfig +from a2a.client.client_factory import ClientFactory +from a2a.server.agent_execution import AgentExecutor, RequestContext +from a2a.server.routes import create_agent_card_routes, create_jsonrpc_routes +from a2a.server.events import EventQueue +from a2a.server.events.in_memory_queue_manager import InMemoryQueueManager +from a2a.server.request_handlers import DefaultRequestHandler +from a2a.server.tasks import TaskUpdater +from a2a.server.tasks.inmemory_task_store import InMemoryTaskStore +from a2a.types import ( + AgentCapabilities, + AgentCard, + AgentInterface, + Artifact, + GetTaskRequest, + Message, + Part, + Role, + SendMessageRequest, + TaskState, +) +from a2a.helpers.proto_helpers import new_task_from_user_message +from a2a.utils import TransportProtocol + + +class MockMutatingAgentExecutor(AgentExecutor): + async def execute(self, context: RequestContext, event_queue: EventQueue): + assert context.task_id is not None + assert context.context_id is not None + task_updater = TaskUpdater( + event_queue, + context.task_id, + context.context_id, + ) + + user_input = context.get_user_input() + + if user_input == 'Init task': + # Explicitly save status change to ensure task exists with some state + task = new_task_from_user_message(context.message) + task.id = context.task_id + task.context_id = context.context_id + task.status.state = TaskState.TASK_STATE_WORKING + await event_queue.enqueue_event(task) + + await task_updater.update_status( + TaskState.TASK_STATE_WORKING, + message=task_updater.new_agent_message( + [Part(text='task working')] + ), + ) + else: + # Mutate the task WITHOUT saving it properly + assert context.current_task is not None + context.current_task.artifacts.append( + Artifact( + name='leaked-artifact', + parts=[Part(text='leaked artifact')], + ) + ) + + async def cancel(self, context: RequestContext, event_queue: EventQueue): + raise NotImplementedError('Cancellation is not supported') + + +@pytest.fixture +def agent_card() -> AgentCard: + return AgentCard( + name='Mutating Agent', + description='Real in-memory integration testing.', + version='1.0.0', + capabilities=AgentCapabilities( + streaming=True, push_notifications=False + ), + skills=[], + default_input_modes=['text/plain'], + default_output_modes=['text/plain'], + supported_interfaces=[ + AgentInterface( + protocol_binding=TransportProtocol.JSONRPC, + url='http://testserver', + ), + ], + ) + + +class ClientSetup(NamedTuple): + client: Client + task_store: InMemoryTaskStore + use_copying: bool + + +def setup_client(agent_card: AgentCard, use_copying: bool) -> ClientSetup: + task_store = InMemoryTaskStore(use_copying=use_copying) + handler = DefaultRequestHandler( + agent_executor=MockMutatingAgentExecutor(), + task_store=task_store, + agent_card=agent_card, + queue_manager=InMemoryQueueManager(), + extended_agent_card=agent_card, + ) + agent_card_routes = create_agent_card_routes( + agent_card=agent_card, card_url='/' + ) + jsonrpc_routes = create_jsonrpc_routes( + request_handler=handler, + rpc_url='/', + ) + app = Starlette(routes=[*agent_card_routes, *jsonrpc_routes]) + httpx_client = httpx.AsyncClient( + transport=httpx.ASGITransport(app=app), base_url='http://testserver' + ) + factory = ClientFactory( + config=ClientConfig( + httpx_client=httpx_client, + supported_protocol_bindings=[TransportProtocol.JSONRPC], + ) + ) + client = factory.create(agent_card) + return ClientSetup( + client=client, + task_store=task_store, + use_copying=use_copying, + ) + + +@pytest.mark.asyncio +@pytest.mark.parametrize('use_copying', [True, False]) +async def test_mutation_observability(agent_card: AgentCard, use_copying: bool): + """Tests that task mutations are observable when copying is disabled. + + When copying is disabled, the agent mutates the task in-place and the + changes are observable by the client. When copying is enabled, the agent + mutates a copy of the task and the changes are not observable by the client. + + It is ok to remove the `use_copying` parameter from the system in the future + to make InMemoryTaskStore consistent with other task stores. + """ + client_setup = setup_client(agent_card, use_copying) + client = client_setup.client + + # 1. First message to create the task + message_to_send = Message( + role=Role.ROLE_USER, + message_id='msg-mut-init', + parts=[Part(text='Init task')], + ) + + events = [ + event + async for event in client.send_message( + request=SendMessageRequest(message=message_to_send) + ) + ] + + event = events[-1] + assert event.HasField('status_update') + task_id = event.status_update.task_id + + # 2. Second message to mutate it + message_to_send_2 = Message( + role=Role.ROLE_USER, + message_id='msg-mut-do', + task_id=task_id, + parts=[Part(text='Update task without saving it')], + ) + _ = [ + event + async for event in client.send_message( + request=SendMessageRequest(message=message_to_send_2) + ) + ] + + # 3. Get task via client + retrieved_task = await client.get_task(request=GetTaskRequest(id=task_id)) + + # 4. Assert behavior based on `use_copying` + if use_copying: + # The un-saved artifact IS NOT leaked to the client + assert len(retrieved_task.artifacts) == 0 + else: + # The un-saved artifact IS leaked to the client + assert len(retrieved_task.artifacts) == 1 + assert retrieved_task.artifacts[0].name == 'leaked-artifact' diff --git a/tests/integration/test_end_to_end.py b/tests/integration/test_end_to_end.py new file mode 100644 index 000000000..dcd016b48 --- /dev/null +++ b/tests/integration/test_end_to_end.py @@ -0,0 +1,834 @@ +from collections.abc import AsyncGenerator +from typing import NamedTuple + +import grpc +import httpx +import pytest +import pytest_asyncio + +from starlette.applications import Starlette + +from a2a.client.base_client import BaseClient +from a2a.client.client import ClientCallContext, ClientConfig +from a2a.client.client_factory import ClientFactory +from a2a.client.service_parameters import ( + ServiceParametersFactory, + with_a2a_extensions, +) +from a2a.server.agent_execution import AgentExecutor, RequestContext +from a2a.server.events import EventQueue +from a2a.server.events.in_memory_queue_manager import InMemoryQueueManager +from a2a.server.request_handlers import DefaultRequestHandler, GrpcHandler +from a2a.server.routes import create_agent_card_routes, create_jsonrpc_routes +from a2a.server.routes.rest_routes import create_rest_routes +from a2a.server.tasks import TaskUpdater +from a2a.server.tasks.inmemory_task_store import InMemoryTaskStore +from a2a.types import ( + AgentCapabilities, + AgentCard, + AgentExtension, + AgentInterface, + CancelTaskRequest, + DeleteTaskPushNotificationConfigRequest, + GetTaskPushNotificationConfigRequest, + GetTaskRequest, + ListTaskPushNotificationConfigsRequest, + ListTasksRequest, + Message, + Part, + Role, + SendMessageConfiguration, + SendMessageRequest, + SubscribeToTaskRequest, + TaskState, + a2a_pb2_grpc, +) +from a2a.utils import TransportProtocol +from a2a.helpers.proto_helpers import new_task_from_user_message +from a2a.utils.errors import InvalidParamsError + + +SUPPORTED_EXTENSION_URIS = [ + 'https://example.com/ext/v1', + 'https://example.com/ext/v2', +] + + +def assert_message_matches(message, expected_role, expected_text): + assert message.role == expected_role + assert message.parts[0].text == expected_text + + +def assert_history_matches(history, expected_history): + assert len(history) == len(expected_history) + for msg, (expected_role, expected_text) in zip( + history, expected_history, strict=True + ): + assert_message_matches(msg, expected_role, expected_text) + + +def assert_artifacts_match(artifacts, expected_artifacts): + assert len(artifacts) == len(expected_artifacts) + for artifact, (expected_name, expected_text) in zip( + artifacts, expected_artifacts, strict=True + ): + assert artifact.name == expected_name + assert artifact.parts[0].text == expected_text + + +def assert_events_match(events, expected_events): + assert len(events) == len(expected_events) + for event, (expected_type, expected_val) in zip( + events, expected_events, strict=True + ): + assert event.HasField(expected_type) + if expected_type == 'task': + assert event.task.status.state == expected_val + elif expected_type == 'status_update': + assert event.status_update.status.state == expected_val + elif expected_type == 'artifact_update': + if expected_val is not None: + assert_artifacts_match( + [event.artifact_update.artifact], + expected_val, + ) + else: + raise ValueError(f'Unexpected event type: {expected_type}') + + +class MockAgentExecutor(AgentExecutor): + async def execute(self, context: RequestContext, event_queue: EventQueue): + user_input = context.get_user_input() + + # Extensions echo: report the requested extensions back to the client + # via the Message.extensions field. + if user_input.startswith('Extensions:'): + await event_queue.enqueue_event( + Message( + role=Role.ROLE_AGENT, + message_id='ext-reply-1', + parts=[Part(text='extensions echoed')], + extensions=sorted(context.requested_extensions), + ) + ) + return + + # Direct message response (no task created). + if user_input.startswith('Message:'): + await event_queue.enqueue_event( + Message( + role=Role.ROLE_AGENT, + message_id='direct-reply-1', + parts=[Part(text=f'Direct reply to: {user_input}')], + ) + ) + return + + # Task-based response. + task = context.current_task + if not task: + task = new_task_from_user_message(context.message) + await event_queue.enqueue_event(task) + + task_updater = TaskUpdater( + event_queue, + task.id, + task.context_id, + ) + + await task_updater.update_status( + TaskState.TASK_STATE_WORKING, + message=task_updater.new_agent_message([Part(text='task working')]), + ) + + if user_input == 'Need input': + await task_updater.update_status( + TaskState.TASK_STATE_INPUT_REQUIRED, + message=task_updater.new_agent_message( + [Part(text='Please provide input')] + ), + ) + else: + await task_updater.add_artifact( + parts=[Part(text='artifact content')], name='test-artifact' + ) + await task_updater.update_status( + TaskState.TASK_STATE_COMPLETED, + message=task_updater.new_agent_message([Part(text='done')]), + ) + + async def cancel(self, context: RequestContext, event_queue: EventQueue): + raise NotImplementedError('Cancellation is not supported') + + +@pytest.fixture +def agent_card() -> AgentCard: + return AgentCard( + name='Integration Agent', + description='Real in-memory integration testing.', + version='1.0.0', + capabilities=AgentCapabilities( + streaming=True, + push_notifications=False, + extensions=[ + AgentExtension( + uri=uri, + description=f'Test extension {uri}', + ) + for uri in SUPPORTED_EXTENSION_URIS + ], + ), + skills=[], + default_input_modes=['text/plain'], + default_output_modes=['text/plain'], + supported_interfaces=[ + AgentInterface( + protocol_binding=TransportProtocol.HTTP_JSON, + url='http://testserver', + ), + AgentInterface( + protocol_binding=TransportProtocol.JSONRPC, + url='http://testserver', + ), + AgentInterface( + protocol_binding=TransportProtocol.GRPC, + url='localhost:50051', + ), + ], + ) + + +class ClientSetup(NamedTuple): + """Holds the client and task_store for a given test.""" + + client: BaseClient + task_store: InMemoryTaskStore + + +@pytest.fixture +def base_e2e_setup(agent_card): + task_store = InMemoryTaskStore() + handler = DefaultRequestHandler( + agent_executor=MockAgentExecutor(), + task_store=task_store, + agent_card=agent_card, + queue_manager=InMemoryQueueManager(), + ) + return task_store, handler + + +@pytest.fixture +def rest_setup(agent_card, base_e2e_setup) -> ClientSetup: + task_store, handler = base_e2e_setup + rest_routes = create_rest_routes(request_handler=handler) + agent_card_routes = create_agent_card_routes( + agent_card=agent_card, card_url='/' + ) + app = Starlette(routes=[*rest_routes, *agent_card_routes]) + httpx_client = httpx.AsyncClient( + transport=httpx.ASGITransport(app=app), base_url='http://testserver' + ) + factory = ClientFactory( + config=ClientConfig( + httpx_client=httpx_client, + supported_protocol_bindings=[TransportProtocol.HTTP_JSON], + ) + ) + client = factory.create(agent_card) + return ClientSetup( + client=client, + task_store=task_store, + ) + + +@pytest.fixture +def jsonrpc_setup(agent_card, base_e2e_setup) -> ClientSetup: + task_store, handler = base_e2e_setup + agent_card_routes = create_agent_card_routes( + agent_card=agent_card, card_url='/' + ) + jsonrpc_routes = create_jsonrpc_routes( + request_handler=handler, + rpc_url='/', + ) + app = Starlette(routes=[*agent_card_routes, *jsonrpc_routes]) + httpx_client = httpx.AsyncClient( + transport=httpx.ASGITransport(app=app), base_url='http://testserver' + ) + factory = ClientFactory( + config=ClientConfig( + httpx_client=httpx_client, + supported_protocol_bindings=[TransportProtocol.JSONRPC], + ) + ) + client = factory.create(agent_card) + return ClientSetup( + client=client, + task_store=task_store, + ) + + +@pytest_asyncio.fixture +async def grpc_setup( + agent_card: AgentCard, base_e2e_setup +) -> AsyncGenerator[ClientSetup, None]: + task_store, handler = base_e2e_setup + server = grpc.aio.server() + port = server.add_insecure_port('[::]:0') + server_address = f'localhost:{port}' + + grpc_agent_card = AgentCard() + grpc_agent_card.CopyFrom(agent_card) + + # Update the gRPC interface dynamically based on the assigned port + for interface in grpc_agent_card.supported_interfaces: + if interface.protocol_binding == TransportProtocol.GRPC: + interface.url = server_address + break + else: + raise ValueError('No gRPC interface found in agent card') + handler._agent_card = grpc_agent_card + servicer = GrpcHandler(handler) + a2a_pb2_grpc.add_A2AServiceServicer_to_server(servicer, server) + await server.start() + + factory = ClientFactory( + config=ClientConfig( + grpc_channel_factory=grpc.aio.insecure_channel, + supported_protocol_bindings=[TransportProtocol.GRPC], + ) + ) + client = factory.create(grpc_agent_card) + yield ClientSetup( + client=client, + task_store=task_store, + ) + + await client.close() + await server.stop(0) + + +@pytest.fixture( + params=[ + pytest.param('rest_setup', id='REST'), + pytest.param('jsonrpc_setup', id='JSON-RPC'), + pytest.param('grpc_setup', id='gRPC'), + ] +) +def transport_setups(request) -> ClientSetup: + """Parametrized fixture that runs tests against all supported transports.""" + return request.getfixturevalue(request.param) + + +@pytest.fixture( + params=[ + pytest.param('jsonrpc_setup', id='JSON-RPC'), + pytest.param('grpc_setup', id='gRPC'), + ] +) +def rpc_transport_setups(request) -> ClientSetup: + """Parametrized fixture for RPC transports only (excludes REST). + + REST encodes some required fields in URL paths, so empty-field validation + tests hit routing errors before reaching the handler. JSON-RPC and gRPC + send the full request message, allowing server-side validation to work. + """ + return request.getfixturevalue(request.param) + + +@pytest.mark.asyncio +async def test_end_to_end_send_message_blocking(transport_setups): + client = transport_setups.client + client._config.streaming = False + + message_to_send = Message( + role=Role.ROLE_USER, + message_id='msg-e2e-blocking', + parts=[Part(text='Run dummy agent!')], + ) + configuration = SendMessageConfiguration() + + events = [ + event + async for event in client.send_message( + request=SendMessageRequest( + message=message_to_send, configuration=configuration + ) + ) + ] + assert len(events) == 1 + response = events[0] + assert response.task.id + assert response.task.status.state == TaskState.TASK_STATE_COMPLETED + assert_artifacts_match( + response.task.artifacts, + [('test-artifact', 'artifact content')], + ) + assert_history_matches( + response.task.history, + [ + (Role.ROLE_USER, 'Run dummy agent!'), + (Role.ROLE_AGENT, 'task working'), + ], + ) + + +@pytest.mark.asyncio +async def test_end_to_end_send_message_non_blocking(transport_setups): + client = transport_setups.client + client._config.streaming = False + + message_to_send = Message( + role=Role.ROLE_USER, + message_id='msg-e2e-non-blocking', + parts=[Part(text='Run dummy agent!')], + ) + configuration = SendMessageConfiguration(return_immediately=True) + + events = [ + event + async for event in client.send_message( + request=SendMessageRequest( + message=message_to_send, configuration=configuration + ) + ) + ] + assert len(events) == 1 + response = events[0] + assert response.task.id + assert response.task.status.state == TaskState.TASK_STATE_SUBMITTED + assert_history_matches( + response.task.history, + [ + (Role.ROLE_USER, 'Run dummy agent!'), + ], + ) + + +@pytest.mark.asyncio +async def test_end_to_end_send_message_streaming(transport_setups): + client = transport_setups.client + + message_to_send = Message( + role=Role.ROLE_USER, + message_id='msg-e2e-streaming', + parts=[Part(text='Run dummy agent!')], + ) + + events = [ + event + async for event in client.send_message( + request=SendMessageRequest(message=message_to_send) + ) + ] + + assert_events_match( + events, + [ + ('task', TaskState.TASK_STATE_SUBMITTED), + ('status_update', TaskState.TASK_STATE_WORKING), + ('artifact_update', [('test-artifact', 'artifact content')]), + ('status_update', TaskState.TASK_STATE_COMPLETED), + ], + ) + + task_id = events[0].task.id + task = await client.get_task(request=GetTaskRequest(id=task_id)) + assert_history_matches( + task.history, + [ + (Role.ROLE_USER, 'Run dummy agent!'), + (Role.ROLE_AGENT, 'task working'), + ], + ) + assert task.status.state == TaskState.TASK_STATE_COMPLETED + assert_message_matches(task.status.message, Role.ROLE_AGENT, 'done') + + +@pytest.mark.asyncio +async def test_end_to_end_get_task(transport_setups): + client = transport_setups.client + + message_to_send = Message( + role=Role.ROLE_USER, + message_id='msg-e2e-get', + parts=[Part(text='Test Get Task')], + ) + events = [ + event + async for event in client.send_message( + request=SendMessageRequest(message=message_to_send) + ) + ] + response = events[0] + task_id = response.task.id + + get_request = GetTaskRequest(id=task_id) + retrieved_task = await client.get_task(request=get_request) + + assert retrieved_task.id == task_id + assert retrieved_task.status.state in { + TaskState.TASK_STATE_SUBMITTED, + TaskState.TASK_STATE_WORKING, + TaskState.TASK_STATE_COMPLETED, + } + assert_history_matches( + retrieved_task.history, + [ + (Role.ROLE_USER, 'Test Get Task'), + (Role.ROLE_AGENT, 'task working'), + ], + ) + + +@pytest.mark.asyncio +async def test_end_to_end_list_tasks(transport_setups): + client = transport_setups.client + + total_items = 6 + page_size = 2 + + expected_task_ids = [] + for i in range(total_items): + # One event is enough to get the task ID + response = await anext( + client.send_message( + request=SendMessageRequest( + message=Message( + role=Role.ROLE_USER, + message_id=f'msg-e2e-list-{i}', + parts=[Part(text=f'Test List Tasks {i}')], + ) + ) + ) + ) + expected_task_ids.append(response.task.id) + + list_request = ListTasksRequest(page_size=page_size) + + actual_task_ids = [] + token = None + + while token != '': + if token: + list_request.page_token = token + + list_response = await client.list_tasks(request=list_request) + assert 0 < len(list_response.tasks) <= page_size + assert list_response.total_size == total_items + assert list_response.page_size == page_size + + actual_task_ids.extend([task.id for task in list_response.tasks]) + + for task in list_response.tasks: + assert len(task.history) >= 1 + assert task.history[0].role == Role.ROLE_USER + assert task.history[0].parts[0].text.startswith('Test List Tasks ') + + token = list_response.next_page_token + + assert len(actual_task_ids) == total_items + assert sorted(actual_task_ids) == sorted(expected_task_ids) + + +@pytest.mark.asyncio +async def test_end_to_end_input_required(transport_setups): + client = transport_setups.client + + message_to_send = Message( + role=Role.ROLE_USER, + message_id='msg-e2e-input-req-1', + parts=[Part(text='Need input')], + ) + + events = [ + event + async for event in client.send_message( + request=SendMessageRequest(message=message_to_send) + ) + ] + + assert_events_match( + events, + [ + ('task', TaskState.TASK_STATE_SUBMITTED), + ('status_update', TaskState.TASK_STATE_WORKING), + ('status_update', TaskState.TASK_STATE_INPUT_REQUIRED), + ], + ) + + task_id = events[0].task.id + task = await client.get_task(request=GetTaskRequest(id=task_id)) + + assert task.status.state == TaskState.TASK_STATE_INPUT_REQUIRED + assert_history_matches( + task.history, + [ + (Role.ROLE_USER, 'Need input'), + (Role.ROLE_AGENT, 'task working'), + ], + ) + assert_message_matches( + task.status.message, Role.ROLE_AGENT, 'Please provide input' + ) + + # Follow-up message + follow_up_message = Message( + task_id=task.id, + role=Role.ROLE_USER, + message_id='msg-e2e-input-req-2', + parts=[Part(text='Here is the input')], + ) + + follow_up_events = [ + event + async for event in client.send_message( + request=SendMessageRequest(message=follow_up_message) + ) + ] + + assert_events_match( + follow_up_events, + [ + ('status_update', TaskState.TASK_STATE_WORKING), + ('artifact_update', [('test-artifact', 'artifact content')]), + ('status_update', TaskState.TASK_STATE_COMPLETED), + ], + ) + + task = await client.get_task(request=GetTaskRequest(id=task.id)) + + assert task.status.state == TaskState.TASK_STATE_COMPLETED + assert_artifacts_match( + task.artifacts, + [('test-artifact', 'artifact content')], + ) + + assert_history_matches( + task.history, + [ + (Role.ROLE_USER, 'Need input'), + (Role.ROLE_AGENT, 'task working'), + (Role.ROLE_AGENT, 'Please provide input'), + (Role.ROLE_USER, 'Here is the input'), + (Role.ROLE_AGENT, 'task working'), + ], + ) + assert_message_matches(task.status.message, Role.ROLE_AGENT, 'done') + + +@pytest.mark.asyncio +@pytest.mark.parametrize( + 'empty_request, expected_fields', + [ + ( + SendMessageRequest(), + {'message'}, + ), + ( + SendMessageRequest(message=Message()), + {'message.message_id', 'message.role', 'message.parts'}, + ), + ( + SendMessageRequest( + message=Message(message_id='m1', role=Role.ROLE_USER) + ), + {'message.parts'}, + ), + ], +) +async def test_end_to_end_send_message_validation_errors( + transport_setups, + empty_request: SendMessageRequest, + expected_fields: set[str], +) -> None: + client = transport_setups.client + + with pytest.raises(InvalidParamsError) as exc_info: + async for _ in client.send_message(request=empty_request): + pass + + errors = exc_info.value.data.get('errors', []) + assert {e['field'] for e in errors} == expected_fields + + await client.close() + + +@pytest.mark.asyncio +@pytest.mark.parametrize( + 'method, invalid_request, expected_fields', + [ + ( + 'get_task', + GetTaskRequest(), + {'id'}, + ), + ( + 'cancel_task', + CancelTaskRequest(), + {'id'}, + ), + ( + 'get_task_push_notification_config', + GetTaskPushNotificationConfigRequest(), + {'task_id', 'id'}, + ), + ( + 'list_task_push_notification_configs', + ListTaskPushNotificationConfigsRequest(), + {'task_id'}, + ), + ( + 'delete_task_push_notification_config', + DeleteTaskPushNotificationConfigRequest(), + {'task_id', 'id'}, + ), + ], +) +async def test_end_to_end_unary_validation_errors( + rpc_transport_setups, + method: str, + invalid_request, + expected_fields: set[str], +) -> None: + client = rpc_transport_setups.client + + with pytest.raises(InvalidParamsError) as exc_info: + await getattr(client, method)(request=invalid_request) + + errors = exc_info.value.data.get('errors', []) + assert {e['field'] for e in errors} == expected_fields + + await client.close() + + +@pytest.mark.asyncio +async def test_end_to_end_subscribe_validation_error( + rpc_transport_setups, +) -> None: + client = rpc_transport_setups.client + + with pytest.raises(InvalidParamsError) as exc_info: + async for _ in client.subscribe(request=SubscribeToTaskRequest()): + pass + + errors = exc_info.value.data.get('errors', []) + assert {e['field'] for e in errors} == {'id'} + + await client.close() + + +@pytest.mark.asyncio +@pytest.mark.parametrize( + 'streaming', + [ + pytest.param(False, id='blocking'), + pytest.param(True, id='streaming'), + ], +) +async def test_end_to_end_direct_message(transport_setups, streaming): + """Test that an executor can return a direct Message without creating a Task.""" + client = transport_setups.client + client._config.streaming = streaming + + message_to_send = Message( + role=Role.ROLE_USER, + message_id='msg-direct', + parts=[Part(text='Message: Hello agent')], + ) + + events = [ + event + async for event in client.send_message( + request=SendMessageRequest(message=message_to_send) + ) + ] + + assert len(events) == 1 + response = events[0] + assert response.HasField('message') + assert not response.HasField('task') + assert_message_matches( + response.message, + Role.ROLE_AGENT, + 'Direct reply to: Message: Hello agent', + ) + + +@pytest.mark.asyncio +async def test_end_to_end_direct_message_return_immediately(transport_setups): + """Test that return_immediately still returns the Message for direct replies. + + When the executor responds with a direct Message, the response is + inherently immediate -- there is no async task to defer to. The client + should receive the Message regardless of the return_immediately flag. + """ + client = transport_setups.client + client._config.streaming = False + + message_to_send = Message( + role=Role.ROLE_USER, + message_id='msg-direct-return-immediately', + parts=[Part(text='Message: Quick question')], + ) + configuration = SendMessageConfiguration(return_immediately=True) + + events = [ + event + async for event in client.send_message( + request=SendMessageRequest( + message=message_to_send, configuration=configuration + ) + ) + ] + + assert len(events) == 1 + response = events[0] + assert response.HasField('message') + assert not response.HasField('task') + assert_message_matches( + response.message, + Role.ROLE_AGENT, + 'Direct reply to: Message: Quick question', + ) + + +@pytest.mark.asyncio +@pytest.mark.parametrize( + 'streaming', + [ + pytest.param(False, id='blocking'), + pytest.param(True, id='streaming'), + ], +) +async def test_end_to_end_extensions_propagation(transport_setups, streaming): + """Test that extensions sent by the client reach the agent executor.""" + client = transport_setups.client + client._config.streaming = streaming + + service_params = ServiceParametersFactory.create( + [with_a2a_extensions(SUPPORTED_EXTENSION_URIS)] + ) + context = ClientCallContext(service_parameters=service_params) + + message_to_send = Message( + role=Role.ROLE_USER, + message_id='msg-ext-propagation', + parts=[Part(text='Extensions: echo')], + ) + + events = [ + event + async for event in client.send_message( + request=SendMessageRequest(message=message_to_send), + context=context, + ) + ] + + assert len(events) == 1 + response = events[0] + assert response.HasField('message') + assert_message_matches( + response.message, Role.ROLE_AGENT, 'extensions echoed' + ) + assert set(response.message.extensions) == set(SUPPORTED_EXTENSION_URIS) diff --git a/tests/integration/test_samples_smoke.py b/tests/integration/test_samples_smoke.py new file mode 100644 index 000000000..fcb49a003 --- /dev/null +++ b/tests/integration/test_samples_smoke.py @@ -0,0 +1,134 @@ +"""End-to-end smoke test for `samples/hello_world_agent.py` and `samples/cli.py`. + +Boots the sample agent as a subprocess on free ports, then runs the sample CLI +against it once per supported transport, asserting the expected greeting reply +flows through. +""" + +from __future__ import annotations + +import asyncio +import socket +import sys + +from pathlib import Path +from typing import TYPE_CHECKING + +import httpx +import pytest +import pytest_asyncio + + +if TYPE_CHECKING: + from collections.abc import AsyncGenerator + + +REPO_ROOT = Path(__file__).resolve().parents[2] +SAMPLES_DIR = REPO_ROOT / 'samples' +AGENT_SCRIPT = SAMPLES_DIR / 'hello_world_agent.py' +CLI_SCRIPT = SAMPLES_DIR / 'cli.py' + +STARTUP_TIMEOUT_S = 30.0 +CLI_TIMEOUT_S = 30.0 +EXPECTED_REPLY = 'Hello World! Nice to meet you!' + + +def _free_port() -> int: + with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as sock: + sock.bind(('127.0.0.1', 0)) + return sock.getsockname()[1] + + +async def _wait_for_agent_card(url: str) -> None: + deadline = asyncio.get_running_loop().time() + STARTUP_TIMEOUT_S + async with httpx.AsyncClient(timeout=2.0) as client: + while asyncio.get_running_loop().time() < deadline: + try: + response = await client.get(url) + if response.status_code == 200: + return + except httpx.RequestError: + pass + await asyncio.sleep(0.2) + raise TimeoutError(f'Agent did not become ready at {url}') + + +@pytest_asyncio.fixture +async def running_sample_agent() -> AsyncGenerator[str, None]: + """Start `hello_world_agent.py` as a subprocess on free ports.""" + host = '127.0.0.1' + http_port = _free_port() + grpc_port = _free_port() + compat_grpc_port = _free_port() + base_url = f'http://{host}:{http_port}' + + proc = await asyncio.create_subprocess_exec( + sys.executable, + str(AGENT_SCRIPT), + '--host', + host, + '--port', + str(http_port), + '--grpc-port', + str(grpc_port), + '--compat-grpc-port', + str(compat_grpc_port), + cwd=str(REPO_ROOT), + stdout=asyncio.subprocess.PIPE, + stderr=asyncio.subprocess.STDOUT, + ) + + try: + await _wait_for_agent_card(f'{base_url}/.well-known/agent-card.json') + yield base_url + finally: + if proc.returncode is None: + proc.terminate() + try: + await asyncio.wait_for(proc.wait(), timeout=10.0) + except asyncio.TimeoutError: + proc.kill() + await proc.wait() + + +async def _run_cli(base_url: str, transport: str) -> str: + """Run `cli.py --transport `, send `hello`, return combined output.""" + proc = await asyncio.create_subprocess_exec( + sys.executable, + str(CLI_SCRIPT), + '--url', + base_url, + '--transport', + transport, + cwd=str(REPO_ROOT), + stdin=asyncio.subprocess.PIPE, + stdout=asyncio.subprocess.PIPE, + stderr=asyncio.subprocess.STDOUT, + ) + try: + stdout, _ = await asyncio.wait_for( + proc.communicate(b'hello\n/quit\n'), + timeout=CLI_TIMEOUT_S, + ) + except asyncio.TimeoutError: + proc.kill() + await proc.wait() + raise + output = stdout.decode('utf-8', errors='replace') + assert proc.returncode == 0, ( + f'CLI exited with {proc.returncode} for transport {transport!r}.\n' + f'Output:\n{output}' + ) + return output + + +@pytest.mark.asyncio +@pytest.mark.parametrize('transport', ['JSONRPC', 'HTTP+JSON', 'GRPC']) +async def test_cli_against_sample_agent( + running_sample_agent: str, transport: str +) -> None: + """The CLI should successfully exchange a greeting over each transport.""" + output = await _run_cli(running_sample_agent, transport) + + assert 'TASK_STATE_COMPLETED' in output, output + assert EXPECTED_REPLY in output, output diff --git a/tests/integration/test_scenarios.py b/tests/integration/test_scenarios.py new file mode 100644 index 000000000..6070a672f --- /dev/null +++ b/tests/integration/test_scenarios.py @@ -0,0 +1,2128 @@ +import asyncio +import collections +import contextlib +import logging + +from typing import Any + +import grpc +import pytest +import pytest_asyncio + +from a2a.auth.user import User +from a2a.client.client import ClientConfig +from a2a.client.client_factory import ClientFactory +from a2a.client.errors import A2AClientError +from a2a.server.agent_execution import AgentExecutor, RequestContext +from a2a.server.context import ServerCallContext +from a2a.server.events import EventQueue +from a2a.server.events.in_memory_queue_manager import InMemoryQueueManager +from a2a.server.request_handlers import ( + DefaultRequestHandlerV2, + GrpcHandler, + GrpcServerCallContextBuilder, +) +from a2a.server.request_handlers.default_request_handler import ( + LegacyRequestHandler, +) +from a2a.server.tasks.inmemory_task_store import InMemoryTaskStore +from a2a.types import a2a_pb2_grpc +from a2a.types.a2a_pb2 import ( + AgentCapabilities, + AgentCard, + AgentInterface, + Artifact, + CancelTaskRequest, + GetTaskRequest, + ListTasksRequest, + Message, + Part, + Role, + SendMessageConfiguration, + SendMessageRequest, + SubscribeToTaskRequest, + Task, + TaskArtifactUpdateEvent, + TaskState, + TaskStatus, + TaskStatusUpdateEvent, +) +from a2a.helpers.proto_helpers import new_task_from_user_message +from a2a.utils import TransportProtocol +from a2a.utils.errors import ( + InvalidParamsError, + TaskNotCancelableError, + TaskNotFoundError, + InvalidAgentResponseError, +) + + +logger = logging.getLogger(__name__) + + +async def wait_for_state( + client: Any, + task_id: str, + expected_states: set[TaskState.ValueType], + timeout: float = 1.0, +) -> None: + """Wait for the task to reach one of the expected states.""" + start_time = asyncio.get_event_loop().time() + while True: + task = await client.get_task(GetTaskRequest(id=task_id)) + if task.status.state in expected_states: + return + + if asyncio.get_event_loop().time() - start_time > timeout: + raise TimeoutError( + f'Task {task_id} did not reach expected states {expected_states} within {timeout}s. ' + f'Current state: {task.status.state}' + ) + await asyncio.sleep(0.01) + + +async def get_all_events(stream): + return [event async for event in stream] + + +class MockUser(User): + @property + def is_authenticated(self) -> bool: + return True + + @property + def user_name(self) -> str: + return 'test-user' + + +class MockCallContextBuilder(GrpcServerCallContextBuilder): + def build(self, request: Any) -> ServerCallContext: + return ServerCallContext( + user=MockUser(), state={'headers': {'a2a-version': '1.0'}} + ) + + +def agent_card(): + return AgentCard( + name='Test Agent', + version='1.0.0', + capabilities=AgentCapabilities(streaming=True), + supported_interfaces=[ + AgentInterface( + protocol_binding=TransportProtocol.GRPC, + url='http://testserver', + ) + ], + ) + + +def get_task_id(event): + if event.HasField('task'): + return event.task.id + if event.HasField('status_update'): + return event.status_update.task_id + assert False, f'Event {event} has no task_id' + + +def get_task_context_id(event): + if event.HasField('task'): + return event.task.context_id + if event.HasField('status_update'): + return event.status_update.context_id + assert False, f'Event {event} has no context_id' + + +def get_state(event): + if event.HasField('task'): + return event.task.status.state + return event.status_update.status.state + + +def validate_state(event, expected_state): + assert get_state(event) == expected_state + + +_test_servers = [] + + +@pytest_asyncio.fixture(autouse=True) +async def cleanup_test_servers(): + yield + for server in _test_servers: + await server.stop(None) + _test_servers.clear() + + +# TODO: Test different transport (e.g. HTTP_JSON hangs for some tests). +async def create_client(handler, agent_card, streaming=False): + server = grpc.aio.server() + port = server.add_insecure_port('[::]:0') + server_address = f'localhost:{port}' + + agent_card.supported_interfaces[0].url = server_address + agent_card.supported_interfaces[0].protocol_binding = TransportProtocol.GRPC + + servicer = GrpcHandler( + request_handler=handler, context_builder=MockCallContextBuilder() + ) + a2a_pb2_grpc.add_A2AServiceServicer_to_server(servicer, server) + await server.start() + _test_servers.append(server) + + factory = ClientFactory( + config=ClientConfig( + grpc_channel_factory=grpc.aio.insecure_channel, + supported_protocol_bindings=[TransportProtocol.GRPC], + streaming=streaming, + ) + ) + client = factory.create(agent_card) + client._server = server # Keep reference to prevent garbage collection + return client + + +def create_handler( + agent_executor, use_legacy, task_store=None, queue_manager=None +): + task_store = task_store or InMemoryTaskStore() + queue_manager = queue_manager or InMemoryQueueManager() + return ( + LegacyRequestHandler( + agent_executor, + task_store, + agent_card(), + queue_manager, + ) + if use_legacy + else DefaultRequestHandlerV2( + agent_executor, + task_store, + agent_card(), + queue_manager, + ) + ) + + +# Scenario 1: Cancellation of already terminal task +# This also covers test_scenario_7_cancel_terminal_task from test_handler_comparison +@pytest.mark.timeout(2.0) +@pytest.mark.asyncio +@pytest.mark.parametrize('use_legacy', [False, True], ids=['v2', 'legacy']) +@pytest.mark.parametrize( + 'streaming', [False, True], ids=['blocking', 'streaming'] +) +async def test_scenario_1_cancel_terminal_task(use_legacy, streaming): + class DummyAgentExecutor(AgentExecutor): + async def execute( + self, context: RequestContext, event_queue: EventQueue + ): + pass + + async def cancel( + self, context: RequestContext, event_queue: EventQueue + ): + pass + + task_store = InMemoryTaskStore() + handler = create_handler( + DummyAgentExecutor(), use_legacy, task_store=task_store + ) + client = await create_client( + handler, agent_card=agent_card(), streaming=streaming + ) + + task_id = 'terminal-task' + await task_store.save( + Task( + id=task_id, status=TaskStatus(state=TaskState.TASK_STATE_COMPLETED) + ), + ServerCallContext(user=MockUser()), + ) + with pytest.raises(TaskNotCancelableError): + await client.cancel_task(CancelTaskRequest(id=task_id)) + + +@pytest.mark.asyncio +@pytest.mark.parametrize('use_legacy', [False, True], ids=['v2', 'legacy']) +async def test_scenario_4_simple_streaming(use_legacy): + class DummyAgentExecutor(AgentExecutor): + async def execute( + self, context: RequestContext, event_queue: EventQueue + ): + task = new_task_from_user_message(context.message) + task.status.state = TaskState.TASK_STATE_WORKING + await event_queue.enqueue_event(task) + await event_queue.enqueue_event( + TaskStatusUpdateEvent( + task_id=context.task_id, + context_id=context.context_id, + status=TaskStatus(state=TaskState.TASK_STATE_COMPLETED), + ) + ) + + async def cancel( + self, context: RequestContext, event_queue: EventQueue + ): + pass + + handler = create_handler(DummyAgentExecutor(), use_legacy) + client = await create_client( + handler, agent_card=agent_card(), streaming=True + ) + msg = Message( + message_id='test-msg', role=Role.ROLE_USER, parts=[Part(text='hello')] + ) + events = [ + event + async for event in client.send_message(SendMessageRequest(message=msg)) + ] + task, status_update = events + assert task.HasField('task') + assert status_update.HasField('status_update') + + assert [get_state(event) for event in events] == [ + TaskState.TASK_STATE_WORKING, + TaskState.TASK_STATE_COMPLETED, + ] + + +# Scenario 5: Re-subscribing to a finished task +@pytest.mark.asyncio +@pytest.mark.parametrize('use_legacy', [False, True], ids=['v2', 'legacy']) +async def test_scenario_5_resubscribe_to_finished(use_legacy): + class DummyAgentExecutor(AgentExecutor): + async def execute( + self, context: RequestContext, event_queue: EventQueue + ): + task = new_task_from_user_message(context.message) + task.status.state = TaskState.TASK_STATE_WORKING + await event_queue.enqueue_event(task) + await event_queue.enqueue_event( + TaskStatusUpdateEvent( + task_id=context.task_id, + context_id=context.context_id, + status=TaskStatus(state=TaskState.TASK_STATE_COMPLETED), + ) + ) + + async def cancel( + self, context: RequestContext, event_queue: EventQueue + ): + pass + + handler = create_handler(DummyAgentExecutor(), use_legacy) + client = await create_client(handler, agent_card=agent_card()) + msg = Message( + message_id='test-msg', role=Role.ROLE_USER, parts=[Part(text='hello')] + ) + it = client.send_message( + SendMessageRequest( + message=msg, + configuration=SendMessageConfiguration(return_immediately=False), + ) + ) + + (event,) = [event async for event in it] + task_id = event.task.id + + await wait_for_state( + client, task_id, expected_states={TaskState.TASK_STATE_COMPLETED} + ) + # TODO: Use different transport. + with pytest.raises( + NotImplementedError, + match='client and/or server do not support resubscription', + ): + async for _ in client.subscribe(SubscribeToTaskRequest(id=task_id)): + pass + + +# Scenario 6-8: Parity for Error cases +@pytest.mark.timeout(2.0) +@pytest.mark.asyncio +@pytest.mark.parametrize('use_legacy', [False, True], ids=['v2', 'legacy']) +@pytest.mark.parametrize( + 'streaming', [False, True], ids=['blocking', 'streaming'] +) +async def test_scenarios_simple_errors(use_legacy, streaming): + class DummyAgentExecutor(AgentExecutor): + async def execute( + self, context: RequestContext, event_queue: EventQueue + ): + task = new_task_from_user_message(context.message) + task.status.state = TaskState.TASK_STATE_COMPLETED + await event_queue.enqueue_event(task) + + async def cancel( + self, context: RequestContext, event_queue: EventQueue + ): + pass + + handler = create_handler(DummyAgentExecutor(), use_legacy) + client = await create_client( + handler, agent_card=agent_card(), streaming=streaming + ) + + with pytest.raises(TaskNotFoundError): + await client.get_task(GetTaskRequest(id='missing')) + + msg1 = Message( + task_id='missing', + message_id='missing-task', + role=Role.ROLE_USER, + parts=[Part(text='h')], + ) + with pytest.raises(TaskNotFoundError): + async for _ in client.send_message(SendMessageRequest(message=msg1)): + pass + + msg = Message( + message_id='test-msg', role=Role.ROLE_USER, parts=[Part(text='hello')] + ) + it = client.send_message( + SendMessageRequest( + message=msg, + configuration=SendMessageConfiguration(return_immediately=False), + ) + ) + (event,) = [event async for event in it] + + if streaming: + assert event.HasField('task') + task_id = event.task.id + validate_state(event, TaskState.TASK_STATE_COMPLETED) + else: + assert event.HasField('task') + task_id = event.task.id + assert event.task.status.state == TaskState.TASK_STATE_COMPLETED + + logger.info('Sending message to completed task %s', task_id) + msg2 = Message( + message_id='test-msg-2', + task_id=task_id, + role=Role.ROLE_USER, + parts=[Part(text='message to completed task')], + ) + # TODO: Is it correct error code ? + with pytest.raises(InvalidParamsError): + async for _ in client.send_message(SendMessageRequest(message=msg2)): + pass + + (task,) = (await client.list_tasks(ListTasksRequest())).tasks + assert task.status.state == TaskState.TASK_STATE_COMPLETED + (message,) = task.history + assert message.role == Role.ROLE_USER + (message_part,) = message.parts + assert message_part.text == 'hello' + + +# Scenario 9: Exception before any event. +@pytest.mark.timeout(2.0) +@pytest.mark.asyncio +@pytest.mark.parametrize('use_legacy', [False, True], ids=['v2', 'legacy']) +@pytest.mark.parametrize( + 'streaming', [False, True], ids=['blocking', 'streaming'] +) +async def test_scenario_9_error_before_blocking(use_legacy, streaming): + class ErrorBeforeAgent(AgentExecutor): + async def execute( + self, context: RequestContext, event_queue: EventQueue + ): + raise ValueError('TEST_ERROR_IN_EXECUTE') + + async def cancel( + self, context: RequestContext, event_queue: EventQueue + ): + pass + + handler = create_handler(ErrorBeforeAgent(), use_legacy) + client = await create_client( + handler, agent_card=agent_card(), streaming=streaming + ) + msg = Message( + message_id='test-msg', role=Role.ROLE_USER, parts=[Part(text='hello')] + ) + + # TODO: Is it correct error code ? + with pytest.raises(A2AClientError, match='TEST_ERROR_IN_EXECUTE'): + async for _ in client.send_message( + SendMessageRequest( + message=msg, + configuration=SendMessageConfiguration( + return_immediately=False + ), + ) + ): + pass + + if use_legacy: + # Legacy is not creating tasks for agent failures. + assert len((await client.list_tasks(ListTasksRequest())).tasks) == 0 + else: + (task,) = (await client.list_tasks(ListTasksRequest())).tasks + assert task.status.state == TaskState.TASK_STATE_FAILED + + +# Scenario 12/13: Exception after initial event +@pytest.mark.timeout(2.0) +@pytest.mark.asyncio +@pytest.mark.parametrize('use_legacy', [False, True], ids=['v2', 'legacy']) +@pytest.mark.parametrize( + 'streaming', [False, True], ids=['blocking', 'streaming'] +) +async def test_scenario_12_13_error_after_initial_event(use_legacy, streaming): + started_event = asyncio.Event() + continue_event = asyncio.Event() + + class ErrorAfterAgent(AgentExecutor): + async def execute( + self, context: RequestContext, event_queue: EventQueue + ): + task = new_task_from_user_message(context.message) + task.status.state = TaskState.TASK_STATE_WORKING + await event_queue.enqueue_event(task) + started_event.set() + await continue_event.wait() + raise ValueError('TEST_ERROR_IN_EXECUTE') + + async def cancel( + self, context: RequestContext, event_queue: EventQueue + ): + pass + + handler = create_handler(ErrorAfterAgent(), use_legacy) + client = await create_client( + handler, agent_card=agent_card(), streaming=streaming + ) + msg = Message( + message_id='test-msg', role=Role.ROLE_USER, parts=[Part(text='hello')] + ) + + it = client.send_message(SendMessageRequest(message=msg)) + + tasks = [] + + if streaming: + res = await it.__anext__() + validate_state(res, TaskState.TASK_STATE_WORKING) + continue_event.set() + else: + + async def release_agent(): + await started_event.wait() + continue_event.set() + + tasks.append(asyncio.create_task(release_agent())) + + with pytest.raises(A2AClientError, match='TEST_ERROR_IN_EXECUTE'): + async for _ in it: + pass + + await asyncio.gather(*tasks) + + (task,) = (await client.list_tasks(ListTasksRequest())).tasks + if use_legacy: + # Legacy does not update task state on exception. + assert task.status.state == TaskState.TASK_STATE_WORKING + else: + assert task.status.state == TaskState.TASK_STATE_FAILED + + +# Scenario 14: Exception in Cancel +@pytest.mark.timeout(2.0) +@pytest.mark.asyncio +@pytest.mark.parametrize('use_legacy', [False, True], ids=['v2', 'legacy']) +@pytest.mark.parametrize( + 'streaming', [False, True], ids=['blocking', 'streaming'] +) +async def test_scenario_14_error_in_cancel(use_legacy, streaming): + started_event = asyncio.Event() + hang_event = asyncio.Event() + + class ErrorCancelAgent(AgentExecutor): + async def execute( + self, context: RequestContext, event_queue: EventQueue + ): + task = new_task_from_user_message(context.message) + task.status.state = TaskState.TASK_STATE_WORKING + await event_queue.enqueue_event(task) + started_event.set() + await hang_event.wait() + + async def cancel( + self, context: RequestContext, event_queue: EventQueue + ): + raise ValueError('TEST_ERROR_IN_CANCEL') + + handler = create_handler(ErrorCancelAgent(), use_legacy) + client = await create_client( + handler, agent_card=agent_card(), streaming=streaming + ) + + msg = Message( + message_id='test-msg', + role=Role.ROLE_USER, + parts=[Part(text='hello')], + ) + + it = client.send_message( + SendMessageRequest( + message=msg, + configuration=SendMessageConfiguration(return_immediately=True), + ) + ) + res = await it.__anext__() + task_id = res.task.id if res.HasField('task') else res.status_update.task_id + + await asyncio.wait_for(started_event.wait(), timeout=1.0) + + with pytest.raises(A2AClientError, match='TEST_ERROR_IN_CANCEL'): + await client.cancel_task(CancelTaskRequest(id=task_id)) + + (task,) = (await client.list_tasks(ListTasksRequest())).tasks + if use_legacy: + # Legacy does not update task state on exception. + assert task.status.state == TaskState.TASK_STATE_WORKING + else: + assert task.status.state == TaskState.TASK_STATE_FAILED + + +# Scenario 15: Subscribe to task that errors out +@pytest.mark.timeout(2.0) +@pytest.mark.asyncio +@pytest.mark.parametrize('use_legacy', [False, True], ids=['v2', 'legacy']) +async def test_scenario_15_subscribe_error(use_legacy): + started_event = asyncio.Event() + continue_event = asyncio.Event() + + class ErrorAfterAgent(AgentExecutor): + async def execute( + self, context: RequestContext, event_queue: EventQueue + ): + task = new_task_from_user_message(context.message) + task.status.state = TaskState.TASK_STATE_WORKING + await event_queue.enqueue_event(task) + started_event.set() + await continue_event.wait() + raise ValueError('TEST_ERROR_IN_EXECUTE') + + async def cancel( + self, context: RequestContext, event_queue: EventQueue + ): + pass + + handler = create_handler(ErrorAfterAgent(), use_legacy) + client = await create_client( + handler, agent_card=agent_card(), streaming=True + ) + msg = Message( + message_id='test-msg', role=Role.ROLE_USER, parts=[Part(text='hello')] + ) + + it_start = client.send_message( + SendMessageRequest( + message=msg, + configuration=SendMessageConfiguration(return_immediately=True), + ) + ) + res = await it_start.__anext__() + task_id = res.task.id if res.HasField('task') else res.status_update.task_id + + async def consume_events(): + async for _ in client.subscribe(SubscribeToTaskRequest(id=task_id)): + pass + + consume_task = asyncio.create_task(consume_events()) + with pytest.raises(asyncio.TimeoutError): + await asyncio.wait_for(asyncio.shield(consume_task), timeout=0.1) + + await asyncio.wait_for(started_event.wait(), timeout=1.0) + continue_event.set() + + if use_legacy: + # Legacy client hangs forever. + with pytest.raises(asyncio.TimeoutError): + await asyncio.wait_for(consume_task, timeout=0.1) + else: + with pytest.raises(A2AClientError, match='TEST_ERROR_IN_EXECUTE'): + await consume_task + + (task,) = (await client.list_tasks(ListTasksRequest())).tasks + if use_legacy: + # Legacy does not update task state on exception. + assert task.status.state == TaskState.TASK_STATE_WORKING + else: + assert task.status.state == TaskState.TASK_STATE_FAILED + + +# Scenario 16: Slow execution and return_immediately=True +@pytest.mark.timeout(2.0) +@pytest.mark.asyncio +@pytest.mark.parametrize('use_legacy', [False, True], ids=['v2', 'legacy']) +@pytest.mark.parametrize( + 'streaming', [False, True], ids=['blocking', 'streaming'] +) +async def test_scenario_16_slow_execution(use_legacy, streaming): + started_event = asyncio.Event() + hang_event = asyncio.Event() + + class SlowAgent(AgentExecutor): + async def execute( + self, context: RequestContext, event_queue: EventQueue + ): + started_event.set() + await hang_event.wait() + + async def cancel( + self, context: RequestContext, event_queue: EventQueue + ): + pass + + queue_manager = InMemoryQueueManager() + handler = create_handler( + SlowAgent(), use_legacy, queue_manager=queue_manager + ) + client = await create_client( + handler, agent_card=agent_card(), streaming=streaming + ) + + msg = Message( + message_id='test-msg', + role=Role.ROLE_USER, + parts=[Part(text='hello')], + ) + + async def send_message_and_get_first_response(): + it = client.send_message( + SendMessageRequest( + message=msg, + configuration=SendMessageConfiguration(return_immediately=True), + ) + ) + return await asyncio.wait_for(it.__anext__(), timeout=0.1) + + # First response should not be there yet. + with pytest.raises(asyncio.TimeoutError): + await send_message_and_get_first_response() + + tasks = (await client.list_tasks(ListTasksRequest())).tasks + assert len(tasks) == 0 + + +# Scenario 17: Cancellation of a working task. +# @pytest.mark.skip +@pytest.mark.timeout(2.0) +@pytest.mark.asyncio +@pytest.mark.parametrize('use_legacy', [False, True], ids=['v2', 'legacy']) +@pytest.mark.parametrize( + 'streaming', [False, True], ids=['blocking', 'streaming'] +) +async def test_scenario_cancel_working_task_empty_cancel(use_legacy, streaming): + started_event = asyncio.Event() + hang_event = asyncio.Event() + + class DummyCancelAgent(AgentExecutor): + async def execute( + self, context: RequestContext, event_queue: EventQueue + ): + task = new_task_from_user_message(context.message) + task.status.state = TaskState.TASK_STATE_WORKING + await event_queue.enqueue_event(task) + started_event.set() + await hang_event.wait() + + async def cancel( + self, context: RequestContext, event_queue: EventQueue + ): + # TODO: this should be done automatically by the framework ? + await event_queue.enqueue_event( + TaskStatusUpdateEvent( + task_id=context.task_id, + context_id=context.context_id, + status=TaskStatus(state=TaskState.TASK_STATE_CANCELED), + ) + ) + + handler = create_handler(DummyCancelAgent(), use_legacy) + client = await create_client( + handler, agent_card=agent_card(), streaming=streaming + ) + + msg = Message( + message_id='test-msg', role=Role.ROLE_USER, parts=[Part(text='hello')] + ) + + it = client.send_message( + SendMessageRequest( + message=msg, + configuration=SendMessageConfiguration(return_immediately=True), + ) + ) + res = await it.__anext__() + task_id = res.task.id if res.HasField('task') else res.status_update.task_id + + await asyncio.wait_for(started_event.wait(), timeout=1.0) + + task_before = await client.get_task(GetTaskRequest(id=task_id)) + assert task_before.status.state == TaskState.TASK_STATE_WORKING + + cancel_res = await client.cancel_task(CancelTaskRequest(id=task_id)) + assert cancel_res.status.state == TaskState.TASK_STATE_CANCELED + + task_after = await client.get_task(GetTaskRequest(id=task_id)) + assert task_after.status.state == TaskState.TASK_STATE_CANCELED + + (task_from_list,) = (await client.list_tasks(ListTasksRequest())).tasks + assert task_from_list.status.state == TaskState.TASK_STATE_CANCELED + + +# Scenario 18: Complex streaming with multiple subscribers +@pytest.mark.timeout(2.0) +@pytest.mark.asyncio +@pytest.mark.parametrize('use_legacy', [False, True], ids=['v2', 'legacy']) +async def test_scenario_18_streaming_subscribers(use_legacy): + started_event = asyncio.Event() + working_event = asyncio.Event() + completed_event = asyncio.Event() + + class ComplexAgent(AgentExecutor): + async def execute( + self, context: RequestContext, event_queue: EventQueue + ): + task = new_task_from_user_message(context.message) + task.status.state = TaskState.TASK_STATE_WORKING + await event_queue.enqueue_event(task) + started_event.set() + await working_event.wait() + + await event_queue.enqueue_event( + TaskArtifactUpdateEvent( + task_id=context.task_id, + context_id=context.context_id, + artifact=Artifact(artifact_id='test-art'), + ) + ) + await completed_event.wait() + + await event_queue.enqueue_event( + TaskStatusUpdateEvent( + task_id=context.task_id, + context_id=context.context_id, + status=TaskStatus(state=TaskState.TASK_STATE_COMPLETED), + ) + ) + + async def cancel( + self, context: RequestContext, event_queue: EventQueue + ): + pass + + handler = create_handler(ComplexAgent(), use_legacy) + client = await create_client( + handler, agent_card=agent_card(), streaming=True + ) + + msg = Message( + message_id='test-msg', role=Role.ROLE_USER, parts=[Part(text='hello')] + ) + + it = client.send_message( + SendMessageRequest( + message=msg, + configuration=SendMessageConfiguration(return_immediately=True), + ) + ) + res = await it.__anext__() + task_id = res.task.id if res.HasField('task') else res.status_update.task_id + + await asyncio.wait_for(started_event.wait(), timeout=1.0) + + # create first subscriber + sub1 = client.subscribe(SubscribeToTaskRequest(id=task_id)) + + # first subscriber receives current task state (WORKING) + validate_state(await sub1.__anext__(), TaskState.TASK_STATE_WORKING) + + # create second subscriber + sub2 = client.subscribe(SubscribeToTaskRequest(id=task_id)) + + # second subscriber receives current task state (WORKING) + validate_state(await sub2.__anext__(), TaskState.TASK_STATE_WORKING) + + working_event.set() + + # validate what both subscribers observed (artifact) + res1_art = await sub1.__anext__() + assert res1_art.artifact_update.artifact.artifact_id == 'test-art' + + res2_art = await sub2.__anext__() + assert res2_art.artifact_update.artifact.artifact_id == 'test-art' + + completed_event.set() + + # validate what both subscribers observed (completed) + validate_state(await sub1.__anext__(), TaskState.TASK_STATE_COMPLETED) + validate_state(await sub2.__anext__(), TaskState.TASK_STATE_COMPLETED) + + # validate final task state with getTask + final_task = await client.get_task(GetTaskRequest(id=task_id)) + assert final_task.status.state == TaskState.TASK_STATE_COMPLETED + + (artifact,) = final_task.artifacts + assert artifact.artifact_id == 'test-art' + + (message,) = final_task.history + assert message.parts[0].text == 'hello' + + +# Scenario 19: Parallel executions for the same task should not happen simultaneously. +@pytest.mark.timeout(2.0) +@pytest.mark.asyncio +@pytest.mark.parametrize('use_legacy', [False, True], ids=['v2', 'legacy']) +@pytest.mark.parametrize( + 'streaming', [False, True], ids=['blocking', 'streaming'] +) +async def test_scenario_19_no_parallel_executions(use_legacy, streaming): + started_event = asyncio.Event() + continue_event = asyncio.Event() + executions_count = 0 + + class CountingAgent(AgentExecutor): + async def execute( + self, context: RequestContext, event_queue: EventQueue + ): + nonlocal executions_count + executions_count += 1 + + if executions_count > 1: + await event_queue.enqueue_event( + TaskArtifactUpdateEvent( + task_id=context.task_id, + context_id=context.context_id, + artifact=Artifact(artifact_id='SECOND_EXECUTION'), + ) + ) + return + + task = new_task_from_user_message(context.message) + task.status.state = TaskState.TASK_STATE_WORKING + await event_queue.enqueue_event(task) + started_event.set() + await continue_event.wait() + await event_queue.enqueue_event( + TaskStatusUpdateEvent( + task_id=context.task_id, + context_id=context.context_id, + status=TaskStatus(state=TaskState.TASK_STATE_COMPLETED), + ) + ) + + async def cancel( + self, context: RequestContext, event_queue: EventQueue + ): + pass + + handler = create_handler(CountingAgent(), use_legacy) + client1 = await create_client( + handler, agent_card=agent_card(), streaming=streaming + ) + client2 = await create_client( + handler, agent_card=agent_card(), streaming=streaming + ) + + msg1 = Message( + message_id='test-msg-1', + role=Role.ROLE_USER, + parts=[Part(text='hello 1')], + ) + + # First client sends initial message + it1 = client1.send_message( + SendMessageRequest( + message=msg1, + configuration=SendMessageConfiguration(return_immediately=False), + ) + ) + task1 = asyncio.create_task(it1.__anext__()) + + # Wait for the first execution to reach the WORKING state + await asyncio.wait_for(started_event.wait(), timeout=1.0) + assert executions_count == 1 + + # Extract task_id from the first call using list_tasks + (task,) = (await client1.list_tasks(ListTasksRequest())).tasks + task_id = task.id + + msg2 = Message( + message_id='test-msg-2', + task_id=task_id, + role=Role.ROLE_USER, + parts=[Part(text='hello 2')], + ) + + # Second client sends a message to the same task + it2 = client2.send_message( + SendMessageRequest( + message=msg2, + configuration=SendMessageConfiguration(return_immediately=False), + ) + ) + + task2 = asyncio.create_task(it2.__anext__()) + + if use_legacy: + # Legacy handler executes the second request in parallel. + await task2 + assert executions_count == 2 + else: + # V2 handler queues the second request. + with pytest.raises(asyncio.TimeoutError): + await asyncio.wait_for(asyncio.shield(task2), timeout=0.1) + assert executions_count == 1 + + # Unblock AgentExecutor + continue_event.set() + + # Verify that both calls for clients finished. + if use_legacy and not streaming: + # Legacy handler fails on first execution. + with pytest.raises(A2AClientError, match='NoTaskQueue'): + await task1 + else: + await task1 + + try: + await task2 + except StopAsyncIteration: + # TODO: Test is flaky. Debug it. + return + + # Consume remaining events if any + async def consume(it): + async for _ in it: + pass + + await asyncio.gather(consume(it1), consume(it2)) + assert executions_count == 2 + + # Validate final task state. + final_task = await client1.get_task(GetTaskRequest(id=task_id)) + + if use_legacy: + # Legacy handler fails to complete the task. + assert final_task.status.state == TaskState.TASK_STATE_WORKING + else: + assert final_task.status.state == TaskState.TASK_STATE_COMPLETED + + # TODO: What is expected state of messages and artifacts? + + +# Scenario: Validate return_immediately flag behaviour. +@pytest.mark.asyncio +@pytest.mark.parametrize('use_legacy', [False, True], ids=['v2', 'legacy']) +@pytest.mark.parametrize( + 'streaming', [False, True], ids=['blocking', 'streaming'] +) +async def test_scenario_return_immediately(use_legacy, streaming): + class ImmediateAgent(AgentExecutor): + async def execute( + self, context: RequestContext, event_queue: EventQueue + ): + task = new_task_from_user_message(context.message) + task.status.state = TaskState.TASK_STATE_WORKING + await event_queue.enqueue_event(task) + await event_queue.enqueue_event( + TaskStatusUpdateEvent( + task_id=context.task_id, + context_id=context.context_id, + status=TaskStatus(state=TaskState.TASK_STATE_COMPLETED), + ) + ) + + async def cancel( + self, context: RequestContext, event_queue: EventQueue + ): + pass + + handler = create_handler(ImmediateAgent(), use_legacy) + client = await create_client( + handler, agent_card=agent_card(), streaming=streaming + ) + + msg = Message( + message_id='test-msg', role=Role.ROLE_USER, parts=[Part(text='hello')] + ) + + # Test non-blocking return. + it = client.send_message( + SendMessageRequest( + message=msg, + configuration=SendMessageConfiguration(return_immediately=True), + ) + ) + states = [get_state(event) async for event in it] + + if streaming: + assert states == [ + TaskState.TASK_STATE_WORKING, + TaskState.TASK_STATE_COMPLETED, + ] + else: + assert states == [TaskState.TASK_STATE_WORKING] + + +# Scenario: Test TASK_STATE_INPUT_REQUIRED. +@pytest.mark.timeout(2.0) +@pytest.mark.asyncio +@pytest.mark.parametrize('use_legacy', [False, True], ids=['v2', 'legacy']) +@pytest.mark.parametrize( + 'streaming', [False, True], ids=['blocking', 'streaming'] +) +async def test_scenario_resumption_from_interrupted(use_legacy, streaming): + class ResumingAgent(AgentExecutor): + async def execute( + self, context: RequestContext, event_queue: EventQueue + ): + message = context.message + if message and message.parts and message.parts[0].text == 'start': + task = new_task_from_user_message(message) + task.status.state = TaskState.TASK_STATE_INPUT_REQUIRED + await event_queue.enqueue_event(task) + elif ( + message + and message.parts + and message.parts[0].text == 'here is input' + ): + task = new_task_from_user_message(message) + task.status.state = TaskState.TASK_STATE_COMPLETED + await event_queue.enqueue_event(task) + else: + raise ValueError('Unexpected message') + + async def cancel( + self, context: RequestContext, event_queue: EventQueue + ): + pass + + handler = create_handler(ResumingAgent(), use_legacy) + client = await create_client( + handler, agent_card=agent_card(), streaming=streaming + ) + + # First send message to get it into input required state + msg1 = Message( + message_id='msg-start', role=Role.ROLE_USER, parts=[Part(text='start')] + ) + + it = client.send_message( + SendMessageRequest( + message=msg1, + configuration=SendMessageConfiguration(return_immediately=False), + ) + ) + + events1 = [event async for event in it] + assert [get_state(event) for event in events1] == [ + TaskState.TASK_STATE_INPUT_REQUIRED, + ] + task_id = events1[0].status_update.task_id + context_id = events1[0].status_update.context_id + + # Now send another message to resume + msg2 = Message( + task_id=task_id, + context_id=context_id, + message_id='msg-resume', + role=Role.ROLE_USER, + parts=[Part(text='here is input')], + ) + + it2 = client.send_message( + SendMessageRequest( + message=msg2, + configuration=SendMessageConfiguration(return_immediately=False), + ) + ) + + assert [get_state(event) async for event in it2] == [ + TaskState.TASK_STATE_COMPLETED, + ] + + +# Scenario: Auth required and side channel unblocking +# Migrated from: test_workflow_auth_required_side_channel in test_handler_comparison +@pytest.mark.timeout(2.0) +@pytest.mark.asyncio +@pytest.mark.parametrize('use_legacy', [False, True], ids=['v2', 'legacy']) +@pytest.mark.parametrize( + 'streaming', [False, True], ids=['blocking', 'streaming'] +) +async def test_scenario_auth_required_side_channel(use_legacy, streaming): + side_channel_event = asyncio.Event() + + class AuthAgent(AgentExecutor): + async def execute( + self, context: RequestContext, event_queue: EventQueue + ): + task = new_task_from_user_message(context.message) + task.status.state = TaskState.TASK_STATE_WORKING + await event_queue.enqueue_event(task) + await event_queue.enqueue_event( + TaskStatusUpdateEvent( + task_id=context.task_id, + context_id=context.context_id, + status=TaskStatus(state=TaskState.TASK_STATE_AUTH_REQUIRED), + ) + ) + + await side_channel_event.wait() + + await event_queue.enqueue_event( + TaskStatusUpdateEvent( + task_id=context.task_id, + context_id=context.context_id, + status=TaskStatus(state=TaskState.TASK_STATE_COMPLETED), + ) + ) + + async def cancel( + self, context: RequestContext, event_queue: EventQueue + ): + pass + + handler = create_handler(AuthAgent(), use_legacy) + client = await create_client( + handler, agent_card=agent_card(), streaming=streaming + ) + + msg = Message( + message_id='test-msg', role=Role.ROLE_USER, parts=[Part(text='start')] + ) + + it = client.send_message( + SendMessageRequest( + message=msg, + configuration=SendMessageConfiguration(return_immediately=False), + ) + ) + + if streaming: + event1 = await asyncio.wait_for(it.__anext__(), timeout=1.0) + assert get_state(event1) == TaskState.TASK_STATE_WORKING + + event2 = await asyncio.wait_for(it.__anext__(), timeout=1.0) + assert get_state(event2) == TaskState.TASK_STATE_AUTH_REQUIRED + + task_id = event2.status_update.task_id + + side_channel_event.set() + + # Remaining event. + (event3,) = [event async for event in it] + assert get_state(event3) == TaskState.TASK_STATE_COMPLETED + else: + (event,) = [event async for event in it] + assert get_state(event) == TaskState.TASK_STATE_AUTH_REQUIRED + task_id = event.task.id + + side_channel_event.set() + + await wait_for_state( + client, task_id, expected_states={TaskState.TASK_STATE_COMPLETED} + ) + + +# Scenario: Auth required and in channel unblocking +@pytest.mark.timeout(2.0) +@pytest.mark.asyncio +@pytest.mark.parametrize('use_legacy', [False, True], ids=['v2', 'legacy']) +@pytest.mark.parametrize( + 'streaming', [False, True], ids=['blocking', 'streaming'] +) +async def test_scenario_auth_required_in_channel(use_legacy, streaming): + class AuthAgent(AgentExecutor): + async def execute( + self, context: RequestContext, event_queue: EventQueue + ): + message = context.message + if message and message.parts and message.parts[0].text == 'start': + task = new_task_from_user_message(message) + task.status.state = TaskState.TASK_STATE_AUTH_REQUIRED + await event_queue.enqueue_event(task) + elif ( + message + and message.parts + and message.parts[0].text == 'credentials' + ): + await event_queue.enqueue_event( + TaskStatusUpdateEvent( + task_id=context.task_id, + context_id=context.context_id, + status=TaskStatus(state=TaskState.TASK_STATE_COMPLETED), + ) + ) + + else: + raise ValueError(f'Unexpected message {message}') + + async def cancel( + self, context: RequestContext, event_queue: EventQueue + ): + pass + + handler = create_handler(AuthAgent(), use_legacy) + client = await create_client( + handler, agent_card=agent_card(), streaming=streaming + ) + + msg1 = Message( + message_id='msg-start', role=Role.ROLE_USER, parts=[Part(text='start')] + ) + + it = client.send_message( + SendMessageRequest( + message=msg1, + configuration=SendMessageConfiguration(return_immediately=False), + ) + ) + + events1 = [event async for event in it] + assert [get_state(event) for event in events1] == [ + TaskState.TASK_STATE_AUTH_REQUIRED, + ] + task_id = get_task_id(events1[0]) + context_id = get_task_context_id(events1[0]) + + # Now send another message with credentials + msg2 = Message( + task_id=task_id, + context_id=context_id, + message_id='msg-creds', + role=Role.ROLE_USER, + parts=[Part(text='credentials')], + ) + + it2 = client.send_message( + SendMessageRequest( + message=msg2, + configuration=SendMessageConfiguration(return_immediately=False), + ) + ) + + assert [get_state(event) async for event in it2] == [ + TaskState.TASK_STATE_COMPLETED, + ] + + +# Scenario: Parallel subscribe attach detach +# Migrated from: test_parallel_subscribe_attach_detach in test_handler_comparison +@pytest.mark.timeout(5.0) +@pytest.mark.asyncio +@pytest.mark.parametrize('use_legacy', [False, True], ids=['v2', 'legacy']) +async def test_scenario_parallel_subscribe_attach_detach(use_legacy): # noqa: PLR0915 + events = collections.defaultdict(asyncio.Event) + + class EmitAgent(AgentExecutor): + async def execute( + self, context: RequestContext, event_queue: EventQueue + ): + task = new_task_from_user_message(context.message) + task.status.state = TaskState.TASK_STATE_WORKING + await event_queue.enqueue_event(task) + + phases = [ + ('trigger_phase_1', 'artifact_1'), + ('trigger_phase_2', 'artifact_2'), + ('trigger_phase_3', 'artifact_3'), + ('trigger_phase_4', 'artifact_4'), + ] + + for trigger_name, artifact_id in phases: + await events[trigger_name].wait() + await event_queue.enqueue_event( + TaskArtifactUpdateEvent( + task_id=context.task_id, + context_id=context.context_id, + artifact=Artifact( + artifact_id=artifact_id, + parts=[Part(text=artifact_id)], + ), + ) + ) + + await event_queue.enqueue_event( + TaskStatusUpdateEvent( + task_id=context.task_id, + context_id=context.context_id, + status=TaskStatus(state=TaskState.TASK_STATE_COMPLETED), + ) + ) + + async def cancel( + self, context: RequestContext, event_queue: EventQueue + ): + pass + + handler = create_handler(EmitAgent(), use_legacy) + client = await create_client( + handler, agent_card=agent_card(), streaming=True + ) + + msg = Message( + message_id='test-msg', role=Role.ROLE_USER, parts=[Part(text='start')] + ) + + it = client.send_message( + SendMessageRequest( + message=msg, + configuration=SendMessageConfiguration(return_immediately=True), + ) + ) + + res = await it.__anext__() + task_id = res.task.id if res.HasField('task') else res.status_update.task_id + + async def monitor_artifacts(): + try: + async for event in client.subscribe( + SubscribeToTaskRequest(id=task_id) + ): + if event.HasField('artifact_update'): + artifact_id = event.artifact_update.artifact.artifact_id + if artifact_id.startswith('artifact_'): + phase_num = artifact_id.split('_')[1] + events[f'emitted_phase_{phase_num}'].set() + except asyncio.CancelledError: + pass + + monitor_task = asyncio.create_task(monitor_artifacts()) + + async def subscribe_and_collect(artifacts_to_collect: int | None = None): + ready_event = asyncio.Event() + + async def collect(): + collected = [] + artifacts_seen = 0 + try: + async for event in client.subscribe( + SubscribeToTaskRequest(id=task_id) + ): + collected.append(event) + ready_event.set() + if event.HasField('artifact_update'): + artifacts_seen += 1 + if ( + artifacts_to_collect is not None + and artifacts_seen >= artifacts_to_collect + ): + break + except asyncio.CancelledError: + pass + return collected + + task = asyncio.create_task(collect()) + await ready_event.wait() + return task + + sub1_task = await subscribe_and_collect() + + events['trigger_phase_1'].set() + await events['emitted_phase_1'].wait() + + sub2_task = await subscribe_and_collect(artifacts_to_collect=1) + sub3_task = await subscribe_and_collect(artifacts_to_collect=2) + + events['trigger_phase_2'].set() + await events['emitted_phase_2'].wait() + + events['trigger_phase_3'].set() + await events['emitted_phase_3'].wait() + + sub4_task = await subscribe_and_collect() + + events['trigger_phase_4'].set() + await events['emitted_phase_4'].wait() + + def get_artifact_updates(evs): + return [ + [p.text for p in sr.artifact_update.artifact.parts] + for sr in evs + if sr.HasField('artifact_update') + ] + + assert get_artifact_updates(await sub1_task) == [ + ['artifact_1'], + ['artifact_2'], + ['artifact_3'], + ['artifact_4'], + ] + + assert get_artifact_updates(await sub2_task) == [ + ['artifact_2'], + ] + assert get_artifact_updates(await sub3_task) == [ + ['artifact_2'], + ['artifact_3'], + ] + assert get_artifact_updates(await sub4_task) == [ + ['artifact_4'], + ] + + monitor_task.cancel() + + +# Return message directly. +@pytest.mark.timeout(2.0) +@pytest.mark.asyncio +@pytest.mark.parametrize('use_legacy', [False, True], ids=['v2', 'legacy']) +@pytest.mark.parametrize( + 'streaming', [False, True], ids=['blocking', 'streaming'] +) +@pytest.mark.parametrize( + 'return_immediately', + [False, True], + ids=['no_return_immediately', 'return_immediately'], +) +async def test_scenario_publish_message( + use_legacy, streaming, return_immediately +): + class MessageAgent(AgentExecutor): + async def execute( + self, context: RequestContext, event_queue: EventQueue + ): + await event_queue.enqueue_event( + Message( + task_id=context.task_id, + context_id=context.context_id, + message_id='msg-1', + role=Role.ROLE_AGENT, + parts=[Part(text='response text')], + ) + ) + + async def cancel( + self, context: RequestContext, event_queue: EventQueue + ): + pass + + handler = create_handler(MessageAgent(), use_legacy) + client = await create_client( + handler, agent_card=agent_card(), streaming=streaming + ) + + msg = Message( + message_id='test-msg', role=Role.ROLE_USER, parts=[Part(text='start')] + ) + + it = client.send_message( + SendMessageRequest( + message=msg, + configuration=SendMessageConfiguration( + return_immediately=return_immediately + ), + ) + ) + events = [event async for event in it] + + (event,) = events + assert event.HasField('message') + assert event.message.parts[0].text == 'response text' + + tasks = (await client.list_tasks(ListTasksRequest())).tasks + assert len(tasks) == 0 + + +# Scenario: Publish ArtifactUpdateEvent +@pytest.mark.timeout(2.0) +@pytest.mark.asyncio +@pytest.mark.parametrize('use_legacy', [False, True], ids=['v2', 'legacy']) +@pytest.mark.parametrize( + 'streaming', [False, True], ids=['blocking', 'streaming'] +) +async def test_scenario_publish_artifact(use_legacy, streaming): + class ArtifactAgent(AgentExecutor): + async def execute( + self, context: RequestContext, event_queue: EventQueue + ): + task = new_task_from_user_message(context.message) + task.status.state = TaskState.TASK_STATE_WORKING + await event_queue.enqueue_event(task) + await event_queue.enqueue_event( + TaskArtifactUpdateEvent( + task_id=context.task_id, + context_id=context.context_id, + artifact=Artifact( + artifact_id='art-1', parts=[Part(text='artifact data')] + ), + ) + ) + await event_queue.enqueue_event( + TaskStatusUpdateEvent( + task_id=context.task_id, + context_id=context.context_id, + status=TaskStatus(state=TaskState.TASK_STATE_COMPLETED), + ) + ) + + async def cancel( + self, context: RequestContext, event_queue: EventQueue + ): + pass + + handler = create_handler(ArtifactAgent(), use_legacy) + client = await create_client( + handler, agent_card=agent_card(), streaming=streaming + ) + + msg = Message( + message_id='test-msg', role=Role.ROLE_USER, parts=[Part(text='start')] + ) + + it = client.send_message( + SendMessageRequest( + message=msg, + configuration=SendMessageConfiguration(return_immediately=False), + ) + ) + events = [event async for event in it] + + if streaming: + last_event = events[-1] + assert get_state(last_event) == TaskState.TASK_STATE_COMPLETED + + artifact_events = [e for e in events if e.HasField('artifact_update')] + assert len(artifact_events) > 0, ( + 'Bug: Streaming should return the artifact update event' + ) + assert ( + artifact_events[0].artifact_update.artifact.artifact_id == 'art-1' + ) + else: + last_event = events[-1] + assert last_event.HasField('task') + assert last_event.task.status.state == TaskState.TASK_STATE_COMPLETED + + assert len(last_event.task.artifacts) > 0, ( + 'Bug: Task should include the published artifact' + ) + assert last_event.task.artifacts[0].artifact_id == 'art-1' + + +# Scenario: Enqueue Task twice +@pytest.mark.timeout(2.0) +@pytest.mark.asyncio +@pytest.mark.parametrize('use_legacy', [False, True], ids=['v2', 'legacy']) +@pytest.mark.parametrize( + 'streaming', [False, True], ids=['blocking', 'streaming'] +) +async def test_scenario_enqueue_task_twice(caplog, use_legacy, streaming): + class DoubleTaskAgent(AgentExecutor): + async def execute( + self, context: RequestContext, event_queue: EventQueue + ): + task1 = Task( + id=context.task_id, + context_id=context.context_id, + status=TaskStatus( + state=TaskState.TASK_STATE_WORKING, + message=Message(parts=[Part(text='First task')]), + ), + ) + await event_queue.enqueue_event(task1) + + # This is undefined behavior, but it should not crash or hang. + task2 = Task( + id=context.task_id, + context_id=context.context_id, + status=TaskStatus( + state=TaskState.TASK_STATE_WORKING, + message=Message(parts=[Part(text='Second task')]), + ), + ) + await event_queue.enqueue_event(task2) + + await event_queue.enqueue_event( + TaskStatusUpdateEvent( + task_id=context.task_id, + context_id=context.context_id, + status=TaskStatus(state=TaskState.TASK_STATE_COMPLETED), + ) + ) + + async def cancel( + self, context: RequestContext, event_queue: EventQueue + ): + pass + + handler = create_handler(DoubleTaskAgent(), use_legacy) + client = await create_client( + handler, agent_card=agent_card(), streaming=streaming + ) + + msg = Message( + message_id='test-msg', role=Role.ROLE_USER, parts=[Part(text='start')] + ) + + it = client.send_message( + SendMessageRequest( + message=msg, + configuration=SendMessageConfiguration(return_immediately=False), + ) + ) + _ = [event async for event in it] + + (final_task,) = (await client.list_tasks(ListTasksRequest())).tasks + + if use_legacy: + assert [part.text for part in final_task.history[0].parts] == [ + 'Second task' + ] + else: + assert [part.text for part in final_task.history[0].parts] == [ + 'First task' + ] + + # Validate that new version logs with error exactly once 'Ignoring task replacement' + error_logs = [ + record.message + for record in caplog.records + if record.levelname == 'ERROR' + and 'Ignoring task replacement' in record.message + ] + + assert len(error_logs) == 1 + + +# Scenario: Task restoration - terminal state +@pytest.mark.timeout(2.0) +@pytest.mark.asyncio +@pytest.mark.parametrize('use_legacy', [False, True], ids=['v2', 'legacy']) +@pytest.mark.parametrize( + 'streaming', [False, True], ids=['blocking', 'streaming'] +) +@pytest.mark.parametrize( + 'subscribe_first', + [False, True], + ids=['no_subscribe_first', 'subscribe_first'], +) +async def test_restore_task_terminal_state( + use_legacy, streaming, subscribe_first +): + class TerminalAgent(AgentExecutor): + async def execute( + self, context: RequestContext, event_queue: EventQueue + ): + task = new_task_from_user_message(context.message) + task.status.state = TaskState.TASK_STATE_COMPLETED + await event_queue.enqueue_event(task) + + async def cancel( + self, context: RequestContext, event_queue: EventQueue + ): + pass + + task_store = InMemoryTaskStore() + handler1 = create_handler( + TerminalAgent(), use_legacy, task_store=task_store + ) + client1 = await create_client( + handler1, agent_card=agent_card(), streaming=streaming + ) + + msg = Message( + message_id='test-msg-1', role=Role.ROLE_USER, parts=[Part(text='start')] + ) + it1 = client1.send_message( + SendMessageRequest( + message=msg, + configuration=SendMessageConfiguration(return_immediately=False), + ) + ) + events1 = [event async for event in it1] + task_id = get_task_id(events1[-1]) + + await wait_for_state( + client1, task_id, expected_states={TaskState.TASK_STATE_COMPLETED} + ) + + # Restore task in a new handler (simulating server restart) + handler2 = create_handler( + TerminalAgent(), use_legacy, task_store=task_store + ) + client2 = await create_client( + handler2, agent_card=agent_card(), streaming=streaming + ) + + restored_task = await client2.get_task(GetTaskRequest(id=task_id)) + assert restored_task.status.state == TaskState.TASK_STATE_COMPLETED + + if subscribe_first and streaming: + with pytest.raises( + Exception, + match=r'terminal state', + ): + async for _ in client2.subscribe( + SubscribeToTaskRequest(id=task_id) + ): + pass + + msg2 = Message( + task_id=task_id, + message_id='test-msg-2', + role=Role.ROLE_USER, + parts=[Part(text='message to completed task')], + ) + + with pytest.raises(Exception, match=r'terminal state'): + async for _ in client2.send_message(SendMessageRequest(message=msg2)): + pass + + if streaming: + with pytest.raises( + Exception, + match=r'terminal state', + ): + async for _ in client2.subscribe( + SubscribeToTaskRequest(id=task_id) + ): + pass + + +# Scenario: Task restoration - user input required state +@pytest.mark.timeout(2.0) +@pytest.mark.asyncio +@pytest.mark.parametrize('use_legacy', [False, True], ids=['v2', 'legacy']) +@pytest.mark.parametrize( + 'streaming', [False, True], ids=['blocking', 'streaming'] +) +@pytest.mark.parametrize( + 'subscribe_mode', + ['none', 'drop', 'listen'], + ids=['no_sub', 'sub_drop', 'sub_listen'], +) +async def test_restore_task_input_required_state( + use_legacy, streaming, subscribe_mode +): + class InputAgent(AgentExecutor): + async def execute( + self, context: RequestContext, event_queue: EventQueue + ): + message = context.message + if message and message.parts and message.parts[0].text == 'start': + task = new_task_from_user_message(message) + task.status.state = TaskState.TASK_STATE_INPUT_REQUIRED + await event_queue.enqueue_event(task) + elif message and message.parts and message.parts[0].text == 'input': + await event_queue.enqueue_event( + TaskStatusUpdateEvent( + task_id=context.task_id, + context_id=context.context_id, + status=TaskStatus(state=TaskState.TASK_STATE_COMPLETED), + ) + ) + + async def cancel( + self, context: RequestContext, event_queue: EventQueue + ): + pass + + task_store = InMemoryTaskStore() + handler1 = create_handler(InputAgent(), use_legacy, task_store=task_store) + client1 = await create_client( + handler1, agent_card=agent_card(), streaming=streaming + ) + + msg1 = Message( + message_id='test-msg-1', role=Role.ROLE_USER, parts=[Part(text='start')] + ) + it1 = client1.send_message( + SendMessageRequest( + message=msg1, + configuration=SendMessageConfiguration(return_immediately=False), + ) + ) + events1 = [event async for event in it1] + + task_id = get_task_id(events1[-1]) + context_id = get_task_context_id(events1[-1]) + + await wait_for_state( + client1, task_id, expected_states={TaskState.TASK_STATE_INPUT_REQUIRED} + ) + + # Restore task in a new handler (simulating server restart) + handler2 = create_handler(InputAgent(), use_legacy, task_store=task_store) + client2 = await create_client( + handler2, agent_card=agent_card(), streaming=streaming + ) + + restored_task = await client2.get_task(GetTaskRequest(id=task_id)) + assert restored_task.status.state == TaskState.TASK_STATE_INPUT_REQUIRED + + # Subscription logic based on mode + listen_task = None + if streaming: + if subscribe_mode == 'drop': + # Subscribing and dropping immediately (cancelling the generator) + async for _ in client2.subscribe( + SubscribeToTaskRequest(id=task_id) + ): + break + elif subscribe_mode == 'listen': + sub_started_event = asyncio.Event() + + async def listen_to_end(): + res = [] + async for ev in client2.subscribe( + SubscribeToTaskRequest(id=task_id) + ): + res.append(ev) + sub_started_event.set() + return res + + listen_task = asyncio.create_task(listen_to_end()) + # Wait for subscription to establish and yield the initial task event + await asyncio.wait_for(sub_started_event.wait(), timeout=1.0) + + msg2 = Message( + task_id=task_id, + context_id=context_id, + message_id='test-msg-2', + role=Role.ROLE_USER, + parts=[Part(text='input')], + ) + + it2 = client2.send_message( + SendMessageRequest( + message=msg2, + configuration=SendMessageConfiguration(return_immediately=False), + ) + ) + events2 = [event async for event in it2] + + if streaming: + assert ( + events2[-1].status_update.status.state + == TaskState.TASK_STATE_COMPLETED + ) + else: + assert events2[-1].task.status.state == TaskState.TASK_STATE_COMPLETED + + if listen_task: + if use_legacy and streaming: + # Error: Legacy handler does not properly manage subscriptions for restored tasks + with pytest.raises(TaskNotFoundError): + await listen_task + else: + listen_events = await listen_task + # The first event is the initial task state (INPUT_REQUIRED), the last should be COMPLETED + assert ( + get_state(listen_events[-1]) == TaskState.TASK_STATE_COMPLETED + ) + + final_task = await client2.get_task(GetTaskRequest(id=task_id)) + assert final_task.status.state == TaskState.TASK_STATE_COMPLETED + + +# Scenario 20: Create initial task with new_task +@pytest.mark.timeout(2.0) +@pytest.mark.asyncio +@pytest.mark.parametrize('use_legacy', [False, True], ids=['v2', 'legacy']) +@pytest.mark.parametrize( + 'streaming', [False, True], ids=['blocking', 'streaming'] +) +@pytest.mark.parametrize('initial_task_type', ['new_task', 'status_update']) +async def test_scenario_initial_task_types( + use_legacy, streaming, initial_task_type +): + started_event = asyncio.Event() + continue_event = asyncio.Event() + + class InitialTaskAgent(AgentExecutor): + async def execute( + self, context: RequestContext, event_queue: EventQueue + ): + if initial_task_type == 'new_task': + # Create with new_task + task = new_task_from_user_message(context.message) + task.status.state = TaskState.TASK_STATE_WORKING + await event_queue.enqueue_event(task) + else: + # Create with status update (illegal in v2) + await event_queue.enqueue_event( + TaskStatusUpdateEvent( + task_id=context.task_id, + context_id=context.context_id, + status=TaskStatus(state=TaskState.TASK_STATE_WORKING), + ) + ) + + started_event.set() + await continue_event.wait() + + await event_queue.enqueue_event( + TaskArtifactUpdateEvent( + task_id=context.task_id, + context_id=context.context_id, + artifact=Artifact( + artifact_id='art-1', parts=[Part(text='artifact data')] + ), + ) + ) + + await event_queue.enqueue_event( + TaskStatusUpdateEvent( + task_id=context.task_id, + context_id=context.context_id, + status=TaskStatus(state=TaskState.TASK_STATE_COMPLETED), + ) + ) + + async def cancel( + self, context: RequestContext, event_queue: EventQueue + ): + pass + + handler = create_handler(InitialTaskAgent(), use_legacy) + client = await create_client( + handler, agent_card=agent_card(), streaming=streaming + ) + + msg = Message( + message_id='test-msg', role=Role.ROLE_USER, parts=[Part(text='start')] + ) + + it = client.send_message( + SendMessageRequest( + message=msg, + configuration=SendMessageConfiguration( + return_immediately=streaming + ), + ) + ) + + if streaming: + if initial_task_type == 'status_update' and not use_legacy: + with pytest.raises( + InvalidAgentResponseError, + match='Agent should enqueue Task before TaskStatusUpdateEvent event', + ): + await it.__anext__() + + # End of the test. + return + + res = await it.__anext__() + if initial_task_type == 'status_update' and use_legacy: + # First message has to be a Task. + assert res.HasField('status_update') + + # End of the test. + return + + assert res.HasField('task') + task_id = get_task_id(res) + + await asyncio.wait_for(started_event.wait(), timeout=1.0) + + # Start subscription + sub = client.subscribe(SubscribeToTaskRequest(id=task_id)) + + # first subscriber receives current task state (WORKING) + first_event = await sub.__anext__() + assert first_event.HasField('task') + + continue_event.set() + + events = [first_event] + [event async for event in sub] + else: + # blocking + async def release_agent(): + await started_event.wait() + continue_event.set() + + release_task = asyncio.create_task(release_agent()) + if initial_task_type == 'status_update' and not use_legacy: + with pytest.raises( + InvalidAgentResponseError, + match='Agent should enqueue Task before TaskStatusUpdateEvent event', + ): + events = [event async for event in it] + # End of the test + return + else: + events = [event async for event in it] + await release_task + + if streaming: + task, artifact_update, status_update = events + assert task.HasField('task') + validate_state(task, TaskState.TASK_STATE_WORKING) + assert artifact_update.artifact_update.artifact.artifact_id == 'art-1' + assert status_update.HasField('status_update') + validate_state(status_update, TaskState.TASK_STATE_COMPLETED) + else: + (task,) = events + assert task.HasField('task') + validate_state(task, TaskState.TASK_STATE_COMPLETED) + (artifact,) = task.task.artifacts + assert artifact.artifact_id == 'art-1' + task_id = task.task.id + + (final_task_from_list,) = ( + await client.list_tasks(ListTasksRequest(include_artifacts=True)) + ).tasks + assert len(final_task_from_list.artifacts) > 0 + assert final_task_from_list.artifacts[0].artifact_id == 'art-1' + + final_task = await client.get_task(GetTaskRequest(id=task_id)) + assert final_task.status.state == TaskState.TASK_STATE_COMPLETED + assert len(final_task.artifacts) > 0 + assert final_task.artifacts[0].artifact_id == 'art-1' + + +# Scenario 23: Invalid Agent Response - Task followed by Message +@pytest.mark.asyncio +@pytest.mark.parametrize('use_legacy', [False, True], ids=['v2', 'legacy']) +@pytest.mark.parametrize( + 'streaming', [False, True], ids=['blocking', 'streaming'] +) +async def test_scenario_23_invalid_response_task_message(use_legacy, streaming): + class TaskMessageAgent(AgentExecutor): + async def execute( + self, context: RequestContext, event_queue: EventQueue + ): + await event_queue.enqueue_event( + new_task_from_user_message(context.message) + ) + await event_queue.enqueue_event( + Message(message_id='m1', parts=[Part(text='m1')]) + ) + + async def cancel( + self, context: RequestContext, event_queue: EventQueue + ): + pass + + handler = create_handler(TaskMessageAgent(), use_legacy) + client = await create_client( + handler, agent_card=agent_card(), streaming=streaming + ) + + msg = Message( + message_id='test-msg', role=Role.ROLE_USER, parts=[Part(text='start')] + ) + + it = client.send_message(SendMessageRequest(message=msg)) + + if use_legacy: + # Legacy: no error. + async for _ in it: + pass + else: + with pytest.raises( + InvalidAgentResponseError, + match='Received Message object in task mode', + ): + async for _ in it: + pass diff --git a/tests/integration/test_stream_generator_cleanup.py b/tests/integration/test_stream_generator_cleanup.py new file mode 100644 index 000000000..f26f62c6f --- /dev/null +++ b/tests/integration/test_stream_generator_cleanup.py @@ -0,0 +1,134 @@ +"""Test that streaming SSE responses clean up without athrow() errors. + +Reproduces https://github.com/a2aproject/a2a-python/issues/911 — +``RuntimeError: athrow(): asynchronous generator is already running`` +during event-loop shutdown after consuming a streaming response. +""" + +import asyncio +import gc + +from typing import Any +from uuid import uuid4 + +import httpx +import pytest + +from starlette.applications import Starlette + +from a2a.client.base_client import BaseClient +from a2a.client.client import ClientConfig +from a2a.client.client_factory import ClientFactory +from a2a.server.agent_execution import AgentExecutor, RequestContext +from a2a.server.events import EventQueue +from a2a.server.events.in_memory_queue_manager import InMemoryQueueManager +from a2a.server.request_handlers import DefaultRequestHandler +from a2a.server.routes import create_agent_card_routes, create_jsonrpc_routes +from a2a.server.tasks.inmemory_task_store import InMemoryTaskStore +from a2a.types import ( + AgentCapabilities, + AgentCard, + AgentInterface, + Message, + Part, + Role, + SendMessageRequest, +) +from a2a.utils import TransportProtocol + + +class _MessageExecutor(AgentExecutor): + """Responds with a single Message event.""" + + async def execute(self, ctx: RequestContext, eq: EventQueue) -> None: + await eq.enqueue_event( + Message( + role=Role.ROLE_AGENT, + message_id=str(uuid4()), + parts=[Part(text='Hello')], + context_id=ctx.context_id, + task_id=ctx.task_id, + ) + ) + + async def cancel(self, ctx: RequestContext, eq: EventQueue) -> None: + pass + + +@pytest.fixture +def client(): + """Creates a JSON-RPC client backed by an in-process ASGI server.""" + card = AgentCard( + name='T', + description='T', + version='1', + capabilities=AgentCapabilities(streaming=True), + default_input_modes=['text/plain'], + default_output_modes=['text/plain'], + supported_interfaces=[ + AgentInterface( + protocol_binding=TransportProtocol.JSONRPC, + url='http://test', + ), + ], + ) + handler = DefaultRequestHandler( + agent_executor=_MessageExecutor(), + task_store=InMemoryTaskStore(), + agent_card=card, + queue_manager=InMemoryQueueManager(), + ) + app = Starlette( + routes=[ + *create_agent_card_routes(agent_card=card, card_url='/card'), + *create_jsonrpc_routes( + request_handler=handler, + rpc_url='/', + ), + ] + ) + return ClientFactory( + config=ClientConfig( + httpx_client=httpx.AsyncClient( + transport=httpx.ASGITransport(app=app), + base_url='http://test', + ) + ) + ).create(card) + + +@pytest.mark.asyncio +async def test_stream_message_no_athrow(client: BaseClient) -> None: + """Consuming a streamed Message must not leave broken async generators.""" + errors: list[dict[str, Any]] = [] + loop = asyncio.get_event_loop() + orig = loop.get_exception_handler() + loop.set_exception_handler(lambda _l, ctx: errors.append(ctx)) + + try: + msg = Message( + role=Role.ROLE_USER, + message_id=f'msg-{uuid4()}', + parts=[Part(text='hi')], + ) + events = [ + e + async for e in client.send_message( + request=SendMessageRequest(message=msg) + ) + ] + assert events + assert events[0].HasField('message') + + gc.collect() + await loop.shutdown_asyncgens() + + bad = [ + e + for e in errors + if 'asynchronous generator' in str(e.get('message', '')) + ] + assert not bad, '\n'.join(str(e.get('message', '')) for e in bad) + finally: + loop.set_exception_handler(orig) + await client.close() diff --git a/tests/integration/test_tenant.py b/tests/integration/test_tenant.py new file mode 100644 index 000000000..6b489270b --- /dev/null +++ b/tests/integration/test_tenant.py @@ -0,0 +1,250 @@ +import pytest +from unittest.mock import AsyncMock, patch, MagicMock +import httpx +from httpx import ASGITransport, AsyncClient + +from a2a.types.a2a_pb2 import ( + AgentCard, + AgentInterface, + SendMessageRequest, + Message, + GetTaskRequest, + AgentCapabilities, + ListTasksRequest, + ListTasksResponse, + Task, +) +from a2a.client.transports import RestTransport, JsonRpcTransport, GrpcTransport +from a2a.client.transports.tenant_decorator import TenantTransportDecorator +from a2a.client import ClientConfig, ClientFactory +from a2a.utils.constants import TransportProtocol + +from a2a.server.routes import create_agent_card_routes, create_jsonrpc_routes +from starlette.applications import Starlette +from a2a.server.request_handlers.request_handler import RequestHandler +from a2a.server.context import ServerCallContext + + +class TestTenantDecorator: + @pytest.fixture + def agent_card(self): + return AgentCard( + supported_interfaces=[ + AgentInterface( + url='http://example.com/rest', + protocol_binding=TransportProtocol.HTTP_JSON, + tenant='tenant-1', + ), + AgentInterface( + url='http://example.com/jsonrpc', + protocol_binding=TransportProtocol.JSONRPC, + tenant='tenant-2', + ), + AgentInterface( + url='http://example.com/grpc', + protocol_binding=TransportProtocol.GRPC, + tenant='tenant-3', + ), + ], + capabilities=AgentCapabilities(streaming=True), + ) + + @pytest.mark.asyncio + async def test_tenant_decorator_rest(self, agent_card): + mock_httpx = AsyncMock(spec=httpx.AsyncClient) + mock_httpx.build_request.return_value = MagicMock() + mock_httpx.send.return_value = MagicMock( + status_code=200, json=lambda: {'message': {}} + ) + + config = ClientConfig( + httpx_client=mock_httpx, + supported_protocol_bindings=[TransportProtocol.HTTP_JSON], + ) + factory = ClientFactory(config) + client = factory.create(agent_card) + + assert isinstance(client._transport, TenantTransportDecorator) + assert client._transport._tenant == 'tenant-1' + + # Test SendMessage (POST) - Use transport directly to avoid streaming complexity in mock + request = SendMessageRequest(message=Message(parts=[{'text': 'hi'}])) + await client._transport.send_message(request) + + # Check that tenant was populated in request + assert request.tenant == 'tenant-1' + + # Check that path was prepended in the underlying transport + mock_httpx.build_request.assert_called() + send_call = next( + c + for c in mock_httpx.build_request.call_args_list + if 'message:send' in c.args[1] + ) + args, kwargs = send_call + assert args[1] == 'http://example.com/rest/tenant-1/message:send' + assert 'tenant' in kwargs['json'] + + @pytest.mark.asyncio + async def test_tenant_decorator_jsonrpc(self, agent_card): + mock_httpx = AsyncMock(spec=httpx.AsyncClient) + mock_httpx.build_request.return_value = MagicMock() + mock_httpx.send.return_value = MagicMock( + status_code=200, + json=lambda: { + 'result': {'message': {}}, + 'id': '1', + 'jsonrpc': '2.0', + }, + ) + + config = ClientConfig( + httpx_client=mock_httpx, + supported_protocol_bindings=[TransportProtocol.JSONRPC], + ) + factory = ClientFactory(config) + client = factory.create(agent_card) + + assert isinstance(client._transport, TenantTransportDecorator) + assert client._transport._tenant == 'tenant-2' + + request = SendMessageRequest(message=Message(parts=[{'text': 'hi'}])) + await client._transport.send_message(request) + + mock_httpx.build_request.assert_called() + _, kwargs = mock_httpx.build_request.call_args + assert kwargs['json']['params']['tenant'] == 'tenant-2' + + @pytest.mark.asyncio + async def test_tenant_decorator_grpc(self, agent_card): + mock_channel = MagicMock() + config = ClientConfig( + grpc_channel_factory=lambda url: mock_channel, + supported_protocol_bindings=[TransportProtocol.GRPC], + ) + + with patch('a2a.types.a2a_pb2_grpc.A2AServiceStub') as mock_stub_class: + mock_stub = mock_stub_class.return_value + mock_stub.SendMessage = AsyncMock(return_value={'message': {}}) + + factory = ClientFactory(config) + client = factory.create(agent_card) + + assert isinstance(client._transport, TenantTransportDecorator) + assert client._transport._tenant == 'tenant-3' + + await client._transport.send_message( + SendMessageRequest(message=Message(parts=[{'text': 'hi'}])) + ) + + call_args = mock_stub.SendMessage.call_args + assert call_args[0][0].tenant == 'tenant-3' + + @pytest.mark.asyncio + async def test_tenant_decorator_explicit_override(self, agent_card): + mock_httpx = AsyncMock(spec=httpx.AsyncClient) + mock_httpx.build_request.return_value = MagicMock() + mock_httpx.send.return_value = MagicMock( + status_code=200, json=lambda: {'message': {}} + ) + + config = ClientConfig( + httpx_client=mock_httpx, + supported_protocol_bindings=[TransportProtocol.HTTP_JSON], + ) + factory = ClientFactory(config) + client = factory.create(agent_card) + + request = SendMessageRequest( + message=Message(parts=[{'text': 'hi'}]), tenant='explicit-tenant' + ) + await client._transport.send_message(request) + + assert request.tenant == 'explicit-tenant' + + send_call = next( + c + for c in mock_httpx.build_request.call_args_list + if 'message:send' in c.args[1] + ) + args, _ = send_call + assert args[1] == 'http://example.com/rest/explicit-tenant/message:send' + + +class TestJSONRPCTenantIntegration: + @pytest.fixture + def mock_handler(self): + handler = AsyncMock(spec=RequestHandler) + handler.on_list_tasks.return_value = ListTasksResponse( + tasks=[Task(id='task-1')] + ) + return handler + + @pytest.fixture + def jsonrpc_agent_card(self): + return AgentCard( + supported_interfaces=[ + AgentInterface( + url='http://testserver/jsonrpc', + protocol_binding=TransportProtocol.JSONRPC, + tenant='my-test-tenant', + ), + ], + capabilities=AgentCapabilities( + streaming=False, + push_notifications=False, + ), + ) + + @pytest.fixture + def server_app(self, jsonrpc_agent_card, mock_handler): + agent_card_routes = create_agent_card_routes( + agent_card=jsonrpc_agent_card, card_url='/' + ) + jsonrpc_routes = create_jsonrpc_routes( + request_handler=mock_handler, + rpc_url='/jsonrpc', + ) + app = Starlette(routes=[*agent_card_routes, *jsonrpc_routes]) + return app + + @pytest.mark.asyncio + async def test_jsonrpc_tenant_context_population( + self, server_app, mock_handler, jsonrpc_agent_card + ): + """ + Integration test to verify that a tenant configured in the client + is correctly propagated to the ServerCallContext in the server + via the JSON-RPC transport. + """ + # 1. Setup the client using the server app as the transport + # We use ASGITransport so httpx calls go directly to the Starlette app + transport = ASGITransport(app=server_app) + async with AsyncClient( + transport=transport, base_url='http://testserver' + ) as httpx_client: + # Create the A2A client properly configured + config = ClientConfig( + httpx_client=httpx_client, + supported_protocol_bindings=[TransportProtocol.JSONRPC], + ) + factory = ClientFactory(config) + client = factory.create(jsonrpc_agent_card) + + # 2. Make the call (list_tasks) + response = await client.list_tasks(ListTasksRequest()) + + # 3. Verify response + assert len(response.tasks) == 1 + assert response.tasks[0].id == 'task-1' + + # 4. Verify ServerCallContext on the server side + mock_handler.on_list_tasks.assert_called_once() + call_args = mock_handler.on_list_tasks.call_args + + # call_args[0] are positional args: (request, context) + # Check call_args signature in jsonrpc_handler.py: await self.handler.list_tasks(request_obj, context) + + server_context = call_args[0][1] + assert isinstance(server_context, ServerCallContext) + assert server_context.tenant == 'my-test-tenant' diff --git a/tests/integration/test_version_header.py b/tests/integration/test_version_header.py new file mode 100644 index 000000000..046f4d4cc --- /dev/null +++ b/tests/integration/test_version_header.py @@ -0,0 +1,205 @@ +import pytest + +from fastapi import FastAPI +from starlette.testclient import TestClient + +from a2a.server.agent_execution import AgentExecutor, RequestContext +from a2a.server.routes.rest_routes import create_rest_routes +from a2a.server.routes import create_agent_card_routes, create_jsonrpc_routes +from a2a.server.events import EventQueue +from a2a.server.events.in_memory_queue_manager import InMemoryQueueManager +from a2a.server.request_handlers import DefaultRequestHandler +from a2a.server.tasks.inmemory_push_notification_config_store import ( + InMemoryPushNotificationConfigStore, +) +from a2a.server.tasks.inmemory_task_store import InMemoryTaskStore +from a2a.types.a2a_pb2 import AgentCapabilities, AgentCard, Task +from a2a.utils.constants import VERSION_HEADER + + +class DummyAgentExecutor(AgentExecutor): + async def execute( + self, context: RequestContext, event_queue: EventQueue + ) -> None: + pass + + async def cancel( + self, context: RequestContext, event_queue: EventQueue + ) -> None: + pass + + +@pytest.fixture +def test_app(): + agent_card = AgentCard( + name='Test Agent', + version='1.0.0', + capabilities=AgentCapabilities(streaming=True), + ) + handler = DefaultRequestHandler( + agent_executor=DummyAgentExecutor(), + task_store=InMemoryTaskStore(), + agent_card=agent_card, + queue_manager=InMemoryQueueManager(), + push_config_store=InMemoryPushNotificationConfigStore(), + ) + + async def mock_on_message_send(*args, **kwargs): + task = Task(id='task-123') + task.status.message.message_id = 'msg-123' + return task + + async def mock_on_message_send_stream(*args, **kwargs): + task = Task(id='task-123') + task.status.message.message_id = 'msg-123' + yield task + + handler.on_message_send = mock_on_message_send + handler.on_message_send_stream = mock_on_message_send_stream + + app = FastAPI() + agent_card_routes = create_agent_card_routes( + agent_card=agent_card, card_url='/' + ) + jsonrpc_routes = create_jsonrpc_routes( + request_handler=handler, rpc_url='/jsonrpc', enable_v0_3_compat=True + ) + app.routes.extend(agent_card_routes) + app.routes.extend(jsonrpc_routes) + + rest_routes = create_rest_routes( + request_handler=handler, path_prefix='/rest', enable_v0_3_compat=True + ) + app.routes.extend(rest_routes) + return app + + +@pytest.fixture +def client(test_app): + return TestClient(test_app, raise_server_exceptions=False) + + +@pytest.mark.parametrize('transport', ['rest', 'jsonrpc']) +@pytest.mark.parametrize('endpoint_ver', ['0.3', '1.0']) +@pytest.mark.parametrize('is_streaming', [False, True]) +@pytest.mark.parametrize( + 'header_val, should_succeed', + [ + (None, '0.3'), + ('0.3', '0.3'), + ('1.0', '1.0'), + ('1.2', '1.0'), + ('2', 'none'), + ('INVALID', 'none'), + ], +) +def test_version_header_integration( + client, transport, endpoint_ver, is_streaming, header_val, should_succeed +): + headers = {} + if header_val is not None: + headers[VERSION_HEADER] = header_val + + expect_success = endpoint_ver == should_succeed + + if transport == 'rest': + if endpoint_ver == '0.3': + url = ( + '/rest/v1/message:stream' + if is_streaming + else '/rest/v1/message:send' + ) + else: + url = ( + '/rest/message:stream' if is_streaming else '/rest/message:send' + ) + + payload = { + 'message': { + 'messageId': 'msg1', + 'role': 'ROLE_USER' if endpoint_ver == '1.0' else 'user', + 'parts': [{'text': 'hello'}] if endpoint_ver == '1.0' else None, + 'content': [{'text': 'hello'}] + if endpoint_ver == '0.3' + else None, + } + } + if endpoint_ver == '0.3': + del payload['message']['parts'] + else: + del payload['message']['content'] + + if is_streaming: + headers['Accept'] = 'text/event-stream' + with client.stream( + 'POST', url, json=payload, headers=headers + ) as response: + response.read() + + if expect_success: + assert response.status_code == 200, response.text + else: + assert response.status_code == 400, response.text + else: + response = client.post(url, json=payload, headers=headers) + if expect_success: + assert response.status_code == 200, response.text + else: + assert response.status_code == 400, response.text + + else: + url = '/jsonrpc' + if endpoint_ver == '0.3': + payload = { + 'jsonrpc': '2.0', + 'id': '1', + 'method': 'message/stream' if is_streaming else 'message/send', + 'params': { + 'message': { + 'messageId': 'msg1', + 'role': 'user', + 'parts': [{'text': 'hello'}], + } + }, + } + else: + payload = { + 'jsonrpc': '2.0', + 'id': '1', + 'method': 'SendStreamingMessage' + if is_streaming + else 'SendMessage', + 'params': { + 'message': { + 'messageId': 'msg1', + 'role': 'ROLE_USER', + 'parts': [{'text': 'hello'}], + } + }, + } + + if is_streaming: + headers['Accept'] = 'text/event-stream' + with client.stream( + 'POST', url, json=payload, headers=headers + ) as response: + response.read() + + if expect_success: + assert response.status_code == 200, response.text + assert ( + 'result' in response.text or 'task' in response.text + ), response.text + else: + assert response.status_code == 200 + assert 'error' in response.text.lower(), response.text + else: + response = client.post(url, json=payload, headers=headers) + assert response.status_code == 200, response.text + resp_data = response.json() + if expect_success: + assert 'result' in resp_data, resp_data + else: + assert 'error' in resp_data, resp_data + expected_code = -32603 if endpoint_ver == '0.3' else -32009 + assert resp_data['error']['code'] == expected_code diff --git a/tests/migrations/test_a2a_db_cli.py b/tests/migrations/test_a2a_db_cli.py new file mode 100644 index 000000000..0d55aaa41 --- /dev/null +++ b/tests/migrations/test_a2a_db_cli.py @@ -0,0 +1,142 @@ +import os +import argparse +from unittest.mock import MagicMock, patch +import pytest +from a2a.a2a_db_cli import run_migrations + + +@pytest.fixture +def mock_alembic_command(): + with ( + patch('alembic.command.upgrade') as mock_upgrade, + patch('alembic.command.downgrade') as mock_downgrade, + ): + yield mock_upgrade, mock_downgrade + + +@pytest.fixture +def mock_alembic_config(): + with patch('a2a.a2a_db_cli.Config') as mock_config: + yield mock_config + + +def test_cli_upgrade_offline(mock_alembic_command, mock_alembic_config): + mock_upgrade, _ = mock_alembic_command + custom_owner = 'test-owner' + tasks_table = 'my_tasks' + push_table = 'my_push' + + # Simulate: a2a-db upgrade head --sql --add_columns_owner_last_updated-default-ownertest-owner --tasks-table my_tasks --push-notification-configs-table my_push -v + test_args = [ + 'a2a-db', + 'upgrade', + 'head', + '--sql', + '--add_columns_owner_last_updated-default-owner', + custom_owner, + '--tasks-table', + tasks_table, + '--push-notification-configs-table', + push_table, + '-v', + ] + with patch('sys.argv', test_args): + with patch.dict(os.environ, {'DATABASE_URL': 'sqlite:///test.db'}): + run_migrations() + + # Verify upgrade parameters + args, kwargs = mock_upgrade.call_args + assert kwargs['sql'] is True + assert args[1] == 'head' + + # Verify options were set in config instance + # Note: Using assert_any_call because multiple options are set + mock_alembic_config.return_value.set_main_option.assert_any_call( + 'add_columns_owner_last_updated_default_owner', custom_owner + ) + mock_alembic_config.return_value.set_main_option.assert_any_call( + 'tasks_table', tasks_table + ) + mock_alembic_config.return_value.set_main_option.assert_any_call( + 'push_notification_configs_table', push_table + ) + mock_alembic_config.return_value.set_main_option.assert_any_call( + 'verbose', 'true' + ) + + +def test_cli_downgrade_offline(mock_alembic_command, mock_alembic_config): + _, mock_downgrade = mock_alembic_command + tasks_table = 'old_tasks' + + # Simulate: a2a-db downgrade base --sql --tasks-table old_tasks + test_args = [ + 'a2a-db', + 'downgrade', + 'base', + '--sql', + '--tasks-table', + tasks_table, + ] + with patch('sys.argv', test_args): + with patch.dict(os.environ, {'DATABASE_URL': 'sqlite:///test.db'}): + run_migrations() + + args, kwargs = mock_downgrade.call_args + assert kwargs['sql'] is True + assert args[1] == 'base' + + # Verify tables option + mock_alembic_config.return_value.set_main_option.assert_any_call( + 'tasks_table', tasks_table + ) + + +def test_cli_default_upgrade(mock_alembic_command, mock_alembic_config): + mock_upgrade, _ = mock_alembic_command + + # Simulate: a2a-db (no args) + test_args = ['a2a-db'] + with patch('sys.argv', test_args): + with patch.dict(os.environ, {'DATABASE_URL': 'sqlite:///test.db'}): + run_migrations() + + # Should default to upgrade head + mock_upgrade.assert_called_once() + args, kwargs = mock_upgrade.call_args + assert args[1] == 'head' + assert kwargs['sql'] is False + + +def test_cli_database_url_flag(mock_alembic_command, mock_alembic_config): + mock_upgrade, _ = mock_alembic_command + custom_db = 'sqlite:///custom_cli.db' + + # Simulate: a2a-db --database-url sqlite:///custom_cli.db + test_args = ['a2a-db', '--database-url', custom_db] + with patch('sys.argv', test_args): + with patch.dict(os.environ, {}, clear=True): + run_migrations() + # Verify the CLI tool set the environment variable + assert os.environ['DATABASE_URL'] == custom_db + + mock_upgrade.assert_called() + + +def test_cli_owner_with_downgrade_error( + mock_alembic_command, mock_alembic_config +): + # This should trigger parser.error(). Flag --add_columns_owner_last_updated-default-owner is not allowed with downgrade + test_args = [ + 'a2a-db', + 'downgrade', + 'base', + '--add_columns_owner_last_updated-default-owner', + 'some-owner', + ] + + with patch('sys.argv', test_args): + with patch.dict(os.environ, {'DATABASE_URL': 'sqlite:///test.db'}): + # argparse calls sys.exit on error + with pytest.raises(SystemExit): + run_migrations() diff --git a/tests/migrations/test_env.py b/tests/migrations/test_env.py new file mode 100644 index 000000000..0439077b9 --- /dev/null +++ b/tests/migrations/test_env.py @@ -0,0 +1,137 @@ +import asyncio +import importlib +import logging +import os +import sys +from unittest.mock import MagicMock, patch + +import pytest + + +@pytest.fixture +def mock_alembic_setup(): + """Fixture to mock alembic context and config for safe import of env.py.""" + with patch('alembic.context') as mock_context: + mock_config = MagicMock() + mock_context.config = mock_config + # Basic setup to avoid crashes on import + mock_config.config_file_name = 'alembic.ini' + mock_config.get_section.return_value = {} + + # We need to make sure 'a2a.migrations.env' is not in sys.modules + # initially so we can control its execution + if 'a2a.migrations.env' in sys.modules: + del sys.modules['a2a.migrations.env'] + + yield mock_context, mock_config + + +def test_env_py_missing_db_url(mock_alembic_setup): + """Test that env.py raises RuntimeError when DATABASE_URL is missing.""" + mock_context, mock_config = mock_alembic_setup + + with patch.dict(os.environ, {}, clear=True): + with pytest.raises( + RuntimeError, match='DATABASE_URL environment variable is not set' + ): + # Using standard import/reload ensures coverage tracking + import a2a.migrations.env as env + + importlib.reload(env) + + +def test_env_py_offline_mode(mock_alembic_setup): + """Test env.py logic in offline mode.""" + mock_context, mock_config = mock_alembic_setup + db_url = 'sqlite+aiosqlite:///test_cov_offline.db' + + mock_config.config_file_name = None # Skip fileConfig + mock_context.is_offline_mode.return_value = True + + # Mock get_main_option to return db_url for 'sqlalchemy.url' + def get_opt(key, default=None): + if key == 'sqlalchemy.url': + return db_url + return default + + mock_config.get_main_option.side_effect = get_opt + + with patch.dict(os.environ, {'DATABASE_URL': db_url}): + import a2a.migrations.env as env + + importlib.reload(env) + + # Verify sqlalchemy.url was set from env var + mock_config.set_main_option.assert_any_call('sqlalchemy.url', db_url) + + # Verify context.configure was called for offline mode + mock_context.configure.assert_called() + # Check if url was passed to configure + args, kwargs = mock_context.configure.call_args + assert kwargs['url'] == db_url + + +@patch('alembic.context.run_migrations') +@patch('sqlalchemy.ext.asyncio.async_engine_from_config') +@patch('asyncio.run') +def test_env_py_online_mode( + mock_asyncio_run, + mock_async_engine, + mock_run_migrations, + mock_alembic_setup, +): + """Test env.py logic in online mode.""" + mock_context, mock_config = mock_alembic_setup + db_url = 'sqlite+aiosqlite:///test_cov_online.db' + + mock_config.config_file_name = None + mock_context.is_offline_mode.return_value = False + + # Prevent "coroutine never awaited" warning + def close_coro(coro): + if asyncio.iscoroutine(coro): + coro.close() + + mock_asyncio_run.side_effect = close_coro + + with patch.dict(os.environ, {'DATABASE_URL': db_url}): + import a2a.migrations.env as env + + importlib.reload(env) + + # Verify sqlalchemy.url was set + mock_config.set_main_option.assert_any_call('sqlalchemy.url', db_url) + + # Verify asyncio.run was called to start online migrations + mock_asyncio_run.assert_called() + + +def test_env_py_verbose_logging(mock_alembic_setup): + """Test that env.py enables verbose logging when 'verbose' option is set.""" + mock_context, mock_config = mock_alembic_setup + db_url = 'sqlite+aiosqlite:///test_cov_verbose.db' + + # Use a real side_effect to simulate config.get_main_option + def get_opt(key, default=None): + if key == 'verbose': + return 'true' + if key == 'sqlalchemy.url': + return db_url + return default + + mock_config.get_main_option.side_effect = get_opt + mock_config.config_file_name = None + mock_context.is_offline_mode.return_value = True + + with patch('logging.getLogger') as mock_get_logger: + mock_logger = MagicMock() + mock_get_logger.return_value = mock_logger + + with patch.dict(os.environ, {'DATABASE_URL': db_url}): + import a2a.migrations.env as env + + importlib.reload(env) + + # Check if sqlalchemy.engine logger level was set to INFO + mock_get_logger.assert_called_with('sqlalchemy.engine') + mock_logger.setLevel.assert_called_with(logging.INFO) diff --git a/tests/migrations/versions/test_migration_6419d2d130f6.py b/tests/migrations/versions/test_migration_6419d2d130f6.py new file mode 100644 index 000000000..e7011969c --- /dev/null +++ b/tests/migrations/versions/test_migration_6419d2d130f6.py @@ -0,0 +1,308 @@ +import importlib +import logging +import os +import sqlite3 +import tempfile +from typing import Generator +from unittest.mock import patch + +import pytest + +from a2a.a2a_db_cli import run_migrations + +# Explicitly import the migration module to ensure it is tracked by the coverage tool +# when Alembic loads it dynamically. +try: + importlib.import_module( + 'a2a.migrations.versions.6419d2d130f6_add_columns_owner_last_updated' + ) +except (ImportError, AttributeError): + # This might fail if Alembic context is not initialized, which is fine for coverage purposes + pass + + +@pytest.fixture(autouse=True) +def mock_logging_config(): + """Mock logging configuration function. + + This prevents tests from changing global logging state + and interfering with other tests (like telemetry tests). + """ + with patch('logging.basicConfig'), patch('logging.config.fileConfig'): + yield + + +@pytest.fixture +def temp_db() -> Generator[str, None, None]: + """Create a temporary SQLite database for testing.""" + fd, path = tempfile.mkstemp(suffix='.db') + os.close(fd) + yield path + if os.path.exists(path): + os.remove(path) + + +def _setup_initial_schema(db_path: str) -> None: + """Setup initial schema without the new columns.""" + conn = sqlite3.connect(db_path) + cursor = conn.cursor() + cursor.execute(""" + CREATE TABLE tasks ( + id VARCHAR(36) PRIMARY KEY, + context_id VARCHAR(36) NOT NULL, + kind VARCHAR(16) NOT NULL, + status TEXT, + artifacts TEXT, + history TEXT, + metadata TEXT + ) + """) + cursor.execute(""" + CREATE TABLE push_notification_configs ( + task_id VARCHAR(36), + config_id VARCHAR(255), + config_data BLOB NOT NULL, + PRIMARY KEY (task_id, config_id) + ) + """) + conn.commit() + conn.close() + + +def test_migration_6419d2d130f6_full_cycle( + temp_db: str, capsys: pytest.CaptureFixture[str] +) -> None: + """Test the full upgrade/downgrade cycle for migration 6419d2d130f6.""" + db_url = f'sqlite+aiosqlite:///{temp_db}' + + # 1. Setup initial schema without the new columns + _setup_initial_schema(temp_db) + + # 2. Run Upgrade via direct call with a custom owner + custom_owner = 'test_owner_123' + + test_args = [ + 'a2a-db', + '--database-url', + db_url, + '--add_columns_owner_last_updated-default-owner', + custom_owner, + 'upgrade', + '6419d2d130f6', + ] + with patch('sys.argv', test_args): + run_migrations() + + # 3. Verify columns and index exist + conn = sqlite3.connect(temp_db) + cursor = conn.cursor() + + # Check tasks table + cursor.execute('PRAGMA table_info(tasks)') + tasks_columns = {row[1]: row for row in cursor.fetchall()} + assert 'owner' in tasks_columns + assert 'last_updated' in tasks_columns + assert tasks_columns['last_updated'][2] == 'DATETIME' + + # Check default value for owner in tasks + # row[4] is dflt_value in PRAGMA table_info + assert tasks_columns['owner'][4] == f"'{custom_owner}'" + + # Check index on tasks + cursor.execute('PRAGMA index_list(tasks)') + tasks_indexes = {row[1] for row in cursor.fetchall()} + assert 'idx_tasks_owner_last_updated' in tasks_indexes + + # Check push_notification_configs table + cursor.execute('PRAGMA table_info(push_notification_configs)') + pnc_columns = {row[1]: row for row in cursor.fetchall()} + assert 'owner' in pnc_columns + assert ( + 'last_updated' not in pnc_columns + ) # Only for tables with 'kind' column + + conn.close() + + # 4. Run Downgrade via direct call + test_args = ['a2a-db', '--database-url', db_url, 'downgrade', 'base'] + with patch('sys.argv', test_args): + run_migrations() + + # 5. Verify columns are gone + conn = sqlite3.connect(temp_db) + cursor = conn.cursor() + + # Check tasks table + cursor.execute('PRAGMA table_info(tasks)') + tasks_columns_post = {row[1] for row in cursor.fetchall()} + assert 'owner' not in tasks_columns_post + assert 'last_updated' not in tasks_columns_post + + # Check index on tasks + cursor.execute('PRAGMA index_list(tasks)') + tasks_indexes_post = {row[1] for row in cursor.fetchall()} + assert 'idx_tasks_owner_last_updated' not in tasks_indexes_post + + # Check push_notification_configs table + cursor.execute('PRAGMA table_info(push_notification_configs)') + pnc_columns_post = {row[1] for row in cursor.fetchall()} + assert 'owner' not in pnc_columns_post + + conn.close() + + +def test_migration_6419d2d130f6_custom_tables( + temp_db: str, capsys: pytest.CaptureFixture[str] +) -> None: + """Test the migration with custom table names.""" + db_url = f'sqlite+aiosqlite:///{temp_db}' + custom_tasks = 'custom_tasks' + custom_push = 'custom_push' + + # 1. Setup initial schema with custom names + conn = sqlite3.connect(temp_db) + cursor = conn.cursor() + cursor.execute( + f'CREATE TABLE {custom_tasks} (id VARCHAR(36) PRIMARY KEY, kind VARCHAR(16))' + ) + cursor.execute( + f'CREATE TABLE {custom_push} (task_id VARCHAR(36), PRIMARY KEY (task_id))' + ) + conn.commit() + conn.close() + + # 2. Run Upgrade via direct call with custom table flags + test_args = [ + 'a2a-db', + '--database-url', + db_url, + '--tasks-table', + custom_tasks, + '--push-notification-configs-table', + custom_push, + 'upgrade', + '6419d2d130f6', + ] + with patch('sys.argv', test_args): + run_migrations() + + # 3. Verify columns exist in custom tables + conn = sqlite3.connect(temp_db) + cursor = conn.cursor() + + cursor.execute(f'PRAGMA table_info({custom_tasks})') + columns = {row[1] for row in cursor.fetchall()} + assert 'owner' in columns + assert 'last_updated' in columns + + # Check index on custom tasks table + cursor.execute(f'PRAGMA index_list({custom_tasks})') + indexes = {row[1] for row in cursor.fetchall()} + assert f'idx_{custom_tasks}_owner_last_updated' in indexes + + cursor.execute(f'PRAGMA table_info({custom_push})') + columns = {row[1] for row in cursor.fetchall()} + assert 'owner' in columns + + conn.close() + + +def test_migration_6419d2d130f6_missing_tables( + temp_db: str, caplog: pytest.LogCaptureFixture +) -> None: + """Test that the migration handles missing tables gracefully.""" + db_url = f'sqlite+aiosqlite:///{temp_db}' + + # Run upgrade on empty database + test_args = [ + 'a2a-db', + '--database-url', + db_url, + 'upgrade', + '6419d2d130f6', + ] + with patch('sys.argv', test_args), caplog.at_level(logging.WARNING): + run_migrations() + + assert "Table 'tasks' does not exist" in caplog.text + + +def test_migration_6419d2d130f6_idempotency( + temp_db: str, capsys: pytest.CaptureFixture[str] +) -> None: + """Test that the migration is idempotent (can be run multiple times).""" + db_url = f'sqlite+aiosqlite:///{temp_db}' + + # 1. Setup initial schema + _setup_initial_schema(temp_db) + + # 2. Run Upgrade first time + test_args = [ + 'a2a-db', + '--database-url', + db_url, + 'upgrade', + '6419d2d130f6', + ] + with patch('sys.argv', test_args): + run_migrations() + + # 3. Run Upgrade second time - should not fail even if columns already exist + with patch('sys.argv', test_args): + run_migrations() + + +def test_migration_6419d2d130f6_offline( + temp_db: str, capsys: pytest.CaptureFixture[str] +) -> None: + """Test that offline mode generates the expected SQL without modifying the database.""" + db_url = f'sqlite+aiosqlite:///{temp_db}' + + # 1. Setup initial schema + _setup_initial_schema(temp_db) + + # 2. Run upgrade in offline mode + test_args = [ + 'a2a-db', + '--database-url', + db_url, + '--sql', + 'upgrade', + '6419d2d130f6', + ] + with patch('sys.argv', test_args): + run_migrations() + + captured = capsys.readouterr() + # Verify SQL output contains key migration statements + assert 'ALTER TABLE tasks ADD COLUMN owner' in captured.out + assert 'ALTER TABLE tasks ADD COLUMN last_updated' in captured.out + assert 'CREATE INDEX idx_tasks_owner_last_updated' in captured.out + assert 'CREATE TABLE a2a_alembic_version' in captured.out + assert ( + 'ALTER TABLE push_notification_configs ADD COLUMN owner' in captured.out + ) + + # 3. Verify the database was NOT actually changed (since it is offline mode) + conn = sqlite3.connect(temp_db) + cursor = conn.cursor() + + # Verify tables exist + cursor.execute("SELECT name FROM sqlite_schema WHERE type='table'") + tables = {row[0] for row in cursor.fetchall()} + assert 'tasks' in tables + assert 'push_notification_configs' in tables + assert 'a2a_alembic_version' not in tables + + # Verify columns were NOT added to tasks + cursor.execute('PRAGMA table_info(tasks)') + columns = {row[1] for row in cursor.fetchall()} + assert 'owner' not in columns + assert 'last_updated' not in columns + + # Verify columns were NOT added to push_notification_configs + cursor.execute('PRAGMA table_info(push_notification_configs)') + columns = {row[1] for row in cursor.fetchall()} + assert 'owner' not in columns + + conn.close() diff --git a/tests/server/agent_execution/__init__.py b/tests/server/agent_execution/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/server/agent_execution/test_active_task.py b/tests/server/agent_execution/test_active_task.py new file mode 100644 index 000000000..6e477186b --- /dev/null +++ b/tests/server/agent_execution/test_active_task.py @@ -0,0 +1,893 @@ +import asyncio +import logging + +from unittest.mock import AsyncMock, Mock, patch + +import pytest +import pytest_asyncio + +from a2a.server.agent_execution.active_task import ActiveTask +from a2a.server.agent_execution.agent_executor import AgentExecutor +from a2a.server.agent_execution.context import RequestContext +from a2a.server.context import ServerCallContext +from a2a.server.events.event_queue_v2 import EventQueueSource as EventQueue +from a2a.server.tasks.push_notification_sender import PushNotificationSender +from a2a.server.tasks.task_manager import TaskManager +from a2a.types.a2a_pb2 import ( + Message, + Task, + TaskState, + TaskStatus, + TaskStatusUpdateEvent, + Role, + Part, +) +from a2a.utils.errors import InvalidParamsError + + +logger = logging.getLogger(__name__) + + +class TestActiveTask: + """Tests for the ActiveTask class.""" + + @pytest.fixture + def agent_executor(self) -> Mock: + return Mock(spec=AgentExecutor) + + @pytest.fixture + def task_manager(self) -> Mock: + tm = Mock(spec=TaskManager) + tm.process = AsyncMock(side_effect=lambda x: x) + tm.get_task = AsyncMock(return_value=None) + tm.context_id = 'test-context-id' + tm._init_task_obj = Mock(return_value=Task(id='test-task-id')) + tm.save_task_event = AsyncMock() + return tm + + @pytest_asyncio.fixture + async def event_queue(self) -> EventQueue: + return EventQueue() + + @pytest.fixture + def push_sender(self) -> Mock: + ps = Mock(spec=PushNotificationSender) + ps.send_notification = AsyncMock() + return ps + + @pytest.fixture + def request_context(self) -> Mock: + return Mock(spec=RequestContext) + + @pytest_asyncio.fixture + async def active_task( + self, + agent_executor: Mock, + task_manager: Mock, + push_sender: Mock, + ) -> ActiveTask: + return ActiveTask( + agent_executor=agent_executor, + task_id='test-task-id', + task_manager=task_manager, + push_sender=push_sender, + ) + + @pytest.mark.asyncio + async def test_active_task_already_started( + self, active_task: ActiveTask, request_context: Mock + ) -> None: + """Test starting a task that is already started.""" + await active_task.enqueue_request(request_context) + await active_task.start( + call_context=ServerCallContext(), create_task_if_missing=True + ) + # Enqueuing and starting again should not raise errors + await active_task.enqueue_request(request_context) + await active_task.start( + call_context=ServerCallContext(), create_task_if_missing=True + ) + assert active_task._producer_task is not None + + @pytest.mark.asyncio + async def test_active_task_cancel( + self, + active_task: ActiveTask, + agent_executor: Mock, + request_context: Mock, + task_manager: Mock, + ) -> None: + """Test canceling an ActiveTask.""" + stop_event = asyncio.Event() + + async def execute_mock(req, q): + await stop_event.wait() + + agent_executor.execute = AsyncMock(side_effect=execute_mock) + agent_executor.cancel = AsyncMock() + task_manager.get_task.side_effect = [ + Task( + id='test-task-id', + status=TaskStatus(state=TaskState.TASK_STATE_WORKING), + ) + ] + [ + Task( + id='test-task-id', + status=TaskStatus(state=TaskState.TASK_STATE_COMPLETED), + ) + ] * 10 + + await active_task.enqueue_request(request_context) + await active_task.start( + call_context=ServerCallContext(), create_task_if_missing=True + ) + + # Give it a moment to start + await asyncio.sleep(0.1) + + await active_task.cancel(request_context) + + agent_executor.cancel.assert_called_once() + stop_event.set() + + @pytest.mark.asyncio + async def test_active_task_interrupted_auth( + self, + active_task: ActiveTask, + agent_executor: Mock, + request_context: Mock, + task_manager: Mock, + ) -> None: + """Test task interruption due to AUTH_REQUIRED.""" + task_obj = Task( + id='test-task-id', + status=TaskStatus(state=TaskState.TASK_STATE_AUTH_REQUIRED), + ) + + async def execute_mock(req, q): + await q.enqueue_event( + TaskStatusUpdateEvent( + task_id='test-task-id', + status=TaskStatus(state=TaskState.TASK_STATE_AUTH_REQUIRED), + ) + ) + + agent_executor.execute = AsyncMock(side_effect=execute_mock) + task_manager.get_task.side_effect = [ + Task( + id='test-task-id', + status=TaskStatus(state=TaskState.TASK_STATE_WORKING), + ) + ] + [task_obj] * 10 + + await active_task.start( + call_context=ServerCallContext(), create_task_if_missing=True + ) + + events = [ + e async for e in active_task.subscribe(request=request_context) + ] + + result = events[0] if events else None + assert ( + getattr(result, 'id', getattr(result, 'task_id', None)) + == 'test-task-id' + ) + assert result.status.state == TaskState.TASK_STATE_AUTH_REQUIRED + + @pytest.mark.asyncio + async def test_active_task_interrupted_input( + self, + active_task: ActiveTask, + agent_executor: Mock, + request_context: Mock, + task_manager: Mock, + ) -> None: + """Test task interruption due to INPUT_REQUIRED.""" + task_obj = Task( + id='test-task-id', + status=TaskStatus(state=TaskState.TASK_STATE_INPUT_REQUIRED), + ) + + async def execute_mock(req, q): + await q.enqueue_event( + Task( + id='test-task-id', + status=TaskStatus( + state=TaskState.TASK_STATE_INPUT_REQUIRED + ), + ) + ) + + agent_executor.execute = AsyncMock(side_effect=execute_mock) + task_manager.get_task.side_effect = [ + Task( + id='test-task-id', + status=TaskStatus(state=TaskState.TASK_STATE_WORKING), + ) + ] + [task_obj] * 10 + + await active_task.start( + call_context=ServerCallContext(), create_task_if_missing=True + ) + + events = [ + e async for e in active_task.subscribe(request=request_context) + ] + + result = events[-1] if events else None + assert result.id == 'test-task-id' + assert result.status.state == TaskState.TASK_STATE_INPUT_REQUIRED + + @pytest.mark.asyncio + async def test_active_task_producer_failure( + self, + active_task: ActiveTask, + agent_executor: Mock, + request_context: Mock, + ) -> None: + """Test ActiveTask behavior when the producer fails.""" + agent_executor.execute = AsyncMock( + side_effect=ValueError('Producer crashed') + ) + + await active_task.enqueue_request(request_context) + await active_task.start( + call_context=ServerCallContext(), create_task_if_missing=True + ) + + # We need to wait a bit for the producer to fail and set the exception + for _ in range(10): + try: + async for _ in active_task.subscribe(): + pass + except ValueError: + return + await asyncio.sleep(0.05) + + pytest.fail('Producer failure was not raised') + + @pytest.mark.asyncio + async def test_active_task_push_notification( + self, + active_task: ActiveTask, + agent_executor: Mock, + request_context: Mock, + push_sender: Mock, + task_manager: Mock, + ) -> None: + """Test push notification sending.""" + task_obj = Task( + id='test-task-id', + status=TaskStatus(state=TaskState.TASK_STATE_COMPLETED), + ) + + async def execute_mock(req, q): + await q.enqueue_event(task_obj) + + agent_executor.execute = AsyncMock(side_effect=execute_mock) + task_manager.get_task.side_effect = [ + Task( + id='test-task-id', + status=TaskStatus(state=TaskState.TASK_STATE_WORKING), + ) + ] + [task_obj] * 10 + + await active_task.start( + call_context=ServerCallContext(), create_task_if_missing=True + ) + + async for _ in active_task.subscribe(request=request_context): + pass + + push_sender.send_notification.assert_called() + + @pytest.mark.asyncio + async def test_active_task_consumer_failure( + self, + active_task: ActiveTask, + agent_executor: Mock, + request_context: Mock, + ) -> None: + """Test behavior when the consumer task fails.""" + # Mock dequeue_event to raise exception + active_task._event_queue_agent.dequeue_event = AsyncMock( + side_effect=RuntimeError('Consumer crash') + ) + + await active_task.enqueue_request(request_context) + await active_task.start( + call_context=ServerCallContext(), create_task_if_missing=True + ) + + # We need to wait for the consumer to fail + for _ in range(10): + try: + async for _ in active_task.subscribe(): + pass + except RuntimeError as e: + if str(e) == 'Consumer crash': + return + await asyncio.sleep(0.05) + + pytest.fail('Consumer failure was not raised') + + @pytest.mark.asyncio + async def test_active_task_subscribe_exception_handling( + self, + active_task: ActiveTask, + agent_executor: Mock, + request_context: Mock, + ) -> None: + """Test exception handling in subscribe.""" + agent_executor.execute = AsyncMock( + side_effect=ValueError('Producer failure') + ) + + await active_task.enqueue_request(request_context) + await active_task.start( + call_context=ServerCallContext(), create_task_if_missing=True + ) + + # Give it a moment to fail + for _ in range(10): + if active_task._exception: + break + await asyncio.sleep(0.05) + + with pytest.raises(ValueError, match='Producer failure'): + async for _ in active_task.subscribe(): + pass + + @pytest.mark.asyncio + async def test_active_task_cancel_not_started( + self, active_task: ActiveTask, request_context: Mock + ) -> None: + """Test canceling a task that was never started.""" + # TODO: Implement this test + + @pytest.mark.asyncio + async def test_active_task_cancel_already_finished( + self, + active_task: ActiveTask, + agent_executor: Mock, + request_context: Mock, + task_manager: Mock, + ) -> None: + """Test canceling a task that is already finished.""" + task_obj = Task( + id='test-task-id', + status=TaskStatus(state=TaskState.TASK_STATE_COMPLETED), + ) + + async def execute_mock(req, q): + active_task._request_queue.shutdown(immediate=True) + + agent_executor.execute = AsyncMock(side_effect=execute_mock) + task_manager.get_task.side_effect = [ + Task( + id='test-task-id', + status=TaskStatus(state=TaskState.TASK_STATE_WORKING), + ) + ] + [task_obj] * 10 + + await active_task.start( + call_context=ServerCallContext(), create_task_if_missing=True + ) + + async for _ in active_task.subscribe(request=request_context): + pass + + await active_task._is_finished.wait() + + # Now it is finished + await active_task.cancel(request_context) + + # agent_executor.cancel should NOT be called + agent_executor.cancel.assert_not_called() + + @pytest.mark.asyncio + async def test_active_task_subscribe_cancelled_during_wait( + self, + active_task: ActiveTask, + agent_executor: Mock, + request_context: Mock, + ) -> None: + """Test subscribe when it is cancelled while waiting for events.""" + + async def slow_execute(req, q): + await asyncio.sleep(10) + + agent_executor.execute = AsyncMock(side_effect=slow_execute) + + await active_task.enqueue_request(request_context) + await active_task.start( + call_context=ServerCallContext(), create_task_if_missing=True + ) + + it = active_task.subscribe() + it_obj = it.__aiter__() + + # This task will be waiting inside the loop in subscribe() + task = asyncio.create_task(it_obj.__anext__()) + await asyncio.sleep(0.2) + + task.cancel() + + # In python 3.10+ cancelling an async generator next() might raise StopAsyncIteration + # if the generator handles the cancellation by closing. + with pytest.raises((asyncio.CancelledError, StopAsyncIteration)): + await task + + await it.aclose() + + @pytest.mark.asyncio + async def test_active_task_subscribe_queue_shutdown( + self, + active_task: ActiveTask, + agent_executor: Mock, + request_context: Mock, + ) -> None: + """Test subscribe when the queue is shut down.""" + + async def long_execute(*args, **kwargs): + await asyncio.sleep(10) + + agent_executor.execute = AsyncMock(side_effect=long_execute) + await active_task.enqueue_request(request_context) + await active_task.start( + call_context=ServerCallContext(), create_task_if_missing=True + ) + + tapped = await active_task._event_queue_subscribers.tap() + + with patch.object( + active_task._event_queue_subscribers, 'tap', return_value=tapped + ): + # Close the queue while subscribe is waiting + async def close_later(): + await asyncio.sleep(0.2) + await tapped.close() + + _ = asyncio.create_task(close_later()) + + async for _ in active_task.subscribe(): + pass + + # Should finish normally after QueueShutDown + + @pytest.mark.asyncio + async def test_active_task_subscribe_yield_then_shutdown( + self, + active_task: ActiveTask, + agent_executor: Mock, + request_context: Mock, + ) -> None: + """Test subscribe when an event is yielded and then the queue is shut down.""" + msg = Message(message_id='m1') + + async def execute_mock(req, q): + await q.enqueue_event(msg) + await asyncio.sleep(0.5) + # Finish producer + active_task._request_queue.shutdown(immediate=True) + + agent_executor.execute = AsyncMock(side_effect=execute_mock) + await active_task.enqueue_request(request_context) + await active_task.start( + call_context=ServerCallContext(), create_task_if_missing=True + ) + + events = [event async for event in active_task.subscribe()] + assert len(events) == 1 + assert events[0] == msg + + @pytest.mark.asyncio + async def test_active_task_task_sets_result_first( + self, + active_task: ActiveTask, + agent_executor: Mock, + request_context: Mock, + task_manager: Mock, + ) -> None: + """Test that enqueuing a Task sets result_available when no result yet.""" + task_obj = Task( + id='test-task-id', + status=TaskStatus(state=TaskState.TASK_STATE_COMPLETED), + ) + + async def execute_mock(req, q): + # No result available yet + await q.enqueue_event(task_obj) + + agent_executor.execute = AsyncMock(side_effect=execute_mock) + task_manager.get_task.side_effect = [ + Task( + id='test-task-id', + status=TaskStatus(state=TaskState.TASK_STATE_WORKING), + ) + ] + [task_obj] * 10 + + await active_task.start( + call_context=ServerCallContext(), create_task_if_missing=True + ) + + events = [ + e async for e in active_task.subscribe(request=request_context) + ] + + result = events[-1] if events else None + assert result == task_obj + + @pytest.mark.asyncio + async def test_active_task_subscribe_cancelled_during_yield( + self, + active_task: ActiveTask, + agent_executor: Mock, + request_context: Mock, + ) -> None: + """Test subscribe cancellation while yielding (GeneratorExit).""" + msg = Message(message_id='m1') + + async def execute_mock(req, q): + await q.enqueue_event(msg) + await asyncio.sleep(10) + + agent_executor.execute = AsyncMock(side_effect=execute_mock) + await active_task.enqueue_request(request_context) + await active_task.start( + call_context=ServerCallContext(), create_task_if_missing=True + ) + + it = active_task.subscribe() + async for event in it: + assert event == msg + # Cancel while we have the event (inside the loop) + await it.aclose() + break + + @pytest.mark.asyncio + async def test_active_task_cancel_when_already_closed( + self, + active_task: ActiveTask, + agent_executor: Mock, + request_context: Mock, + task_manager: Mock, + ) -> None: + """Test cancel when the event queue is already closed.""" + + async def execute_mock(req, q): + active_task._request_queue.shutdown(immediate=True) + + agent_executor.execute = AsyncMock(side_effect=execute_mock) + task_manager.get_task.return_value = Task(id='test') + await active_task.enqueue_request(request_context) + await active_task.start( + call_context=ServerCallContext(), create_task_if_missing=True + ) + + # Forced queue close. + await active_task._event_queue_agent.close() + await active_task._event_queue_subscribers.close() + + # Now cancel the task itself. + await active_task.cancel(request_context) + # wait() was removed, no need to wait here. + + # Cancel again should not do anything. + await active_task.cancel(request_context) + # wait() was removed, no need to wait here. + + @pytest.mark.asyncio + async def test_active_task_subscribe_dequeue_failure( + self, + active_task: ActiveTask, + agent_executor: Mock, + request_context: Mock, + ) -> None: + """Test subscribe when dequeue_event fails on the tapped queue.""" + + async def slow_execute(req, q): + await asyncio.sleep(10) + + agent_executor.execute = AsyncMock(side_effect=slow_execute) + await active_task.enqueue_request(request_context) + await active_task.start( + call_context=ServerCallContext(), create_task_if_missing=True + ) + + mock_tapped_queue = Mock(spec=EventQueue) + mock_tapped_queue.dequeue_event = AsyncMock( + side_effect=RuntimeError('Tapped queue crash') + ) + mock_tapped_queue.close = AsyncMock() + + with ( + patch.object( + active_task._event_queue_subscribers, + 'tap', + return_value=mock_tapped_queue, + ), + pytest.raises(RuntimeError, match='Tapped queue crash'), + ): + async for _ in active_task.subscribe(): + pass + + mock_tapped_queue.close.assert_called_once() + + @pytest.mark.asyncio + async def test_active_task_consumer_interrupted_multiple_times( + self, + active_task: ActiveTask, + agent_executor: Mock, + request_context: Mock, + task_manager: Mock, + ) -> None: + """Test consumer receiving multiple interrupting events.""" + task_obj = Task( + id='test-task-id', + status=TaskStatus(state=TaskState.TASK_STATE_AUTH_REQUIRED), + ) + + async def execute_mock(req, q): + await q.enqueue_event( + TaskStatusUpdateEvent( + task_id='test-task-id', + status=TaskStatus(state=TaskState.TASK_STATE_AUTH_REQUIRED), + ) + ) + await q.enqueue_event( + TaskStatusUpdateEvent( + task_id='test-task-id', + status=TaskStatus( + state=TaskState.TASK_STATE_INPUT_REQUIRED + ), + ) + ) + + agent_executor.execute = AsyncMock(side_effect=execute_mock) + task_manager.get_task.side_effect = [ + Task( + id='test-task-id', + status=TaskStatus(state=TaskState.TASK_STATE_WORKING), + ) + ] + [task_obj] * 10 + + await active_task.start( + call_context=ServerCallContext(), create_task_if_missing=True + ) + + events = [ + e async for e in active_task.subscribe(request=request_context) + ] + + result = events[0] if events else None + assert result.status.state == TaskState.TASK_STATE_AUTH_REQUIRED + + @pytest.mark.asyncio + async def test_active_task_subscribe_immediate_finish( + self, + active_task: ActiveTask, + agent_executor: Mock, + request_context: Mock, + ) -> None: + """Test subscribe when the task finishes immediately.""" + + async def execute_mock(req, q): + active_task._request_queue.shutdown(immediate=True) + + agent_executor.execute = AsyncMock(side_effect=execute_mock) + + await active_task.enqueue_request(request_context) + await active_task.start( + call_context=ServerCallContext(), create_task_if_missing=True + ) + + # Wait for it to finish + await active_task._is_finished.wait() + + with pytest.raises( + InvalidParamsError, match=r'Task .* is already completed' + ): + async for _ in active_task.subscribe(): + pass + + @pytest.mark.asyncio + async def test_active_task_start_producer_immediate_error( + self, + active_task: ActiveTask, + agent_executor: Mock, + request_context: Mock, + ) -> None: + """Test start when producer fails immediately.""" + agent_executor.execute = AsyncMock( + side_effect=ValueError('Quick failure') + ) + + await active_task.enqueue_request(request_context) + await active_task.start( + call_context=ServerCallContext(), create_task_if_missing=True + ) + + # Consumer should also finish + with pytest.raises(ValueError, match='Quick failure'): + async for _ in active_task.subscribe(): + pass + + @pytest.mark.asyncio + async def test_active_task_subscribe_finished_during_wait( + self, + active_task: ActiveTask, + agent_executor: Mock, + request_context: Mock, + ) -> None: + """Test subscribe when the task finishes while waiting for an event.""" + + async def slow_execute(req, q): + # Do nothing and just finish + await asyncio.sleep(0.5) + active_task._request_queue.shutdown(immediate=True) + + agent_executor.execute = AsyncMock(side_effect=slow_execute) + + await active_task.enqueue_request(request_context) + await active_task.start( + call_context=ServerCallContext(), create_task_if_missing=True + ) + + async def consume(): + async for _ in active_task.subscribe(): + pass + + task = asyncio.create_task(consume()) + await asyncio.sleep(0.2) + + # Task is still running, subscribe is waiting. + # Now it finishes. + await asyncio.sleep(0.5) + await task # Should finish normally + + @pytest.mark.asyncio + async def test_active_task_maybe_cleanup_not_finished( + self, + agent_executor: Mock, + task_manager: Mock, + push_sender: Mock, + ) -> None: + """Test that cleanup is not called if task is not finished.""" + on_cleanup = Mock() + active_task = ActiveTask( + agent_executor=agent_executor, + task_id='test-task-id', + task_manager=task_manager, + push_sender=push_sender, + on_cleanup=on_cleanup, + ) + + # Explicitly call private _maybe_cleanup to verify it respects finished state + await active_task._maybe_cleanup() + on_cleanup.assert_not_called() + + @pytest.mark.asyncio + async def test_active_task_subscribe_exception_already_set( + self, active_task: ActiveTask + ) -> None: + """Test subscribe when exception is already set.""" + active_task._exception = ValueError('Pre-existing error') + with pytest.raises(ValueError, match='Pre-existing error'): + async for _ in active_task.subscribe(): + pass + + @pytest.mark.asyncio + async def test_active_task_subscribe_inner_exception( + self, + active_task: ActiveTask, + agent_executor: Mock, + request_context: Mock, + ) -> None: + """Test the generic exception block in subscribe.""" + + async def slow_execute(req, q): + await asyncio.sleep(10) + + agent_executor.execute = AsyncMock(side_effect=slow_execute) + await active_task.enqueue_request(request_context) + await active_task.start( + call_context=ServerCallContext(), create_task_if_missing=True + ) + + mock_tapped_queue = Mock(spec=EventQueue) + # dequeue_event returns a task that fails + mock_tapped_queue.dequeue_event = AsyncMock( + side_effect=Exception('Inner error') + ) + mock_tapped_queue.close = AsyncMock() + + with ( + patch.object( + active_task._event_queue_subscribers, + 'tap', + return_value=mock_tapped_queue, + ), + pytest.raises(Exception, match='Inner error'), + ): + async for _ in active_task.subscribe(): + pass + + +@pytest.mark.asyncio +async def test_active_task_subscribe_include_initial_task(): + agent_executor = Mock() + task_manager = Mock() + request_context = Mock(spec=RequestContext) + + active_task = ActiveTask( + agent_executor=agent_executor, + task_id='test-task-id', + task_manager=task_manager, + push_sender=Mock(), + ) + + initial_task = Task( + id='test-task-id', status=TaskStatus(state=TaskState.TASK_STATE_WORKING) + ) + + async def execute_mock(req, q): + active_task._request_queue.shutdown(immediate=True) + + agent_executor.execute = AsyncMock(side_effect=execute_mock) + task_manager.get_task = AsyncMock(return_value=initial_task) + task_manager.save_task_event = AsyncMock() + + await active_task.enqueue_request(request_context) + await active_task.start( + call_context=ServerCallContext(), create_task_if_missing=True + ) + + events = [e async for e in active_task.subscribe(include_initial_task=True)] + + # Verify that the first yielded event is the initial task + assert len(events) >= 1 + assert events[0] == initial_task + + +@pytest.mark.timeout(1) +@pytest.mark.asyncio +async def test_active_task_subscribe_request_parameter(): + agent_executor = Mock() + task_manager = Mock() + request_context = Mock(spec=RequestContext) + + active_task = ActiveTask( + agent_executor=agent_executor, + task_id='test-task-id', + task_manager=task_manager, + push_sender=Mock(), + ) + + async def execute_mock(req, q): + # We simulate the task finishing successfully, so it will emit _RequestCompleted + pass + + agent_executor.execute = AsyncMock(side_effect=execute_mock) + agent_executor.cancel = AsyncMock() + task_manager.get_task = AsyncMock( + return_value=Task( + id='test-task-id', + status=TaskStatus(state=TaskState.TASK_STATE_WORKING), + ) + ) + task_manager.save_task_event = AsyncMock() + task_manager.process = AsyncMock(side_effect=lambda x: x) + + await active_task.start( + call_context=ServerCallContext(), create_task_if_missing=True + ) + + # Pass request_context directly to subscribe without enqueuing manually + events = [e async for e in active_task.subscribe(request=request_context)] + + # Should complete without error, and yield no events (just _RequestCompleted which is hidden) + assert len(events) == 0 + + await active_task.cancel(request_context) diff --git a/tests/server/agent_execution/test_context.py b/tests/server/agent_execution/test_context.py index 979978add..dce780f58 100644 --- a/tests/server/agent_execution/test_context.py +++ b/tests/server/agent_execution/test_context.py @@ -7,12 +7,12 @@ from a2a.server.agent_execution import RequestContext from a2a.server.context import ServerCallContext from a2a.server.id_generator import IDGenerator -from a2a.types import ( +from a2a.types.a2a_pb2 import ( Message, - MessageSendParams, + SendMessageRequest, Task, ) -from a2a.utils.errors import ServerError +from a2a.utils.errors import InvalidParamsError class TestRequestContext: @@ -25,8 +25,8 @@ def mock_message(self) -> Mock: @pytest.fixture def mock_params(self, mock_message: Mock) -> Mock: - """Fixture for a mock MessageSendParams.""" - return Mock(spec=MessageSendParams, message=mock_message) + """Fixture for a mock SendMessageRequest.""" + return Mock(spec=SendMessageRequest, message=mock_message) @pytest.fixture def mock_task(self) -> Mock: @@ -35,7 +35,7 @@ def mock_task(self) -> Mock: def test_init_without_params(self) -> None: """Test initialization without parameters.""" - context = RequestContext() + context = RequestContext(ServerCallContext()) assert context.message is None assert context.task_id is None assert context.context_id is None @@ -51,7 +51,7 @@ def test_init_with_params_no_ids(self, mock_params: Mock) -> None: uuid.UUID('00000000-0000-0000-0000-000000000002'), ], ): - context = RequestContext(request=mock_params) + context = RequestContext(ServerCallContext(), request=mock_params) assert context.message == mock_params.message assert context.task_id == '00000000-0000-0000-0000-000000000001' @@ -68,7 +68,9 @@ def test_init_with_params_no_ids(self, mock_params: Mock) -> None: def test_init_with_task_id(self, mock_params: Mock) -> None: """Test initialization with task ID provided.""" task_id = 'task-123' - context = RequestContext(request=mock_params, task_id=task_id) + context = RequestContext( + ServerCallContext(), request=mock_params, task_id=task_id + ) assert context.task_id == task_id assert mock_params.message.task_id == task_id @@ -76,7 +78,9 @@ def test_init_with_task_id(self, mock_params: Mock) -> None: def test_init_with_context_id(self, mock_params: Mock) -> None: """Test initialization with context ID provided.""" context_id = 'context-456' - context = RequestContext(request=mock_params, context_id=context_id) + context = RequestContext( + ServerCallContext(), request=mock_params, context_id=context_id + ) assert context.context_id == context_id assert mock_params.message.context_id == context_id @@ -86,7 +90,10 @@ def test_init_with_both_ids(self, mock_params: Mock) -> None: task_id = 'task-123' context_id = 'context-456' context = RequestContext( - request=mock_params, task_id=task_id, context_id=context_id + ServerCallContext(), + request=mock_params, + task_id=task_id, + context_id=context_id, ) assert context.task_id == task_id @@ -96,18 +103,20 @@ def test_init_with_both_ids(self, mock_params: Mock) -> None: def test_init_with_task(self, mock_params: Mock, mock_task: Mock) -> None: """Test initialization with a task object.""" - context = RequestContext(request=mock_params, task=mock_task) + context = RequestContext( + ServerCallContext(), request=mock_params, task=mock_task + ) assert context.current_task == mock_task def test_get_user_input_no_params(self) -> None: """Test get_user_input with no params returns empty string.""" - context = RequestContext() + context = RequestContext(ServerCallContext()) assert context.get_user_input() == '' def test_attach_related_task(self, mock_task: Mock) -> None: """Test attach_related_task adds a task to related_tasks.""" - context = RequestContext() + context = RequestContext(ServerCallContext()) assert len(context.related_tasks) == 0 context.attach_related_task(mock_task) @@ -122,7 +131,7 @@ def test_attach_related_task(self, mock_task: Mock) -> None: def test_current_task_property(self, mock_task: Mock) -> None: """Test current_task getter and setter.""" - context = RequestContext() + context = RequestContext(ServerCallContext()) assert context.current_task is None context.current_task = mock_task @@ -135,7 +144,7 @@ def test_current_task_property(self, mock_task: Mock) -> None: def test_check_or_generate_task_id_no_params(self) -> None: """Test _check_or_generate_task_id with no params does nothing.""" - context = RequestContext() + context = RequestContext(ServerCallContext()) context._check_or_generate_task_id() assert context.task_id is None @@ -146,7 +155,7 @@ def test_check_or_generate_task_id_with_existing_task_id( existing_id = 'existing-task-id' mock_params.message.task_id = existing_id - context = RequestContext(request=mock_params) + context = RequestContext(ServerCallContext(), request=mock_params) # The method is called during initialization assert context.task_id == existing_id @@ -160,7 +169,9 @@ def test_check_or_generate_task_id_with_custom_id_generator( id_generator.generate.return_value = 'custom-task-id' context = RequestContext( - request=mock_params, task_id_generator=id_generator + ServerCallContext(), + request=mock_params, + task_id_generator=id_generator, ) # The method is called during initialization @@ -168,7 +179,7 @@ def test_check_or_generate_task_id_with_custom_id_generator( def test_check_or_generate_context_id_no_params(self) -> None: """Test _check_or_generate_context_id with no params does nothing.""" - context = RequestContext() + context = RequestContext(ServerCallContext()) context._check_or_generate_context_id() assert context.context_id is None @@ -179,7 +190,7 @@ def test_check_or_generate_context_id_with_existing_context_id( existing_id = 'existing-context-id' mock_params.message.context_id = existing_id - context = RequestContext(request=mock_params) + context = RequestContext(ServerCallContext(), request=mock_params) # The method is called during initialization assert context.context_id == existing_id @@ -193,7 +204,9 @@ def test_check_or_generate_context_id_with_custom_id_generator( id_generator.generate.return_value = 'custom-context-id' context = RequestContext( - request=mock_params, context_id_generator=id_generator + ServerCallContext(), + request=mock_params, + context_id_generator=id_generator, ) # The method is called during initialization @@ -203,11 +216,14 @@ def test_init_raises_error_on_task_id_mismatch( self, mock_params: Mock, mock_task: Mock ) -> None: """Test that an error is raised if provided task_id mismatches task.id.""" - with pytest.raises(ServerError) as exc_info: + with pytest.raises(InvalidParamsError) as exc_info: RequestContext( - request=mock_params, task_id='wrong-task-id', task=mock_task + ServerCallContext(), + request=mock_params, + task_id='wrong-task-id', + task=mock_task, ) - assert 'bad task id' in str(exc_info.value.error.message) + assert 'bad task id' in exc_info.value.message def test_init_raises_error_on_context_id_mismatch( self, mock_params: Mock, mock_task: Mock @@ -216,43 +232,46 @@ def test_init_raises_error_on_context_id_mismatch( # Set a valid task_id to avoid that error mock_params.message.task_id = mock_task.id - with pytest.raises(ServerError) as exc_info: + with pytest.raises(InvalidParamsError) as exc_info: RequestContext( + ServerCallContext(), request=mock_params, task_id=mock_task.id, context_id='wrong-context-id', task=mock_task, ) - assert 'bad context id' in str(exc_info.value.error.message) + assert 'bad context id' in exc_info.value.message def test_with_related_tasks_provided(self, mock_task: Mock) -> None: """Test initialization with related tasks provided.""" related_tasks = [mock_task, Mock(spec=Task)] - context = RequestContext(related_tasks=related_tasks) + context = RequestContext( + ServerCallContext(), related_tasks=related_tasks + ) # type: ignore[arg-type] assert context.related_tasks == related_tasks assert len(context.related_tasks) == 2 def test_message_property_without_params(self) -> None: """Test message property returns None when no params are provided.""" - context = RequestContext() + context = RequestContext(ServerCallContext()) assert context.message is None def test_message_property_with_params(self, mock_params: Mock) -> None: """Test message property returns the message from params.""" - context = RequestContext(request=mock_params) + context = RequestContext(ServerCallContext(), request=mock_params) assert context.message == mock_params.message def test_metadata_property_without_content(self) -> None: """Test metadata property returns empty dict when no content are provided.""" - context = RequestContext() + context = RequestContext(ServerCallContext()) assert context.metadata == {} def test_metadata_property_with_content(self, mock_params: Mock) -> None: """Test metadata property returns the metadata from params.""" mock_params.metadata = {'key': 'value'} - context = RequestContext(request=mock_params) + context = RequestContext(ServerCallContext(), request=mock_params) assert context.metadata == {'key': 'value'} def test_init_with_existing_ids_in_message( @@ -262,7 +281,7 @@ def test_init_with_existing_ids_in_message( mock_message.task_id = 'existing-task-id' mock_message.context_id = 'existing-context-id' - context = RequestContext(request=mock_params) + context = RequestContext(ServerCallContext(), request=mock_params) assert context.task_id == 'existing-task-id' assert context.context_id == 'existing-context-id' @@ -275,7 +294,10 @@ def test_init_with_task_id_and_existing_task_id_match( mock_params.message.task_id = mock_task.id context = RequestContext( - request=mock_params, task_id=mock_task.id, task=mock_task + ServerCallContext(), + request=mock_params, + task_id=mock_task.id, + task=mock_task, ) assert context.task_id == mock_task.id @@ -289,6 +311,7 @@ def test_init_with_context_id_and_existing_context_id_match( mock_params.message.context_id = mock_task.context_id context = RequestContext( + ServerCallContext(), request=mock_params, task_id=mock_task.id, context_id=mock_task.context_id, @@ -299,14 +322,8 @@ def test_init_with_context_id_and_existing_context_id_match( assert context.current_task == mock_task def test_extension_handling(self) -> None: - """Test extension handling in RequestContext.""" + """Test that requested_extensions is exposed via RequestContext.""" call_context = ServerCallContext(requested_extensions={'foo', 'bar'}) context = RequestContext(call_context=call_context) assert context.requested_extensions == {'foo', 'bar'} - - context.add_activated_extension('foo') - assert call_context.activated_extensions == {'foo'} - - context.add_activated_extension('baz') - assert call_context.activated_extensions == {'foo', 'baz'} diff --git a/tests/server/agent_execution/test_simple_request_context_builder.py b/tests/server/agent_execution/test_simple_request_context_builder.py index c1cbcf051..ef374e364 100644 --- a/tests/server/agent_execution/test_simple_request_context_builder.py +++ b/tests/server/agent_execution/test_simple_request_context_builder.py @@ -12,16 +12,14 @@ from a2a.server.context import ServerCallContext from a2a.server.id_generator import IDGenerator from a2a.server.tasks.task_store import TaskStore -from a2a.types import ( +from a2a.types.a2a_pb2 import ( Message, - MessageSendParams, Part, - # ServerCallContext, # Removed from a2a.types Role, + SendMessageRequest, Task, TaskState, TaskStatus, - TextPart, ) @@ -29,13 +27,13 @@ def create_sample_message( content: str = 'test message', msg_id: str = 'msg1', - role: Role = Role.user, + role: Role = Role.ROLE_USER, reference_task_ids: list[str] | None = None, ) -> Message: return Message( message_id=msg_id, role=role, - parts=[Part(root=TextPart(text=content))], + parts=[Part(text=content)], reference_task_ids=reference_task_ids if reference_task_ids else [], ) @@ -43,7 +41,7 @@ def create_sample_message( # Helper to create a simple task def create_sample_task( task_id: str = 'task1', - status_state: TaskState = TaskState.submitted, + status_state: TaskState = TaskState.TASK_STATE_SUBMITTED, context_id: str = 'ctx1', ) -> Task: return Task( @@ -86,16 +84,14 @@ async def test_build_basic_context_no_populate(self) -> None: task_store=self.mock_task_store, ) - params = MessageSendParams(message=create_sample_message()) + params = SendMessageRequest(message=create_sample_message()) task_id = 'test_task_id_1' context_id = 'test_context_id_1' current_task = create_sample_task( task_id=task_id, context_id=context_id ) # Pass a valid User instance, e.g., UnauthenticatedUser or a mock spec'd as User - server_call_context = ServerCallContext( - user=UnauthenticatedUser(), auth_token='dummy_token' - ) + server_call_context = ServerCallContext(user=UnauthenticatedUser()) request_context = await builder.build( params=params, @@ -131,10 +127,12 @@ async def test_build_populate_true_with_reference_task_ids(self) -> None: mock_ref_task1 = create_sample_task(task_id=ref_task_id1) mock_ref_task3 = create_sample_task(task_id=ref_task_id3) + server_call_context = ServerCallContext(user=UnauthenticatedUser()) + # Configure task_store.get mock # Note: AsyncMock side_effect needs to handle multiple calls if they have different args. # A simple way is a list of return values, or a function. - async def get_side_effect(task_id): + async def get_side_effect(task_id, server_call_context): if task_id == ref_task_id1: return mock_ref_task1 if task_id == ref_task_id3: @@ -143,12 +141,11 @@ async def get_side_effect(task_id): self.mock_task_store.get = AsyncMock(side_effect=get_side_effect) - params = MessageSendParams( + params = SendMessageRequest( message=create_sample_message( reference_task_ids=[ref_task_id1, ref_task_id2, ref_task_id3] ) ) - server_call_context = ServerCallContext(user=UnauthenticatedUser()) request_context = await builder.build( params=params, @@ -159,9 +156,15 @@ async def get_side_effect(task_id): ) self.assertEqual(self.mock_task_store.get.call_count, 3) - self.mock_task_store.get.assert_any_call(ref_task_id1) - self.mock_task_store.get.assert_any_call(ref_task_id2) - self.mock_task_store.get.assert_any_call(ref_task_id3) + self.mock_task_store.get.assert_any_call( + ref_task_id1, server_call_context + ) + self.mock_task_store.get.assert_any_call( + ref_task_id2, server_call_context + ) + self.mock_task_store.get.assert_any_call( + ref_task_id3, server_call_context + ) self.assertIsNotNone(request_context.related_tasks) self.assertEqual( @@ -194,7 +197,7 @@ async def test_build_populate_true_reference_ids_empty_or_none( server_call_context = ServerCallContext(user=UnauthenticatedUser()) # Test with empty list - params_empty_refs = MessageSendParams( + params_empty_refs = SendMessageRequest( message=create_sample_message(reference_task_ids=[]) ) request_context_empty = await builder.build( @@ -211,14 +214,17 @@ async def test_build_populate_true_reference_ids_empty_or_none( self.mock_task_store.get.reset_mock() # Reset for next call - # Test with referenceTaskIds=None (Pydantic model might default it to empty list or handle it) + # Test with reference_task_ids=None (Pydantic model might default it to empty list or handle it) # create_sample_message defaults to [] if None is passed, so this tests the same as above. # To explicitly test None in Message, we'd have to bypass Pydantic default or modify helper. # For now, this covers the "no IDs to process" case. msg_with_no_refs = Message( - message_id='m2', role=Role.user, parts=[], referenceTaskIds=None + message_id='m2', + role=Role.ROLE_USER, + parts=[], + reference_task_ids=None, ) - params_none_refs = MessageSendParams(message=msg_with_no_refs) + params_none_refs = SendMessageRequest(message=msg_with_no_refs) request_context_none = await builder.build( params=params_none_refs, task_id='t2', @@ -238,7 +244,7 @@ async def test_build_populate_true_task_store_none(self) -> None: should_populate_referred_tasks=True, task_store=None, # Explicitly None ) - params = MessageSendParams( + params = SendMessageRequest( message=create_sample_message(reference_task_ids=['ref1']) ) server_call_context = ServerCallContext(user=UnauthenticatedUser()) @@ -259,7 +265,7 @@ async def test_build_populate_false_with_reference_task_ids(self) -> None: should_populate_referred_tasks=False, task_store=self.mock_task_store, ) - params = MessageSendParams( + params = SendMessageRequest( message=create_sample_message( reference_task_ids=['ref_task_should_not_be_fetched'] ) @@ -288,7 +294,7 @@ async def test_build_with_custom_id_generators(self) -> None: task_id_generator=mock_task_id_generator, context_id_generator=mock_context_id_generator, ) - params = MessageSendParams(message=create_sample_message()) + params = SendMessageRequest(message=create_sample_message()) server_call_context = ServerCallContext(user=UnauthenticatedUser()) request_context = await builder.build( @@ -316,7 +322,7 @@ async def test_build_with_provided_ids_and_custom_id_generators( task_id_generator=mock_task_id_generator, context_id_generator=mock_context_id_generator, ) - params = MessageSendParams(message=create_sample_message()) + params = SendMessageRequest(message=create_sample_message()) server_call_context = ServerCallContext(user=UnauthenticatedUser()) provided_task_id = 'provided_task_id' diff --git a/tests/server/apps/jsonrpc/test_fastapi_app.py b/tests/server/apps/jsonrpc/test_fastapi_app.py deleted file mode 100644 index ddb68691f..000000000 --- a/tests/server/apps/jsonrpc/test_fastapi_app.py +++ /dev/null @@ -1,80 +0,0 @@ -from typing import Any -from unittest.mock import MagicMock - -import pytest - -from a2a.server.apps.jsonrpc import fastapi_app -from a2a.server.apps.jsonrpc.fastapi_app import A2AFastAPIApplication -from a2a.server.request_handlers.request_handler import ( - RequestHandler, # For mock spec -) -from a2a.types import AgentCard # For mock spec - - -# --- A2AFastAPIApplication Tests --- - - -class TestA2AFastAPIApplicationOptionalDeps: - # Running tests in this class requires the optional dependency fastapi to be - # present in the test environment. - - @pytest.fixture(scope='class', autouse=True) - def ensure_pkg_fastapi_is_present(self): - try: - import fastapi as _fastapi # noqa: F401 - except ImportError: - pytest.fail( - f'Running tests in {self.__class__.__name__} requires' - ' the optional dependency fastapi to be present in the test' - ' environment. Run `uv sync --dev ...` before running the test' - ' suite.' - ) - - @pytest.fixture(scope='class') - def mock_app_params(self) -> dict: - # Mock http_handler - mock_handler = MagicMock(spec=RequestHandler) - # Mock agent_card with essential attributes accessed in __init__ - mock_agent_card = MagicMock(spec=AgentCard) - # Ensure 'url' attribute exists on the mock_agent_card, as it's accessed - # in __init__ - mock_agent_card.url = 'http://example.com' - # Ensure 'supports_authenticated_extended_card' attribute exists - mock_agent_card.supports_authenticated_extended_card = False - return {'agent_card': mock_agent_card, 'http_handler': mock_handler} - - @pytest.fixture(scope='class') - def mark_pkg_fastapi_not_installed(self): - pkg_fastapi_installed_flag = fastapi_app._package_fastapi_installed - fastapi_app._package_fastapi_installed = False - yield - fastapi_app._package_fastapi_installed = pkg_fastapi_installed_flag - - def test_create_a2a_fastapi_app_with_present_deps_succeeds( - self, mock_app_params: dict - ): - try: - _app = A2AFastAPIApplication(**mock_app_params) - except ImportError: - pytest.fail( - 'With the fastapi package present, creating a' - ' A2AFastAPIApplication instance should not raise ImportError' - ) - - def test_create_a2a_fastapi_app_with_missing_deps_raises_importerror( - self, - mock_app_params: dict, - mark_pkg_fastapi_not_installed: Any, - ): - with pytest.raises( - ImportError, - match=( - 'The `fastapi` package is required to use the' - ' `A2AFastAPIApplication`' - ), - ): - _app = A2AFastAPIApplication(**mock_app_params) - - -if __name__ == '__main__': - pytest.main([__file__]) diff --git a/tests/server/apps/jsonrpc/test_jsonrpc_app.py b/tests/server/apps/jsonrpc/test_jsonrpc_app.py deleted file mode 100644 index 36309872e..000000000 --- a/tests/server/apps/jsonrpc/test_jsonrpc_app.py +++ /dev/null @@ -1,380 +0,0 @@ -from typing import Any -from unittest.mock import AsyncMock, MagicMock - -import pytest - -from starlette.testclient import TestClient - - -# Attempt to import StarletteBaseUser, fallback to MagicMock if not available -try: - from starlette.authentication import BaseUser as StarletteBaseUser -except ImportError: - StarletteBaseUser = MagicMock() # type: ignore - -from a2a.extensions.common import HTTP_EXTENSION_HEADER -from a2a.server.apps.jsonrpc import ( - jsonrpc_app, # Keep this import for optional deps test -) -from a2a.server.apps.jsonrpc.jsonrpc_app import ( - JSONRPCApplication, - StarletteUserProxy, -) -from a2a.server.apps.jsonrpc.starlette_app import A2AStarletteApplication -from a2a.server.context import ServerCallContext -from a2a.server.request_handlers.request_handler import ( - RequestHandler, -) # For mock spec -from a2a.types import ( - AgentCard, - Message, - MessageSendParams, - Part, - Role, - SendMessageRequest, - SendMessageResponse, - SendMessageSuccessResponse, - TextPart, -) - - -# --- StarletteUserProxy Tests --- - - -class TestStarletteUserProxy: - def test_starlette_user_proxy_is_authenticated_true(self): - starlette_user_mock = MagicMock(spec=StarletteBaseUser) - starlette_user_mock.is_authenticated = True - proxy = StarletteUserProxy(starlette_user_mock) - assert proxy.is_authenticated is True - - def test_starlette_user_proxy_is_authenticated_false(self): - starlette_user_mock = MagicMock(spec=StarletteBaseUser) - starlette_user_mock.is_authenticated = False - proxy = StarletteUserProxy(starlette_user_mock) - assert proxy.is_authenticated is False - - def test_starlette_user_proxy_user_name(self): - starlette_user_mock = MagicMock(spec=StarletteBaseUser) - starlette_user_mock.display_name = 'Test User DisplayName' - proxy = StarletteUserProxy(starlette_user_mock) - assert proxy.user_name == 'Test User DisplayName' - - def test_starlette_user_proxy_user_name_raises_attribute_error(self): - """ - Tests that if the underlying starlette user object is missing the - display_name attribute, the proxy currently raises an AttributeError. - """ - starlette_user_mock = MagicMock(spec=StarletteBaseUser) - # Ensure display_name is not present on the mock to trigger AttributeError - del starlette_user_mock.display_name - - proxy = StarletteUserProxy(starlette_user_mock) - with pytest.raises(AttributeError, match='display_name'): - _ = proxy.user_name - - -# --- JSONRPCApplication Tests (Selected) --- - - -class TestJSONRPCApplicationSetup: # Renamed to avoid conflict - def test_jsonrpc_app_build_method_abstract_raises_typeerror( - self, - ): # Renamed test - mock_handler = MagicMock(spec=RequestHandler) - # Mock agent_card with essential attributes accessed in JSONRPCApplication.__init__ - mock_agent_card = MagicMock(spec=AgentCard) - # Ensure 'url' attribute exists on the mock_agent_card, as it's accessed in __init__ - mock_agent_card.url = 'http://mockurl.com' - # Ensure 'supportsAuthenticatedExtendedCard' attribute exists - mock_agent_card.supports_authenticated_extended_card = False - - # This will fail at definition time if an abstract method is not implemented - with pytest.raises( - TypeError, - match=".*abstract class IncompleteJSONRPCApp .* abstract method '?build'?", - ): - - class IncompleteJSONRPCApp(JSONRPCApplication): - # Intentionally not implementing 'build' - def some_other_method(self): - pass - - IncompleteJSONRPCApp( - agent_card=mock_agent_card, http_handler=mock_handler - ) - - -class TestJSONRPCApplicationOptionalDeps: - # Running tests in this class requires optional dependencies starlette and - # sse-starlette to be present in the test environment. - - @pytest.fixture(scope='class', autouse=True) - def ensure_pkg_starlette_is_present(self): - try: - import sse_starlette as _sse_starlette # noqa: F401 - import starlette as _starlette # noqa: F401 - except ImportError: - pytest.fail( - f'Running tests in {self.__class__.__name__} requires' - ' optional dependencies starlette and sse-starlette to be' - ' present in the test environment. Run `uv sync --dev ...`' - ' before running the test suite.' - ) - - @pytest.fixture(scope='class') - def mock_app_params(self) -> dict: - # Mock http_handler - mock_handler = MagicMock(spec=RequestHandler) - # Mock agent_card with essential attributes accessed in __init__ - mock_agent_card = MagicMock(spec=AgentCard) - # Ensure 'url' attribute exists on the mock_agent_card, as it's accessed - # in __init__ - mock_agent_card.url = 'http://example.com' - # Ensure 'supportsAuthenticatedExtendedCard' attribute exists - mock_agent_card.supports_authenticated_extended_card = False - return {'agent_card': mock_agent_card, 'http_handler': mock_handler} - - @pytest.fixture(scope='class') - def mark_pkg_starlette_not_installed(self): - pkg_starlette_installed_flag = jsonrpc_app._package_starlette_installed - jsonrpc_app._package_starlette_installed = False - yield - jsonrpc_app._package_starlette_installed = pkg_starlette_installed_flag - - def test_create_jsonrpc_based_app_with_present_deps_succeeds( - self, mock_app_params: dict - ): - class DummyJSONRPCApp(JSONRPCApplication): - def build( - self, - agent_card_url='/.well-known/agent.json', - rpc_url='/', - **kwargs, - ): - return object() - - try: - _app = DummyJSONRPCApp(**mock_app_params) - except ImportError: - pytest.fail( - 'With packages starlette and see-starlette present, creating a' - ' JSONRPCApplication-based instance should not raise' - ' ImportError' - ) - - def test_create_jsonrpc_based_app_with_missing_deps_raises_importerror( - self, mock_app_params: dict, mark_pkg_starlette_not_installed: Any - ): - class DummyJSONRPCApp(JSONRPCApplication): - def build( - self, - agent_card_url='/.well-known/agent.json', - rpc_url='/', - **kwargs, - ): - return object() - - with pytest.raises( - ImportError, - match=( - 'Packages `starlette` and `sse-starlette` are required to use' - ' the `JSONRPCApplication`' - ), - ): - _app = DummyJSONRPCApp(**mock_app_params) - - -class TestJSONRPCExtensions: - @pytest.fixture - def mock_handler(self): - handler = AsyncMock(spec=RequestHandler) - handler.on_message_send.return_value = SendMessageResponse( - root=SendMessageSuccessResponse( - id='1', - result=Message( - message_id='test', - role=Role.agent, - parts=[Part(TextPart(text='response message'))], - ), - ) - ) - return handler - - @pytest.fixture - def test_app(self, mock_handler): - mock_agent_card = MagicMock(spec=AgentCard) - mock_agent_card.url = 'http://mockurl.com' - mock_agent_card.supports_authenticated_extended_card = False - - return A2AStarletteApplication( - agent_card=mock_agent_card, http_handler=mock_handler - ) - - @pytest.fixture - def client(self, test_app): - return TestClient(test_app.build()) - - def test_request_with_single_extension(self, client, mock_handler): - headers = {HTTP_EXTENSION_HEADER: 'foo'} - response = client.post( - '/', - headers=headers, - json=SendMessageRequest( - id='1', - params=MessageSendParams( - message=Message( - message_id='1', - role=Role.user, - parts=[Part(TextPart(text='hi'))], - ) - ), - ).model_dump(), - ) - response.raise_for_status() - - mock_handler.on_message_send.assert_called_once() - call_context = mock_handler.on_message_send.call_args[0][1] - assert isinstance(call_context, ServerCallContext) - assert call_context.requested_extensions == {'foo'} - - def test_request_with_comma_separated_extensions( - self, client, mock_handler - ): - headers = {HTTP_EXTENSION_HEADER: 'foo, bar'} - response = client.post( - '/', - headers=headers, - json=SendMessageRequest( - id='1', - params=MessageSendParams( - message=Message( - message_id='1', - role=Role.user, - parts=[Part(TextPart(text='hi'))], - ) - ), - ).model_dump(), - ) - response.raise_for_status() - - mock_handler.on_message_send.assert_called_once() - call_context = mock_handler.on_message_send.call_args[0][1] - assert call_context.requested_extensions == {'foo', 'bar'} - - def test_request_with_comma_separated_extensions_no_space( - self, client, mock_handler - ): - headers = [ - (HTTP_EXTENSION_HEADER, 'foo, bar'), - (HTTP_EXTENSION_HEADER, 'baz'), - ] - response = client.post( - '/', - headers=headers, - json=SendMessageRequest( - id='1', - params=MessageSendParams( - message=Message( - message_id='1', - role=Role.user, - parts=[Part(TextPart(text='hi'))], - ) - ), - ).model_dump(), - ) - response.raise_for_status() - - mock_handler.on_message_send.assert_called_once() - call_context = mock_handler.on_message_send.call_args[0][1] - assert call_context.requested_extensions == {'foo', 'bar', 'baz'} - - def test_method_added_to_call_context_state(self, client, mock_handler): - response = client.post( - '/', - json=SendMessageRequest( - id='1', - params=MessageSendParams( - message=Message( - message_id='1', - role=Role.user, - parts=[Part(TextPart(text='hi'))], - ) - ), - ).model_dump(), - ) - response.raise_for_status() - - mock_handler.on_message_send.assert_called_once() - call_context = mock_handler.on_message_send.call_args[0][1] - assert call_context.state['method'] == 'message/send' - - def test_request_with_multiple_extension_headers( - self, client, mock_handler - ): - headers = [ - (HTTP_EXTENSION_HEADER, 'foo'), - (HTTP_EXTENSION_HEADER, 'bar'), - ] - response = client.post( - '/', - headers=headers, - json=SendMessageRequest( - id='1', - params=MessageSendParams( - message=Message( - message_id='1', - role=Role.user, - parts=[Part(TextPart(text='hi'))], - ) - ), - ).model_dump(), - ) - response.raise_for_status() - - mock_handler.on_message_send.assert_called_once() - call_context = mock_handler.on_message_send.call_args[0][1] - assert call_context.requested_extensions == {'foo', 'bar'} - - def test_response_with_activated_extensions(self, client, mock_handler): - def side_effect(request, context: ServerCallContext): - context.activated_extensions.add('foo') - context.activated_extensions.add('baz') - return SendMessageResponse( - root=SendMessageSuccessResponse( - id='1', - result=Message( - message_id='test', - role=Role.agent, - parts=[Part(TextPart(text='response message'))], - ), - ) - ) - - mock_handler.on_message_send.side_effect = side_effect - - response = client.post( - '/', - json=SendMessageRequest( - id='1', - params=MessageSendParams( - message=Message( - message_id='1', - role=Role.user, - parts=[Part(TextPart(text='hi'))], - ) - ), - ).model_dump(), - ) - response.raise_for_status() - - assert response.status_code == 200 - assert HTTP_EXTENSION_HEADER in response.headers - assert set(response.headers[HTTP_EXTENSION_HEADER].split(', ')) == { - 'foo', - 'baz', - } - - -if __name__ == '__main__': - pytest.main([__file__]) diff --git a/tests/server/apps/jsonrpc/test_serialization.py b/tests/server/apps/jsonrpc/test_serialization.py deleted file mode 100644 index f67780461..000000000 --- a/tests/server/apps/jsonrpc/test_serialization.py +++ /dev/null @@ -1,228 +0,0 @@ -from unittest import mock - -import pytest - -from fastapi import FastAPI -from pydantic import ValidationError -from starlette.testclient import TestClient - -from a2a.server.apps import A2AFastAPIApplication, A2AStarletteApplication -from a2a.types import ( - APIKeySecurityScheme, - AgentCapabilities, - AgentCard, - In, - InvalidRequestError, - JSONParseError, - Message, - Part, - Role, - SecurityScheme, - TextPart, -) - - -@pytest.fixture -def agent_card_with_api_key(): - """Provides an AgentCard with an APIKeySecurityScheme for testing serialization.""" - # This data uses the alias 'in', which is correct for creating the model. - api_key_scheme_data = { - 'type': 'apiKey', - 'name': 'X-API-KEY', - 'in': 'header', - } - api_key_scheme = APIKeySecurityScheme.model_validate(api_key_scheme_data) - - return AgentCard( - name='APIKeyAgent', - description='An agent that uses API Key auth.', - url='http://example.com/apikey-agent', - version='1.0.0', - capabilities=AgentCapabilities(), - default_input_modes=['text/plain'], - default_output_modes=['text/plain'], - skills=[], - security_schemes={'api_key_auth': SecurityScheme(root=api_key_scheme)}, - security=[{'api_key_auth': []}], - ) - - -def test_starlette_agent_card_with_api_key_scheme_alias( - agent_card_with_api_key: AgentCard, -): - """ - Tests that the A2AStarletteApplication endpoint correctly serializes aliased fields. - - This verifies the fix for `APIKeySecurityScheme.in_` being serialized as `in_` instead of `in`. - """ - handler = mock.AsyncMock() - app_instance = A2AStarletteApplication(agent_card_with_api_key, handler) - client = TestClient(app_instance.build()) - - response = client.get('/.well-known/agent-card.json') - assert response.status_code == 200 - response_data = response.json() - - security_scheme_json = response_data['securitySchemes']['api_key_auth'] - assert 'in' in security_scheme_json - assert security_scheme_json['in'] == 'header' - assert 'in_' not in security_scheme_json - - try: - parsed_card = AgentCard.model_validate(response_data) - parsed_scheme_wrapper = parsed_card.security_schemes['api_key_auth'] - assert isinstance(parsed_scheme_wrapper.root, APIKeySecurityScheme) - assert parsed_scheme_wrapper.root.in_ == In.header - except ValidationError as e: - pytest.fail( - f"AgentCard.model_validate failed on the server's response: {e}" - ) - - -def test_fastapi_agent_card_with_api_key_scheme_alias( - agent_card_with_api_key: AgentCard, -): - """ - Tests that the A2AFastAPIApplication endpoint correctly serializes aliased fields. - - This verifies the fix for `APIKeySecurityScheme.in_` being serialized as `in_` instead of `in`. - """ - handler = mock.AsyncMock() - app_instance = A2AFastAPIApplication(agent_card_with_api_key, handler) - client = TestClient(app_instance.build()) - - response = client.get('/.well-known/agent-card.json') - assert response.status_code == 200 - response_data = response.json() - - security_scheme_json = response_data['securitySchemes']['api_key_auth'] - assert 'in' in security_scheme_json - assert 'in_' not in security_scheme_json - assert security_scheme_json['in'] == 'header' - - -def test_handle_invalid_json(agent_card_with_api_key: AgentCard): - """Test handling of malformed JSON.""" - handler = mock.AsyncMock() - app_instance = A2AStarletteApplication(agent_card_with_api_key, handler) - client = TestClient(app_instance.build()) - - response = client.post( - '/', - content='{ "jsonrpc": "2.0", "method": "test", "id": 1, "params": { "key": "value" }', - ) - assert response.status_code == 200 - data = response.json() - assert data['error']['code'] == JSONParseError().code - - -def test_handle_oversized_payload(agent_card_with_api_key: AgentCard): - """Test handling of oversized JSON payloads.""" - handler = mock.AsyncMock() - app_instance = A2AStarletteApplication(agent_card_with_api_key, handler) - client = TestClient(app_instance.build()) - - large_string = 'a' * 11 * 1_000_000 # 11MB string - payload = { - 'jsonrpc': '2.0', - 'method': 'test', - 'id': 1, - 'params': {'data': large_string}, - } - - response = client.post('/', json=payload) - assert response.status_code == 200 - data = response.json() - assert data['error']['code'] == InvalidRequestError().code - - -@pytest.mark.parametrize( - 'max_content_length', - [ - None, - 11 * 1024 * 1024, - 30 * 1024 * 1024, - ], -) -def test_handle_oversized_payload_with_max_content_length( - agent_card_with_api_key: AgentCard, - max_content_length: int | None, -): - """Test handling of JSON payloads with sizes within custom max_content_length.""" - handler = mock.AsyncMock() - app_instance = A2AStarletteApplication( - agent_card_with_api_key, handler, max_content_length=max_content_length - ) - client = TestClient(app_instance.build()) - - large_string = 'a' * 11 * 1_000_000 # 11MB string - payload = { - 'jsonrpc': '2.0', - 'method': 'test', - 'id': 1, - 'params': {'data': large_string}, - } - - response = client.post('/', json=payload) - assert response.status_code == 200 - data = response.json() - # When max_content_length is set, requests up to that size should not be - # rejected due to payload size. The request might fail for other reasons, - # but it shouldn't be an InvalidRequestError related to the content length. - assert data['error']['code'] != InvalidRequestError().code - - -def test_handle_unicode_characters(agent_card_with_api_key: AgentCard): - """Test handling of unicode characters in JSON payload.""" - handler = mock.AsyncMock() - app_instance = A2AStarletteApplication(agent_card_with_api_key, handler) - client = TestClient(app_instance.build()) - - unicode_text = 'こんにちは世界' # "Hello world" in Japanese - unicode_payload = { - 'jsonrpc': '2.0', - 'method': 'message/send', - 'id': 'unicode_test', - 'params': { - 'message': { - 'role': 'user', - 'parts': [{'kind': 'text', 'text': unicode_text}], - 'message_id': 'msg-unicode', - } - }, - } - - # Mock a handler for this method - handler.on_message_send.return_value = Message( - role=Role.agent, - parts=[Part(root=TextPart(text=f'Received: {unicode_text}'))], - message_id='response-unicode', - ) - - response = client.post('/', json=unicode_payload) - - # We are not testing the handler logic here, just that the server can correctly - # deserialize the unicode payload without errors. A 200 response with any valid - # JSON-RPC response indicates success. - assert response.status_code == 200 - data = response.json() - assert 'error' not in data or data['error'] is None - assert data['result']['parts'][0]['text'] == f'Received: {unicode_text}' - - -def test_fastapi_sub_application(agent_card_with_api_key: AgentCard): - """ - Tests that the A2AFastAPIApplication endpoint correctly passes the url in sub-application. - """ - handler = mock.AsyncMock() - sub_app_instance = A2AFastAPIApplication(agent_card_with_api_key, handler) - app_instance = FastAPI() - app_instance.mount('/a2a', sub_app_instance.build()) - client = TestClient(app_instance) - - response = client.get('/a2a/openapi.json') - assert response.status_code == 200 - response_data = response.json() - - assert 'servers' in response_data - assert response_data['servers'] == [{'url': '/a2a'}] diff --git a/tests/server/apps/jsonrpc/test_starlette_app.py b/tests/server/apps/jsonrpc/test_starlette_app.py deleted file mode 100644 index 6a1472c8c..000000000 --- a/tests/server/apps/jsonrpc/test_starlette_app.py +++ /dev/null @@ -1,82 +0,0 @@ -from typing import Any -from unittest.mock import MagicMock - -import pytest - -from a2a.server.apps.jsonrpc import starlette_app -from a2a.server.apps.jsonrpc.starlette_app import A2AStarletteApplication -from a2a.server.request_handlers.request_handler import ( - RequestHandler, # For mock spec -) -from a2a.types import AgentCard # For mock spec - - -# --- A2AStarletteApplication Tests --- - - -class TestA2AStarletteApplicationOptionalDeps: - # Running tests in this class requires optional dependencies starlette and - # sse-starlette to be present in the test environment. - - @pytest.fixture(scope='class', autouse=True) - def ensure_pkg_starlette_is_present(self): - try: - import sse_starlette as _sse_starlette # noqa: F401 - import starlette as _starlette # noqa: F401 - except ImportError: - pytest.fail( - f'Running tests in {self.__class__.__name__} requires' - ' optional dependencies starlette and sse-starlette to be' - ' present in the test environment. Run `uv sync --dev ...`' - ' before running the test suite.' - ) - - @pytest.fixture(scope='class') - def mock_app_params(self) -> dict: - # Mock http_handler - mock_handler = MagicMock(spec=RequestHandler) - # Mock agent_card with essential attributes accessed in __init__ - mock_agent_card = MagicMock(spec=AgentCard) - # Ensure 'url' attribute exists on the mock_agent_card, as it's accessed - # in __init__ - mock_agent_card.url = 'http://example.com' - # Ensure 'supports_authenticated_extended_card' attribute exists - mock_agent_card.supports_authenticated_extended_card = False - return {'agent_card': mock_agent_card, 'http_handler': mock_handler} - - @pytest.fixture(scope='class') - def mark_pkg_starlette_not_installed(self): - pkg_starlette_installed_flag = ( - starlette_app._package_starlette_installed - ) - starlette_app._package_starlette_installed = False - yield - starlette_app._package_starlette_installed = ( - pkg_starlette_installed_flag - ) - - def test_create_a2a_starlette_app_with_present_deps_succeeds( - self, mock_app_params: dict - ): - try: - _app = A2AStarletteApplication(**mock_app_params) - except ImportError: - pytest.fail( - 'With packages starlette and see-starlette present, creating an' - ' A2AStarletteApplication instance should not raise ImportError' - ) - - def test_create_a2a_starlette_app_with_missing_deps_raises_importerror( - self, - mock_app_params: dict, - mark_pkg_starlette_not_installed: Any, - ): - with pytest.raises( - ImportError, - match='Packages `starlette` and `sse-starlette` are required', - ): - _app = A2AStarletteApplication(**mock_app_params) - - -if __name__ == '__main__': - pytest.main([__file__]) diff --git a/tests/server/apps/rest/test_rest_fastapi_app.py b/tests/server/apps/rest/test_rest_fastapi_app.py deleted file mode 100644 index 9ea8c9686..000000000 --- a/tests/server/apps/rest/test_rest_fastapi_app.py +++ /dev/null @@ -1,403 +0,0 @@ -import logging - -from typing import Any -from unittest.mock import MagicMock - -import pytest - -from fastapi import FastAPI -from google.protobuf import json_format -from httpx import ASGITransport, AsyncClient - -from a2a.grpc import a2a_pb2 -from a2a.server.apps.rest import fastapi_app, rest_adapter -from a2a.server.apps.rest.fastapi_app import A2ARESTFastAPIApplication -from a2a.server.apps.rest.rest_adapter import RESTAdapter -from a2a.server.request_handlers.request_handler import RequestHandler -from a2a.types import ( - AgentCard, - Message, - Part, - Role, - Task, - TaskState, - TaskStatus, - TextPart, -) - - -logger = logging.getLogger(__name__) - - -@pytest.fixture -async def agent_card() -> AgentCard: - mock_agent_card = MagicMock(spec=AgentCard) - mock_agent_card.url = 'http://mockurl.com' - mock_agent_card.supports_authenticated_extended_card = False - - # Mock the capabilities object with streaming disabled - mock_capabilities = MagicMock() - mock_capabilities.streaming = False - mock_agent_card.capabilities = mock_capabilities - - return mock_agent_card - - -@pytest.fixture -async def streaming_agent_card() -> AgentCard: - """Agent card that supports streaming for testing streaming endpoints.""" - mock_agent_card = MagicMock(spec=AgentCard) - mock_agent_card.url = 'http://mockurl.com' - mock_agent_card.supports_authenticated_extended_card = False - - # Mock the capabilities object with streaming enabled - mock_capabilities = MagicMock() - mock_capabilities.streaming = True - mock_agent_card.capabilities = mock_capabilities - - return mock_agent_card - - -@pytest.fixture -async def request_handler() -> RequestHandler: - return MagicMock(spec=RequestHandler) - - -@pytest.fixture -async def streaming_app( - streaming_agent_card: AgentCard, request_handler: RequestHandler -) -> FastAPI: - """Builds the FastAPI application for testing streaming endpoints.""" - - return A2ARESTFastAPIApplication( - streaming_agent_card, request_handler - ).build(agent_card_url='/well-known/agent-card.json', rpc_url='') - - -@pytest.fixture -async def streaming_client(streaming_app: FastAPI) -> AsyncClient: - """HTTP client for the streaming FastAPI application.""" - return AsyncClient( - transport=ASGITransport(app=streaming_app), base_url='http://test' - ) - - -@pytest.fixture -async def app( - agent_card: AgentCard, request_handler: RequestHandler -) -> FastAPI: - """Builds the FastAPI application for testing.""" - - return A2ARESTFastAPIApplication(agent_card, request_handler).build( - agent_card_url='/well-known/agent.json', rpc_url='' - ) - - -@pytest.fixture -async def client(app: FastAPI) -> AsyncClient: - return AsyncClient( - transport=ASGITransport(app=app), base_url='http://testapp' - ) - - -@pytest.fixture -def mark_pkg_starlette_not_installed(): - pkg_starlette_installed_flag = rest_adapter._package_starlette_installed - rest_adapter._package_starlette_installed = False - yield - rest_adapter._package_starlette_installed = pkg_starlette_installed_flag - - -@pytest.fixture -def mark_pkg_fastapi_not_installed(): - pkg_fastapi_installed_flag = fastapi_app._package_fastapi_installed - fastapi_app._package_fastapi_installed = False - yield - fastapi_app._package_fastapi_installed = pkg_fastapi_installed_flag - - -@pytest.mark.anyio -async def test_create_rest_adapter_with_present_deps_succeeds( - agent_card: AgentCard, request_handler: RequestHandler -): - try: - _app = RESTAdapter(agent_card, request_handler) - except ImportError: - pytest.fail( - 'With packages starlette and see-starlette present, creating an' - ' RESTAdapter instance should not raise ImportError' - ) - - -@pytest.mark.anyio -async def test_create_rest_adapter_with_missing_deps_raises_importerror( - agent_card: AgentCard, - request_handler: RequestHandler, - mark_pkg_starlette_not_installed: Any, -): - with pytest.raises( - ImportError, - match=( - 'Packages `starlette` and `sse-starlette` are required to use' - ' the `RESTAdapter`.' - ), - ): - _app = RESTAdapter(agent_card, request_handler) - - -@pytest.mark.anyio -async def test_create_a2a_rest_fastapi_app_with_present_deps_succeeds( - agent_card: AgentCard, request_handler: RequestHandler -): - try: - _app = A2ARESTFastAPIApplication(agent_card, request_handler).build( - agent_card_url='/well-known/agent.json', rpc_url='' - ) - except ImportError: - pytest.fail( - 'With the fastapi package present, creating a' - ' A2ARESTFastAPIApplication instance should not raise ImportError' - ) - - -@pytest.mark.anyio -async def test_create_a2a_rest_fastapi_app_with_missing_deps_raises_importerror( - agent_card: AgentCard, - request_handler: RequestHandler, - mark_pkg_fastapi_not_installed: Any, -): - with pytest.raises( - ImportError, - match=( - 'The `fastapi` package is required to use the' - ' `A2ARESTFastAPIApplication`' - ), - ): - _app = A2ARESTFastAPIApplication(agent_card, request_handler).build( - agent_card_url='/well-known/agent.json', rpc_url='' - ) - - -@pytest.mark.anyio -async def test_send_message_success_message( - client: AsyncClient, request_handler: MagicMock -) -> None: - expected_response = a2a_pb2.SendMessageResponse( - msg=a2a_pb2.Message( - message_id='test', - role=a2a_pb2.Role.ROLE_AGENT, - content=[ - a2a_pb2.Part(text='response message'), - ], - ), - ) - request_handler.on_message_send.return_value = Message( - message_id='test', - role=Role.agent, - parts=[Part(TextPart(text='response message'))], - ) - - request = a2a_pb2.SendMessageRequest( - request=a2a_pb2.Message(), - configuration=a2a_pb2.SendMessageConfiguration(), - ) - # To see log output, run pytest with '--log-cli=true --log-cli-level=INFO' - response = await client.post( - '/v1/message:send', json=json_format.MessageToDict(request) - ) - # request should always be successful - response.raise_for_status() - - actual_response = a2a_pb2.SendMessageResponse() - json_format.Parse(response.text, actual_response) - assert expected_response == actual_response - - -@pytest.mark.anyio -async def test_send_message_success_task( - client: AsyncClient, request_handler: MagicMock -) -> None: - expected_response = a2a_pb2.SendMessageResponse( - task=a2a_pb2.Task( - id='test_task_id', - context_id='test_context_id', - status=a2a_pb2.TaskStatus( - state=a2a_pb2.TaskState.TASK_STATE_COMPLETED, - update=a2a_pb2.Message( - message_id='test', - role=a2a_pb2.ROLE_AGENT, - content=[ - a2a_pb2.Part(text='response task message'), - ], - ), - ), - ), - ) - request_handler.on_message_send.return_value = Task( - id='test_task_id', - context_id='test_context_id', - status=TaskStatus( - state=TaskState.completed, - message=Message( - message_id='test', - role=Role.agent, - parts=[Part(TextPart(text='response task message'))], - ), - ), - ) - - request = a2a_pb2.SendMessageRequest( - request=a2a_pb2.Message(), - configuration=a2a_pb2.SendMessageConfiguration(), - ) - # To see log output, run pytest with '--log-cli=true --log-cli-level=INFO' - response = await client.post( - '/v1/message:send', json=json_format.MessageToDict(request) - ) - # request should always be successful - response.raise_for_status() - - actual_response = a2a_pb2.SendMessageResponse() - json_format.Parse(response.text, actual_response) - assert expected_response == actual_response - - -@pytest.mark.anyio -async def test_streaming_message_request_body_consumption( - streaming_client: AsyncClient, request_handler: MagicMock -) -> None: - """Test that streaming endpoint properly handles request body consumption. - - This test verifies the fix for the deadlock issue where request.body() - was being consumed inside the EventSourceResponse context, causing - the application to hang indefinitely. - """ - - # Mock the async generator response from the request handler - async def mock_stream_response(): - """Mock streaming response generator.""" - yield Message( - message_id='stream_msg_1', - role=Role.agent, - parts=[Part(TextPart(text='First streaming response'))], - ) - yield Message( - message_id='stream_msg_2', - role=Role.agent, - parts=[Part(TextPart(text='Second streaming response'))], - ) - - request_handler.on_message_send_stream.return_value = mock_stream_response() - - # Create a valid streaming request - request = a2a_pb2.SendMessageRequest( - request=a2a_pb2.Message( - message_id='test_stream_msg', - role=a2a_pb2.ROLE_USER, - content=[a2a_pb2.Part(text='Test streaming message')], - ), - configuration=a2a_pb2.SendMessageConfiguration(), - ) - - # This should not hang indefinitely (previously it would due to the deadlock) - response = await streaming_client.post( - '/v1/message:stream', - json=json_format.MessageToDict(request), - headers={'Accept': 'text/event-stream'}, - timeout=10.0, # Reasonable timeout to prevent hanging in tests - ) - - # The response should be successful - response.raise_for_status() - assert response.status_code == 200 - assert 'text/event-stream' in response.headers.get('content-type', '') - - # Verify that the request handler was called - request_handler.on_message_send_stream.assert_called_once() - - -@pytest.mark.anyio -async def test_streaming_endpoint_with_invalid_content_type( - streaming_client: AsyncClient, request_handler: MagicMock -) -> None: - """Test streaming endpoint behavior with invalid content type.""" - - async def mock_stream_response(): - yield Message( - message_id='stream_msg_1', - role=Role.agent, - parts=[Part(TextPart(text='Response'))], - ) - - request_handler.on_message_send_stream.return_value = mock_stream_response() - - request = a2a_pb2.SendMessageRequest( - request=a2a_pb2.Message( - message_id='test_stream_msg', - role=a2a_pb2.ROLE_USER, - content=[a2a_pb2.Part(text='Test message')], - ), - configuration=a2a_pb2.SendMessageConfiguration(), - ) - - # Send request without proper event-stream headers - response = await streaming_client.post( - '/v1/message:stream', - json=json_format.MessageToDict(request), - timeout=10.0, - ) - - # Should still succeed (the adapter handles content-type internally) - response.raise_for_status() - assert response.status_code == 200 - - -@pytest.mark.anyio -async def test_send_message_rejected_task( - client: AsyncClient, request_handler: MagicMock -) -> None: - expected_response = a2a_pb2.SendMessageResponse( - task=a2a_pb2.Task( - id='test_task_id', - context_id='test_context_id', - status=a2a_pb2.TaskStatus( - state=a2a_pb2.TaskState.TASK_STATE_REJECTED, - update=a2a_pb2.Message( - message_id='test', - role=a2a_pb2.ROLE_AGENT, - content=[ - a2a_pb2.Part(text="I don't want to work"), - ], - ), - ), - ), - ) - request_handler.on_message_send.return_value = Task( - id='test_task_id', - context_id='test_context_id', - status=TaskStatus( - state=TaskState.rejected, - message=Message( - message_id='test', - role=Role.agent, - parts=[Part(TextPart(text="I don't want to work"))], - ), - ), - ) - request = a2a_pb2.SendMessageRequest( - request=a2a_pb2.Message(), - configuration=a2a_pb2.SendMessageConfiguration(), - ) - - response = await client.post( - '/v1/message:send', json=json_format.MessageToDict(request) - ) - - response.raise_for_status() - actual_response = a2a_pb2.SendMessageResponse() - json_format.Parse(response.text, actual_response) - assert expected_response == actual_response - - -if __name__ == '__main__': - pytest.main([__file__]) diff --git a/tests/server/events/__init__.py b/tests/server/events/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/server/events/test_event_consumer.py b/tests/server/events/test_event_consumer.py index d306418ec..d7d20768b 100644 --- a/tests/server/events/test_event_consumer.py +++ b/tests/server/events/test_event_consumer.py @@ -7,46 +7,53 @@ from pydantic import ValidationError -from a2a.server.events.event_consumer import EventConsumer, QueueClosed -from a2a.server.events.event_queue import EventQueue +from a2a.server.events.event_consumer import EventConsumer +from a2a.server.events.event_queue import QueueShutDown +from a2a.server.events.event_queue import EventQueue, EventQueueLegacy +from a2a.server.jsonrpc_models import JSONRPCError from a2a.types import ( - A2AError, - Artifact, InternalError, - JSONRPCError, +) +from a2a.types.a2a_pb2 import ( + Artifact, Message, Part, + Role, Task, TaskArtifactUpdateEvent, TaskState, TaskStatus, TaskStatusUpdateEvent, - TextPart, ) -from a2a.utils.errors import ServerError -MINIMAL_TASK: dict[str, Any] = { - 'id': '123', - 'context_id': 'session-xyz', - 'status': {'state': 'submitted'}, - 'kind': 'task', -} +def create_sample_message(message_id: str = '111') -> Message: + """Create a sample Message proto object.""" + return Message( + message_id=message_id, + role=Role.ROLE_AGENT, + parts=[Part(text='test message')], + ) + -MESSAGE_PAYLOAD: dict[str, Any] = { - 'role': 'agent', - 'parts': [{'text': 'test message'}], - 'message_id': '111', -} +def create_sample_task( + task_id: str = '123', context_id: str = 'session-xyz' +) -> Task: + """Create a sample Task proto object.""" + return Task( + id=task_id, + context_id=context_id, + status=TaskStatus(state=TaskState.TASK_STATE_SUBMITTED), + ) @pytest.fixture def mock_event_queue(): - return AsyncMock(spec=EventQueue) + return AsyncMock(spec=EventQueueLegacy) @pytest.fixture -def event_consumer(mock_event_queue: EventQueue): +def event_consumer(mock_event_queue: EventQueueLegacy): return EventConsumer(queue=mock_event_queue) @@ -58,87 +65,22 @@ def test_init_logs_debug_message(mock_event_queue: EventQueue): mock_logger.debug.assert_called_once_with('EventConsumer initialized') -@pytest.mark.asyncio -async def test_consume_one_task_event( - event_consumer: MagicMock, - mock_event_queue: MagicMock, -): - task_event = Task(**MINIMAL_TASK) - mock_event_queue.dequeue_event.return_value = task_event - result = await event_consumer.consume_one() - assert result == task_event - mock_event_queue.task_done.assert_called_once() - - -@pytest.mark.asyncio -async def test_consume_one_message_event( - event_consumer: MagicMock, - mock_event_queue: MagicMock, -): - message_event = Message(**MESSAGE_PAYLOAD) - mock_event_queue.dequeue_event.return_value = message_event - result = await event_consumer.consume_one() - assert result == message_event - mock_event_queue.task_done.assert_called_once() - - -@pytest.mark.asyncio -async def test_consume_one_a2a_error_event( - event_consumer: MagicMock, - mock_event_queue: MagicMock, -): - error_event = A2AError(InternalError()) - mock_event_queue.dequeue_event.return_value = error_event - result = await event_consumer.consume_one() - assert result == error_event - mock_event_queue.task_done.assert_called_once() - - -@pytest.mark.asyncio -async def test_consume_one_jsonrpc_error_event( - event_consumer: MagicMock, - mock_event_queue: MagicMock, -): - error_event = JSONRPCError(code=123, message='Some Error') - mock_event_queue.dequeue_event.return_value = error_event - result = await event_consumer.consume_one() - assert result == error_event - mock_event_queue.task_done.assert_called_once() - - -@pytest.mark.asyncio -async def test_consume_one_queue_empty( - event_consumer: MagicMock, - mock_event_queue: MagicMock, -): - mock_event_queue.dequeue_event.side_effect = asyncio.QueueEmpty - try: - result = await event_consumer.consume_one() - assert result is not None - except ServerError: - pass - mock_event_queue.task_done.assert_not_called() - - @pytest.mark.asyncio async def test_consume_all_multiple_events( event_consumer: MagicMock, mock_event_queue: MagicMock, ): events: list[Any] = [ - Task(**MINIMAL_TASK), + create_sample_task(), TaskArtifactUpdateEvent( task_id='task_123', context_id='session-xyz', - artifact=Artifact( - artifact_id='11', parts=[Part(TextPart(text='text'))] - ), + artifact=Artifact(artifact_id='11', parts=[Part(text='text')]), ), TaskStatusUpdateEvent( task_id='task_123', context_id='session-xyz', - status=TaskStatus(state=TaskState.working), - final=True, + status=TaskStatus(state=TaskState.TASK_STATE_WORKING), ), ] cursor = 0 @@ -149,7 +91,8 @@ async def mock_dequeue() -> Any: event = events[cursor] cursor += 1 return event - return None + mock_event_queue.is_closed.return_value = True + raise asyncio.QueueEmpty() mock_event_queue.dequeue_event = mock_dequeue consumed_events: list[Any] = [] @@ -168,20 +111,17 @@ async def test_consume_until_message( mock_event_queue: MagicMock, ): events: list[Any] = [ - Task(**MINIMAL_TASK), + create_sample_task(), TaskArtifactUpdateEvent( task_id='task_123', context_id='session-xyz', - artifact=Artifact( - artifact_id='11', parts=[Part(TextPart(text='text'))] - ), + artifact=Artifact(artifact_id='11', parts=[Part(text='text')]), ), - Message(**MESSAGE_PAYLOAD), + create_sample_message(), TaskStatusUpdateEvent( task_id='task_123', context_id='session-xyz', - status=TaskStatus(state=TaskState.working), - final=True, + status=TaskStatus(state=TaskState.TASK_STATE_WORKING), ), ] cursor = 0 @@ -192,7 +132,8 @@ async def mock_dequeue() -> Any: event = events[cursor] cursor += 1 return event - return None + mock_event_queue.is_closed.return_value = True + raise asyncio.QueueEmpty() mock_event_queue.dequeue_event = mock_dequeue consumed_events: list[Any] = [] @@ -211,8 +152,10 @@ async def test_consume_message_events( mock_event_queue: MagicMock, ): events = [ - Message(**MESSAGE_PAYLOAD), - Message(**MESSAGE_PAYLOAD, final=True), + create_sample_message(), + create_sample_message( + message_id='222' + ), # Another message (final doesn't exist in proto) ] cursor = 0 @@ -222,7 +165,8 @@ async def mock_dequeue() -> Any: event = events[cursor] cursor += 1 return event - return None + mock_event_queue.is_closed.return_value = True + raise asyncio.QueueEmpty() mock_event_queue.dequeue_event = mock_dequeue consumed_events: list[Any] = [] @@ -251,9 +195,9 @@ async def test_consume_all_raises_stored_exception( async def test_consume_all_stops_on_queue_closed_and_confirmed_closed( event_consumer: EventConsumer, mock_event_queue: AsyncMock ): - """Test consume_all stops if QueueClosed is raised and queue.is_closed() is True.""" - # Simulate the queue raising QueueClosed (which is asyncio.QueueEmpty or QueueShutdown) - mock_event_queue.dequeue_event.side_effect = QueueClosed( + """Test consume_all stops if QueueShutDown is raised and queue.is_closed() is True.""" + # Simulate the queue raising QueueShutDown (which is asyncio.QueueEmpty or QueueShutdown) + mock_event_queue.dequeue_event.side_effect = QueueShutDown( 'Queue is empty/closed' ) # Simulate the queue confirming it's closed @@ -265,7 +209,7 @@ async def test_consume_all_stops_on_queue_closed_and_confirmed_closed( assert ( len(consumed_events) == 0 - ) # No events should be consumed as it breaks on QueueClosed + ) # No events should be consumed as it breaks on QueueShutDown mock_event_queue.dequeue_event.assert_called_once() # Should attempt to dequeue once mock_event_queue.is_closed.assert_called_once() # Should check if closed @@ -274,30 +218,28 @@ async def test_consume_all_stops_on_queue_closed_and_confirmed_closed( async def test_consume_all_continues_on_queue_empty_if_not_really_closed( event_consumer: EventConsumer, mock_event_queue: AsyncMock ): - """Test that QueueClosed with is_closed=False allows loop to continue via timeout.""" - payload = MESSAGE_PAYLOAD.copy() - payload['message_id'] = 'final_event_id' - final_event = Message(**payload) + """Test that QueueShutDown with is_closed=False allows loop to continue via timeout.""" + final_event = create_sample_message(message_id='final_event_id') # Setup dequeue_event behavior: - # 1. Raise QueueClosed (e.g., asyncio.QueueEmpty) + # 1. Raise QueueShutDown (e.g., asyncio.QueueEmpty) # 2. Return the final_event - # 3. Raise QueueClosed again (to terminate after final_event) + # 3. Raise QueueShutDown again (to terminate after final_event) dequeue_effects = [ - QueueClosed('Simulated temporary empty'), + QueueShutDown('Simulated temporary empty'), final_event, - QueueClosed('Queue closed after final event'), + QueueShutDown('Queue closed after final event'), ] mock_event_queue.dequeue_event.side_effect = dequeue_effects # Setup is_closed behavior: - # 1. False when QueueClosed is first raised (so loop doesn't break) - # 2. True after final_event is processed and QueueClosed is raised again + # 1. False when QueueShutDown is first raised (so loop doesn't break) + # 2. True after final_event is processed and QueueShutDown is raised again is_closed_effects = [False, True] mock_event_queue.is_closed.side_effect = is_closed_effects # Patch asyncio.wait_for used inside consume_all - # The goal is that the first QueueClosed leads to a TimeoutError inside consume_all, + # The goal is that the first QueueShutDown leads to a TimeoutError inside consume_all, # the loop continues, and then the final_event is fetched. # To reliably test the timeout behavior within consume_all, we adjust the consumer's @@ -312,15 +254,15 @@ async def test_consume_all_continues_on_queue_empty_if_not_really_closed( assert consumed_events[0] == final_event # Dequeue attempts: - # 1. Raises QueueClosed (is_closed=False, leads to TimeoutError, loop continues) + # 1. Raises QueueShutDown (is_closed=False, leads to TimeoutError, loop continues) # 2. Returns final_event (which is a Message, causing consume_all to break) assert ( mock_event_queue.dequeue_event.call_count == 2 ) # Only two calls needed # is_closed calls: - # 1. After first QueueClosed (returns False) - # The second QueueClosed is not reached because Message breaks the loop. + # 1. After first QueueShutDown (returns False) + # The second QueueShutDown is not reached because Message breaks the loop. assert mock_event_queue.is_closed.call_count == 1 @@ -329,13 +271,13 @@ async def test_consume_all_handles_queue_empty_when_closed_python_version_agnost event_consumer: EventConsumer, mock_event_queue: AsyncMock, monkeypatch ): """Ensure consume_all stops with no events when queue is closed and dequeue_event raises asyncio.QueueEmpty (Python version-agnostic).""" - # Make QueueClosed a distinct exception (not QueueEmpty) to emulate py3.13 semantics + # Make QueueShutDown a distinct exception (not QueueEmpty) to emulate py3.13 semantics from a2a.server.events import event_consumer as ec class QueueShutDown(Exception): pass - monkeypatch.setattr(ec, 'QueueClosed', QueueShutDown, raising=True) + monkeypatch.setattr(ec, 'QueueShutDown', QueueShutDown, raising=True) # Simulate queue reporting closed while dequeue raises QueueEmpty mock_event_queue.dequeue_event.side_effect = asyncio.QueueEmpty( @@ -358,7 +300,7 @@ async def test_consume_all_continues_on_queue_empty_when_not_closed( ): """Ensure consume_all continues after asyncio.QueueEmpty when queue is open, yielding the next (final) event.""" # First dequeue raises QueueEmpty (transient empty), then a final Message arrives - final = Message(role='agent', parts=[{'text': 'done'}], message_id='final') + final = create_sample_message(message_id='final') mock_event_queue.dequeue_event.side_effect = [ asyncio.QueueEmpty('temporarily empty'), final, @@ -455,3 +397,102 @@ async def test_consume_all_handles_validation_error( assert ( 'Invalid event format received' in logger_error_mock.call_args[0][0] ) + + +@pytest.mark.xfail(reason='https://github.com/a2aproject/a2a-python/issues/869') +@pytest.mark.asyncio +async def test_graceful_close_allows_tapped_queues_to_drain() -> None: + + parent_queue = EventQueueLegacy(max_queue_size=10) + child_queue = await parent_queue.tap() + + fast_consumer_done = asyncio.Event() + + # Producer + async def produce() -> None: + await parent_queue.enqueue_event( + TaskStatusUpdateEvent( + status=TaskStatus(state=TaskState.TASK_STATE_WORKING) + ) + ) + await parent_queue.enqueue_event( + TaskStatusUpdateEvent( + status=TaskStatus(state=TaskState.TASK_STATE_WORKING) + ) + ) + await parent_queue.enqueue_event(Message(message_id='final')) + + # Fast consumer on parent queue + async def fast_consume() -> list: + consumer = EventConsumer(parent_queue) + events = [event async for event in consumer.consume_all()] + fast_consumer_done.set() + return events + + # Slow consumer on child queue + async def slow_consume() -> list: + consumer = EventConsumer(child_queue) + events = [] + async for event in consumer.consume_all(): + events.append(event) + # Wait for fast_consume to complete (and trigger close) before + # consuming further events to ensure they aren't prematurely dropped. + await fast_consumer_done.wait() + return events + + # Run producer and consumers + producer_task = asyncio.create_task(produce()) + + fast_task = asyncio.create_task(fast_consume()) + slow_task = asyncio.create_task(slow_consume()) + + await producer_task + fast_events = await fast_task + slow_events = await slow_task + + assert len(fast_events) == 3 + assert len(slow_events) == 3 + + +@pytest.mark.xfail( + reason='https://github.com/a2aproject/a2a-python/issues/869', + raises=asyncio.TimeoutError, +) +@pytest.mark.asyncio +async def test_background_close_deadlocks_on_trailing_events() -> None: + queue = EventQueueLegacy() + + # Producer enqueues a final event, but then enqueues another event + # (e.g., simulating a delayed log message, race condition, or multiple messages). + await queue.enqueue_event(Message(message_id='final')) + await queue.enqueue_event(Message(message_id='trailing_log')) + + # Consumer dequeues 'final' but stops there (e.g. because it is a final event). + event = await queue.dequeue_event() + assert isinstance(event, Message) and event.message_id == 'final' + queue.task_done() + + # Now attempt a graceful close. This demonstrates the deadlock that + # the previous implementation (with background task and clear_parent_events) + # was trying to solve. + await asyncio.wait_for(queue.close(immediate=False), timeout=0.1) + + +@pytest.mark.asyncio +async def test_consume_all_handles_actual_queue_shutdown( + event_consumer: EventConsumer, mock_event_queue: AsyncMock +): + """Ensure consume_all stops when queue is closed and dequeue_event raises the actual QueueShutDown from event_queue.""" + from a2a.server.events.event_queue import QueueShutDown + + mock_event_queue.dequeue_event.side_effect = QueueShutDown( + 'Queue is closed' + ) + mock_event_queue.is_closed.return_value = True + + consumed_events = [] + # This should exit cleanly because consume_all correctly catches the QueueShutDown exception. + async for event in event_consumer.consume_all(): + consumed_events.append(event) + + assert len(consumed_events) == 0 diff --git a/tests/server/events/test_event_queue.py b/tests/server/events/test_event_queue.py index 96ded9580..b45d99003 100644 --- a/tests/server/events/test_event_queue.py +++ b/tests/server/events/test_event_queue.py @@ -1,60 +1,81 @@ import asyncio -import sys from typing import Any -from unittest.mock import ( - AsyncMock, - MagicMock, - patch, -) import pytest -from a2a.server.events.event_queue import DEFAULT_MAX_QUEUE_SIZE, EventQueue +from a2a.server.events.event_queue import ( + DEFAULT_MAX_QUEUE_SIZE, + EventQueueLegacy, + QueueShutDown, +) +from a2a.server.jsonrpc_models import JSONRPCError from a2a.types import ( - A2AError, + TaskNotFoundError, +) +from a2a.types.a2a_pb2 import ( Artifact, - JSONRPCError, Message, Part, + Role, Task, TaskArtifactUpdateEvent, - TaskNotFoundError, TaskState, TaskStatus, TaskStatusUpdateEvent, - TextPart, ) -MINIMAL_TASK: dict[str, Any] = { - 'id': '123', - 'context_id': 'session-xyz', - 'status': {'state': 'submitted'}, - 'kind': 'task', -} -MESSAGE_PAYLOAD: dict[str, Any] = { - 'role': 'agent', - 'parts': [{'text': 'test message'}], - 'message_id': '111', -} +def create_sample_message(message_id: str = '111') -> Message: + """Create a sample Message proto object.""" + return Message( + message_id=message_id, + role=Role.ROLE_AGENT, + parts=[Part(text='test message')], + ) + + +def create_sample_task( + task_id: str = '123', context_id: str = 'session-xyz' +) -> Task: + """Create a sample Task proto object.""" + return Task( + id=task_id, + context_id=context_id, + status=TaskStatus(state=TaskState.TASK_STATE_SUBMITTED), + ) + + +class QueueJoinWrapper: + """A wrapper to intercept and signal when `queue.join()` is called.""" + + def __init__(self, original: Any, join_reached: asyncio.Event) -> None: + self.original = original + self.join_reached = join_reached + + def __getattr__(self, name: str) -> Any: + return getattr(self.original, name) + + async def join(self) -> None: + self.join_reached.set() + await self.original.join() @pytest.fixture -def event_queue() -> EventQueue: - return EventQueue() +def event_queue() -> EventQueueLegacy: + return EventQueueLegacy() def test_constructor_default_max_queue_size() -> None: """Test that the queue is created with the default max size.""" - eq = EventQueue() + eq = EventQueueLegacy() assert eq.queue.maxsize == DEFAULT_MAX_QUEUE_SIZE def test_constructor_max_queue_size() -> None: """Test that the asyncio.Queue is created with the specified max_queue_size.""" custom_size = 123 - eq = EventQueue(max_queue_size=custom_size) + eq = EventQueueLegacy(max_queue_size=custom_size) assert eq.queue.maxsize == custom_size @@ -63,48 +84,51 @@ def test_constructor_invalid_max_queue_size() -> None: with pytest.raises( ValueError, match='max_queue_size must be greater than 0' ): - EventQueue(max_queue_size=0) + EventQueueLegacy(max_queue_size=0) with pytest.raises( ValueError, match='max_queue_size must be greater than 0' ): - EventQueue(max_queue_size=-10) + EventQueueLegacy(max_queue_size=-10) @pytest.mark.asyncio -async def test_enqueue_and_dequeue_event(event_queue: EventQueue) -> None: - """Test that an event can be enqueued and dequeued.""" - event = Message(**MESSAGE_PAYLOAD) - await event_queue.enqueue_event(event) - dequeued_event = await event_queue.dequeue_event() - assert dequeued_event == event +async def test_event_queue_async_context_manager( + event_queue: EventQueueLegacy, +) -> None: + """Test that EventQueueLegacy can be used as an async context manager.""" + async with event_queue as q: + assert q is event_queue + assert event_queue.is_closed() is False + assert event_queue.is_closed() is True @pytest.mark.asyncio -async def test_dequeue_event_no_wait(event_queue: EventQueue) -> None: - """Test dequeue_event with no_wait=True.""" - event = Task(**MINIMAL_TASK) - await event_queue.enqueue_event(event) - dequeued_event = await event_queue.dequeue_event(no_wait=True) - assert dequeued_event == event +async def test_event_queue_async_context_manager_on_exception( + event_queue: EventQueueLegacy, +) -> None: + """Test that close() is called even when an exception occurs inside the context.""" + with pytest.raises(RuntimeError, match='boom'): + async with event_queue: + raise RuntimeError('boom') + assert event_queue.is_closed() is True @pytest.mark.asyncio -async def test_dequeue_event_empty_queue_no_wait( - event_queue: EventQueue, -) -> None: - """Test dequeue_event with no_wait=True when the queue is empty.""" - with pytest.raises(asyncio.QueueEmpty): - await event_queue.dequeue_event(no_wait=True) +async def test_enqueue_and_dequeue_event(event_queue: EventQueueLegacy) -> None: + """Test that an event can be enqueued and dequeued.""" + event = create_sample_message() + await event_queue.enqueue_event(event) + dequeued_event = await event_queue.dequeue_event() + assert dequeued_event == event @pytest.mark.asyncio -async def test_dequeue_event_wait(event_queue: EventQueue) -> None: +async def test_dequeue_event_wait(event_queue: EventQueueLegacy) -> None: """Test dequeue_event with the default wait behavior.""" event = TaskStatusUpdateEvent( task_id='task_123', context_id='session-xyz', - status=TaskStatus(state=TaskState.working), - final=True, + status=TaskStatus(state=TaskState.TASK_STATE_WORKING), ) await event_queue.enqueue_event(event) dequeued_event = await event_queue.dequeue_event() @@ -112,14 +136,12 @@ async def test_dequeue_event_wait(event_queue: EventQueue) -> None: @pytest.mark.asyncio -async def test_task_done(event_queue: EventQueue) -> None: +async def test_task_done(event_queue: EventQueueLegacy) -> None: """Test the task_done method.""" event = TaskArtifactUpdateEvent( task_id='task_123', context_id='session-xyz', - artifact=Artifact( - artifact_id='11', parts=[Part(TextPart(text='text'))] - ), + artifact=Artifact(artifact_id='11', parts=[Part(text='text')]), ) await event_queue.enqueue_event(event) _ = await event_queue.dequeue_event() @@ -128,11 +150,11 @@ async def test_task_done(event_queue: EventQueue) -> None: @pytest.mark.asyncio async def test_enqueue_different_event_types( - event_queue: EventQueue, + event_queue: EventQueueLegacy, ) -> None: """Test enqueuing different types of events.""" events: list[Any] = [ - A2AError(TaskNotFoundError()), + TaskNotFoundError(), JSONRPCError(code=111, message='rpc error'), ] for event in events: @@ -143,50 +165,51 @@ async def test_enqueue_different_event_types( @pytest.mark.asyncio async def test_enqueue_event_propagates_to_children( - event_queue: EventQueue, + event_queue: EventQueueLegacy, ) -> None: """Test that events are enqueued to tapped child queues.""" - child_queue1 = event_queue.tap() - child_queue2 = event_queue.tap() + child_queue1 = await event_queue.tap() + child_queue2 = await event_queue.tap() - event1 = Message(**MESSAGE_PAYLOAD) - event2 = Task(**MINIMAL_TASK) + event1 = create_sample_message() + event2 = create_sample_task() await event_queue.enqueue_event(event1) await event_queue.enqueue_event(event2) # Check parent queue - assert await event_queue.dequeue_event(no_wait=True) == event1 - assert await event_queue.dequeue_event(no_wait=True) == event2 + assert await event_queue.dequeue_event() == event1 + assert await event_queue.dequeue_event() == event2 # Check child queue 1 - assert await child_queue1.dequeue_event(no_wait=True) == event1 - assert await child_queue1.dequeue_event(no_wait=True) == event2 + assert await child_queue1.dequeue_event() == event1 + assert await child_queue1.dequeue_event() == event2 # Check child queue 2 - assert await child_queue2.dequeue_event(no_wait=True) == event1 - assert await child_queue2.dequeue_event(no_wait=True) == event2 + assert await child_queue2.dequeue_event() == event1 + assert await child_queue2.dequeue_event() == event2 @pytest.mark.asyncio async def test_enqueue_event_when_closed( - event_queue: EventQueue, expected_queue_closed_exception: type[Exception] + event_queue: EventQueueLegacy, + expected_queue_closed_exception: type[Exception], ) -> None: """Test that no event is enqueued if the parent queue is closed.""" await event_queue.close() # Close the queue first - event = Message(**MESSAGE_PAYLOAD) + event = create_sample_message() # Attempt to enqueue, should do nothing or log a warning as per implementation await event_queue.enqueue_event(event) # Verify the queue is still empty with pytest.raises(expected_queue_closed_exception): - await event_queue.dequeue_event(no_wait=True) + await event_queue.dequeue_event() # Also verify child queues are not affected directly by parent's enqueue attempt when closed # (though they would be closed too by propagation) child_queue = ( - event_queue.tap() + await event_queue.tap() ) # Tap after close might be weird, but let's see # The current implementation would add it to _children # and then child.close() would be called. @@ -195,34 +218,18 @@ async def test_enqueue_event_when_closed( child_queue.close() ) # ensure child is also seen as closed for this test's purpose with pytest.raises(expected_queue_closed_exception): - await child_queue.dequeue_event(no_wait=True) + await child_queue.dequeue_event() @pytest.fixture def expected_queue_closed_exception() -> type[Exception]: - if sys.version_info < (3, 13): - return asyncio.QueueEmpty - return asyncio.QueueShutDown - - -@pytest.mark.asyncio -async def test_dequeue_event_closed_and_empty_no_wait( - event_queue: EventQueue, expected_queue_closed_exception: type[Exception] -) -> None: - """Test dequeue_event raises QueueEmpty when closed, empty, and no_wait=True.""" - await event_queue.close() - assert event_queue.is_closed() - # Ensure queue is actually empty (e.g. by trying a non-blocking get on internal queue) - with pytest.raises(expected_queue_closed_exception): - event_queue.queue.get_nowait() - - with pytest.raises(expected_queue_closed_exception): - await event_queue.dequeue_event(no_wait=True) + return QueueShutDown @pytest.mark.asyncio async def test_dequeue_event_closed_and_empty_waits_then_raises( - event_queue: EventQueue, expected_queue_closed_exception: type[Exception] + event_queue: EventQueueLegacy, + expected_queue_closed_exception: type[Exception], ) -> None: """Test dequeue_event raises QueueEmpty eventually when closed, empty, and no_wait=False.""" await event_queue.close() @@ -238,30 +245,27 @@ async def test_dequeue_event_closed_and_empty_waits_then_raises( # However, the current code: # async with self._lock: # if self._is_closed and self.queue.empty(): - # logger.warning('Queue is closed. Event will not be dequeued.') - # raise asyncio.QueueEmpty('Queue is closed.') # event = await self.queue.get() -> this line is not reached if closed and empty. # So, for the current implementation, it will raise QueueEmpty immediately. with pytest.raises(expected_queue_closed_exception): - await event_queue.dequeue_event(no_wait=False) + await event_queue.dequeue_event() # If the implementation were to change to allow `await self.queue.get()` # to be called even when closed (to drain it), then a timeout test would be needed. # For now, testing the current behavior. # Example of a timeout test if it were to wait: # with pytest.raises(asyncio.TimeoutError): # Or QueueEmpty if that's what join/shutdown causes get() to raise - # await asyncio.wait_for(event_queue.dequeue_event(no_wait=False), timeout=0.01) @pytest.mark.asyncio -async def test_tap_creates_child_queue(event_queue: EventQueue) -> None: - """Test that tap creates a new EventQueue and adds it to children.""" +async def test_tap_creates_child_queue(event_queue: EventQueueLegacy) -> None: + """Test that tap creates a new EventQueueLegacy and adds it to children.""" initial_children_count = len(event_queue._children) - child_queue = event_queue.tap() + child_queue = await event_queue.tap() - assert isinstance(child_queue, EventQueue) + assert isinstance(child_queue, EventQueueLegacy) assert child_queue != event_queue # Ensure it's a new instance assert len(event_queue._children) == initial_children_count + 1 assert child_queue in event_queue._children @@ -271,111 +275,15 @@ async def test_tap_creates_child_queue(event_queue: EventQueue) -> None: @pytest.mark.asyncio -async def test_close_sets_flag_and_handles_internal_queue_old_python( - event_queue: EventQueue, -) -> None: - """Test close behavior on Python < 3.13 (using queue.join).""" - with patch('sys.version_info', (3, 12, 0)): # Simulate older Python - # Mock queue.join as it's called in older versions - event_queue.queue.join = AsyncMock() - - await event_queue.close() - - assert event_queue.is_closed() is True - event_queue.queue.join.assert_awaited_once() # waited for drain - - -@pytest.mark.asyncio -async def test_close_sets_flag_and_handles_internal_queue_new_python( - event_queue: EventQueue, -) -> None: - """Test close behavior on Python >= 3.13 (using queue.shutdown).""" - with patch('sys.version_info', (3, 13, 0)): - # Inject a dummy shutdown method for non-3.13 runtimes - from typing import cast - - queue = cast('Any', event_queue.queue) - queue.shutdown = MagicMock() # type: ignore[attr-defined] - await event_queue.close() - assert event_queue.is_closed() is True - queue.shutdown.assert_called_once_with(False) - - -@pytest.mark.asyncio -async def test_close_graceful_py313_waits_for_join_and_children( - event_queue: EventQueue, -) -> None: - """For Python >=3.13 and immediate=False, close should shut down(False), then wait for join and children.""" - with patch('sys.version_info', (3, 13, 0)): - # Arrange - from typing import cast - - q_any = cast('Any', event_queue.queue) - q_any.shutdown = MagicMock() # type: ignore[attr-defined] - event_queue.queue.join = AsyncMock() - - child = event_queue.tap() - child.close = AsyncMock() - - # Act - await event_queue.close(immediate=False) - - # Assert - event_queue.queue.join.assert_awaited_once() - child.close.assert_awaited_once() - - -@pytest.mark.asyncio -async def test_close_propagates_to_children(event_queue: EventQueue) -> None: - """Test that close() is called on all child queues.""" - child_queue1 = event_queue.tap() - child_queue2 = event_queue.tap() - - # Mock the close method of children to verify they are called - child_queue1.close = AsyncMock() - child_queue2.close = AsyncMock() - +async def test_close_idempotent(event_queue: EventQueueLegacy) -> None: await event_queue.close() - - child_queue1.close.assert_awaited_once() - child_queue2.close.assert_awaited_once() - - -@pytest.mark.asyncio -async def test_close_idempotent(event_queue: EventQueue) -> None: - """Test that calling close() multiple times doesn't cause errors and only acts once.""" - # Mock the internal queue's join or shutdown to see how many times it's effectively called - with patch( - 'sys.version_info', (3, 12, 0) - ): # Test with older version logic first - event_queue.queue.join = AsyncMock() - await event_queue.close() - assert event_queue.is_closed() is True - event_queue.queue.join.assert_called_once() # Called first time - - # Call close again - await event_queue.close() - assert event_queue.is_closed() is True - event_queue.queue.join.assert_called_once() # Still only called once - - # Reset for new Python version test - event_queue_new = EventQueue() # New queue for fresh state - with patch('sys.version_info', (3, 13, 0)): - from typing import cast - - queue = cast('Any', event_queue_new.queue) - queue.shutdown = MagicMock() # type: ignore[attr-defined] - await event_queue_new.close() - assert event_queue_new.is_closed() is True - queue.shutdown.assert_called_once() - - await event_queue_new.close() - assert event_queue_new.is_closed() is True - queue.shutdown.assert_called_once() # Still only called once + assert event_queue.is_closed() is True + await event_queue.close() + assert event_queue.is_closed() is True @pytest.mark.asyncio -async def test_is_closed_reflects_state(event_queue: EventQueue) -> None: +async def test_is_closed_reflects_state(event_queue: EventQueueLegacy) -> None: """Test that is_closed() returns the correct state before and after closing.""" assert event_queue.is_closed() is False # Initially open @@ -385,11 +293,11 @@ async def test_is_closed_reflects_state(event_queue: EventQueue) -> None: @pytest.mark.asyncio -async def test_close_with_immediate_true(event_queue: EventQueue) -> None: +async def test_close_with_immediate_true(event_queue: EventQueueLegacy) -> None: """Test close with immediate=True clears events immediately.""" # Add some events to the queue - event1 = Message(**MESSAGE_PAYLOAD) - event2 = Task(**MINIMAL_TASK) + event1 = create_sample_message() + event2 = create_sample_task() await event_queue.enqueue_event(event1) await event_queue.enqueue_event(event2) @@ -406,13 +314,13 @@ async def test_close_with_immediate_true(event_queue: EventQueue) -> None: @pytest.mark.asyncio async def test_close_immediate_propagates_to_children( - event_queue: EventQueue, + event_queue: EventQueueLegacy, ) -> None: """Test that immediate parameter is propagated to child queues.""" - child_queue = event_queue.tap() + child_queue = await event_queue.tap() # Add events to both parent and child - event = Message(**MESSAGE_PAYLOAD) + event = create_sample_message() await event_queue.enqueue_event(event) assert child_queue.is_closed() is False @@ -427,82 +335,198 @@ async def test_close_immediate_propagates_to_children( @pytest.mark.asyncio -async def test_clear_events_current_queue_only(event_queue: EventQueue) -> None: - """Test clear_events clears only the current queue when clear_child_queues=False.""" - child_queue = event_queue.tap() - event1 = Message(**MESSAGE_PAYLOAD) - event2 = Task(**MINIMAL_TASK) - await event_queue.enqueue_event(event1) - await event_queue.enqueue_event(event2) +async def test_close_graceful_waits_for_join_and_children( + event_queue: EventQueueLegacy, +) -> None: + child = await event_queue.tap() + await event_queue.enqueue_event(create_sample_message()) - # Clear only parent queue - await event_queue.clear_events(clear_child_queues=False) + join_reached = asyncio.Event() + event_queue._queue = QueueJoinWrapper(event_queue.queue, join_reached) + child._queue = QueueJoinWrapper(child.queue, join_reached) - # Verify parent queue is empty - assert event_queue.queue.empty() + close_task = asyncio.create_task(event_queue.close(immediate=False)) + await join_reached.wait() - # Verify child queue still has its event - assert not child_queue.queue.empty() - assert child_queue.is_closed() is False + assert event_queue.is_closed() + assert child.is_closed() + assert not close_task.done() - dequeued_child_event = await child_queue.dequeue_event(no_wait=True) - assert dequeued_child_event == event1 + await event_queue.dequeue_event() + event_queue.task_done() + + await child.dequeue_event() + child.task_done() + + await asyncio.wait_for(close_task, timeout=1.0) @pytest.mark.asyncio -async def test_clear_events_with_children(event_queue: EventQueue) -> None: - """Test clear_events clears both current queue and child queues.""" - # Create child queues and add events - child_queue1 = event_queue.tap() - child_queue2 = event_queue.tap() - - # Add events to parent queue - event1 = Message(**MESSAGE_PAYLOAD) - event2 = Task(**MINIMAL_TASK) - await event_queue.enqueue_event(event1) - await event_queue.enqueue_event(event2) +async def test_close_propagates_to_children( + event_queue: EventQueueLegacy, +) -> None: + child_queue1 = await event_queue.tap() + child_queue2 = await event_queue.tap() + await event_queue.close() + assert child_queue1.is_closed() + assert child_queue2.is_closed() - # Clear all queues - await event_queue.clear_events(clear_child_queues=True) - # Verify all queues are empty - assert event_queue.queue.empty() - assert child_queue1.queue.empty() - assert child_queue2.queue.empty() +@pytest.mark.xfail(reason='https://github.com/a2aproject/a2a-python/issues/869') +@pytest.mark.asyncio +async def test_enqueue_close_race_condition() -> None: + queue = EventQueueLegacy() + event = create_sample_message() + + enqueue_task = asyncio.create_task(queue.enqueue_event(event)) + close_task = asyncio.create_task(queue.close(immediate=False)) + + try: + results = await asyncio.wait_for( + asyncio.gather(enqueue_task, close_task, return_exceptions=True), + timeout=1.0, + ) + for res in results: + if ( + isinstance(res, Exception) + and type(res).__name__ != 'QueueShutDown' + ): + raise res + except asyncio.TimeoutError: + pytest.fail( + 'Deadlock in close() because enqueue_event put an item during close but before join()' + ) @pytest.mark.asyncio -async def test_clear_events_empty_queue(event_queue: EventQueue) -> None: - """Test clear_events works correctly with empty queue.""" - # Verify queue is initially empty - assert event_queue.queue.empty() +async def test_event_queue_dequeue_immediate_false( + event_queue: EventQueueLegacy, +) -> None: + msg = create_sample_message() + await event_queue.enqueue_event(msg) + # Start close in background so it can wait for join() + close_task = asyncio.create_task(event_queue.close(immediate=False)) + + # The event is still in the queue, we can dequeue it + assert await event_queue.dequeue_event() == msg + event_queue.task_done() - # Clear events from empty queue - await event_queue.clear_events() + await close_task - # Verify queue remains empty - assert event_queue.queue.empty() + # Queue is now empty and closed + with pytest.raises(QueueShutDown): + await event_queue.dequeue_event() @pytest.mark.asyncio -async def test_clear_events_closed_queue(event_queue: EventQueue) -> None: - """Test clear_events works correctly with closed queue.""" - # Add events and close queue +async def test_event_queue_dequeue_immediate_true( + event_queue: EventQueueLegacy, +) -> None: + msg = create_sample_message() + await event_queue.enqueue_event(msg) + await event_queue.close(immediate=True) + # The queue is immediately flushed, so dequeue should raise QueueShutDown + with pytest.raises(QueueShutDown): + await event_queue.dequeue_event() - with patch('sys.version_info', (3, 12, 0)): # Simulate older Python - # Mock queue.join as it's called in older versions - event_queue.queue.join = AsyncMock() - event = Message(**MESSAGE_PAYLOAD) - await event_queue.enqueue_event(event) - await event_queue.close() +@pytest.mark.asyncio +async def test_event_queue_enqueue_when_closed( + event_queue: EventQueueLegacy, +) -> None: + await event_queue.close(immediate=True) + msg = create_sample_message() + await event_queue.enqueue_event(msg) + # Enqueue should have returned without doing anything + with pytest.raises(QueueShutDown): + await event_queue.dequeue_event() - # Verify queue is closed but not empty - assert event_queue.is_closed() is True - assert not event_queue.queue.empty() - # Clear events from closed queue - await event_queue.clear_events() +@pytest.mark.asyncio +async def test_event_queue_shutdown_wakes_getter( + event_queue: EventQueueLegacy, +) -> None: + original_queue = event_queue.queue + getter_reached_get = asyncio.Event() - # Verify queue is now empty - assert event_queue.queue.empty() + class QueueWrapper: + def __getattr__(self, name): + return getattr(original_queue, name) + + async def get(self): + getter_reached_get.set() + return await original_queue.get() + + # Replace the underlying queue with a wrapper to intercept `get` + event_queue._queue = QueueWrapper() + + async def getter(): + with pytest.raises(QueueShutDown): + await event_queue.dequeue_event() + + task = asyncio.create_task(getter()) + await getter_reached_get.wait() + + # At this point, getter is guaranteed to be awaiting the original_queue.get() + await event_queue.close(immediate=True) + await asyncio.wait_for(task, timeout=1.0) + + +@pytest.mark.parametrize( + 'immediate, expected_events, close_blocks', + [ + (False, (1, 1), True), + (True, (0, 0), False), + ], +) +@pytest.mark.asyncio +async def test_event_queue_close_behaviors( + event_queue: EventQueueLegacy, + immediate: bool, + expected_events: tuple[int, int], + close_blocks: bool, +) -> None: + expected_parent_events, expected_child_events = expected_events + child_queue = await event_queue.tap() + + msg = create_sample_message() + await event_queue.enqueue_event(msg) + + # We need deterministic event waiting to prevent sleep() + join_reached = asyncio.Event() + + # Apply wrappers so we know exactly when join() starts + event_queue._queue = QueueJoinWrapper(event_queue.queue, join_reached) + child_queue._queue = QueueJoinWrapper(child_queue.queue, join_reached) + + close_task = asyncio.create_task(event_queue.close(immediate=immediate)) + + if close_blocks: + await join_reached.wait() + assert not close_task.done(), ( + 'close() should block waiting for queue to be drained' + ) + else: + # We await it with a tiny timeout to ensure the task had time to run, + # but because immediate=True, it runs without blocking at all. + await asyncio.wait_for(close_task, timeout=0.1) + assert close_task.done(), 'close() should not block' + + # Verify parent queue state + if expected_parent_events == 0: + with pytest.raises(QueueShutDown): + await event_queue.dequeue_event() + else: + assert await event_queue.dequeue_event() == msg + event_queue.task_done() + + # Verify child queue state + if expected_child_events == 0: + with pytest.raises(QueueShutDown): + await child_queue.dequeue_event() + else: + assert await child_queue.dequeue_event() == msg + child_queue.task_done() + + # Ensure close_task finishes cleanly + await asyncio.wait_for(close_task, timeout=1.0) diff --git a/tests/server/events/test_event_queue_v2.py b/tests/server/events/test_event_queue_v2.py new file mode 100644 index 000000000..27bceea4c --- /dev/null +++ b/tests/server/events/test_event_queue_v2.py @@ -0,0 +1,818 @@ +import asyncio +import logging + +from typing import Any + +import pytest +import pytest_asyncio + +from a2a.server.events.event_queue import ( + DEFAULT_MAX_QUEUE_SIZE, + EventQueue, + QueueShutDown, +) +from a2a.server.events.event_queue_v2 import ( + EventQueueSink, + EventQueueSource, +) +from a2a.server.jsonrpc_models import JSONRPCError +from a2a.types import ( + TaskNotFoundError, +) +from a2a.types.a2a_pb2 import ( + Artifact, + Message, + Part, + Role, + Task, + TaskArtifactUpdateEvent, + TaskState, + TaskStatus, + TaskStatusUpdateEvent, +) + + +def create_sample_message(message_id: str = '111') -> Message: + """Create a sample Message proto object.""" + return Message( + message_id=message_id, + role=Role.ROLE_AGENT, + parts=[Part(text='test message')], + ) + + +def create_sample_task( + task_id: str = '123', context_id: str = 'session-xyz' +) -> Task: + """Create a sample Task proto object.""" + return Task( + id=task_id, + context_id=context_id, + status=TaskStatus(state=TaskState.TASK_STATE_SUBMITTED), + ) + + +class QueueJoinWrapper: + """A wrapper to intercept and signal when `queue.join()` is called.""" + + def __init__(self, original: Any, join_reached: asyncio.Event) -> None: + self.original = original + self.join_reached = join_reached + + def __getattr__(self, name: str) -> Any: + return getattr(self.original, name) + + async def join(self) -> None: + self.join_reached.set() + await self.original.join() + + +@pytest_asyncio.fixture +async def event_queue() -> EventQueueSource: + return EventQueueSource() + + +@pytest.mark.asyncio +async def test_constructor_default_max_queue_size() -> None: + """Test that the queue is created with the default max size.""" + eq = EventQueueSource() + assert eq.queue.maxsize == DEFAULT_MAX_QUEUE_SIZE + + +@pytest.mark.asyncio +async def test_constructor_max_queue_size() -> None: + """Test that the asyncio.Queue is created with the specified max_queue_size.""" + custom_size = 123 + eq = EventQueueSource(max_queue_size=custom_size) + assert eq.queue.maxsize == custom_size + + +@pytest.mark.asyncio +async def test_constructor_invalid_max_queue_size() -> None: + """Test that a ValueError is raised for non-positive max_queue_size.""" + with pytest.raises( + ValueError, match='max_queue_size must be greater than 0' + ): + EventQueueSource(max_queue_size=0) + with pytest.raises( + ValueError, match='max_queue_size must be greater than 0' + ): + EventQueueSource(max_queue_size=-10) + + +@pytest.mark.asyncio +async def test_event_queue_async_context_manager( + event_queue: EventQueueSource, +) -> None: + """Test that EventQueue can be used as an async context manager.""" + async with event_queue as q: + assert q is event_queue + assert event_queue.is_closed() is False + assert event_queue.is_closed() is True + + +@pytest.mark.asyncio +async def test_event_queue_async_context_manager_on_exception( + event_queue: EventQueueSource, +) -> None: + """Test that close() is called even when an exception occurs inside the context.""" + with pytest.raises(RuntimeError, match='boom'): + async with event_queue: + raise RuntimeError('boom') + assert event_queue.is_closed() is True + + +@pytest.mark.asyncio +async def test_enqueue_and_dequeue_event(event_queue: EventQueueSource) -> None: + """Test that an event can be enqueued and dequeued.""" + event = create_sample_message() + await event_queue.enqueue_event(event) + dequeued_event = await event_queue.dequeue_event() + assert dequeued_event == event + + +@pytest.mark.asyncio +async def test_dequeue_event_wait(event_queue: EventQueueSource) -> None: + """Test dequeue_event with the default wait behavior.""" + event = TaskStatusUpdateEvent( + task_id='task_123', + context_id='session-xyz', + status=TaskStatus(state=TaskState.TASK_STATE_WORKING), + ) + await event_queue.enqueue_event(event) + dequeued_event = await event_queue.dequeue_event() + assert dequeued_event == event + + +@pytest.mark.asyncio +async def test_task_done(event_queue: EventQueueSource) -> None: + """Test the task_done method.""" + event = TaskArtifactUpdateEvent( + task_id='task_123', + context_id='session-xyz', + artifact=Artifact(artifact_id='11', parts=[Part(text='text')]), + ) + await event_queue.enqueue_event(event) + _ = await event_queue.dequeue_event() + event_queue.task_done() + + +@pytest.mark.asyncio +async def test_enqueue_different_event_types( + event_queue: EventQueueSource, +) -> None: + """Test enqueuing different types of events.""" + events: list[Any] = [ + TaskNotFoundError(), + JSONRPCError(code=111, message='rpc error'), + ] + for event in events: + await event_queue.enqueue_event(event) + dequeued_event = await event_queue.dequeue_event() + assert dequeued_event == event + + +@pytest.mark.asyncio +async def test_enqueue_event_propagates_to_children( + event_queue: EventQueueSource, +) -> None: + """Test that events are enqueued to tapped child queues.""" + child_queue1 = await event_queue.tap() + child_queue2 = await event_queue.tap() + + event1 = create_sample_message() + event2 = create_sample_task() + + await event_queue.enqueue_event(event1) + await event_queue.enqueue_event(event2) + + # Check parent queue + assert await event_queue.dequeue_event() == event1 + assert await event_queue.dequeue_event() == event2 + + # Check child queue 1 + assert await child_queue1.dequeue_event() == event1 + assert await child_queue1.dequeue_event() == event2 + + # Check child queue 2 + assert await child_queue2.dequeue_event() == event1 + assert await child_queue2.dequeue_event() == event2 + + +@pytest.mark.asyncio +async def test_enqueue_event_when_closed( + event_queue: EventQueueSource, + expected_queue_closed_exception: type[Exception], +) -> None: + """Test that no event is enqueued if the parent queue is closed.""" + await event_queue.close() # Close the queue first + + event = create_sample_message() + # Attempt to enqueue, should do nothing or log a warning as per implementation + await event_queue.enqueue_event(event) + + # Verify the queue is still empty + with pytest.raises(expected_queue_closed_exception): + await event_queue.dequeue_event() + + # Also verify child queues are not affected directly by parent's enqueue attempt when closed + # (though they would be closed too by propagation) + with pytest.raises(expected_queue_closed_exception): + await event_queue.tap() + + +@pytest.fixture +def expected_queue_closed_exception() -> type[Exception]: + return QueueShutDown + + +@pytest.mark.asyncio +async def test_dequeue_event_closed_and_empty( + event_queue: EventQueueSource, + expected_queue_closed_exception: type[Exception], +) -> None: + """Test dequeue_event raises QueueShutDown when closed and empty.""" + await event_queue.close() + assert event_queue.is_closed() + # Ensure queue is actually empty (e.g. by trying a non-blocking get on internal queue) + with pytest.raises(expected_queue_closed_exception): + event_queue.queue.get_nowait() + + with pytest.raises(expected_queue_closed_exception): + await event_queue.dequeue_event() + + +@pytest.mark.asyncio +async def test_tap_creates_child_queue(event_queue: EventQueueSource) -> None: + """Test that tap creates a new EventQueue and adds it to children.""" + initial_children_count = len(event_queue._sinks) + + child_queue = await event_queue.tap() + + assert isinstance(child_queue, EventQueue) + assert child_queue != event_queue # Ensure it's a new instance + assert len(event_queue._sinks) == initial_children_count + 1 + assert child_queue in event_queue._sinks + + # Test that the new child queue has the default max size (or specific if tap could configure it) + assert child_queue.queue.maxsize == DEFAULT_MAX_QUEUE_SIZE + + +@pytest.mark.asyncio +async def test_close_idempotent(event_queue: EventQueueSource) -> None: + await event_queue.close() + assert event_queue.is_closed() is True + await event_queue.close() + assert event_queue.is_closed() is True + + +@pytest.mark.asyncio +async def test_is_closed_reflects_state(event_queue: EventQueueSource) -> None: + """Test that is_closed() returns the correct state before and after closing.""" + assert event_queue.is_closed() is False # Initially open + + await event_queue.close() + + assert event_queue.is_closed() is True # Closed after calling close() + + +@pytest.mark.asyncio +async def test_close_with_immediate_true(event_queue: EventQueueSource) -> None: + """Test close with immediate=True clears events immediately.""" + # Add some events to the queue + event1 = create_sample_message() + event2 = create_sample_task() + await event_queue.enqueue_event(event1) + await event_queue.enqueue_event(event2) + await event_queue.test_only_join_incoming_queue() + + # Verify events are in queue + assert not event_queue.queue.empty() + + # Close with immediate=True + await event_queue.close(immediate=True) + + # Verify queue is closed and empty + assert event_queue.is_closed() is True + assert event_queue.queue.empty() + + +@pytest.mark.asyncio +async def test_close_immediate_propagates_to_children( + event_queue: EventQueueSource, +) -> None: + """Test that immediate parameter is propagated to child queues.""" + child_queue = await event_queue.tap() + + # Add events to both parent and child + event = create_sample_message() + await event_queue.enqueue_event(event) + await event_queue.test_only_join_incoming_queue() + + assert child_queue.is_closed() is False + assert child_queue.queue.empty() is False + + # close event queue + await event_queue.close(immediate=True) + + # Verify child queue was called and empty with immediate=True + assert child_queue.is_closed() is True + assert child_queue.queue.empty() + + +@pytest.mark.asyncio +async def test_close_graceful_waits_for_join_and_children( + event_queue: EventQueueSource, +) -> None: + child = await event_queue.tap() + await event_queue.enqueue_event(create_sample_message()) + + join_reached = asyncio.Event() + event_queue._default_sink._queue = QueueJoinWrapper( + event_queue.queue, join_reached + ) # type: ignore + child._queue = QueueJoinWrapper(child.queue, join_reached) # type: ignore + + close_task = asyncio.create_task(event_queue.close(immediate=False)) + await join_reached.wait() + + assert event_queue.is_closed() + assert child.is_closed() + assert not close_task.done() + + await event_queue.dequeue_event() + event_queue.task_done() + + await child.dequeue_event() + child.task_done() + + await asyncio.wait_for(close_task, timeout=1.0) + + +@pytest.mark.asyncio +async def test_close_propagates_to_children( + event_queue: EventQueueSource, +) -> None: + child_queue1 = await event_queue.tap() + child_queue2 = await event_queue.tap() + await event_queue.close() + assert child_queue1.is_closed() + assert child_queue2.is_closed() + + +@pytest.mark.asyncio +async def test_event_queue_dequeue_immediate_false( + event_queue: EventQueueSource, +) -> None: + msg = create_sample_message() + await event_queue.enqueue_event(msg) + await event_queue.test_only_join_incoming_queue() + # Start close in background so it can wait for join() + close_task = asyncio.create_task(event_queue.close(immediate=False)) + + # The event is still in the queue, we can dequeue it + assert await event_queue.dequeue_event() == msg + event_queue.task_done() + + await close_task + + # Queue is now empty and closed + with pytest.raises(QueueShutDown): + await event_queue.dequeue_event() + + +@pytest.mark.asyncio +async def test_event_queue_dequeue_immediate_true( + event_queue: EventQueueSource, +) -> None: + msg = create_sample_message() + await event_queue.enqueue_event(msg) + await event_queue.close(immediate=True) + # The queue is immediately flushed, so dequeue should raise QueueShutDown + with pytest.raises(QueueShutDown): + await event_queue.dequeue_event() + + +@pytest.mark.asyncio +async def test_event_queue_enqueue_when_closed( + event_queue: EventQueueSource, +) -> None: + await event_queue.close(immediate=True) + msg = create_sample_message() + await event_queue.enqueue_event(msg) + # Enqueue should have returned without doing anything + with pytest.raises(QueueShutDown): + await event_queue.dequeue_event() + + +@pytest.mark.asyncio +async def test_event_queue_shutdown_wakes_getter( + event_queue: EventQueueSource, +) -> None: + original_queue = event_queue.queue + getter_reached_get = asyncio.Event() + + class QueueWrapper: + def __getattr__(self, name): + return getattr(original_queue, name) + + async def get(self): + getter_reached_get.set() + return await original_queue.get() + + # Replace the underlying queue with a wrapper to intercept `get` + event_queue._default_sink._queue = QueueWrapper() # type: ignore + + async def getter(): + with pytest.raises(QueueShutDown): + await event_queue.dequeue_event() + + task = asyncio.create_task(getter()) + await getter_reached_get.wait() + + # At this point, getter is guaranteed to be awaiting the original_queue.get() + await event_queue.close(immediate=True) + await asyncio.wait_for(task, timeout=1.0) + + +@pytest.mark.parametrize( + 'immediate, expected_events, close_blocks', + [ + (False, (1, 1), True), + (True, (0, 0), False), + ], +) +@pytest.mark.asyncio +async def test_event_queue_close_behaviors( + event_queue: EventQueueSource, + immediate: bool, + expected_events: tuple[int, int], + close_blocks: bool, +) -> None: + expected_parent_events, expected_child_events = expected_events + child_queue = await event_queue.tap() + + msg = create_sample_message() + await event_queue.enqueue_event(msg) + + # We need deterministic event waiting to prevent sleep() + join_reached = asyncio.Event() + + # Apply wrappers so we know exactly when join() starts + event_queue._default_sink._queue = QueueJoinWrapper( + event_queue.queue, join_reached + ) # type: ignore + child_queue._queue = QueueJoinWrapper(child_queue.queue, join_reached) # type: ignore + + close_task = asyncio.create_task(event_queue.close(immediate=immediate)) + + if close_blocks: + await join_reached.wait() + assert not close_task.done(), ( + 'close() should block waiting for queue to be drained' + ) + else: + # We await it with a tiny timeout to ensure the task had time to run, + # but because immediate=True, it runs without blocking at all. + await asyncio.wait_for(close_task, timeout=0.1) + assert close_task.done(), 'close() should not block' + + # Verify parent queue state + if expected_parent_events == 0: + with pytest.raises(QueueShutDown): + await event_queue.dequeue_event() + else: + assert await event_queue.dequeue_event() == msg + event_queue.task_done() + + # Verify child queue state + if expected_child_events == 0: + with pytest.raises(QueueShutDown): + await child_queue.dequeue_event() + else: + assert await child_queue.dequeue_event() == msg + child_queue.task_done() + + # Ensure close_task finishes cleanly + await asyncio.wait_for(close_task, timeout=1.0) + + +@pytest.mark.asyncio +async def test_sink_only_raises_on_enqueue() -> None: + """Test that enqueuing to a sink-only queue raises an error.""" + parent = EventQueueSource() + sink_queue = EventQueueSink(parent=parent) + event = create_sample_message() + with pytest.raises( + RuntimeError, match='Cannot enqueue to a sink-only queue' + ): + await sink_queue.enqueue_event(event) + + +@pytest.mark.asyncio +async def test_tap_creates_sink_only_queue( + event_queue: EventQueueSource, +) -> None: + """Test that tap() creates a child queue that is sink-only.""" + child_queue = await event_queue.tap() + assert hasattr(child_queue, '_parent') and child_queue._parent is not None # type: ignore + + event = create_sample_message() + with pytest.raises( + RuntimeError, match='Cannot enqueue to a sink-only queue' + ): + await child_queue.enqueue_event(event) + + +@pytest.mark.asyncio +async def test_tap_attaches_to_top_parent( + event_queue: EventQueueSource, +) -> None: + """Test that tap() on a child queue attaches the new queue to the top parent.""" + # First level child + child1 = await event_queue.tap() + + # Second level child (tapped from child1) + child2 = await child1.tap() + + # The top parent should have both child1 and child2 in its children list + assert child1 in event_queue._sinks + assert child2 in event_queue._sinks + + # child1 should not have any children, because tap() attaches to top parent + assert True # Child does not have children anymore + + # Ensure events still flow to all queues + event = create_sample_message() + await event_queue.enqueue_event(event) + + +@pytest.mark.asyncio +async def test_concurrent_enqueue_order_preserved() -> None: + """ + Verifies that concurrent enqueues to a parent queue are preserved in + the exact same order in all child queues due to root serialization. + """ + parent = EventQueueSource() + child = await parent.tap() + + events = [create_sample_message(message_id=str(i)) for i in range(100)] + + # Enqueue all concurrently + await asyncio.gather(*(parent.enqueue_event(e) for e in events)) + + parent_events = [] + while not parent.queue.empty(): + parent_events.append(await parent.dequeue_event()) + parent.task_done() + + child_events = [] + while not child.queue.empty(): + child_events.append(await child.dequeue_event()) + child.task_done() + + assert parent_events == child_events, ( + 'Order mismatch! Locking failed to serialize enqueues.' + ) + + +@pytest.mark.asyncio +async def test_dispatch_task_failed(event_queue: EventQueueSource) -> None: + event_queue._dispatcher_task.cancel() + with pytest.raises(asyncio.CancelledError): + await event_queue._dispatcher_task + + event = create_sample_message() + await event_queue.enqueue_event(event) + + with pytest.raises(QueueShutDown): + await asyncio.wait_for(event_queue.dequeue_event(), timeout=0.1) + + # Event was never dequeued, but close() should still work after dispatcher was force cancelled. + await asyncio.wait_for(event_queue.close(immediate=False), timeout=0.1) + + +@pytest.mark.asyncio +async def test_concurrent_close_immediate_false() -> None: + """Test that concurrent close(immediate=False) calls both wait for join() deterministically.""" + queue = EventQueueSource() + sink = await queue.tap() + + event_arrived = asyncio.Event() + original_put_internal = sink._put_internal # type: ignore + + async def mock_put_internal(msg: Any) -> None: + await original_put_internal(msg) + event_arrived.set() + + sink._put_internal = mock_put_internal # type: ignore + + event = Message() + await queue.enqueue_event(event) + + # Deterministically wait for the event to be processed and reach the sink + await asyncio.wait_for(event_arrived.wait(), timeout=1.0) + + class CustomJoinWrapper: + def __init__(self, original: Any) -> None: + self.original = original + self.join_count = 0 + self.join_started_1 = asyncio.Event() + self.join_started_2 = asyncio.Event() + + def __getattr__(self, name: str) -> Any: + return getattr(self.original, name) + + async def join(self) -> None: + self.join_count += 1 + if self.join_count == 1: + self.join_started_1.set() + elif self.join_count == 2: + self.join_started_2.set() + await self.original.join() + + wrapper = CustomJoinWrapper(sink._queue) # type: ignore + sink._queue = wrapper # type: ignore + + close_task_1 = asyncio.create_task(sink.close(immediate=False)) + # Wait deterministically until the first close call reaches await queue.join() + await asyncio.wait_for(wrapper.join_started_1.wait(), timeout=1.0) + assert not close_task_1.done() + + close_task_2 = asyncio.create_task(sink.close(immediate=False)) + # Wait deterministically until the second close call reaches await queue.join() + await asyncio.wait_for(wrapper.join_started_2.wait(), timeout=1.0) + assert not close_task_2.done() + + # To clean up and allow the queue to finish joining + await sink.dequeue_event() + sink.task_done() + + # Now both tasks should complete + await asyncio.wait_for( + asyncio.gather(close_task_1, close_task_2), timeout=1.0 + ) + + +@pytest.mark.asyncio +async def test_dispatch_loop_logs_exceptions( + event_queue: EventQueueSource, caplog: pytest.LogCaptureFixture +) -> None: + """Test that exceptions raised by sinks during dispatch are logged.""" + caplog.set_level(logging.ERROR) + sink = await event_queue.tap() + + async def mock_put_internal(event: Any) -> None: + raise RuntimeError('simulated error') + + sink._put_internal = mock_put_internal # type: ignore + + msg = create_sample_message() + await event_queue.enqueue_event(msg) + + # Wait for dispatch loop to process + await event_queue.test_only_join_incoming_queue() + + assert any( + record.levelname == 'ERROR' + and 'Error dispatching event to sink' in record.message + for record in caplog.records + ) + + +@pytest.mark.asyncio +async def test_join_incoming_queue_cancels_join_task( + event_queue: EventQueueSource, +) -> None: + """Test that _join_incoming_queue cancels join_task on CancelledError.""" + # Tap a sink and block its processing so dispatcher and join() hang + sink = await event_queue.tap() + block_event = asyncio.Event() + + async def mock_put_internal(event: Any) -> None: + await block_event.wait() + + sink._put_internal = mock_put_internal # type: ignore + + # Enqueue a message so join() blocks + await event_queue.enqueue_event(create_sample_message()) + + join_reached = asyncio.Event() + event_queue._incoming_queue = QueueJoinWrapper( # type: ignore + event_queue._incoming_queue, join_reached + ) + + join_task = asyncio.create_task(event_queue._join_incoming_queue()) + + # Wait deterministically until the internal task calls join() + await join_reached.wait() + + # Cancel the wrapper task + join_task.cancel() + + with pytest.raises(asyncio.CancelledError): + await join_task + + # Unblock the sink and clean up + block_event.set() + await event_queue.dequeue_event() + event_queue.task_done() + + +@pytest.mark.asyncio +async def test_event_queue_capacity_order_and_concurrency() -> None: + """Test that EventQueue preserves order and handles concurrency with limited capacity.""" + queue = EventQueueSource(max_queue_size=5) + + # Create 10 tapped queues + tapped_queues = [await queue.tap(max_queue_size=5) for _ in range(10)] + all_queues: list[EventQueue] = [queue] + tapped_queues # type: ignore + + async def producer() -> None: + for i in range(100): + await queue.enqueue_event(create_sample_message(message_id=str(i))) + + async def consumer(q: EventQueue) -> None: + for expected_i in range(100): + event = await q.dequeue_event() + assert isinstance(event, Message) + assert event.message_id == str(expected_i) + q.task_done() + + consumer_tasks = [asyncio.create_task(consumer(q)) for q in all_queues] + producer_task = asyncio.create_task(producer()) + + await asyncio.wait_for( + asyncio.gather(producer_task, *consumer_tasks), timeout=1.0 + ) + + await queue.close(immediate=True) + + +@pytest.mark.asyncio +async def test_event_queue_blocking_behavior() -> None: + _PARENT_QUEUE_SIZE = 10 + _TAPPED_QUEUE_SIZE = 15 + + queue = EventQueueSource(max_queue_size=_PARENT_QUEUE_SIZE) + # tapped_queue initially has no consumer, so it will block. + tapped_queue = await queue.tap(max_queue_size=_TAPPED_QUEUE_SIZE) + + producer_task_done = asyncio.Event() + enqueued_count = 0 + + async def producer() -> None: + nonlocal enqueued_count + for i in range(50): + event = create_sample_message(message_id=str(i)) + await queue.enqueue_event(event) + enqueued_count += 1 + producer_task_done.set() + + consumed_first = [] + + async def consumer_first() -> None: + while True: + try: + event = await queue.dequeue_event() + consumed_first.append(event) + queue.task_done() + except QueueShutDown: + break + + consumer_first_task = asyncio.create_task(consumer_first()) + producer_task = asyncio.create_task(producer()) + + # Wait to let the producer fill the queues and confirm it is blocked + with pytest.raises(asyncio.TimeoutError): + await asyncio.wait_for(producer_task_done.wait(), timeout=0.1) + + # Validate that: first consumer receives _TAPPED_QUEUE_SIZE + 1 items. + # Other items are blocking trying to be enqueued to second queue. + assert len(consumed_first) == _TAPPED_QUEUE_SIZE + 1 + + # Validate that: once child queue is blocked, parent will continue + # processing other items until it reaches its capacity as well. + assert not producer_task.done() + assert enqueued_count == _PARENT_QUEUE_SIZE + _TAPPED_QUEUE_SIZE + 1 + + consumed_second = [] + + # create a consumer for second queue. + async def consumer_second() -> None: + while True: + try: + event = await tapped_queue.dequeue_event() + consumed_second.append(event) + tapped_queue.task_done() + except QueueShutDown: + break + + consumer_second_task = asyncio.create_task(consumer_second()) + await asyncio.wait_for(producer_task_done.wait(), timeout=1.0) + await queue.close(immediate=False) + await asyncio.gather(consumer_first_task, consumer_second_task) + + # Validate that: after unblocking second consumer everything ends smoothly. + assert len(consumed_first) == 50 + assert len(consumed_second) == 50 diff --git a/tests/server/events/test_inmemory_queue_manager.py b/tests/server/events/test_inmemory_queue_manager.py index b51334a95..9716b13bf 100644 --- a/tests/server/events/test_inmemory_queue_manager.py +++ b/tests/server/events/test_inmemory_queue_manager.py @@ -5,7 +5,7 @@ import pytest from a2a.server.events import InMemoryQueueManager -from a2a.server.events.event_queue import EventQueue +from a2a.server.events.event_queue import EventQueueLegacy from a2a.server.events.queue_manager import ( NoTaskQueue, TaskQueueExists, @@ -21,7 +21,7 @@ def queue_manager(self) -> InMemoryQueueManager: @pytest.fixture def event_queue(self) -> MagicMock: """Fixture to create a mock EventQueue.""" - queue = MagicMock(spec=EventQueue) + queue = MagicMock(spec=EventQueueLegacy) # Mock the tap method to return itself queue.tap.return_value = queue @@ -119,7 +119,7 @@ async def test_create_or_tap_new_queue( task_id = 'test_task_id' result = await queue_manager.create_or_tap(task_id) - assert isinstance(result, EventQueue) + assert isinstance(result, EventQueueLegacy) assert queue_manager._task_queue[task_id] == result @pytest.mark.asyncio @@ -142,7 +142,7 @@ async def test_concurrency( """Test concurrent access to the queue manager.""" async def add_task(task_id): - queue = EventQueue() + queue = EventQueueLegacy() await queue_manager.add(task_id, queue) return task_id diff --git a/tests/server/request_handlers/__init__.py b/tests/server/request_handlers/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/server/request_handlers/test_default_request_handler.py b/tests/server/request_handlers/test_default_request_handler.py index ec2956fa2..5a2bf0446 100644 --- a/tests/server/request_handlers/test_default_request_handler.py +++ b/tests/server/request_handlers/test_default_request_handler.py @@ -2,7 +2,9 @@ import contextlib import logging import time +import uuid +from typing import cast from unittest.mock import ( AsyncMock, MagicMock, @@ -12,6 +14,7 @@ import pytest +from a2a.auth.user import UnauthenticatedUser from a2a.server.agent_execution import ( AgentExecutor, RequestContext, @@ -19,8 +22,15 @@ SimpleRequestContextBuilder, ) from a2a.server.context import ServerCallContext -from a2a.server.events import EventQueue, InMemoryQueueManager, QueueManager -from a2a.server.request_handlers import DefaultRequestHandler +from a2a.server.events import ( + EventQueue, + EventQueueLegacy, + InMemoryQueueManager, + QueueManager, +) +from a2a.server.request_handlers import ( + LegacyRequestHandler as DefaultRequestHandler, +) from a2a.server.tasks import ( InMemoryPushNotificationConfigStore, InMemoryTaskStore, @@ -31,43 +41,56 @@ TaskUpdater, ) from a2a.types import ( - DeleteTaskPushNotificationConfigParams, - GetTaskPushNotificationConfigParams, + ExtendedAgentCardNotConfiguredError, InternalError, InvalidParamsError, - ListTaskPushNotificationConfigParams, + PushNotificationNotSupportedError, + TaskNotCancelableError, + TaskNotFoundError, + UnsupportedOperationError, +) +from a2a.types.a2a_pb2 import ( + AgentCapabilities, + AgentCard, + Artifact, + CancelTaskRequest, + DeleteTaskPushNotificationConfigRequest, + GetTaskPushNotificationConfigRequest, + GetExtendedAgentCardRequest, + GetTaskRequest, + ListTaskPushNotificationConfigsRequest, + ListTasksRequest, + ListTasksResponse, Message, - MessageSendConfiguration, - MessageSendParams, Part, - PushNotificationConfig, Role, + SendMessageConfiguration, + SendMessageRequest, + SubscribeToTaskRequest, Task, - TaskIdParams, - TaskNotFoundError, TaskPushNotificationConfig, - TaskQueryParams, TaskState, TaskStatus, TaskStatusUpdateEvent, - TextPart, - UnsupportedOperationError, ) -from a2a.utils import ( - new_task, +from a2a.helpers.proto_helpers import ( + new_text_message, + new_task_from_user_message, ) -class DummyAgentExecutor(AgentExecutor): +class MockAgentExecutor(AgentExecutor): async def execute(self, context: RequestContext, event_queue: EventQueue): task_updater = TaskUpdater( - event_queue, context.task_id, context.context_id + event_queue, + context.task_id, # type: ignore[arg-type] + context.context_id, # type: ignore[arg-type] ) async for i in self._run(): - parts = [Part(root=TextPart(text=f'Event {i}'))] + parts = [Part(text=f'Event {i}')] try: await task_updater.update_status( - TaskState.working, + TaskState.TASK_STATE_WORKING, message=task_updater.new_agent_message(parts), ) except RuntimeError: @@ -84,7 +107,9 @@ async def cancel(self, context: RequestContext, event_queue: EventQueue): # Helper to create a simple task for tests def create_sample_task( - task_id='task1', status_state=TaskState.submitted, context_id='ctx1' + task_id='task1', + status_state=TaskState.TASK_STATE_SUBMITTED, + context_id='ctx1', ) -> Task: return Task( id=task_id, @@ -96,18 +121,29 @@ def create_sample_task( # Helper to create ServerCallContext def create_server_call_context() -> ServerCallContext: # Assuming UnauthenticatedUser is available or can be imported - from a2a.auth.user import UnauthenticatedUser return ServerCallContext(user=UnauthenticatedUser()) -def test_init_default_dependencies(): +@pytest.fixture +def agent_card(): + """Provides a standard AgentCard with streaming and push notifications enabled for tests.""" + return AgentCard( + name='test_agent', + version='1.0', + capabilities=AgentCapabilities(streaming=True, push_notifications=True), + ) + + +def test_init_default_dependencies(agent_card): """Test that default dependencies are created if not provided.""" - agent_executor = DummyAgentExecutor() + agent_executor = MockAgentExecutor() task_store = InMemoryTaskStore() handler = DefaultRequestHandler( - agent_executor=agent_executor, task_store=task_store + agent_executor=agent_executor, + task_store=task_store, + agent_card=agent_card, ) assert isinstance(handler._queue_manager, InMemoryQueueManager) @@ -124,27 +160,147 @@ def test_init_default_dependencies(): @pytest.mark.asyncio -async def test_on_get_task_not_found(): +async def test_on_get_task_not_found(agent_card): """Test on_get_task when task_store.get returns None.""" mock_task_store = AsyncMock(spec=TaskStore) mock_task_store.get.return_value = None request_handler = DefaultRequestHandler( - agent_executor=DummyAgentExecutor(), task_store=mock_task_store + agent_executor=MockAgentExecutor(), + task_store=mock_task_store, + agent_card=agent_card, ) - params = TaskQueryParams(id='non_existent_task') - - from a2a.utils.errors import ServerError # Local import for ServerError + params = GetTaskRequest(id='non_existent_task') context = create_server_call_context() - with pytest.raises(ServerError) as exc_info: + with pytest.raises(TaskNotFoundError): await request_handler.on_get_task(params, context) - assert isinstance(exc_info.value.error, TaskNotFoundError) mock_task_store.get.assert_awaited_once_with('non_existent_task', context) +@pytest.mark.asyncio +async def test_on_list_tasks_success(agent_card): + """Test on_list_tasks successfully returns a page of tasks .""" + mock_task_store = AsyncMock(spec=TaskStore) + task2 = create_sample_task(task_id='task2') + task2.artifacts.extend( + [ + Artifact( + artifact_id='artifact1', + parts=[Part(text='Hello world!')], + name='conversion_result', + ) + ] + ) + mock_page = ListTasksResponse( + tasks=[ + create_sample_task(task_id='task1'), + task2, + ], + next_page_token='123', + ) + mock_task_store.list.return_value = mock_page + request_handler = DefaultRequestHandler( + agent_executor=AsyncMock(spec=AgentExecutor), + task_store=mock_task_store, + agent_card=agent_card, + ) + params = ListTasksRequest(include_artifacts=True, page_size=10) + context = create_server_call_context() + + result = await request_handler.on_list_tasks(params, context) + + mock_task_store.list.assert_awaited_once_with(params, context) + assert result.tasks == mock_page.tasks + assert result.next_page_token == mock_page.next_page_token + + +@pytest.mark.asyncio +async def test_on_list_tasks_excludes_artifacts(agent_card): + """Test on_list_tasks excludes artifacts from returned tasks.""" + mock_task_store = AsyncMock(spec=TaskStore) + task2 = create_sample_task(task_id='task2') + task2.artifacts.extend( + [ + Artifact( + artifact_id='artifact1', + parts=[Part(text='Hello world!')], + name='conversion_result', + ) + ] + ) + mock_page = ListTasksResponse( + tasks=[ + create_sample_task(task_id='task1'), + task2, + ], + next_page_token='123', + ) + mock_task_store.list.return_value = mock_page + request_handler = DefaultRequestHandler( + agent_executor=AsyncMock(spec=AgentExecutor), + task_store=mock_task_store, + agent_card=agent_card, + ) + params = ListTasksRequest(include_artifacts=False, page_size=10) + context = create_server_call_context() + + result = await request_handler.on_list_tasks(params, context) + + assert not result.tasks[1].artifacts + + +@pytest.mark.asyncio +async def test_on_list_tasks_applies_history_length(agent_card): + """Test on_list_tasks applies history length filter.""" + mock_task_store = AsyncMock(spec=TaskStore) + history = [ + new_text_message('Hello 1!'), + new_text_message('Hello 2!'), + ] + task2 = create_sample_task(task_id='task2') + task2.history.extend(history) + mock_page = ListTasksResponse( + tasks=[ + create_sample_task(task_id='task1'), + task2, + ], + next_page_token='123', + ) + mock_task_store.list.return_value = mock_page + request_handler = DefaultRequestHandler( + agent_executor=AsyncMock(spec=AgentExecutor), + task_store=mock_task_store, + agent_card=agent_card, + ) + params = ListTasksRequest(history_length=1, page_size=10) + context = create_server_call_context() + + result = await request_handler.on_list_tasks(params, context) + + assert result.tasks[1].history == [history[1]] + + +@pytest.mark.asyncio +async def test_on_list_tasks_negative_history_length_error(agent_card): + """Test on_list_tasks raises error for negative history length.""" + mock_task_store = AsyncMock(spec=TaskStore) + request_handler = DefaultRequestHandler( + agent_executor=AsyncMock(spec=AgentExecutor), + task_store=mock_task_store, + agent_card=agent_card, + ) + params = ListTasksRequest(history_length=-1, page_size=10) + context = create_server_call_context() + + with pytest.raises(InvalidParamsError) as exc_info: + await request_handler.on_list_tasks(params, context) + + assert 'history length must be non-negative' in exc_info.value.message + + @pytest.mark.asyncio async def test_on_cancel_task_task_not_found(): """Test on_cancel_task when the task is not found.""" @@ -152,24 +308,23 @@ async def test_on_cancel_task_task_not_found(): mock_task_store.get.return_value = None request_handler = DefaultRequestHandler( - agent_executor=DummyAgentExecutor(), task_store=mock_task_store + agent_executor=MockAgentExecutor(), + task_store=mock_task_store, + agent_card=agent_card, ) - params = TaskIdParams(id='task_not_found_for_cancel') - - from a2a.utils.errors import ServerError # Local import + params = CancelTaskRequest(id='task_not_found_for_cancel') context = create_server_call_context() - with pytest.raises(ServerError) as exc_info: + with pytest.raises(TaskNotFoundError): await request_handler.on_cancel_task(params, context) - assert isinstance(exc_info.value.error, TaskNotFoundError) mock_task_store.get.assert_awaited_once_with( 'task_not_found_for_cancel', context ) @pytest.mark.asyncio -async def test_on_cancel_task_queue_tap_returns_none(): +async def test_on_cancel_task_queue_tap_returns_none(agent_card): """Test on_cancel_task when queue_manager.tap returns None.""" mock_task_store = AsyncMock(spec=TaskStore) sample_task = create_sample_task(task_id='tap_none_task') @@ -189,7 +344,7 @@ async def test_on_cancel_task_queue_tap_returns_none(): mock_result_aggregator_instance.consume_all.return_value = ( create_sample_task( task_id='tap_none_task', - status_state=TaskState.canceled, # Expected final state + status_state=TaskState.TASK_STATE_CANCELED, # Expected final state ) ) @@ -197,6 +352,7 @@ async def test_on_cancel_task_queue_tap_returns_none(): agent_executor=mock_agent_executor, task_store=mock_task_store, queue_manager=mock_queue_manager, + agent_card=agent_card, ) context = create_server_call_context() @@ -204,7 +360,7 @@ async def test_on_cancel_task_queue_tap_returns_none(): 'a2a.server.request_handlers.default_request_handler.ResultAggregator', return_value=mock_result_aggregator_instance, ): - params = TaskIdParams(id='tap_none_task') + params = CancelTaskRequest(id='tap_none_task') result_task = await request_handler.on_cancel_task(params, context) mock_task_store.get.assert_awaited_once_with('tap_none_task', context) @@ -220,11 +376,11 @@ async def test_on_cancel_task_queue_tap_returns_none(): mock_result_aggregator_instance.consume_all.assert_awaited_once() assert result_task is not None - assert result_task.status.state == TaskState.canceled + assert result_task.status.state == TaskState.TASK_STATE_CANCELED @pytest.mark.asyncio -async def test_on_cancel_task_cancels_running_agent(): +async def test_on_cancel_task_cancels_running_agent(agent_card): """Test on_cancel_task cancels a running agent task.""" task_id = 'running_agent_task_to_cancel' sample_task = create_sample_task(task_id=task_id) @@ -232,7 +388,7 @@ async def test_on_cancel_task_cancels_running_agent(): mock_task_store.get.return_value = sample_task mock_queue_manager = AsyncMock(spec=QueueManager) - mock_event_queue = AsyncMock(spec=EventQueue) + mock_event_queue = AsyncMock(spec=EventQueueLegacy) mock_queue_manager.tap.return_value = mock_event_queue mock_agent_executor = AsyncMock(spec=AgentExecutor) @@ -240,13 +396,16 @@ async def test_on_cancel_task_cancels_running_agent(): # Mock ResultAggregator mock_result_aggregator_instance = AsyncMock(spec=ResultAggregator) mock_result_aggregator_instance.consume_all.return_value = ( - create_sample_task(task_id=task_id, status_state=TaskState.canceled) + create_sample_task( + task_id=task_id, status_state=TaskState.TASK_STATE_CANCELED + ) ) request_handler = DefaultRequestHandler( agent_executor=mock_agent_executor, task_store=mock_task_store, queue_manager=mock_queue_manager, + agent_card=agent_card, ) # Simulate a running agent task @@ -258,7 +417,7 @@ async def test_on_cancel_task_cancels_running_agent(): 'a2a.server.request_handlers.default_request_handler.ResultAggregator', return_value=mock_result_aggregator_instance, ): - params = TaskIdParams(id=task_id) + params = CancelTaskRequest(id=f'{task_id}') await request_handler.on_cancel_task(params, context) mock_producer_task.cancel.assert_called_once() @@ -266,7 +425,7 @@ async def test_on_cancel_task_cancels_running_agent(): @pytest.mark.asyncio -async def test_on_cancel_task_completes_during_cancellation(): +async def test_on_cancel_task_completes_during_cancellation(agent_card): """Test on_cancel_task fails to cancel a task due to concurrent task completion.""" task_id = 'running_agent_task_to_cancel' sample_task = create_sample_task(task_id=task_id) @@ -274,7 +433,7 @@ async def test_on_cancel_task_completes_during_cancellation(): mock_task_store.get.return_value = sample_task mock_queue_manager = AsyncMock(spec=QueueManager) - mock_event_queue = AsyncMock(spec=EventQueue) + mock_event_queue = AsyncMock(spec=EventQueueLegacy) mock_queue_manager.tap.return_value = mock_event_queue mock_agent_executor = AsyncMock(spec=AgentExecutor) @@ -282,41 +441,38 @@ async def test_on_cancel_task_completes_during_cancellation(): # Mock ResultAggregator mock_result_aggregator_instance = AsyncMock(spec=ResultAggregator) mock_result_aggregator_instance.consume_all.return_value = ( - create_sample_task(task_id=task_id, status_state=TaskState.completed) + create_sample_task( + task_id=task_id, status_state=TaskState.TASK_STATE_COMPLETED + ) ) request_handler = DefaultRequestHandler( agent_executor=mock_agent_executor, task_store=mock_task_store, queue_manager=mock_queue_manager, + agent_card=agent_card, ) # Simulate a running agent task mock_producer_task = AsyncMock(spec=asyncio.Task) request_handler._running_agents[task_id] = mock_producer_task - from a2a.utils.errors import ( - ServerError, # Local import - TaskNotCancelableError, # Local import - ) - with patch( 'a2a.server.request_handlers.default_request_handler.ResultAggregator', return_value=mock_result_aggregator_instance, ): - params = TaskIdParams(id=task_id) - with pytest.raises(ServerError) as exc_info: + params = CancelTaskRequest(id=f'{task_id}') + with pytest.raises(TaskNotCancelableError): await request_handler.on_cancel_task( params, create_server_call_context() ) mock_producer_task.cancel.assert_called_once() mock_agent_executor.cancel.assert_awaited_once() - assert isinstance(exc_info.value.error, TaskNotCancelableError) @pytest.mark.asyncio -async def test_on_cancel_task_invalid_result_type(): +async def test_on_cancel_task_invalid_result_type(agent_card): """Test on_cancel_task when result_aggregator returns a Message instead of a Task.""" task_id = 'cancel_invalid_result_task' sample_task = create_sample_task(task_id=task_id) @@ -324,7 +480,7 @@ async def test_on_cancel_task_invalid_result_type(): mock_task_store.get.return_value = sample_task mock_queue_manager = AsyncMock(spec=QueueManager) - mock_event_queue = AsyncMock(spec=EventQueue) + mock_event_queue = AsyncMock(spec=EventQueueLegacy) mock_queue_manager.tap.return_value = mock_event_queue mock_agent_executor = AsyncMock(spec=AgentExecutor) @@ -332,36 +488,36 @@ async def test_on_cancel_task_invalid_result_type(): # Mock ResultAggregator to return a Message mock_result_aggregator_instance = AsyncMock(spec=ResultAggregator) mock_result_aggregator_instance.consume_all.return_value = Message( - message_id='unexpected_msg', role=Role.agent, parts=[] + message_id='unexpected_msg', + role=Role.ROLE_AGENT, + parts=[Part(text='Test')], ) request_handler = DefaultRequestHandler( agent_executor=mock_agent_executor, task_store=mock_task_store, queue_manager=mock_queue_manager, + agent_card=agent_card, ) - from a2a.utils.errors import ServerError # Local import - with patch( 'a2a.server.request_handlers.default_request_handler.ResultAggregator', return_value=mock_result_aggregator_instance, ): - params = TaskIdParams(id=task_id) - with pytest.raises(ServerError) as exc_info: + params = CancelTaskRequest(id=f'{task_id}') + with pytest.raises(InternalError) as exc_info: await request_handler.on_cancel_task( params, create_server_call_context() ) - assert isinstance(exc_info.value.error, InternalError) assert ( 'Agent did not return valid response for cancel' - in exc_info.value.error.message - ) # type: ignore + in exc_info.value.message + ) @pytest.mark.asyncio -async def test_on_message_send_with_push_notification(): +async def test_on_message_send_with_push_notification(agent_card): """Test on_message_send sets push notification info if provided.""" mock_task_store = AsyncMock(spec=TaskStore) mock_push_notification_store = AsyncMock(spec=PushNotificationConfigStore) @@ -371,7 +527,9 @@ async def test_on_message_send_with_push_notification(): task_id = 'push_task_1' context_id = 'push_ctx_1' sample_initial_task = create_sample_task( - task_id=task_id, context_id=context_id, status_state=TaskState.submitted + task_id=task_id, + context_id=context_id, + status_state=TaskState.TASK_STATE_SUBMITTED, ) # TaskManager will be created inside on_message_send. @@ -395,18 +553,19 @@ async def test_on_message_send_with_push_notification(): task_store=mock_task_store, push_config_store=mock_push_notification_store, request_context_builder=mock_request_context_builder, + agent_card=agent_card, ) - push_config = PushNotificationConfig(url='http://callback.com/push') - message_config = MessageSendConfiguration( - push_notification_config=push_config, + push_config = TaskPushNotificationConfig(url='http://callback.com/push') + message_config = SendMessageConfiguration( + task_push_notification_config=push_config, accepted_output_modes=['text/plain'], # Added required field ) - params = MessageSendParams( + params = SendMessageRequest( message=Message( - role=Role.user, + role=Role.ROLE_USER, message_id='msg_push', - parts=[], + parts=[Part(text='Test')], task_id=task_id, context_id=context_id, ), @@ -416,7 +575,9 @@ async def test_on_message_send_with_push_notification(): # Mock ResultAggregator and its consume_and_break_on_interrupt mock_result_aggregator_instance = AsyncMock(spec=ResultAggregator) final_task_result = create_sample_task( - task_id=task_id, context_id=context_id, status_state=TaskState.completed + task_id=task_id, + context_id=context_id, + status_state=TaskState.TASK_STATE_COMPLETED, ) mock_result_aggregator_instance.consume_and_break_on_interrupt.return_value = ( final_task_result, @@ -424,15 +585,16 @@ async def test_on_message_send_with_push_notification(): None, ) - # Mock the current_result property to return the final task result - async def get_current_result(): + # Mock the current_result async property to return the final task result + # current_result is an async property, so accessing it returns a coroutine + async def mock_current_result(): return final_task_result - # Configure the 'current_result' property on the type of the mock instance - type(mock_result_aggregator_instance).current_result = PropertyMock( - return_value=get_current_result() + type(mock_result_aggregator_instance).current_result = property( + lambda self: mock_current_result() ) + context = create_server_call_context() with ( patch( 'a2a.server.request_handlers.default_request_handler.ResultAggregator', @@ -447,19 +609,19 @@ async def get_current_result(): return_value=sample_initial_task, ), ): # Ensure task object is returned - await request_handler.on_message_send( - params, create_server_call_context() - ) + await request_handler.on_message_send(params, context) mock_push_notification_store.set_info.assert_awaited_once_with( - task_id, push_config + task_id, push_config, context ) # Other assertions for full flow if needed (e.g., agent execution) mock_agent_executor.execute.assert_awaited_once() @pytest.mark.asyncio -async def test_on_message_send_with_push_notification_in_non_blocking_request(): +async def test_on_message_send_with_push_notification_in_non_blocking_request( + agent_card, +): """Test that push notification callback is called during background event processing for non-blocking requests.""" mock_task_store = AsyncMock(spec=TaskStore) mock_push_notification_store = AsyncMock(spec=PushNotificationConfigStore) @@ -472,12 +634,16 @@ async def test_on_message_send_with_push_notification_in_non_blocking_request(): # Create a task that will be returned after the first event initial_task = create_sample_task( - task_id=task_id, context_id=context_id, status_state=TaskState.working + task_id=task_id, + context_id=context_id, + status_state=TaskState.TASK_STATE_WORKING, ) # Create a final task that will be available during background processing final_task = create_sample_task( - task_id=task_id, context_id=context_id, status_state=TaskState.completed + task_id=task_id, + context_id=context_id, + status_state=TaskState.TASK_STATE_COMPLETED, ) mock_task_store.get.return_value = None @@ -494,20 +660,21 @@ async def test_on_message_send_with_push_notification_in_non_blocking_request(): push_config_store=mock_push_notification_store, request_context_builder=mock_request_context_builder, push_sender=mock_push_sender, + agent_card=agent_card, ) # Configure push notification - push_config = PushNotificationConfig(url='http://callback.com/push') - message_config = MessageSendConfiguration( - push_notification_config=push_config, + push_config = TaskPushNotificationConfig(url='http://callback.com/push') + message_config = SendMessageConfiguration( + task_push_notification_config=push_config, accepted_output_modes=['text/plain'], - blocking=False, # Non-blocking request + return_immediately=True, ) - params = MessageSendParams( + params = SendMessageRequest( message=Message( - role=Role.user, + role=Role.ROLE_USER, message_id='msg_non_blocking', - parts=[], + parts=[Part(text='Test')], task_id=task_id, context_id=context_id, ), @@ -524,12 +691,13 @@ async def test_on_message_send_with_push_notification_in_non_blocking_request(): MagicMock(spec=asyncio.Task), # background task ) - # Mock the current_result property to return the final task - async def get_current_result(): + # Mock the current_result async property to return the final task + # current_result is an async property, so accessing it returns a coroutine + async def mock_current_result(): return final_task - type(mock_result_aggregator_instance).current_result = PropertyMock( - return_value=get_current_result() + type(mock_result_aggregator_instance).current_result = property( + lambda self: mock_current_result() ) # Track if the event_callback was passed to consume_and_break_on_interrupt @@ -542,6 +710,8 @@ async def mock_consume_and_break_on_interrupt( nonlocal event_callback_passed, event_callback_received event_callback_passed = event_callback is not None event_callback_received = event_callback + if event_callback_received: + await event_callback_received(final_task) return ( initial_task, True, @@ -552,6 +722,7 @@ async def mock_consume_and_break_on_interrupt( mock_consume_and_break_on_interrupt ) + context = create_server_call_context() with ( patch( 'a2a.server.request_handlers.default_request_handler.ResultAggregator', @@ -567,9 +738,7 @@ async def mock_consume_and_break_on_interrupt( ), ): # Execute the non-blocking request - result = await request_handler.on_message_send( - params, create_server_call_context() - ) + result = await request_handler.on_message_send(params, context) # Verify the result is the initial task (non-blocking behavior) assert result == initial_task @@ -583,16 +752,18 @@ async def mock_consume_and_break_on_interrupt( ) # Verify that the push notification was sent with the final task - mock_push_sender.send_notification.assert_called_with(final_task) + mock_push_sender.send_notification.assert_called_with(task_id, final_task) # Verify that the push notification config was stored mock_push_notification_store.set_info.assert_awaited_once_with( - task_id, push_config + task_id, push_config, context ) @pytest.mark.asyncio -async def test_on_message_send_with_push_notification_no_existing_Task(): +async def test_on_message_send_with_push_notification_no_existing_Task( + agent_card, +): """Test on_message_send for new task sets push notification info if provided.""" mock_task_store = AsyncMock(spec=TaskStore) mock_push_notification_store = AsyncMock(spec=PushNotificationConfigStore) @@ -617,22 +788,29 @@ async def test_on_message_send_with_push_notification_no_existing_Task(): task_store=mock_task_store, push_config_store=mock_push_notification_store, request_context_builder=mock_request_context_builder, + agent_card=agent_card, ) - push_config = PushNotificationConfig(url='http://callback.com/push') - message_config = MessageSendConfiguration( - push_notification_config=push_config, + push_config = TaskPushNotificationConfig(url='http://callback.com/push') + message_config = SendMessageConfiguration( + task_push_notification_config=push_config, accepted_output_modes=['text/plain'], # Added required field ) - params = MessageSendParams( - message=Message(role=Role.user, message_id='msg_push', parts=[]), + params = SendMessageRequest( + message=Message( + role=Role.ROLE_USER, + message_id='msg_push', + parts=[Part(text='Test')], + ), configuration=message_config, ) # Mock ResultAggregator and its consume_and_break_on_interrupt mock_result_aggregator_instance = AsyncMock(spec=ResultAggregator) final_task_result = create_sample_task( - task_id=task_id, context_id=context_id, status_state=TaskState.completed + task_id=task_id, + context_id=context_id, + status_state=TaskState.TASK_STATE_COMPLETED, ) mock_result_aggregator_instance.consume_and_break_on_interrupt.return_value = ( final_task_result, @@ -640,15 +818,16 @@ async def test_on_message_send_with_push_notification_no_existing_Task(): None, ) - # Mock the current_result property to return the final task result - async def get_current_result(): + # Mock the current_result async property to return the final task result + # current_result is an async property, so accessing it returns a coroutine + async def mock_current_result(): return final_task_result - # Configure the 'current_result' property on the type of the mock instance - type(mock_result_aggregator_instance).current_result = PropertyMock( - return_value=get_current_result() + type(mock_result_aggregator_instance).current_result = property( + lambda self: mock_current_result() ) + context = create_server_call_context() with ( patch( 'a2a.server.request_handlers.default_request_handler.ResultAggregator', @@ -659,20 +838,18 @@ async def get_current_result(): return_value=None, ), ): - await request_handler.on_message_send( - params, create_server_call_context() - ) + await request_handler.on_message_send(params, context) mock_push_notification_store.set_info.assert_awaited_once_with( - task_id, push_config + task_id, push_config, context ) # Other assertions for full flow if needed (e.g., agent execution) mock_agent_executor.execute.assert_awaited_once() @pytest.mark.asyncio -async def test_on_message_send_no_result_from_aggregator(): - """Test on_message_send when aggregator returns (None, False).""" +async def test_on_message_send_no_result_from_aggregator(agent_card): + """Test on_message_send when aggregator returns (None, False). Completes unsuccessfully and raises InternalError.""" mock_task_store = AsyncMock(spec=TaskStore) mock_agent_executor = AsyncMock(spec=AgentExecutor) mock_request_context_builder = AsyncMock(spec=RequestContextBuilder) @@ -687,9 +864,14 @@ async def test_on_message_send_no_result_from_aggregator(): agent_executor=mock_agent_executor, task_store=mock_task_store, request_context_builder=mock_request_context_builder, + agent_card=agent_card, ) - params = MessageSendParams( - message=Message(role=Role.user, message_id='msg_no_res', parts=[]) + params = SendMessageRequest( + message=Message( + role=Role.ROLE_USER, + message_id='msg_no_res', + parts=[Part(text='Test')], + ) ) mock_result_aggregator_instance = AsyncMock(spec=ResultAggregator) @@ -699,8 +881,6 @@ async def test_on_message_send_no_result_from_aggregator(): None, ) - from a2a.utils.errors import ServerError # Local import - with ( patch( 'a2a.server.request_handlers.default_request_handler.ResultAggregator', @@ -711,16 +891,15 @@ async def test_on_message_send_no_result_from_aggregator(): return_value=None, ), ): # TaskManager.get_task for initial task - with pytest.raises(ServerError) as exc_info: + with pytest.raises(InternalError): await request_handler.on_message_send( params, create_server_call_context() ) - assert isinstance(exc_info.value.error, InternalError) - @pytest.mark.asyncio -async def test_on_message_send_task_id_mismatch(): +async def test_on_message_send_task_id_mismatch(agent_card): + """Test on_message_send returns InternalError if aggregator returns mismatched Task ID.""" """Test on_message_send when result task ID doesn't match request context task ID.""" mock_task_store = AsyncMock(spec=TaskStore) mock_agent_executor = AsyncMock(spec=AgentExecutor) @@ -738,9 +917,14 @@ async def test_on_message_send_task_id_mismatch(): agent_executor=mock_agent_executor, task_store=mock_task_store, request_context_builder=mock_request_context_builder, + agent_card=agent_card, ) - params = MessageSendParams( - message=Message(role=Role.user, message_id='msg_id_mismatch', parts=[]) + params = SendMessageRequest( + message=Message( + role=Role.ROLE_USER, + message_id='msg_id_mismatch', + parts=[Part(text='Test')], + ) ) mock_result_aggregator_instance = AsyncMock(spec=ResultAggregator) @@ -751,8 +935,6 @@ async def test_on_message_send_task_id_mismatch(): None, ) - from a2a.utils.errors import ServerError # Local import - with ( patch( 'a2a.server.request_handlers.default_request_handler.ResultAggregator', @@ -763,13 +945,12 @@ async def test_on_message_send_task_id_mismatch(): return_value=None, ), ): - with pytest.raises(ServerError) as exc_info: + with pytest.raises(InternalError) as exc_info: await request_handler.on_message_send( params, create_server_call_context() ) - assert isinstance(exc_info.value.error, InternalError) - assert 'Task ID mismatch' in exc_info.value.error.message # type: ignore + assert 'Task ID mismatch' in exc_info.value.message # type: ignore class HelloAgentExecutor(AgentExecutor): @@ -779,14 +960,14 @@ async def execute(self, context: RequestContext, event_queue: EventQueue): assert context.message is not None, ( 'A message is required to create a new task' ) - task = new_task(context.message) # type: ignore + task = new_task_from_user_message(context.message) # type: ignore await event_queue.enqueue_event(task) updater = TaskUpdater(event_queue, task.id, task.context_id) try: - parts = [Part(root=TextPart(text='I am working'))] + parts = [Part(text='I am working')] await updater.update_status( - TaskState.working, + TaskState.TASK_STATE_WORKING, message=updater.new_agent_message(parts), ) except Exception as e: @@ -794,7 +975,7 @@ async def execute(self, context: RequestContext, event_queue: EventQueue): logging.warning('Error: %s', e) return await updater.add_artifact( - [Part(root=TextPart(text='Hello world!'))], + [Part(text='Hello world!')], name='conversion_result', ) await updater.complete() @@ -804,7 +985,7 @@ async def cancel(self, context: RequestContext, event_queue: EventQueue): @pytest.mark.asyncio -async def test_on_message_send_non_blocking(): +async def test_on_message_send_non_blocking(agent_card): task_store = InMemoryTaskStore() push_store = InMemoryPushNotificationConfigStore() @@ -812,37 +993,37 @@ async def test_on_message_send_non_blocking(): agent_executor=HelloAgentExecutor(), task_store=task_store, push_config_store=push_store, + agent_card=agent_card, ) - params = MessageSendParams( + params = SendMessageRequest( message=Message( - role=Role.user, + role=Role.ROLE_USER, message_id='msg_push', - parts=[Part(root=TextPart(text='Hi'))], + parts=[Part(text='Hi')], ), - configuration=MessageSendConfiguration( - blocking=False, accepted_output_modes=['text/plain'] + configuration=SendMessageConfiguration( + return_immediately=True, accepted_output_modes=['text/plain'] ), ) - result = await request_handler.on_message_send( - params, create_server_call_context() - ) + context = create_server_call_context() + result = await request_handler.on_message_send(params, context) assert result is not None assert isinstance(result, Task) - assert result.status.state == TaskState.submitted + assert result.status.state == TaskState.TASK_STATE_SUBMITTED # Polling for 500ms until task is completed. task: Task | None = None for _ in range(5): await asyncio.sleep(0.1) - task = await task_store.get(result.id) + task = await task_store.get(result.id, context) assert task is not None - if task.status.state == TaskState.completed: + if task.status.state == TaskState.TASK_STATE_COMPLETED: break assert task is not None - assert task.status.state == TaskState.completed + assert task.status.state == TaskState.TASK_STATE_COMPLETED assert ( result.history and task.history @@ -851,7 +1032,7 @@ async def test_on_message_send_non_blocking(): @pytest.mark.asyncio -async def test_on_message_send_limit_history(): +async def test_on_message_send_limit_history(agent_card): task_store = InMemoryTaskStore() push_store = InMemoryPushNotificationConfigStore() @@ -859,38 +1040,37 @@ async def test_on_message_send_limit_history(): agent_executor=HelloAgentExecutor(), task_store=task_store, push_config_store=push_store, + agent_card=agent_card, ) - params = MessageSendParams( + params = SendMessageRequest( message=Message( - role=Role.user, + role=Role.ROLE_USER, message_id='msg_push', - parts=[Part(root=TextPart(text='Hi'))], + parts=[Part(text='Hi')], ), - configuration=MessageSendConfiguration( - blocking=True, + configuration=SendMessageConfiguration( accepted_output_modes=['text/plain'], history_length=1, ), ) - result = await request_handler.on_message_send( - params, create_server_call_context() - ) + context = create_server_call_context() + result = await request_handler.on_message_send(params, context) # verify that history_length is honored assert result is not None assert isinstance(result, Task) assert result.history is not None and len(result.history) == 1 - assert result.status.state == TaskState.completed + assert result.status.state == TaskState.TASK_STATE_COMPLETED # verify that history is still persisted to the store - task = await task_store.get(result.id) + task = await task_store.get(result.id, context) assert task is not None assert task.history is not None and len(task.history) > 1 @pytest.mark.asyncio -async def test_on_get_task_limit_history(): +async def test_on_get_task_limit_history(agent_card): task_store = InMemoryTaskStore() push_store = InMemoryPushNotificationConfigStore() @@ -898,15 +1078,15 @@ async def test_on_get_task_limit_history(): agent_executor=HelloAgentExecutor(), task_store=task_store, push_config_store=push_store, + agent_card=agent_card, ) - params = MessageSendParams( + params = SendMessageRequest( message=Message( - role=Role.user, + role=Role.ROLE_USER, message_id='msg_push', - parts=[Part(root=TextPart(text='Hi'))], + parts=[Part(text='Hi')], ), - configuration=MessageSendConfiguration( - blocking=True, + configuration=SendMessageConfiguration( accepted_output_modes=['text/plain'], ), ) @@ -919,7 +1099,7 @@ async def test_on_get_task_limit_history(): assert isinstance(result, Task) get_task_result = await request_handler.on_get_task( - TaskQueryParams(id=result.id, history_length=1), + GetTaskRequest(id=result.id, history_length=1), create_server_call_context(), ) assert get_task_result is not None @@ -931,7 +1111,7 @@ async def test_on_get_task_limit_history(): @pytest.mark.asyncio -async def test_on_message_send_interrupted_flow(): +async def test_on_message_send_interrupted_flow(agent_card): """Test on_message_send when flow is interrupted (e.g., auth_required).""" mock_task_store = AsyncMock(spec=TaskStore) mock_agent_executor = AsyncMock(spec=AgentExecutor) @@ -947,14 +1127,19 @@ async def test_on_message_send_interrupted_flow(): agent_executor=mock_agent_executor, task_store=mock_task_store, request_context_builder=mock_request_context_builder, + agent_card=agent_card, ) - params = MessageSendParams( - message=Message(role=Role.user, message_id='msg_interrupt', parts=[]) + params = SendMessageRequest( + message=Message( + role=Role.ROLE_USER, + message_id='msg_interrupt', + parts=[Part(text='Test')], + ) ) mock_result_aggregator_instance = AsyncMock(spec=ResultAggregator) interrupt_task_result = create_sample_task( - task_id=task_id, status_state=TaskState.auth_required + task_id=task_id, status_state=TaskState.TASK_STATE_AUTH_REQUIRED ) mock_result_aggregator_instance.consume_and_break_on_interrupt.return_value = ( interrupt_task_result, @@ -962,9 +1147,18 @@ async def test_on_message_send_interrupted_flow(): MagicMock(spec=asyncio.Task), # background task ) # Interrupted = True + # Collect coroutines passed to create_task so we can close them + created_coroutines = [] + + def capture_create_task(coro): + created_coroutines.append(coro) + return MagicMock() + # Patch asyncio.create_task to verify _cleanup_producer is scheduled with ( - patch('asyncio.create_task') as mock_asyncio_create_task, + patch( + 'asyncio.create_task', side_effect=capture_create_task + ) as mock_asyncio_create_task, patch( 'a2a.server.request_handlers.default_request_handler.ResultAggregator', return_value=mock_result_aggregator_instance, @@ -985,21 +1179,21 @@ async def test_on_message_send_interrupted_flow(): # Check that the second call to create_task was for _cleanup_producer found_cleanup_call = False - for call_args_tuple in mock_asyncio_create_task.call_args_list: - created_coro = call_args_tuple[0][0] - if ( - hasattr(created_coro, '__name__') - and created_coro.__name__ == '_cleanup_producer' - ): + for coro in created_coroutines: + if hasattr(coro, '__name__') and coro.__name__ == '_cleanup_producer': found_cleanup_call = True break assert found_cleanup_call, ( '_cleanup_producer was not scheduled with asyncio.create_task' ) + # Close coroutines to avoid RuntimeWarning about unawaited coroutines + for coro in created_coroutines: + coro.close() + @pytest.mark.asyncio -async def test_on_message_send_stream_with_push_notification(): +async def test_on_message_send_stream_with_push_notification(agent_card): """Test on_message_send_stream sets and uses push notification info.""" mock_task_store = AsyncMock(spec=TaskStore) mock_push_config_store = AsyncMock(spec=PushNotificationConfigStore) @@ -1012,12 +1206,16 @@ async def test_on_message_send_stream_with_push_notification(): # Initial task state for TaskManager initial_task_for_tm = create_sample_task( - task_id=task_id, context_id=context_id, status_state=TaskState.submitted + task_id=task_id, + context_id=context_id, + status_state=TaskState.TASK_STATE_SUBMITTED, ) # Task state for RequestContext task_for_rc = create_sample_task( - task_id=task_id, context_id=context_id, status_state=TaskState.working + task_id=task_id, + context_id=context_id, + status_state=TaskState.TASK_STATE_WORKING, ) # Example state after message update mock_task_store.get.return_value = None # New task for TaskManager @@ -1033,18 +1231,21 @@ async def test_on_message_send_stream_with_push_notification(): push_config_store=mock_push_config_store, push_sender=mock_push_sender, request_context_builder=mock_request_context_builder, + agent_card=agent_card, ) - push_config = PushNotificationConfig(url='http://callback.stream.com/push') - message_config = MessageSendConfiguration( - push_notification_config=push_config, + push_config = TaskPushNotificationConfig( + url='http://callback.stream.com/push' + ) + message_config = SendMessageConfiguration( + task_push_notification_config=push_config, accepted_output_modes=['text/plain'], # Added required field ) - params = MessageSendParams( + params = SendMessageRequest( message=Message( - role=Role.user, + role=Role.ROLE_USER, message_id='msg_stream_push', - parts=[], + parts=[Part(text='Test')], task_id=task_id, context_id=context_id, ), @@ -1066,10 +1267,14 @@ async def exec_side_effect(*args, **kwargs): # Events to be yielded by consume_and_emit event1_task_update = create_sample_task( - task_id=task_id, context_id=context_id, status_state=TaskState.working + task_id=task_id, + context_id=context_id, + status_state=TaskState.TASK_STATE_WORKING, ) event2_final_task = create_sample_task( - task_id=task_id, context_id=context_id, status_state=TaskState.completed + task_id=task_id, + context_id=context_id, + status_state=TaskState.TASK_STATE_COMPLETED, ) async def event_stream_gen(): @@ -1088,170 +1293,15 @@ def sync_get_event_stream_gen(*args, **kwargs): side_effect=sync_get_event_stream_gen ) - # Mock current_result property to return appropriate awaitables - # Coroutines that will be returned by successive accesses to current_result - async def current_result_coro1(): - return event1_task_update - - async def current_result_coro2(): - return event2_final_task - - # Use unittest.mock.PropertyMock for async property - # We need to patch 'ResultAggregator.current_result' when this instance is used. - # This is complex because ResultAggregator is instantiated inside the handler. - # Easier: If mock_result_aggregator_instance is a MagicMock, we can assign a callable. - # This part is tricky. Let's assume current_result is an async method for easier mocking first. - # If it's truly a property, the mocking is harder with instance mocks. - # Let's adjust the mock_result_aggregator_instance.current_result to be an AsyncMock directly - # This means the code would call `await result_aggregator.current_result()` - # But the actual code is `await result_aggregator.current_result` - # This implies `result_aggregator.current_result` IS an awaitable. - # So, we can mock it with a side_effect that returns awaitables (coroutines). - - # Create simple awaitables (coroutines) for side_effect - async def get_event1(): - return event1_task_update - - async def get_event2(): - return event2_final_task - - # Make the current_result attribute of the mock instance itself an awaitable - # This still means current_result is not callable. - # For an async property, the mock needs to have current_result as a non-AsyncMock attribute - # that is itself an awaitable. - - # Let's try to mock the property at the type level for ResultAggregator temporarily - # This is not ideal as it affects all instances. - - # Alternative: Configure the AsyncMock for current_result to return a coroutine - # when it's awaited. This is not directly supported by AsyncMock for property access. - - # Simplest for now: Assume `current_result` attribute of the mocked `ResultAggregator` instance - # can be sequentially awaited if it's a list of awaitables that a test runner can handle. - # This is likely to fail again but will clarify the exact point of await. - # The error "TypeError: object AsyncMock can't be used in 'await' expression" means - # `mock_result_aggregator_instance.current_result` is an AsyncMock, and that's what's awaited. - # This AsyncMock needs to have a __await__ method. - - # Let's make the side_effect of the AsyncMock `current_result` provide the values. - # This assumes that `await mock.property` somehow triggers a call to the mock. - # This is not how AsyncMock works. - - # The code is `await result_aggregator.current_result`. - # `result_aggregator` is an instance of `ResultAggregator`. - # `current_result` is an async property. - # So `result_aggregator.current_result` evaluates to a coroutine. - # We need `mock_result_aggregator_instance.current_result` to be a coroutine, - # or a list of coroutines if accessed multiple times. - # This is best done by mocking the property itself. - # Let's assume it's called twice. - - # We will patch ResultAggregator to be our mock_result_aggregator_instance - # Then, we need to control what its `current_result` property returns. - # We can use a PropertyMock for this, attached to the type of mock_result_aggregator_instance. - - # For this specific test, let's make current_result a simple async def method on the mock instance - # This means we are slightly diverging from the "property" nature just for this mock. - # Mock current_result property to return appropriate awaitables (coroutines) sequentially. - async def get_event1_coro(): - return event1_task_update - - async def get_event2_coro(): - return event2_final_task - - # Configure the 'current_result' property on the type of the mock instance - # This makes accessing `instance.current_result` call the side_effect function, - # which then cycles through our list of coroutines. - # We need a new PropertyMock for each instance, or patch the class. - # Since mock_result_aggregator_instance is already created, we attach to its type. - # This can be tricky. A more direct way is to ensure the instance's attribute `current_result` - # behaves as desired. If `mock_result_aggregator_instance` is a `MagicMock`, its attributes are also mocks. - - # Let's make `current_result` a MagicMock whose side_effect returns the coroutines. - # This means when `result_aggregator.current_result` is accessed, this mock is "called". - # This isn't quite right for a property. A property isn't "called" on access. - - # Correct approach for mocking an async property on an instance mock: - # Set the attribute `current_result` on the instance `mock_result_aggregator_instance` - # to be a `PropertyMock` if we were patching the class. - # Since we have the instance, we can try to replace its `current_result` attribute. - # The instance `mock_result_aggregator_instance` is a `MagicMock`. - # We can make `mock_result_aggregator_instance.current_result` a `PropertyMock` - # that returns a coroutine. For multiple calls, `side_effect` on `PropertyMock` is a list of return_values. - - # Create a PropertyMock that will cycle through coroutines - # This requires Python 3.8+ for PropertyMock to be directly usable with side_effect list for properties. - # For older versions or for clarity with async properties, directly mocking the attribute - # to be a series of awaitables is hard. - # The easiest is to ensure `current_result` is an AsyncMock that returns the values. - # The product code `await result_aggregator.current_result` means `current_result` must be an awaitable. - - # Let's make current_result an AsyncMock whose __call__ returns the sequence. - # Mock current_result as an async property - # Create coroutines that will be the "result" of awaiting the property - async def get_current_result_coro1(): - return event1_task_update - - async def get_current_result_coro2(): - return event2_final_task - - # Configure the 'current_result' property on the mock_result_aggregator_instance - # using PropertyMock attached to its type. This makes instance.current_result return - # items from side_effect sequentially on each access. - # Since current_result is an async property, these items should be coroutines. - # We need to ensure that mock_result_aggregator_instance itself is the one patched. - # The patch for ResultAggregator returns this instance. - # So, we configure PropertyMock on the type of this specific mock instance. - # This is slightly unusual; typically PropertyMock is used when patching a class. - # A more straightforward approach for an instance is if its type is already a mock. - # As mock_result_aggregator_instance is a MagicMock, we can configure its 'current_result' - # attribute to be a PropertyMock. - - # Let's directly assign a PropertyMock to the type of the instance for `current_result` - # This ensures that when `instance.current_result` is accessed, the PropertyMock's logic is triggered. - # However, PropertyMock is usually used with `patch.object` or by setting it on the class. - # - # A simpler way for MagicMock instance: - # `mock_result_aggregator_instance.current_result` is already a MagicMock (or AsyncMock if spec'd). - # We need to make it return a coroutine upon access. - # The most direct way to mock an async property on a MagicMock instance - # such that it returns a sequence of awaitables: - async def side_effect_current_result(): - yield event1_task_update - yield event2_final_task + # Mock current_result as an async property returning events sequentially. + async def to_coro(val): + return val - # Create an async generator from the side effect - current_result_gen = side_effect_current_result() - - # Make current_result return the next item from this generator (wrapped in a coroutine) - # each time it's accessed. - async def get_next_current_result(): - try: - return await current_result_gen.__anext__() - except StopAsyncIteration: - # Handle case where it's awaited more times than values provided - return None # Or raise an error - - # Since current_result is a property, accessing it should return a coroutine. - # We can achieve this by making mock_result_aggregator_instance.current_result - # a MagicMock whose side_effect returns these coroutines. - # This is still tricky because it's a property access. - - # Let's use the PropertyMock on the class being mocked via the patch. - # Setup for consume_and_emit - def sync_get_event_stream_gen_for_prop_test(*args, **kwargs): - return event_stream_gen() - - mock_result_aggregator_instance.consume_and_emit = MagicMock( - side_effect=sync_get_event_stream_gen_for_prop_test - ) - - # Configure current_result on the type of the mock_result_aggregator_instance - # This makes it behave like a property that returns items from side_effect on access. type(mock_result_aggregator_instance).current_result = PropertyMock( - side_effect=[get_current_result_coro1(), get_current_result_coro2()] + side_effect=[to_coro(event1_task_update), to_coro(event2_final_task)] ) + context = create_server_call_context() with ( patch( 'a2a.server.request_handlers.default_request_handler.ResultAggregator', @@ -1267,27 +1317,33 @@ def sync_get_event_stream_gen_for_prop_test(*args, **kwargs): ), ): # Consume the stream - async for _ in request_handler.on_message_send_stream( - params, create_server_call_context() - ): + async for _ in request_handler.on_message_send_stream(params, context): pass await asyncio.wait_for(execute_called.wait(), timeout=0.1) # Assertions # 1. set_info called once at the beginning if task exists (or after task is created from message) - mock_push_config_store.set_info.assert_any_call(task_id, push_config) + mock_push_config_store.set_info.assert_any_call( + task_id, push_config, context + ) # 2. send_notification called for each task event yielded by aggregator assert mock_push_sender.send_notification.await_count == 2 - mock_push_sender.send_notification.assert_any_await(event1_task_update) - mock_push_sender.send_notification.assert_any_await(event2_final_task) + mock_push_sender.send_notification.assert_any_await( + task_id, event1_task_update + ) + mock_push_sender.send_notification.assert_any_await( + task_id, event2_final_task + ) mock_agent_executor.execute.assert_awaited_once() @pytest.mark.asyncio -async def test_stream_disconnect_then_resubscribe_receives_future_events(): +async def test_stream_disconnect_then_resubscribe_receives_future_events( + agent_card, +): """Start streaming, disconnect, then resubscribe and ensure subsequent events are streamed.""" # Arrange mock_task_store = AsyncMock(spec=TaskStore) @@ -1301,7 +1357,9 @@ async def test_stream_disconnect_then_resubscribe_receives_future_events(): # Task exists and is non-final task_for_resub = create_sample_task( - task_id=task_id, context_id=context_id, status_state=TaskState.working + task_id=task_id, + context_id=context_id, + status_state=TaskState.TASK_STATE_WORKING, ) mock_task_store.get.return_value = task_for_resub @@ -1309,13 +1367,14 @@ async def test_stream_disconnect_then_resubscribe_receives_future_events(): agent_executor=mock_agent_executor, task_store=mock_task_store, queue_manager=queue_manager, + agent_card=agent_card, ) - params = MessageSendParams( + params = SendMessageRequest( message=Message( - role=Role.user, + role=Role.ROLE_USER, message_id='msg_reconn', - parts=[], + parts=[Part(text='Test')], task_id=task_id, context_id=context_id, ) @@ -1327,10 +1386,14 @@ async def test_stream_disconnect_then_resubscribe_receives_future_events(): allow_finish = asyncio.Event() first_event = create_sample_task( - task_id=task_id, context_id=context_id, status_state=TaskState.working + task_id=task_id, + context_id=context_id, + status_state=TaskState.TASK_STATE_WORKING, ) second_event = create_sample_task( - task_id=task_id, context_id=context_id, status_state=TaskState.completed + task_id=task_id, + context_id=context_id, + status_state=TaskState.TASK_STATE_COMPLETED, ) async def exec_side_effect(_request, queue: EventQueue): @@ -1353,14 +1416,18 @@ async def exec_side_effect(_request, queue: EventQueue): await asyncio.wait_for(agen.aclose(), timeout=0.1) # Resubscribe and start consuming future events - resub_gen = request_handler.on_resubscribe_to_task( - TaskIdParams(id=task_id), create_server_call_context() + resub_gen = request_handler.on_subscribe_to_task( + SubscribeToTaskRequest(id=f'{task_id}'), + create_server_call_context(), ) # Allow producer to emit the next event allow_second_event.set() - received = await resub_gen.__anext__() + first_subscribe_event = await anext(resub_gen) + assert first_subscribe_event == task_for_resub + + received = await anext(resub_gen) assert received == second_event # Finish producer to allow cleanup paths to complete @@ -1368,7 +1435,9 @@ async def exec_side_effect(_request, queue: EventQueue): @pytest.mark.asyncio -async def test_on_message_send_stream_client_disconnect_triggers_background_cleanup_and_producer_continues(): +async def test_on_message_send_stream_client_disconnect_triggers_background_cleanup_and_producer_continues( + agent_card, +): """Simulate client disconnect: stream stops early, cleanup is scheduled in background, producer keeps running, and cleanup completes after producer finishes.""" # Arrange @@ -1380,6 +1449,10 @@ async def test_on_message_send_stream_client_disconnect_triggers_background_clea task_id = 'disc_task_1' context_id = 'disc_ctx_1' + # Return an existing task from the store to avoid "task not found" error + existing_task = create_sample_task(task_id=task_id, context_id=context_id) + mock_task_store.get.return_value = existing_task + # RequestContext with IDs mock_request_context = MagicMock(spec=RequestContext) mock_request_context.task_id = task_id @@ -1387,7 +1460,7 @@ async def test_on_message_send_stream_client_disconnect_triggers_background_clea mock_request_context_builder.build.return_value = mock_request_context # Queue used by _run_event_stream; must support close() - mock_queue = AsyncMock(spec=EventQueue) + mock_queue = AsyncMock(spec=EventQueueLegacy) mock_queue_manager.create_or_tap.return_value = mock_queue request_handler = DefaultRequestHandler( @@ -1395,13 +1468,14 @@ async def test_on_message_send_stream_client_disconnect_triggers_background_clea task_store=mock_task_store, queue_manager=mock_queue_manager, request_context_builder=mock_request_context_builder, + agent_card=agent_card, ) - params = MessageSendParams( + params = SendMessageRequest( message=Message( - role=Role.user, + role=Role.ROLE_USER, message_id='mid', - parts=[], + parts=[Part(text='Test')], task_id=task_id, context_id=context_id, ) @@ -1503,7 +1577,7 @@ def create_task_spy(coro): @pytest.mark.asyncio -async def test_disconnect_persists_final_task_to_store(): +async def test_disconnect_persists_final_task_to_store(agent_card): """After client disconnect, ensure background consumer persists final Task to store.""" task_store = InMemoryTaskStore() queue_manager = InMemoryQueueManager() @@ -1516,16 +1590,15 @@ def __init__(self): async def execute( self, context: RequestContext, event_queue: EventQueue ): - from typing import cast updater = TaskUpdater( event_queue, cast('str', context.task_id), cast('str', context.context_id), ) - await updater.update_status(TaskState.working) + await updater.update_status(TaskState.TASK_STATE_WORKING) await self.allow_finish.wait() - await updater.update_status(TaskState.completed) + await updater.update_status(TaskState.TASK_STATE_COMPLETED) async def cancel( self, context: RequestContext, event_queue: EventQueue @@ -1535,14 +1608,17 @@ async def cancel( agent = FinishingAgent() handler = DefaultRequestHandler( - agent_executor=agent, task_store=task_store, queue_manager=queue_manager + agent_executor=agent, + task_store=task_store, + queue_manager=queue_manager, + agent_card=agent_card, ) - params = MessageSendParams( + params = SendMessageRequest( message=Message( - role=Role.user, + role=Role.ROLE_USER, message_id='msg_persist', - parts=[], + parts=[Part(text='Test')], ) ) @@ -1550,11 +1626,12 @@ async def cancel( agen = handler.on_message_send_stream(params, create_server_call_context()) first = await agen.__anext__() if isinstance(first, TaskStatusUpdateEvent): - assert first.status.state == TaskState.working + assert first.status.state == TaskState.TASK_STATE_WORKING task_id = first.task_id else: assert ( - isinstance(first, Task) and first.status.state == TaskState.working + isinstance(first, Task) + and first.status.state == TaskState.TASK_STATE_WORKING ) task_id = first.id @@ -1577,7 +1654,7 @@ async def cancel( # Verify task is persisted as completed persisted = await task_store.get(task_id, create_server_call_context()) assert persisted is not None - assert persisted.status.state == TaskState.completed + assert persisted.status.state == TaskState.TASK_STATE_COMPLETED async def wait_until(predicate, timeout: float = 0.2, interval: float = 0.0): @@ -1593,7 +1670,7 @@ async def wait_until(predicate, timeout: float = 0.2, interval: float = 0.0): @pytest.mark.asyncio -async def test_background_cleanup_task_is_tracked_and_cleared(): +async def test_background_cleanup_task_is_tracked_and_cleared(agent_card): """Ensure background cleanup task is tracked while pending and removed when done.""" # Arrange mock_task_store = AsyncMock(spec=TaskStore) @@ -1604,13 +1681,17 @@ async def test_background_cleanup_task_is_tracked_and_cleared(): task_id = 'track_task_1' context_id = 'track_ctx_1' + # Return an existing task from the store to avoid "task not found" error + existing_task = create_sample_task(task_id=task_id, context_id=context_id) + mock_task_store.get.return_value = existing_task + # RequestContext with IDs mock_request_context = MagicMock(spec=RequestContext) mock_request_context.task_id = task_id mock_request_context.context_id = context_id mock_request_context_builder.build.return_value = mock_request_context - mock_queue = AsyncMock(spec=EventQueue) + mock_queue = AsyncMock(spec=EventQueueLegacy) mock_queue_manager.create_or_tap.return_value = mock_queue request_handler = DefaultRequestHandler( @@ -1618,13 +1699,14 @@ async def test_background_cleanup_task_is_tracked_and_cleared(): task_store=mock_task_store, queue_manager=mock_queue_manager, request_context_builder=mock_request_context_builder, + agent_card=agent_card, ) - params = MessageSendParams( + params = SendMessageRequest( message=Message( - role=Role.user, + role=Role.ROLE_USER, message_id='mid_track', - parts=[], + parts=[Part(text='Test')], task_id=task_id, context_id=context_id, ) @@ -1707,7 +1789,7 @@ def create_task_spy(coro): @pytest.mark.asyncio -async def test_on_message_send_stream_task_id_mismatch(): +async def test_on_message_send_stream_task_id_mismatch(agent_card): """Test on_message_send_stream raises error if yielded task ID mismatches.""" mock_task_store = AsyncMock(spec=TaskStore) mock_agent_executor = AsyncMock( @@ -1726,10 +1808,13 @@ async def test_on_message_send_stream_task_id_mismatch(): agent_executor=mock_agent_executor, task_store=mock_task_store, request_context_builder=mock_request_context_builder, + agent_card=agent_card, ) - params = MessageSendParams( + params = SendMessageRequest( message=Message( - role=Role.user, message_id='msg_stream_mismatch', parts=[] + role=Role.ROLE_USER, + message_id='msg_stream_mismatch', + parts=[Part(text='Test')], ) ) @@ -1745,8 +1830,6 @@ async def event_stream_gen_mismatch(): event_stream_gen_mismatch() ) - from a2a.utils.errors import ServerError # Local import - with ( patch( 'a2a.server.request_handlers.default_request_handler.ResultAggregator', @@ -1757,34 +1840,34 @@ async def event_stream_gen_mismatch(): return_value=None, ), ): - with pytest.raises(ServerError) as exc_info: + with pytest.raises(InternalError) as exc_info: async for _ in request_handler.on_message_send_stream( params, create_server_call_context() ): pass # Consume the stream to trigger the error - assert isinstance(exc_info.value.error, InternalError) - assert 'Task ID mismatch' in exc_info.value.error.message # type: ignore + assert 'Task ID mismatch' in exc_info.value.message # type: ignore @pytest.mark.asyncio -async def test_cleanup_producer_task_id_not_in_running_agents(): +async def test_cleanup_producer_task_id_not_in_running_agents(agent_card): """Test _cleanup_producer when task_id is not in _running_agents (e.g., already cleaned up).""" mock_task_store = AsyncMock(spec=TaskStore) mock_queue_manager = AsyncMock(spec=QueueManager) request_handler = DefaultRequestHandler( - agent_executor=DummyAgentExecutor(), + agent_executor=MockAgentExecutor(), task_store=mock_task_store, queue_manager=mock_queue_manager, + agent_card=agent_card, ) task_id = 'task_already_cleaned' # Create a real, completed asyncio.Task for the test - async def dummy_coro_for_task(): + async def noop_coro_for_task(): pass - mock_producer_task = asyncio.create_task(dummy_coro_for_task()) + mock_producer_task = asyncio.create_task(noop_coro_for_task()) await asyncio.sleep( 0 ) # Ensure the task has a chance to complete/be scheduled @@ -1805,107 +1888,102 @@ async def dummy_coro_for_task(): @pytest.mark.asyncio -async def test_set_task_push_notification_config_no_notifier(): - """Test on_set_task_push_notification_config when _push_config_store is None.""" +async def test_set_task_push_notification_config_no_notifier(agent_card): + """Test on_create_task_push_notification_config when _push_config_store is None.""" request_handler = DefaultRequestHandler( - agent_executor=DummyAgentExecutor(), + agent_executor=MockAgentExecutor(), task_store=AsyncMock(spec=TaskStore), - push_config_store=None, # Explicitly None + push_config_store=None, # Explicitly None, + agent_card=agent_card, ) params = TaskPushNotificationConfig( task_id='task1', - push_notification_config=PushNotificationConfig( - url='http://example.com' - ), + url='http://example.com', ) - from a2a.utils.errors import ServerError # Local import - with pytest.raises(ServerError) as exc_info: - await request_handler.on_set_task_push_notification_config( + with pytest.raises(PushNotificationNotSupportedError): + await request_handler.on_create_task_push_notification_config( params, create_server_call_context() ) - assert isinstance(exc_info.value.error, UnsupportedOperationError) @pytest.mark.asyncio -async def test_set_task_push_notification_config_task_not_found(): - """Test on_set_task_push_notification_config when task is not found.""" +async def test_set_task_push_notification_config_task_not_found(agent_card): + """Test on_create_task_push_notification_config when task is not found.""" mock_task_store = AsyncMock(spec=TaskStore) mock_task_store.get.return_value = None # Task not found mock_push_store = AsyncMock(spec=PushNotificationConfigStore) mock_push_sender = AsyncMock(spec=PushNotificationSender) request_handler = DefaultRequestHandler( - agent_executor=DummyAgentExecutor(), + agent_executor=MockAgentExecutor(), task_store=mock_task_store, push_config_store=mock_push_store, push_sender=mock_push_sender, + agent_card=agent_card, ) params = TaskPushNotificationConfig( task_id='non_existent_task', - push_notification_config=PushNotificationConfig( - url='http://example.com' - ), + url='http://example.com', ) - from a2a.utils.errors import ServerError # Local import context = create_server_call_context() - with pytest.raises(ServerError) as exc_info: - await request_handler.on_set_task_push_notification_config( + with pytest.raises(TaskNotFoundError): + await request_handler.on_create_task_push_notification_config( params, context ) - - assert isinstance(exc_info.value.error, TaskNotFoundError) mock_task_store.get.assert_awaited_once_with('non_existent_task', context) mock_push_store.set_info.assert_not_awaited() @pytest.mark.asyncio -async def test_get_task_push_notification_config_no_store(): +async def test_get_task_push_notification_config_no_store(agent_card): """Test on_get_task_push_notification_config when _push_config_store is None.""" request_handler = DefaultRequestHandler( - agent_executor=DummyAgentExecutor(), + agent_executor=MockAgentExecutor(), task_store=AsyncMock(spec=TaskStore), - push_config_store=None, # Explicitly None + push_config_store=None, # Explicitly None, + agent_card=agent_card, + ) + params = GetTaskPushNotificationConfigRequest( + task_id='task1', + id='task_push_notification_config', ) - params = GetTaskPushNotificationConfigParams(id='task1') - from a2a.utils.errors import ServerError # Local import - with pytest.raises(ServerError) as exc_info: + with pytest.raises(PushNotificationNotSupportedError): await request_handler.on_get_task_push_notification_config( params, create_server_call_context() ) - assert isinstance(exc_info.value.error, UnsupportedOperationError) @pytest.mark.asyncio -async def test_get_task_push_notification_config_task_not_found(): +async def test_get_task_push_notification_config_task_not_found(agent_card): """Test on_get_task_push_notification_config when task is not found.""" mock_task_store = AsyncMock(spec=TaskStore) mock_task_store.get.return_value = None # Task not found mock_push_store = AsyncMock(spec=PushNotificationConfigStore) request_handler = DefaultRequestHandler( - agent_executor=DummyAgentExecutor(), + agent_executor=MockAgentExecutor(), task_store=mock_task_store, push_config_store=mock_push_store, + agent_card=agent_card, + ) + params = GetTaskPushNotificationConfigRequest( + task_id='non_existent_task', id='task_push_notification_config' ) - params = GetTaskPushNotificationConfigParams(id='non_existent_task') - from a2a.utils.errors import ServerError # Local import context = create_server_call_context() - with pytest.raises(ServerError) as exc_info: + with pytest.raises(TaskNotFoundError): await request_handler.on_get_task_push_notification_config( params, context ) - - assert isinstance(exc_info.value.error, TaskNotFoundError) mock_task_store.get.assert_awaited_once_with('non_existent_task', context) mock_push_store.get_info.assert_not_awaited() @pytest.mark.asyncio -async def test_get_task_push_notification_config_info_not_found(): +async def test_get_task_push_notification_config_info_not_found(agent_card): """Test on_get_task_push_notification_config when push_config_store.get_info returns None.""" mock_task_store = AsyncMock(spec=TaskStore) @@ -1916,52 +1994,51 @@ async def test_get_task_push_notification_config_info_not_found(): mock_push_store.get_info.return_value = None # Info not found request_handler = DefaultRequestHandler( - agent_executor=DummyAgentExecutor(), + agent_executor=MockAgentExecutor(), task_store=mock_task_store, push_config_store=mock_push_store, + agent_card=agent_card, + ) + params = GetTaskPushNotificationConfigRequest( + task_id='non_existent_task', id='task_push_notification_config' ) - params = GetTaskPushNotificationConfigParams(id='non_existent_task') - from a2a.utils.errors import ServerError # Local import context = create_server_call_context() - with pytest.raises(ServerError) as exc_info: + with pytest.raises(TaskNotFoundError): await request_handler.on_get_task_push_notification_config( params, context ) - - assert isinstance( - exc_info.value.error, InternalError - ) # Current code raises InternalError mock_task_store.get.assert_awaited_once_with('non_existent_task', context) - mock_push_store.get_info.assert_awaited_once_with('non_existent_task') + mock_push_store.get_info.assert_awaited_once_with( + 'non_existent_task', context + ) @pytest.mark.asyncio -async def test_get_task_push_notification_config_info_with_config(): +async def test_get_task_push_notification_config_info_with_config(agent_card): """Test on_get_task_push_notification_config with valid push config id""" mock_task_store = AsyncMock(spec=TaskStore) + mock_task_store.get.return_value = Task(id='task_1', context_id='ctx_1') push_store = InMemoryPushNotificationConfigStore() request_handler = DefaultRequestHandler( - agent_executor=DummyAgentExecutor(), + agent_executor=MockAgentExecutor(), task_store=mock_task_store, push_config_store=push_store, + agent_card=agent_card, ) set_config_params = TaskPushNotificationConfig( - task_id='task_1', - push_notification_config=PushNotificationConfig( - id='config_id', url='http://1.example.com' - ), + task_id='task_1', id='config_id', url='http://1.example.com' ) context = create_server_call_context() - await request_handler.on_set_task_push_notification_config( + await request_handler.on_create_task_push_notification_config( set_config_params, context ) - params = GetTaskPushNotificationConfigParams( - id='task_1', push_notification_config_id='config_id' + params = GetTaskPushNotificationConfigRequest( + task_id='task_1', id='config_id' ) result: TaskPushNotificationConfig = ( @@ -1972,37 +2049,36 @@ async def test_get_task_push_notification_config_info_with_config(): assert result is not None assert result.task_id == 'task_1' - assert ( - result.push_notification_config.url - == set_config_params.push_notification_config.url - ) - assert result.push_notification_config.id == 'config_id' + assert result.url == set_config_params.url + assert result.id == 'config_id' @pytest.mark.asyncio -async def test_get_task_push_notification_config_info_with_config_no_id(): +async def test_get_task_push_notification_config_info_with_config_no_id( + agent_card, +): """Test on_get_task_push_notification_config with no push config id""" mock_task_store = AsyncMock(spec=TaskStore) + mock_task_store.get.return_value = Task(id='task_1', context_id='ctx_1') push_store = InMemoryPushNotificationConfigStore() request_handler = DefaultRequestHandler( - agent_executor=DummyAgentExecutor(), + agent_executor=MockAgentExecutor(), task_store=mock_task_store, push_config_store=push_store, + agent_card=agent_card, ) set_config_params = TaskPushNotificationConfig( task_id='task_1', - push_notification_config=PushNotificationConfig( - url='http://1.example.com' - ), + url='http://1.example.com', ) - await request_handler.on_set_task_push_notification_config( + await request_handler.on_create_task_push_notification_config( set_config_params, create_server_call_context() ) - params = TaskIdParams(id='task_1') + params = GetTaskPushNotificationConfigRequest(task_id='task_1', id='task_1') result: TaskPushNotificationConfig = ( await request_handler.on_get_task_push_notification_config( @@ -2012,41 +2088,36 @@ async def test_get_task_push_notification_config_info_with_config_no_id(): assert result is not None assert result.task_id == 'task_1' - assert ( - result.push_notification_config.url - == set_config_params.push_notification_config.url - ) - assert result.push_notification_config.id == 'task_1' + assert result.url == set_config_params.url + assert result.id == 'task_1' @pytest.mark.asyncio -async def test_on_resubscribe_to_task_task_not_found(): - """Test on_resubscribe_to_task when the task is not found.""" +async def test_on_subscribe_to_task_task_not_found(agent_card): + """Test on_subscribe_to_task when the task is not found.""" mock_task_store = AsyncMock(spec=TaskStore) mock_task_store.get.return_value = None # Task not found request_handler = DefaultRequestHandler( - agent_executor=DummyAgentExecutor(), task_store=mock_task_store + agent_executor=MockAgentExecutor(), + task_store=mock_task_store, + agent_card=agent_card, ) - params = TaskIdParams(id='resub_task_not_found') - - from a2a.utils.errors import ServerError # Local import + params = SubscribeToTaskRequest(id='resub_task_not_found') context = create_server_call_context() - with pytest.raises(ServerError) as exc_info: + with pytest.raises(TaskNotFoundError): # Need to consume the async generator to trigger the error - async for _ in request_handler.on_resubscribe_to_task(params, context): + async for _ in request_handler.on_subscribe_to_task(params, context): pass - - assert isinstance(exc_info.value.error, TaskNotFoundError) mock_task_store.get.assert_awaited_once_with( 'resub_task_not_found', context ) @pytest.mark.asyncio -async def test_on_resubscribe_to_task_queue_not_found(): - """Test on_resubscribe_to_task when the queue is not found by queue_manager.tap.""" +async def test_on_subscribe_to_task_queue_not_found(agent_card): + """Test on_subscribe_to_task when the queue is not found by queue_manager.tap.""" mock_task_store = AsyncMock(spec=TaskStore) sample_task = create_sample_task(task_id='resub_queue_not_found') mock_task_store.get.return_value = sample_task @@ -2055,22 +2126,17 @@ async def test_on_resubscribe_to_task_queue_not_found(): mock_queue_manager.tap.return_value = None # Queue not found request_handler = DefaultRequestHandler( - agent_executor=DummyAgentExecutor(), + agent_executor=MockAgentExecutor(), task_store=mock_task_store, queue_manager=mock_queue_manager, + agent_card=agent_card, ) - params = TaskIdParams(id='resub_queue_not_found') - - from a2a.utils.errors import ServerError # Local import + params = SubscribeToTaskRequest(id='resub_queue_not_found') context = create_server_call_context() - with pytest.raises(ServerError) as exc_info: - async for _ in request_handler.on_resubscribe_to_task(params, context): + with pytest.raises(TaskNotFoundError): + async for _ in request_handler.on_subscribe_to_task(params, context): pass - - assert isinstance( - exc_info.value.error, TaskNotFoundError - ) # Should be TaskNotFoundError as per spec mock_task_store.get.assert_awaited_once_with( 'resub_queue_not_found', context ) @@ -2078,22 +2144,24 @@ async def test_on_resubscribe_to_task_queue_not_found(): @pytest.mark.asyncio -async def test_on_message_send_stream(): +async def test_on_message_send_stream(agent_card): request_handler = DefaultRequestHandler( - DummyAgentExecutor(), InMemoryTaskStore() + MockAgentExecutor(), + InMemoryTaskStore(), + agent_card=agent_card, ) - message_params = MessageSendParams( + message_params = SendMessageRequest( message=Message( - role=Role.user, + role=Role.ROLE_USER, message_id='msg-123', - parts=[Part(root=TextPart(text='How are you?'))], + parts=[Part(text='How are you?')], ), ) async def consume_stream(): events = [] async for event in request_handler.on_message_send_stream( - message_params + message_params, create_server_call_context() ): events.append(event) if len(events) >= 3: @@ -2110,56 +2178,53 @@ async def consume_stream(): assert len(events) == 3 assert elapsed < 0.5 - texts = [p.root.text for e in events for p in e.status.message.parts] + texts = [p.text for e in events for p in e.status.message.parts] assert texts == ['Event 0', 'Event 1', 'Event 2'] @pytest.mark.asyncio -async def test_list_task_push_notification_config_no_store(): - """Test on_list_task_push_notification_config when _push_config_store is None.""" +async def test_list_task_push_notification_config_no_store(agent_card): + """Test on_list_task_push_notification_configs when _push_config_store is None.""" request_handler = DefaultRequestHandler( - agent_executor=DummyAgentExecutor(), + agent_executor=MockAgentExecutor(), task_store=AsyncMock(spec=TaskStore), - push_config_store=None, # Explicitly None + push_config_store=None, # Explicitly None, + agent_card=agent_card, ) - params = ListTaskPushNotificationConfigParams(id='task1') - from a2a.utils.errors import ServerError # Local import + params = ListTaskPushNotificationConfigsRequest(task_id='task1') - with pytest.raises(ServerError) as exc_info: - await request_handler.on_list_task_push_notification_config( + with pytest.raises(PushNotificationNotSupportedError): + await request_handler.on_list_task_push_notification_configs( params, create_server_call_context() ) - assert isinstance(exc_info.value.error, UnsupportedOperationError) @pytest.mark.asyncio -async def test_list_task_push_notification_config_task_not_found(): - """Test on_list_task_push_notification_config when task is not found.""" +async def test_list_task_push_notification_config_task_not_found(agent_card): + """Test on_list_task_push_notification_configs when task is not found.""" mock_task_store = AsyncMock(spec=TaskStore) mock_task_store.get.return_value = None # Task not found mock_push_store = AsyncMock(spec=PushNotificationConfigStore) request_handler = DefaultRequestHandler( - agent_executor=DummyAgentExecutor(), + agent_executor=MockAgentExecutor(), task_store=mock_task_store, push_config_store=mock_push_store, + agent_card=agent_card, ) - params = ListTaskPushNotificationConfigParams(id='non_existent_task') - from a2a.utils.errors import ServerError # Local import + params = ListTaskPushNotificationConfigsRequest(task_id='non_existent_task') context = create_server_call_context() - with pytest.raises(ServerError) as exc_info: - await request_handler.on_list_task_push_notification_config( + with pytest.raises(TaskNotFoundError): + await request_handler.on_list_task_push_notification_configs( params, context ) - - assert isinstance(exc_info.value.error, TaskNotFoundError) mock_task_store.get.assert_awaited_once_with('non_existent_task', context) mock_push_store.get_info.assert_not_awaited() @pytest.mark.asyncio -async def test_list_no_task_push_notification_config_info(): +async def test_list_no_task_push_notification_config_info(agent_card): """Test on_get_task_push_notification_config when push_config_store.get_info returns []""" mock_task_store = AsyncMock(spec=TaskStore) @@ -2169,158 +2234,152 @@ async def test_list_no_task_push_notification_config_info(): push_store = InMemoryPushNotificationConfigStore() request_handler = DefaultRequestHandler( - agent_executor=DummyAgentExecutor(), + agent_executor=MockAgentExecutor(), task_store=mock_task_store, push_config_store=push_store, + agent_card=agent_card, ) - params = ListTaskPushNotificationConfigParams(id='non_existent_task') + params = ListTaskPushNotificationConfigsRequest(task_id='non_existent_task') - result = await request_handler.on_list_task_push_notification_config( + result = await request_handler.on_list_task_push_notification_configs( params, create_server_call_context() ) - assert result == [] + assert result.configs == [] @pytest.mark.asyncio -async def test_list_task_push_notification_config_info_with_config(): - """Test on_list_task_push_notification_config with push config+id""" +async def test_list_task_push_notification_config_info_with_config(agent_card): + """Test on_list_task_push_notification_configs with push config+id""" mock_task_store = AsyncMock(spec=TaskStore) sample_task = create_sample_task(task_id='non_existent_task') mock_task_store.get.return_value = sample_task - push_config1 = PushNotificationConfig( - id='config_1', url='http://example.com' + push_config1 = TaskPushNotificationConfig( + task_id='task_1', id='config_1', url='http://example.com' ) - push_config2 = PushNotificationConfig( - id='config_2', url='http://example.com' + push_config2 = TaskPushNotificationConfig( + task_id='task_1', id='config_2', url='http://example.com' ) push_store = InMemoryPushNotificationConfigStore() - await push_store.set_info('task_1', push_config1) - await push_store.set_info('task_1', push_config2) + context = create_server_call_context() + await push_store.set_info('task_1', push_config1, context) + await push_store.set_info('task_1', push_config2, context) request_handler = DefaultRequestHandler( - agent_executor=DummyAgentExecutor(), + agent_executor=MockAgentExecutor(), task_store=mock_task_store, push_config_store=push_store, + agent_card=agent_card, ) - params = ListTaskPushNotificationConfigParams(id='task_1') + params = ListTaskPushNotificationConfigsRequest(task_id='task_1') - result: list[ - TaskPushNotificationConfig - ] = await request_handler.on_list_task_push_notification_config( + result = await request_handler.on_list_task_push_notification_configs( params, create_server_call_context() ) - assert len(result) == 2 - assert result[0].task_id == 'task_1' - assert result[0].push_notification_config == push_config1 - assert result[1].task_id == 'task_1' - assert result[1].push_notification_config == push_config2 + assert len(result.configs) == 2 + assert result.configs[0].task_id == 'task_1' + assert result.configs[0] == push_config1 + assert result.configs[1].task_id == 'task_1' + assert result.configs[1] == push_config2 @pytest.mark.asyncio -async def test_list_task_push_notification_config_info_with_config_and_no_id(): - """Test on_list_task_push_notification_config with no push config id""" +async def test_list_task_push_notification_config_info_with_config_and_no_id( + agent_card, +): + """Test on_list_task_push_notification_configs with no push config id""" mock_task_store = AsyncMock(spec=TaskStore) + mock_task_store.get.return_value = Task(id='task_1', context_id='ctx_1') push_store = InMemoryPushNotificationConfigStore() request_handler = DefaultRequestHandler( - agent_executor=DummyAgentExecutor(), + agent_executor=MockAgentExecutor(), task_store=mock_task_store, push_config_store=push_store, + agent_card=agent_card, ) # multiple calls without config id should replace the existing set_config_params1 = TaskPushNotificationConfig( task_id='task_1', - push_notification_config=PushNotificationConfig( - url='http://1.example.com' - ), + url='http://1.example.com', ) - await request_handler.on_set_task_push_notification_config( + await request_handler.on_create_task_push_notification_config( set_config_params1, create_server_call_context() ) set_config_params2 = TaskPushNotificationConfig( task_id='task_1', - push_notification_config=PushNotificationConfig( - url='http://2.example.com' - ), + url='http://2.example.com', ) - await request_handler.on_set_task_push_notification_config( + await request_handler.on_create_task_push_notification_config( set_config_params2, create_server_call_context() ) - params = ListTaskPushNotificationConfigParams(id='task_1') + params = ListTaskPushNotificationConfigsRequest(task_id='task_1') - result: list[ - TaskPushNotificationConfig - ] = await request_handler.on_list_task_push_notification_config( + result = await request_handler.on_list_task_push_notification_configs( params, create_server_call_context() ) - assert len(result) == 1 - assert result[0].task_id == 'task_1' - assert ( - result[0].push_notification_config.url - == set_config_params2.push_notification_config.url - ) - assert result[0].push_notification_config.id == 'task_1' + assert len(result.configs) == 1 + assert result.configs[0].task_id == 'task_1' + assert result.configs[0].url == set_config_params2.url + assert result.configs[0].id == 'task_1' @pytest.mark.asyncio -async def test_delete_task_push_notification_config_no_store(): +async def test_delete_task_push_notification_config_no_store(agent_card): """Test on_delete_task_push_notification_config when _push_config_store is None.""" request_handler = DefaultRequestHandler( - agent_executor=DummyAgentExecutor(), + agent_executor=MockAgentExecutor(), task_store=AsyncMock(spec=TaskStore), - push_config_store=None, # Explicitly None + push_config_store=None, # Explicitly None, + agent_card=agent_card, ) - params = DeleteTaskPushNotificationConfigParams( - id='task1', push_notification_config_id='config1' + params = DeleteTaskPushNotificationConfigRequest( + task_id='task1', id='config1' ) - from a2a.utils.errors import ServerError # Local import - with pytest.raises(ServerError) as exc_info: + with pytest.raises(PushNotificationNotSupportedError): await request_handler.on_delete_task_push_notification_config( params, create_server_call_context() ) - assert isinstance(exc_info.value.error, UnsupportedOperationError) @pytest.mark.asyncio -async def test_delete_task_push_notification_config_task_not_found(): +async def test_delete_task_push_notification_config_task_not_found(agent_card): """Test on_delete_task_push_notification_config when task is not found.""" mock_task_store = AsyncMock(spec=TaskStore) mock_task_store.get.return_value = None # Task not found mock_push_store = AsyncMock(spec=PushNotificationConfigStore) request_handler = DefaultRequestHandler( - agent_executor=DummyAgentExecutor(), + agent_executor=MockAgentExecutor(), task_store=mock_task_store, push_config_store=mock_push_store, + agent_card=agent_card, ) - params = DeleteTaskPushNotificationConfigParams( - id='non_existent_task', push_notification_config_id='config1' + params = DeleteTaskPushNotificationConfigRequest( + task_id='non_existent_task', id='config1' ) - from a2a.utils.errors import ServerError # Local import context = create_server_call_context() - with pytest.raises(ServerError) as exc_info: + + with pytest.raises(TaskNotFoundError): await request_handler.on_delete_task_push_notification_config( params, context ) - - assert isinstance(exc_info.value.error, TaskNotFoundError) mock_task_store.get.assert_awaited_once_with('non_existent_task', context) mock_push_store.get_info.assert_not_awaited() @pytest.mark.asyncio -async def test_delete_no_task_push_notification_config_info(): +async def test_delete_no_task_push_notification_config_info(agent_card): """Test on_delete_task_push_notification_config without config info""" mock_task_store = AsyncMock(spec=TaskStore) @@ -2330,16 +2389,18 @@ async def test_delete_no_task_push_notification_config_info(): push_store = InMemoryPushNotificationConfigStore() await push_store.set_info( 'task_2', - PushNotificationConfig(id='config_1', url='http://example.com'), + TaskPushNotificationConfig(id='config_1', url='http://example.com'), + create_server_call_context(), ) request_handler = DefaultRequestHandler( - agent_executor=DummyAgentExecutor(), + agent_executor=MockAgentExecutor(), task_store=mock_task_store, push_config_store=push_store, + agent_card=agent_card, ) - params = DeleteTaskPushNotificationConfigParams( - id='task1', push_notification_config_id='config_non_existant' + params = DeleteTaskPushNotificationConfigRequest( + task_id='task1', id='config_non_existant' ) result = await request_handler.on_delete_task_push_notification_config( @@ -2347,8 +2408,8 @@ async def test_delete_no_task_push_notification_config_info(): ) assert result is None - params = DeleteTaskPushNotificationConfigParams( - id='task2', push_notification_config_id='config_non_existant' + params = DeleteTaskPushNotificationConfigRequest( + task_id='task2', id='config_non_existant' ) result = await request_handler.on_delete_task_push_notification_config( @@ -2358,32 +2419,36 @@ async def test_delete_no_task_push_notification_config_info(): @pytest.mark.asyncio -async def test_delete_task_push_notification_config_info_with_config(): - """Test on_list_task_push_notification_config with push config+id""" +async def test_delete_task_push_notification_config_info_with_config( + agent_card, +): + """Test on_list_task_push_notification_configs with push config+id""" mock_task_store = AsyncMock(spec=TaskStore) sample_task = create_sample_task(task_id='non_existent_task') mock_task_store.get.return_value = sample_task - push_config1 = PushNotificationConfig( - id='config_1', url='http://example.com' + push_config1 = TaskPushNotificationConfig( + task_id='task_1', id='config_1', url='http://example.com' ) - push_config2 = PushNotificationConfig( - id='config_2', url='http://example.com' + push_config2 = TaskPushNotificationConfig( + task_id='task_1', id='config_2', url='http://example.com' ) push_store = InMemoryPushNotificationConfigStore() - await push_store.set_info('task_1', push_config1) - await push_store.set_info('task_1', push_config2) - await push_store.set_info('task_2', push_config1) + context = create_server_call_context() + await push_store.set_info('task_1', push_config1, context) + await push_store.set_info('task_1', push_config2, context) + await push_store.set_info('task_2', push_config1, context) request_handler = DefaultRequestHandler( - agent_executor=DummyAgentExecutor(), + agent_executor=MockAgentExecutor(), task_store=mock_task_store, push_config_store=push_store, + agent_card=agent_card, ) - params = DeleteTaskPushNotificationConfigParams( - id='task_1', push_notification_config_id='config_1' + params = DeleteTaskPushNotificationConfigRequest( + task_id='task_1', id='config_1' ) result1 = await request_handler.on_delete_task_push_notification_config( @@ -2392,38 +2457,42 @@ async def test_delete_task_push_notification_config_info_with_config(): assert result1 is None - result2 = await request_handler.on_list_task_push_notification_config( - ListTaskPushNotificationConfigParams(id='task_1'), + result2 = await request_handler.on_list_task_push_notification_configs( + ListTaskPushNotificationConfigsRequest(task_id='task_1'), create_server_call_context(), ) - assert len(result2) == 1 - assert result2[0].task_id == 'task_1' - assert result2[0].push_notification_config == push_config2 + assert len(result2.configs) == 1 + assert result2.configs[0].task_id == 'task_1' + assert result2.configs[0] == push_config2 @pytest.mark.asyncio -async def test_delete_task_push_notification_config_info_with_config_and_no_id(): - """Test on_list_task_push_notification_config with no push config id""" +async def test_delete_task_push_notification_config_info_with_config_and_no_id( + agent_card, +): + """Test on_list_task_push_notification_configs with no push config id""" mock_task_store = AsyncMock(spec=TaskStore) sample_task = create_sample_task(task_id='non_existent_task') mock_task_store.get.return_value = sample_task - push_config = PushNotificationConfig(url='http://example.com') + push_config = TaskPushNotificationConfig(url='http://example.com') # insertion without id should replace the existing config push_store = InMemoryPushNotificationConfigStore() - await push_store.set_info('task_1', push_config) - await push_store.set_info('task_1', push_config) + context = create_server_call_context() + await push_store.set_info('task_1', push_config, context) + await push_store.set_info('task_1', push_config, context) request_handler = DefaultRequestHandler( - agent_executor=DummyAgentExecutor(), + agent_executor=MockAgentExecutor(), task_store=mock_task_store, push_config_store=push_store, + agent_card=agent_card, ) - params = DeleteTaskPushNotificationConfigParams( - id='task_1', push_notification_config_id='task_1' + params = DeleteTaskPushNotificationConfigRequest( + task_id='task_1', id='task_1' ) result = await request_handler.on_delete_task_push_notification_config( @@ -2432,27 +2501,30 @@ async def test_delete_task_push_notification_config_info_with_config_and_no_id() assert result is None - result2 = await request_handler.on_list_task_push_notification_config( - ListTaskPushNotificationConfigParams(id='task_1'), + result2 = await request_handler.on_list_task_push_notification_configs( + ListTaskPushNotificationConfigsRequest(task_id='task_1'), create_server_call_context(), ) - assert len(result2) == 0 + assert len(result2.configs) == 0 TERMINAL_TASK_STATES = { - TaskState.completed, - TaskState.canceled, - TaskState.failed, - TaskState.rejected, + TaskState.TASK_STATE_COMPLETED, + TaskState.TASK_STATE_CANCELED, + TaskState.TASK_STATE_FAILED, + TaskState.TASK_STATE_REJECTED, } @pytest.mark.asyncio @pytest.mark.parametrize('terminal_state', TERMINAL_TASK_STATES) -async def test_on_message_send_task_in_terminal_state(terminal_state): +async def test_on_message_send_task_in_terminal_state( + terminal_state, agent_card +): """Test on_message_send when task is already in a terminal state.""" - task_id = f'terminal_task_{terminal_state.value}' + state_name = TaskState.Name(terminal_state) + task_id = f'terminal_task_{state_name}' terminal_task = create_sample_task( task_id=task_id, status_state=terminal_state ) @@ -2463,43 +2535,44 @@ async def test_on_message_send_task_in_terminal_state(terminal_state): # So we should patch that instead. request_handler = DefaultRequestHandler( - agent_executor=DummyAgentExecutor(), task_store=mock_task_store + agent_executor=MockAgentExecutor(), + task_store=mock_task_store, + agent_card=agent_card, ) - params = MessageSendParams( + params = SendMessageRequest( message=Message( - role=Role.user, + role=Role.ROLE_USER, message_id='msg_terminal', - parts=[], + parts=[Part(text='Test')], task_id=task_id, ) ) - from a2a.utils.errors import ServerError - # Patch the TaskManager's get_task method to return our terminal task with patch( 'a2a.server.request_handlers.default_request_handler.TaskManager.get_task', return_value=terminal_task, ): - with pytest.raises(ServerError) as exc_info: + with pytest.raises(InvalidParamsError) as exc_info: await request_handler.on_message_send( params, create_server_call_context() ) - assert isinstance(exc_info.value.error, InvalidParamsError) - assert exc_info.value.error.message assert ( - f'Task {task_id} is in terminal state: {terminal_state.value}' - in exc_info.value.error.message + f'Task {task_id} is in terminal state: {terminal_state}' + in exc_info.value.message ) @pytest.mark.asyncio @pytest.mark.parametrize('terminal_state', TERMINAL_TASK_STATES) -async def test_on_message_send_stream_task_in_terminal_state(terminal_state): +async def test_on_message_send_stream_task_in_terminal_state( + terminal_state, agent_card +): """Test on_message_send_stream when task is already in a terminal state.""" - task_id = f'terminal_stream_task_{terminal_state.value}' + state_name = TaskState.Name(terminal_state) + task_id = f'terminal_stream_task_{state_name}' terminal_task = create_sample_task( task_id=task_id, status_state=terminal_state ) @@ -2507,43 +2580,44 @@ async def test_on_message_send_stream_task_in_terminal_state(terminal_state): mock_task_store = AsyncMock(spec=TaskStore) request_handler = DefaultRequestHandler( - agent_executor=DummyAgentExecutor(), task_store=mock_task_store + agent_executor=MockAgentExecutor(), + task_store=mock_task_store, + agent_card=agent_card, ) - params = MessageSendParams( + params = SendMessageRequest( message=Message( - role=Role.user, + role=Role.ROLE_USER, message_id='msg_terminal_stream', - parts=[], + parts=[Part(text='Test')], task_id=task_id, ) ) - from a2a.utils.errors import ServerError - with patch( 'a2a.server.request_handlers.default_request_handler.TaskManager.get_task', return_value=terminal_task, ): - with pytest.raises(ServerError) as exc_info: + with pytest.raises(InvalidParamsError) as exc_info: async for _ in request_handler.on_message_send_stream( params, create_server_call_context() ): pass # pragma: no cover - assert isinstance(exc_info.value.error, InvalidParamsError) - assert exc_info.value.error.message assert ( - f'Task {task_id} is in terminal state: {terminal_state.value}' - in exc_info.value.error.message + f'Task {task_id} is in terminal state: {terminal_state}' + in exc_info.value.message ) @pytest.mark.asyncio @pytest.mark.parametrize('terminal_state', TERMINAL_TASK_STATES) -async def test_on_resubscribe_to_task_in_terminal_state(terminal_state): - """Test on_resubscribe_to_task when task is in a terminal state.""" - task_id = f'resub_terminal_task_{terminal_state.value}' +async def test_on_subscribe_to_task_in_terminal_state( + terminal_state, agent_card +): + """Test on_subscribe_to_task when task is in a terminal state.""" + state_name = TaskState.Name(terminal_state) + task_id = f'resub_terminal_task_{state_name}' terminal_task = create_sample_task( task_id=task_id, status_state=terminal_state ) @@ -2552,107 +2626,103 @@ async def test_on_resubscribe_to_task_in_terminal_state(terminal_state): mock_task_store.get.return_value = terminal_task request_handler = DefaultRequestHandler( - agent_executor=DummyAgentExecutor(), + agent_executor=MockAgentExecutor(), task_store=mock_task_store, queue_manager=AsyncMock(spec=QueueManager), + agent_card=agent_card, ) - params = TaskIdParams(id=task_id) - - from a2a.utils.errors import ServerError + params = SubscribeToTaskRequest(id=f'{task_id}') context = create_server_call_context() - with pytest.raises(ServerError) as exc_info: - async for _ in request_handler.on_resubscribe_to_task(params, context): + + with pytest.raises(UnsupportedOperationError) as exc_info: + async for _ in request_handler.on_subscribe_to_task(params, context): pass # pragma: no cover - assert isinstance(exc_info.value.error, InvalidParamsError) - assert exc_info.value.error.message assert ( - f'Task {task_id} is in terminal state: {terminal_state.value}' - in exc_info.value.error.message + f'Task {task_id} is in terminal state: {terminal_state}' + in exc_info.value.message ) - mock_task_store.get.assert_awaited_once_with(task_id, context) + mock_task_store.get.assert_awaited_once_with(f'{task_id}', context) @pytest.mark.asyncio -async def test_on_message_send_task_id_provided_but_task_not_found(): +async def test_on_message_send_task_id_provided_but_task_not_found(agent_card): """Test on_message_send when task_id is provided but task doesn't exist.""" task_id = 'nonexistent_task' mock_task_store = AsyncMock(spec=TaskStore) request_handler = DefaultRequestHandler( - agent_executor=DummyAgentExecutor(), task_store=mock_task_store + agent_executor=MockAgentExecutor(), + task_store=mock_task_store, + agent_card=agent_card, ) - params = MessageSendParams( + params = SendMessageRequest( message=Message( - role=Role.user, + role=Role.ROLE_USER, message_id='msg_nonexistent', - parts=[Part(root=TextPart(text='Hello'))], + parts=[Part(text='Hello')], task_id=task_id, context_id='ctx1', ) ) - from a2a.utils.errors import ServerError - # Mock TaskManager.get_task to return None (task not found) with patch( 'a2a.server.request_handlers.default_request_handler.TaskManager.get_task', return_value=None, ): - with pytest.raises(ServerError) as exc_info: + with pytest.raises(TaskNotFoundError) as exc_info: await request_handler.on_message_send( params, create_server_call_context() ) - assert isinstance(exc_info.value.error, TaskNotFoundError) - assert exc_info.value.error.message assert ( f'Task {task_id} was specified but does not exist' - in exc_info.value.error.message + in exc_info.value.message ) @pytest.mark.asyncio -async def test_on_message_send_stream_task_id_provided_but_task_not_found(): +async def test_on_message_send_stream_task_id_provided_but_task_not_found( + agent_card, +): """Test on_message_send_stream when task_id is provided but task doesn't exist.""" task_id = 'nonexistent_stream_task' mock_task_store = AsyncMock(spec=TaskStore) request_handler = DefaultRequestHandler( - agent_executor=DummyAgentExecutor(), task_store=mock_task_store + agent_executor=MockAgentExecutor(), + task_store=mock_task_store, + agent_card=agent_card, ) - params = MessageSendParams( + params = SendMessageRequest( message=Message( - role=Role.user, + role=Role.ROLE_USER, message_id='msg_nonexistent_stream', - parts=[Part(root=TextPart(text='Hello'))], + parts=[Part(text='Hello')], task_id=task_id, context_id='ctx1', ) ) - from a2a.utils.errors import ServerError - # Mock TaskManager.get_task to return None (task not found) with patch( 'a2a.server.request_handlers.default_request_handler.TaskManager.get_task', return_value=None, ): - with pytest.raises(ServerError) as exc_info: + with pytest.raises(TaskNotFoundError) as exc_info: # Need to consume the async generator to trigger the error async for _ in request_handler.on_message_send_stream( params, create_server_call_context() ): pass - assert isinstance(exc_info.value.error, TaskNotFoundError) - assert exc_info.value.error.message assert ( f'Task {task_id} was specified but does not exist' - in exc_info.value.error.message + in exc_info.value.message ) @@ -2669,7 +2739,7 @@ async def execute( task_id=context.task_id or str(uuid.uuid4()), context_id=context.context_id or str(uuid.uuid4()), ) - await updater.update_status(TaskState.working) + await updater.update_status(TaskState.TASK_STATE_WORKING) await updater.complete() async def cancel( @@ -2683,21 +2753,23 @@ async def cancel( # we should reconsider the approach. @pytest.mark.asyncio @pytest.mark.timeout(1) -async def test_on_message_send_error_does_not_hang(): +async def test_on_message_send_error_does_not_hang(agent_card): """Test that if the consumer raises an exception during blocking wait, the producer is cancelled and no deadlock occurs.""" agent = HelloWorldAgentExecutor() task_store = AsyncMock(spec=TaskStore) task_store.save.side_effect = RuntimeError('This is an Error!') request_handler = DefaultRequestHandler( - agent_executor=agent, task_store=task_store + agent_executor=agent, + task_store=task_store, + agent_card=agent_card, ) - params = MessageSendParams( + params = SendMessageRequest( message=Message( - role=Role.user, + role=Role.ROLE_USER, message_id='msg_error_blocking', - parts=[Part(root=TextPart(text='Test message'))], + parts=[Part(text='Test message')], ) ) @@ -2705,3 +2777,203 @@ async def test_on_message_send_error_does_not_hang(): await request_handler.on_message_send( params, create_server_call_context() ) + + +@pytest.mark.asyncio +async def test_on_get_task_negative_history_length_error(agent_card): + """Test on_get_task raises error for negative history length.""" + mock_task_store = AsyncMock(spec=TaskStore) + request_handler = DefaultRequestHandler( + agent_executor=AsyncMock(spec=AgentExecutor), + task_store=mock_task_store, + agent_card=agent_card, + ) + # GetTaskRequest also has history_length + params = GetTaskRequest(id='task1', history_length=-1) + context = create_server_call_context() + + with pytest.raises(InvalidParamsError) as exc_info: + await request_handler.on_get_task(params, context) + + assert 'history length must be non-negative' in exc_info.value.message + + +@pytest.mark.asyncio +async def test_on_list_tasks_page_size_too_small(agent_card): + """Test on_list_tasks raises error for page_size < 1.""" + mock_task_store = AsyncMock(spec=TaskStore) + request_handler = DefaultRequestHandler( + agent_executor=AsyncMock(spec=AgentExecutor), + task_store=mock_task_store, + agent_card=agent_card, + ) + params = ListTasksRequest(page_size=0) + context = create_server_call_context() + + with pytest.raises(InvalidParamsError) as exc_info: + await request_handler.on_list_tasks(params, context) + + assert 'minimum page size is 1' in exc_info.value.message + + +@pytest.mark.asyncio +async def test_on_list_tasks_page_size_too_large(agent_card): + """Test on_list_tasks raises error for page_size > 100.""" + mock_task_store = AsyncMock(spec=TaskStore) + request_handler = DefaultRequestHandler( + agent_executor=AsyncMock(spec=AgentExecutor), + task_store=mock_task_store, + agent_card=agent_card, + ) + params = ListTasksRequest(page_size=101) + context = create_server_call_context() + + with pytest.raises(InvalidParamsError) as exc_info: + await request_handler.on_list_tasks(params, context) + + assert 'maximum page size is 100' in exc_info.value.message + + +@pytest.mark.asyncio +async def test_on_message_send_negative_history_length_error(agent_card): + """Test on_message_send raises error for negative history length in configuration.""" + mock_task_store = AsyncMock(spec=TaskStore) + mock_agent_executor = AsyncMock(spec=AgentExecutor) + request_handler = DefaultRequestHandler( + agent_executor=mock_agent_executor, + task_store=mock_task_store, + agent_card=agent_card, + ) + + message_config = SendMessageConfiguration( + history_length=-1, + accepted_output_modes=['text/plain'], + ) + params = SendMessageRequest( + message=Message( + role=Role.ROLE_USER, message_id='msg1', parts=[Part(text='Test')] + ), + configuration=message_config, + ) + context = create_server_call_context() + + with pytest.raises(InvalidParamsError) as exc_info: + await request_handler.on_message_send(params, context) + + assert 'history length must be non-negative' in exc_info.value.message + + +@pytest.mark.asyncio +async def test_on_get_extended_agent_card_success(agent_card): + """Test on_get_extended_agent_card when extended_agent_card is supported.""" + agent_card.capabilities.extended_agent_card = True + + extended_agent_card = AgentCard( + name='Extended Agent', + description='An extended agent', + version='1.0.0', + capabilities=AgentCapabilities( + streaming=True, + push_notifications=True, + extended_agent_card=True, + ), + ) + + request_handler = DefaultRequestHandler( + agent_executor=AsyncMock(spec=AgentExecutor), + task_store=AsyncMock(spec=TaskStore), + agent_card=agent_card, + extended_agent_card=extended_agent_card, + ) + + params = GetExtendedAgentCardRequest() + context = create_server_call_context() + + result = await request_handler.on_get_extended_agent_card(params, context) + + assert result == extended_agent_card + + +@pytest.mark.asyncio +async def test_on_message_send_stream_unsupported(agent_card): + """Test on_message_send_stream when streaming is unsupported.""" + agent_card.capabilities.streaming = False + + request_handler = DefaultRequestHandler( + agent_executor=AsyncMock(spec=AgentExecutor), + task_store=AsyncMock(spec=TaskStore), + agent_card=agent_card, + ) + + params = SendMessageRequest( + message=Message( + role=Role.ROLE_USER, + message_id='msg-unsupported', + parts=[Part(text='hi')], + ) + ) + + context = create_server_call_context() + + with pytest.raises(UnsupportedOperationError): + async for _ in request_handler.on_message_send_stream(params, context): + pass + + +@pytest.mark.asyncio +async def test_on_get_extended_agent_card_unsupported(agent_card): + """Test on_get_extended_agent_card when extended_agent_card is unsupported.""" + agent_card.capabilities.extended_agent_card = False + + request_handler = DefaultRequestHandler( + agent_executor=AsyncMock(spec=AgentExecutor), + task_store=AsyncMock(spec=TaskStore), + agent_card=agent_card, + ) + + params = GetExtendedAgentCardRequest() + context = create_server_call_context() + + with pytest.raises(UnsupportedOperationError): + await request_handler.on_get_extended_agent_card(params, context) + + +@pytest.mark.asyncio +async def test_on_create_task_push_notification_config_unsupported(agent_card): + """Test on_create_task_push_notification_config when push_notifications is unsupported.""" + agent_card.capabilities.push_notifications = False + + request_handler = DefaultRequestHandler( + agent_executor=AsyncMock(spec=AgentExecutor), + task_store=AsyncMock(spec=TaskStore), + agent_card=agent_card, + ) + + params = TaskPushNotificationConfig(url='http://callback.com/push') + + context = create_server_call_context() + + with pytest.raises(PushNotificationNotSupportedError): + await request_handler.on_create_task_push_notification_config( + params, context + ) + + +@pytest.mark.asyncio +async def test_on_subscribe_to_task_unsupported(agent_card): + """Test on_subscribe_to_task when streaming is unsupported.""" + agent_card.capabilities.streaming = False + + request_handler = DefaultRequestHandler( + agent_executor=AsyncMock(spec=AgentExecutor), + task_store=AsyncMock(spec=TaskStore), + agent_card=agent_card, + ) + + params = SubscribeToTaskRequest(id='some_task') + context = create_server_call_context() + + with pytest.raises(UnsupportedOperationError): + # We need to exhaust the generator to trigger the decorator evaluation + async for _ in request_handler.on_subscribe_to_task(params, context): + pass diff --git a/tests/server/request_handlers/test_default_request_handler_v2.py b/tests/server/request_handlers/test_default_request_handler_v2.py new file mode 100644 index 000000000..e35b8f720 --- /dev/null +++ b/tests/server/request_handlers/test_default_request_handler_v2.py @@ -0,0 +1,1413 @@ +import asyncio +import logging +import time +import uuid + +from unittest.mock import AsyncMock, patch, MagicMock + +import pytest + +from a2a.auth.user import UnauthenticatedUser +from a2a.server.agent_execution import ( + RequestContextBuilder, + AgentExecutor, + RequestContext, + SimpleRequestContextBuilder, +) +from a2a.server.agent_execution.active_task_registry import ActiveTaskRegistry +from a2a.server.context import ServerCallContext +from a2a.server.events import EventQueue, InMemoryQueueManager, QueueManager +from a2a.server.request_handlers import DefaultRequestHandlerV2 +from a2a.server.tasks import ( + InMemoryPushNotificationConfigStore, + InMemoryTaskStore, + PushNotificationConfigStore, + PushNotificationSender, + TaskStore, + TaskUpdater, +) +from a2a.types import ( + InternalError, + InvalidAgentResponseError, + InvalidParamsError, + TaskNotFoundError, + PushNotificationNotSupportedError, +) +from a2a.types.a2a_pb2 import ( + AgentCapabilities, + AgentCard, + Artifact, + CancelTaskRequest, + DeleteTaskPushNotificationConfigRequest, + GetTaskPushNotificationConfigRequest, + GetTaskRequest, + ListTaskPushNotificationConfigsRequest, + ListTasksRequest, + ListTasksResponse, + Message, + Part, + Role, + SendMessageConfiguration, + SendMessageRequest, + SubscribeToTaskRequest, + Task, + TaskPushNotificationConfig, + TaskState, + TaskStatus, + TaskStatusUpdateEvent, +) +from a2a.helpers.proto_helpers import ( + new_text_message, + new_task_from_user_message, +) + + +def create_default_agent_card(): + """Provides a standard AgentCard with streaming and push notifications enabled for tests.""" + return AgentCard( + name='test_agent', + version='1.0', + capabilities=AgentCapabilities(streaming=True, push_notifications=True), + ) + + +class MockAgentExecutor(AgentExecutor): + async def execute(self, context: RequestContext, event_queue: EventQueue): + if context.message: + await event_queue.enqueue_event( + new_task_from_user_message(context.message) + ) + + task_updater = TaskUpdater( + event_queue, + str(context.task_id or ''), + str(context.context_id or ''), + ) + + async for i in self._run(): + parts = [Part(text=f'Event {i}')] + try: + await task_updater.update_status( + TaskState.TASK_STATE_WORKING, + message=task_updater.new_agent_message(parts), + ) + except RuntimeError: + break + + async def _run(self): + for i in range(1000000): + yield i + + async def cancel(self, context: RequestContext, event_queue: EventQueue): + pass + + +def create_sample_task( + task_id='task1', + status_state=TaskState.TASK_STATE_SUBMITTED, + context_id='ctx1', +) -> Task: + return Task( + id=task_id, context_id=context_id, status=TaskStatus(state=status_state) + ) + + +def create_server_call_context() -> ServerCallContext: + return ServerCallContext(user=UnauthenticatedUser()) + + +def test_init_default_dependencies(): + """Test that default dependencies are created if not provided.""" + agent_executor = MockAgentExecutor() + task_store = InMemoryTaskStore() + handler = DefaultRequestHandlerV2( + agent_executor=agent_executor, + task_store=task_store, + agent_card=create_default_agent_card(), + ) + assert isinstance(handler._active_task_registry, ActiveTaskRegistry) + assert isinstance( + handler._request_context_builder, SimpleRequestContextBuilder + ) + assert handler._push_config_store is None + assert handler._push_sender is None + assert ( + handler._request_context_builder._should_populate_referred_tasks + is False + ) + assert handler._request_context_builder._task_store == task_store + + +@pytest.mark.asyncio +async def test_on_get_task_not_found(): + """Test on_get_task when task_store.get returns None.""" + mock_task_store = AsyncMock(spec=TaskStore) + mock_task_store.get.return_value = None + request_handler = DefaultRequestHandlerV2( + agent_executor=MockAgentExecutor(), + task_store=mock_task_store, + agent_card=create_default_agent_card(), + ) + params = GetTaskRequest(id='non_existent_task') + context = create_server_call_context() + with pytest.raises(TaskNotFoundError): + await request_handler.on_get_task(params, context) + mock_task_store.get.assert_awaited_once_with('non_existent_task', context) + + +@pytest.mark.asyncio +async def test_on_list_tasks_success(): + """Test on_list_tasks successfully returns a page of tasks .""" + mock_task_store = AsyncMock(spec=TaskStore) + task2 = create_sample_task(task_id='task2') + task2.artifacts.extend( + [ + Artifact( + artifact_id='artifact1', + parts=[Part(text='Hello world!')], + name='conversion_result', + ) + ] + ) + mock_page = ListTasksResponse( + tasks=[create_sample_task(task_id='task1'), task2], + next_page_token='123', # noqa: S106 + ) + mock_task_store.list.return_value = mock_page + request_handler = DefaultRequestHandlerV2( + agent_executor=AsyncMock(spec=AgentExecutor), + task_store=mock_task_store, + agent_card=create_default_agent_card(), + ) + params = ListTasksRequest(include_artifacts=True, page_size=10) + context = create_server_call_context() + result = await request_handler.on_list_tasks(params, context) + mock_task_store.list.assert_awaited_once_with(params, context) + assert result.tasks == mock_page.tasks + assert result.next_page_token == mock_page.next_page_token + + +@pytest.mark.asyncio +async def test_on_list_tasks_excludes_artifacts(): + """Test on_list_tasks excludes artifacts from returned tasks.""" + mock_task_store = AsyncMock(spec=TaskStore) + task2 = create_sample_task(task_id='task2') + task2.artifacts.extend( + [ + Artifact( + artifact_id='artifact1', + parts=[Part(text='Hello world!')], + name='conversion_result', + ) + ] + ) + mock_page = ListTasksResponse( + tasks=[create_sample_task(task_id='task1'), task2], + next_page_token='123', # noqa: S106 + ) + mock_task_store.list.return_value = mock_page + request_handler = DefaultRequestHandlerV2( + agent_executor=AsyncMock(spec=AgentExecutor), + task_store=mock_task_store, + agent_card=create_default_agent_card(), + ) + params = ListTasksRequest(include_artifacts=False, page_size=10) + context = create_server_call_context() + result = await request_handler.on_list_tasks(params, context) + assert not result.tasks[1].artifacts + + +@pytest.mark.asyncio +async def test_on_list_tasks_applies_history_length(): + """Test on_list_tasks applies history length filter.""" + mock_task_store = AsyncMock(spec=TaskStore) + history = [ + new_text_message('Hello 1!'), + new_text_message('Hello 2!'), + ] + task2 = create_sample_task(task_id='task2') + task2.history.extend(history) + mock_page = ListTasksResponse( + tasks=[create_sample_task(task_id='task1'), task2], + next_page_token='123', # noqa: S106 + ) + mock_task_store.list.return_value = mock_page + request_handler = DefaultRequestHandlerV2( + agent_executor=AsyncMock(spec=AgentExecutor), + task_store=mock_task_store, + agent_card=create_default_agent_card(), + ) + params = ListTasksRequest(history_length=1, page_size=10) + context = create_server_call_context() + result = await request_handler.on_list_tasks(params, context) + assert result.tasks[1].history == [history[1]] + + +@pytest.mark.asyncio +async def test_on_list_tasks_negative_history_length_error(): + """Test on_list_tasks raises error for negative history length.""" + mock_task_store = AsyncMock(spec=TaskStore) + request_handler = DefaultRequestHandlerV2( + agent_executor=AsyncMock(spec=AgentExecutor), + task_store=mock_task_store, + agent_card=create_default_agent_card(), + ) + params = ListTasksRequest(history_length=-1, page_size=10) + context = create_server_call_context() + with pytest.raises(InvalidParamsError) as exc_info: + await request_handler.on_list_tasks(params, context) + assert 'history length must be non-negative' in exc_info.value.message + + +@pytest.mark.asyncio +async def test_on_cancel_task_task_not_found(): + """Test on_cancel_task when the task is not found.""" + mock_task_store = AsyncMock(spec=TaskStore) + mock_task_store.get.return_value = None + request_handler = DefaultRequestHandlerV2( + agent_executor=MockAgentExecutor(), + task_store=mock_task_store, + agent_card=create_default_agent_card(), + ) + params = CancelTaskRequest(id='task_not_found_for_cancel') + context = create_server_call_context() + with pytest.raises(TaskNotFoundError): + await request_handler.on_cancel_task(params, context) + mock_task_store.get.assert_awaited_once_with( + 'task_not_found_for_cancel', context + ) + + +class HelloAgentExecutor(AgentExecutor): + async def execute(self, context: RequestContext, event_queue: EventQueue): + task = context.current_task + if not task: + assert context.message is not None, ( + 'A message is required to create a new task' + ) + task = new_task_from_user_message(context.message) + await event_queue.enqueue_event(task) + updater = TaskUpdater(event_queue, task.id, task.context_id) + try: + parts = [Part(text='I am working')] + await updater.update_status( + TaskState.TASK_STATE_WORKING, + message=updater.new_agent_message(parts), + ) + except Exception as e: # noqa: BLE001 + logging.warning('Error: %s', e) + return + await updater.add_artifact( + [Part(text='Hello world!')], name='conversion_result' + ) + await updater.complete() + + async def cancel(self, context: RequestContext, event_queue: EventQueue): + pass + + +@pytest.mark.asyncio +async def test_on_get_task_limit_history(): + task_store = InMemoryTaskStore() + push_store = InMemoryPushNotificationConfigStore() + request_handler = DefaultRequestHandlerV2( + agent_executor=HelloAgentExecutor(), + task_store=task_store, + push_config_store=push_store, + agent_card=create_default_agent_card(), + ) + params = SendMessageRequest( + message=Message( + role=Role.ROLE_USER, message_id='msg_push', parts=[Part(text='Hi')] + ), + configuration=SendMessageConfiguration( + accepted_output_modes=['text/plain'] + ), + ) + result = await request_handler.on_message_send( + params, create_server_call_context() + ) + assert result is not None + assert isinstance(result, Task) + get_task_result = await request_handler.on_get_task( + GetTaskRequest(id=result.id, history_length=1), + create_server_call_context(), + ) + assert get_task_result is not None + assert isinstance(get_task_result, Task) + assert ( + get_task_result.history is not None + and len(get_task_result.history) == 1 + ) + + +async def wait_until(predicate, timeout: float = 0.2, interval: float = 0.0): + """Await until predicate() is True or timeout elapses.""" + loop = asyncio.get_running_loop() + end = loop.time() + timeout + while True: + if predicate(): + return + if loop.time() >= end: + raise AssertionError('condition not met within timeout') + await asyncio.sleep(interval) + + +@pytest.mark.asyncio +async def test_set_task_push_notification_config_no_notifier(): + """Test on_create_task_push_notification_config when _push_config_store is None.""" + request_handler = DefaultRequestHandlerV2( + agent_executor=MockAgentExecutor(), + task_store=AsyncMock(spec=TaskStore), + push_config_store=None, + agent_card=create_default_agent_card(), + ) + params = TaskPushNotificationConfig( + task_id='task1', url='http://example.com' + ) + with pytest.raises(PushNotificationNotSupportedError): + await request_handler.on_create_task_push_notification_config( + params, create_server_call_context() + ) + + +@pytest.mark.asyncio +async def test_set_task_push_notification_config_task_not_found(): + """Test on_create_task_push_notification_config when task is not found.""" + mock_task_store = AsyncMock(spec=TaskStore) + mock_task_store.get.return_value = None + mock_push_store = AsyncMock(spec=PushNotificationConfigStore) + mock_push_sender = AsyncMock(spec=PushNotificationSender) + request_handler = DefaultRequestHandlerV2( + agent_executor=MockAgentExecutor(), + task_store=mock_task_store, + push_config_store=mock_push_store, + push_sender=mock_push_sender, + agent_card=create_default_agent_card(), + ) + params = TaskPushNotificationConfig( + task_id='non_existent_task', url='http://example.com' + ) + context = create_server_call_context() + with pytest.raises(TaskNotFoundError): + await request_handler.on_create_task_push_notification_config( + params, context + ) + mock_task_store.get.assert_awaited_once_with('non_existent_task', context) + mock_push_store.set_info.assert_not_awaited() + + +@pytest.mark.asyncio +async def test_get_task_push_notification_config_no_store(): + """Test on_get_task_push_notification_config when _push_config_store is None.""" + request_handler = DefaultRequestHandlerV2( + agent_executor=MockAgentExecutor(), + task_store=AsyncMock(spec=TaskStore), + push_config_store=None, + agent_card=create_default_agent_card(), + ) + params = GetTaskPushNotificationConfigRequest( + task_id='task1', id='task_push_notification_config' + ) + with pytest.raises(PushNotificationNotSupportedError): + await request_handler.on_get_task_push_notification_config( + params, create_server_call_context() + ) + + +@pytest.mark.asyncio +async def test_get_task_push_notification_config_task_not_found(): + """Test on_get_task_push_notification_config when task is not found.""" + mock_task_store = AsyncMock(spec=TaskStore) + mock_task_store.get.return_value = None + mock_push_store = AsyncMock(spec=PushNotificationConfigStore) + request_handler = DefaultRequestHandlerV2( + agent_executor=MockAgentExecutor(), + task_store=mock_task_store, + push_config_store=mock_push_store, + agent_card=create_default_agent_card(), + ) + params = GetTaskPushNotificationConfigRequest( + task_id='non_existent_task', id='task_push_notification_config' + ) + context = create_server_call_context() + with pytest.raises(TaskNotFoundError): + await request_handler.on_get_task_push_notification_config( + params, context + ) + mock_task_store.get.assert_awaited_once_with('non_existent_task', context) + mock_push_store.get_info.assert_not_awaited() + + +@pytest.mark.asyncio +async def test_get_task_push_notification_config_info_not_found(): + """Test on_get_task_push_notification_config when push_config_store.get_info returns None.""" + mock_task_store = AsyncMock(spec=TaskStore) + sample_task = create_sample_task(task_id='non_existent_task') + mock_task_store.get.return_value = sample_task + mock_push_store = AsyncMock(spec=PushNotificationConfigStore) + mock_push_store.get_info.return_value = None + request_handler = DefaultRequestHandlerV2( + agent_executor=MockAgentExecutor(), + task_store=mock_task_store, + push_config_store=mock_push_store, + agent_card=create_default_agent_card(), + ) + params = GetTaskPushNotificationConfigRequest( + task_id='non_existent_task', id='task_push_notification_config' + ) + context = create_server_call_context() + with pytest.raises(TaskNotFoundError): + await request_handler.on_get_task_push_notification_config( + params, context + ) + mock_task_store.get.assert_awaited_once_with('non_existent_task', context) + mock_push_store.get_info.assert_awaited_once_with( + 'non_existent_task', context + ) + + +@pytest.mark.asyncio +async def test_get_task_push_notification_config_info_with_config(): + """Test on_get_task_push_notification_config with valid push config id""" + mock_task_store = AsyncMock(spec=TaskStore) + mock_task_store.get.return_value = Task(id='task_1', context_id='ctx_1') + push_store = InMemoryPushNotificationConfigStore() + request_handler = DefaultRequestHandlerV2( + agent_executor=MockAgentExecutor(), + task_store=mock_task_store, + push_config_store=push_store, + agent_card=create_default_agent_card(), + ) + set_config_params = TaskPushNotificationConfig( + task_id='task_1', id='config_id', url='http://1.example.com' + ) + context = create_server_call_context() + await request_handler.on_create_task_push_notification_config( + set_config_params, context + ) + params = GetTaskPushNotificationConfigRequest( + task_id='task_1', id='config_id' + ) + result: TaskPushNotificationConfig = ( + await request_handler.on_get_task_push_notification_config( + params, context + ) + ) + assert result is not None + assert result.task_id == 'task_1' + assert result.url == set_config_params.url + assert result.id == 'config_id' + + +@pytest.mark.asyncio +async def test_get_task_push_notification_config_info_with_config_no_id(): + """Test on_get_task_push_notification_config with no push config id""" + mock_task_store = AsyncMock(spec=TaskStore) + mock_task_store.get.return_value = Task(id='task_1', context_id='ctx_1') + push_store = InMemoryPushNotificationConfigStore() + request_handler = DefaultRequestHandlerV2( + agent_executor=MockAgentExecutor(), + task_store=mock_task_store, + push_config_store=push_store, + agent_card=create_default_agent_card(), + ) + set_config_params = TaskPushNotificationConfig( + task_id='task_1', url='http://1.example.com' + ) + await request_handler.on_create_task_push_notification_config( + set_config_params, create_server_call_context() + ) + params = GetTaskPushNotificationConfigRequest(task_id='task_1', id='task_1') + result: TaskPushNotificationConfig = ( + await request_handler.on_get_task_push_notification_config( + params, create_server_call_context() + ) + ) + assert result is not None + assert result.task_id == 'task_1' + assert result.url == set_config_params.url + assert result.id == 'task_1' + + +@pytest.mark.asyncio +async def test_on_subscribe_to_task_task_not_found(): + """Test on_subscribe_to_task when the task is not found.""" + mock_task_store = AsyncMock(spec=TaskStore) + mock_task_store.get.return_value = None + request_handler = DefaultRequestHandlerV2( + agent_executor=MockAgentExecutor(), + task_store=mock_task_store, + agent_card=create_default_agent_card(), + ) + params = SubscribeToTaskRequest(id='resub_task_not_found') + context = create_server_call_context() + with pytest.raises(TaskNotFoundError): + async for _ in request_handler.on_subscribe_to_task(params, context): + pass + mock_task_store.get.assert_awaited_once_with( + 'resub_task_not_found', context + ) + + +@pytest.mark.asyncio +async def test_on_message_send_stream(): + request_handler = DefaultRequestHandlerV2( + MockAgentExecutor(), + InMemoryTaskStore(), + create_default_agent_card(), + ) + message_params = SendMessageRequest( + message=Message( + role=Role.ROLE_USER, + message_id='msg-123', + parts=[Part(text='How are you?')], + ) + ) + + async def consume_stream(): + events = [] + async for event in request_handler.on_message_send_stream( + message_params, create_server_call_context() + ): + events.append(event) + if len(events) >= 3: + break + return events + + start = time.perf_counter() + events = await consume_stream() + elapsed = time.perf_counter() - start + assert len(events) == 3 + assert elapsed < 0.5 + task, event0, event1 = events + assert isinstance(task, Task) + assert task.history[0].parts[0].text == 'How are you?' + + assert isinstance(event0, TaskStatusUpdateEvent) + assert event0.status.message.parts[0].text == 'Event 0' + + assert isinstance(event1, TaskStatusUpdateEvent) + assert event1.status.message.parts[0].text == 'Event 1' + + +@pytest.mark.asyncio +async def test_list_task_push_notification_config_no_store(): + """Test on_list_task_push_notification_configs when _push_config_store is None.""" + request_handler = DefaultRequestHandlerV2( + agent_executor=MockAgentExecutor(), + task_store=AsyncMock(spec=TaskStore), + push_config_store=None, + agent_card=create_default_agent_card(), + ) + params = ListTaskPushNotificationConfigsRequest(task_id='task1') + with pytest.raises(PushNotificationNotSupportedError): + await request_handler.on_list_task_push_notification_configs( + params, create_server_call_context() + ) + + +@pytest.mark.asyncio +async def test_list_task_push_notification_config_task_not_found(): + """Test on_list_task_push_notification_configs when task is not found.""" + mock_task_store = AsyncMock(spec=TaskStore) + mock_task_store.get.return_value = None + mock_push_store = AsyncMock(spec=PushNotificationConfigStore) + request_handler = DefaultRequestHandlerV2( + agent_executor=MockAgentExecutor(), + task_store=mock_task_store, + push_config_store=mock_push_store, + agent_card=create_default_agent_card(), + ) + params = ListTaskPushNotificationConfigsRequest(task_id='non_existent_task') + context = create_server_call_context() + with pytest.raises(TaskNotFoundError): + await request_handler.on_list_task_push_notification_configs( + params, context + ) + mock_task_store.get.assert_awaited_once_with('non_existent_task', context) + mock_push_store.get_info.assert_not_awaited() + + +@pytest.mark.asyncio +async def test_list_no_task_push_notification_config_info(): + """Test on_get_task_push_notification_config when push_config_store.get_info returns []""" + mock_task_store = AsyncMock(spec=TaskStore) + sample_task = create_sample_task(task_id='non_existent_task') + mock_task_store.get.return_value = sample_task + push_store = InMemoryPushNotificationConfigStore() + request_handler = DefaultRequestHandlerV2( + agent_executor=MockAgentExecutor(), + task_store=mock_task_store, + push_config_store=push_store, + agent_card=create_default_agent_card(), + ) + params = ListTaskPushNotificationConfigsRequest(task_id='non_existent_task') + result = await request_handler.on_list_task_push_notification_configs( + params, create_server_call_context() + ) + assert result.configs == [] + + +@pytest.mark.asyncio +async def test_list_task_push_notification_config_info_with_config(): + """Test on_list_task_push_notification_configs with push config+id""" + mock_task_store = AsyncMock(spec=TaskStore) + sample_task = create_sample_task(task_id='non_existent_task') + mock_task_store.get.return_value = sample_task + push_config1 = TaskPushNotificationConfig( + task_id='task_1', id='config_1', url='http://example.com' + ) + push_config2 = TaskPushNotificationConfig( + task_id='task_1', id='config_2', url='http://example.com' + ) + push_store = InMemoryPushNotificationConfigStore() + context = create_server_call_context() + await push_store.set_info('task_1', push_config1, context) + await push_store.set_info('task_1', push_config2, context) + await push_store.set_info('task_2', push_config1, context) + request_handler = DefaultRequestHandlerV2( + agent_executor=MockAgentExecutor(), + task_store=mock_task_store, + push_config_store=push_store, + agent_card=create_default_agent_card(), + ) + params = ListTaskPushNotificationConfigsRequest(task_id='task_1') + result = await request_handler.on_list_task_push_notification_configs( + params, create_server_call_context() + ) + assert len(result.configs) == 2 + assert result.configs[0].task_id == 'task_1' + assert result.configs[0] == push_config1 + assert result.configs[1].task_id == 'task_1' + assert result.configs[1] == push_config2 + + +@pytest.mark.asyncio +async def test_list_task_push_notification_config_info_with_config_and_no_id(): + """Test on_list_task_push_notification_configs with no push config id""" + mock_task_store = AsyncMock(spec=TaskStore) + mock_task_store.get.return_value = Task(id='task_1', context_id='ctx_1') + push_store = InMemoryPushNotificationConfigStore() + request_handler = DefaultRequestHandlerV2( + agent_executor=MockAgentExecutor(), + task_store=mock_task_store, + push_config_store=push_store, + agent_card=create_default_agent_card(), + ) + set_config_params1 = TaskPushNotificationConfig( + task_id='task_1', url='http://1.example.com' + ) + await request_handler.on_create_task_push_notification_config( + set_config_params1, create_server_call_context() + ) + set_config_params2 = TaskPushNotificationConfig( + task_id='task_1', url='http://2.example.com' + ) + await request_handler.on_create_task_push_notification_config( + set_config_params2, create_server_call_context() + ) + params = ListTaskPushNotificationConfigsRequest(task_id='task_1') + result = await request_handler.on_list_task_push_notification_configs( + params, create_server_call_context() + ) + assert len(result.configs) == 1 + assert result.configs[0].task_id == 'task_1' + assert result.configs[0].url == set_config_params2.url + assert result.configs[0].id == 'task_1' + + +@pytest.mark.asyncio +async def test_delete_task_push_notification_config_no_store(): + """Test on_delete_task_push_notification_config when _push_config_store is None.""" + request_handler = DefaultRequestHandlerV2( + agent_executor=MockAgentExecutor(), + task_store=AsyncMock(spec=TaskStore), + push_config_store=None, + agent_card=create_default_agent_card(), + ) + params = DeleteTaskPushNotificationConfigRequest( + task_id='task1', id='config1' + ) + with pytest.raises(PushNotificationNotSupportedError) as exc_info: + await request_handler.on_delete_task_push_notification_config( + params, create_server_call_context() + ) + assert isinstance(exc_info.value, PushNotificationNotSupportedError) + + +@pytest.mark.asyncio +async def test_delete_task_push_notification_config_task_not_found(): + """Test on_delete_task_push_notification_config when task is not found.""" + mock_task_store = AsyncMock(spec=TaskStore) + mock_task_store.get.return_value = None + mock_push_store = AsyncMock(spec=PushNotificationConfigStore) + request_handler = DefaultRequestHandlerV2( + agent_executor=MockAgentExecutor(), + task_store=mock_task_store, + push_config_store=mock_push_store, + agent_card=create_default_agent_card(), + ) + params = DeleteTaskPushNotificationConfigRequest( + task_id='non_existent_task', id='config1' + ) + context = create_server_call_context() + with pytest.raises(TaskNotFoundError): + await request_handler.on_delete_task_push_notification_config( + params, context + ) + mock_task_store.get.assert_awaited_once_with('non_existent_task', context) + mock_push_store.get_info.assert_not_awaited() + + +@pytest.mark.asyncio +async def test_delete_no_task_push_notification_config_info(): + """Test on_delete_task_push_notification_config without config info""" + mock_task_store = AsyncMock(spec=TaskStore) + sample_task = create_sample_task(task_id='task_1') + mock_task_store.get.return_value = sample_task + push_store = InMemoryPushNotificationConfigStore() + await push_store.set_info( + 'task_2', + TaskPushNotificationConfig(id='config_1', url='http://example.com'), + create_server_call_context(), + ) + request_handler = DefaultRequestHandlerV2( + agent_executor=MockAgentExecutor(), + task_store=mock_task_store, + push_config_store=push_store, + agent_card=create_default_agent_card(), + ) + params = DeleteTaskPushNotificationConfigRequest( + task_id='task1', id='config_non_existant' + ) + result = await request_handler.on_delete_task_push_notification_config( + params, create_server_call_context() + ) + assert result is None + params = DeleteTaskPushNotificationConfigRequest( + task_id='task2', id='config_non_existant' + ) + result = await request_handler.on_delete_task_push_notification_config( + params, create_server_call_context() + ) + assert result is None + + +@pytest.mark.asyncio +async def test_delete_task_push_notification_config_info_with_config(): + """Test on_list_task_push_notification_configs with push config+id""" + mock_task_store = AsyncMock(spec=TaskStore) + sample_task = create_sample_task(task_id='non_existent_task') + mock_task_store.get.return_value = sample_task + push_config1 = TaskPushNotificationConfig( + task_id='task_1', id='config_1', url='http://example.com' + ) + push_config2 = TaskPushNotificationConfig( + task_id='task_1', id='config_2', url='http://example.com' + ) + push_store = InMemoryPushNotificationConfigStore() + context = create_server_call_context() + await push_store.set_info('task_1', push_config1, context) + await push_store.set_info('task_1', push_config2, context) + await push_store.set_info('task_2', push_config1, context) + request_handler = DefaultRequestHandlerV2( + agent_executor=MockAgentExecutor(), + task_store=mock_task_store, + push_config_store=push_store, + agent_card=create_default_agent_card(), + ) + params = DeleteTaskPushNotificationConfigRequest( + task_id='task_1', id='config_1' + ) + result1 = await request_handler.on_delete_task_push_notification_config( + params, create_server_call_context() + ) + assert result1 is None + result2 = await request_handler.on_list_task_push_notification_configs( + ListTaskPushNotificationConfigsRequest(task_id='task_1'), + create_server_call_context(), + ) + assert len(result2.configs) == 1 + assert result2.configs[0].task_id == 'task_1' + assert result2.configs[0] == push_config2 + + +@pytest.mark.asyncio +async def test_delete_task_push_notification_config_info_with_config_and_no_id(): + """Test on_list_task_push_notification_configs with no push config id""" + mock_task_store = AsyncMock(spec=TaskStore) + sample_task = create_sample_task(task_id='non_existent_task') + mock_task_store.get.return_value = sample_task + push_config = TaskPushNotificationConfig(url='http://example.com') + push_store = InMemoryPushNotificationConfigStore() + context = create_server_call_context() + await push_store.set_info('task_1', push_config, context) + await push_store.set_info('task_1', push_config, context) + request_handler = DefaultRequestHandlerV2( + agent_executor=MockAgentExecutor(), + task_store=mock_task_store, + push_config_store=push_store, + agent_card=create_default_agent_card(), + ) + params = DeleteTaskPushNotificationConfigRequest( + task_id='task_1', id='task_1' + ) + result = await request_handler.on_delete_task_push_notification_config( + params, create_server_call_context() + ) + assert result is None + result2 = await request_handler.on_list_task_push_notification_configs( + ListTaskPushNotificationConfigsRequest(task_id='task_1'), + create_server_call_context(), + ) + assert len(result2.configs) == 0 + + +TERMINAL_TASK_STATES = { + TaskState.TASK_STATE_COMPLETED, + TaskState.TASK_STATE_CANCELED, + TaskState.TASK_STATE_FAILED, + TaskState.TASK_STATE_REJECTED, +} + + +@pytest.mark.asyncio +@pytest.mark.parametrize('terminal_state', TERMINAL_TASK_STATES) +async def test_on_message_send_task_in_terminal_state(terminal_state): + """Test on_message_send when task is already in a terminal state.""" + state_name = TaskState.Name(terminal_state) + task_id = f'terminal_task_{state_name}' + terminal_task = create_sample_task( + task_id=task_id, status_state=terminal_state + ) + mock_task_store = AsyncMock(spec=TaskStore) + request_handler = DefaultRequestHandlerV2( + agent_executor=MockAgentExecutor(), + task_store=mock_task_store, + agent_card=create_default_agent_card(), + ) + params = SendMessageRequest( + message=Message( + role=Role.ROLE_USER, + message_id='msg_terminal', + parts=[Part(text='hello')], + task_id=task_id, + ) + ) + with ( + patch( + 'a2a.server.request_handlers.default_request_handler.TaskManager.get_task', + return_value=terminal_task, + ), + pytest.raises(InvalidParamsError) as exc_info, + ): + await request_handler.on_message_send( + params, create_server_call_context() + ) + assert ( + f'Task {task_id} is in terminal state: {terminal_state}' + in exc_info.value.message + ) + + +@pytest.mark.asyncio +@pytest.mark.parametrize('terminal_state', TERMINAL_TASK_STATES) +async def test_on_message_send_stream_task_in_terminal_state(terminal_state): + """Test on_message_send_stream when task is already in a terminal state.""" + state_name = TaskState.Name(terminal_state) + task_id = f'terminal_stream_task_{state_name}' + terminal_task = create_sample_task( + task_id=task_id, status_state=terminal_state + ) + mock_task_store = AsyncMock(spec=TaskStore) + request_handler = DefaultRequestHandlerV2( + agent_executor=MockAgentExecutor(), + task_store=mock_task_store, + agent_card=create_default_agent_card(), + ) + params = SendMessageRequest( + message=Message( + role=Role.ROLE_USER, + message_id='msg_terminal_stream', + parts=[Part(text='hello')], + task_id=task_id, + ) + ) + with ( + patch( + 'a2a.server.request_handlers.default_request_handler.TaskManager.get_task', + return_value=terminal_task, + ), + pytest.raises(InvalidParamsError) as exc_info, + ): + async for _ in request_handler.on_message_send_stream( + params, create_server_call_context() + ): + pass + assert ( + f'Task {task_id} is in terminal state: {terminal_state}' + in exc_info.value.message + ) + + +@pytest.mark.asyncio +async def test_on_message_send_task_id_provided_but_task_not_found(): + """Test on_message_send when task_id is provided but task doesn't exist.""" + pass + + +@pytest.mark.asyncio +async def test_on_message_send_stream_task_id_provided_but_task_not_found(): + """Test on_message_send_stream when task_id is provided but task doesn't exist.""" + pass + + +class HelloWorldAgentExecutor(AgentExecutor): + """Test Agent Implementation.""" + + async def execute( + self, context: RequestContext, event_queue: EventQueue + ) -> None: + if context.message: + await event_queue.enqueue_event( + new_task_from_user_message(context.message) + ) + updater = TaskUpdater( + event_queue, + task_id=context.task_id or str(uuid.uuid4()), + context_id=context.context_id or str(uuid.uuid4()), + ) + await updater.update_status(TaskState.TASK_STATE_WORKING) + await updater.complete() + + async def cancel( + self, context: RequestContext, event_queue: EventQueue + ) -> None: + raise NotImplementedError('cancel not supported') + + +@pytest.mark.asyncio +@pytest.mark.timeout(1) +async def test_on_message_send_error_does_not_hang(): + """Test that if the consumer raises an exception during blocking wait, the producer is cancelled and no deadlock occurs.""" + agent = HelloWorldAgentExecutor() + task_store = AsyncMock(spec=TaskStore) + task_store.get.return_value = None + task_store.save.side_effect = RuntimeError('This is an Error!') + + request_handler = DefaultRequestHandlerV2( + agent_executor=agent, + task_store=task_store, + agent_card=create_default_agent_card(), + ) + + params = SendMessageRequest( + message=Message( + role=Role.ROLE_USER, + message_id='msg_error_blocking', + parts=[Part(text='Test message')], + ) + ) + with pytest.raises(RuntimeError, match='This is an Error!'): + await request_handler.on_message_send( + params, create_server_call_context() + ) + + +@pytest.mark.asyncio +async def test_on_get_task_negative_history_length_error(): + """Test on_get_task raises error for negative history length.""" + mock_task_store = AsyncMock(spec=TaskStore) + request_handler = DefaultRequestHandlerV2( + agent_executor=AsyncMock(spec=AgentExecutor), + task_store=mock_task_store, + agent_card=create_default_agent_card(), + ) + params = GetTaskRequest(id='task1', history_length=-1) + context = create_server_call_context() + with pytest.raises(InvalidParamsError) as exc_info: + await request_handler.on_get_task(params, context) + assert 'history length must be non-negative' in exc_info.value.message + + +@pytest.mark.asyncio +async def test_on_list_tasks_page_size_too_small(): + """Test on_list_tasks raises error for page_size < 1.""" + mock_task_store = AsyncMock(spec=TaskStore) + request_handler = DefaultRequestHandlerV2( + agent_executor=AsyncMock(spec=AgentExecutor), + task_store=mock_task_store, + agent_card=create_default_agent_card(), + ) + params = ListTasksRequest(page_size=0) + context = create_server_call_context() + with pytest.raises(InvalidParamsError) as exc_info: + await request_handler.on_list_tasks(params, context) + assert 'minimum page size is 1' in exc_info.value.message + + +@pytest.mark.asyncio +async def test_on_list_tasks_page_size_too_large(): + """Test on_list_tasks raises error for page_size > 100.""" + mock_task_store = AsyncMock(spec=TaskStore) + request_handler = DefaultRequestHandlerV2( + agent_executor=AsyncMock(spec=AgentExecutor), + task_store=mock_task_store, + agent_card=create_default_agent_card(), + ) + params = ListTasksRequest(page_size=101) + context = create_server_call_context() + with pytest.raises(InvalidParamsError) as exc_info: + await request_handler.on_list_tasks(params, context) + assert 'maximum page size is 100' in exc_info.value.message + + +@pytest.mark.asyncio +async def test_on_message_send_negative_history_length_error(): + """Test on_message_send raises error for negative history length in configuration.""" + mock_task_store = AsyncMock(spec=TaskStore) + mock_agent_executor = AsyncMock(spec=AgentExecutor) + request_handler = DefaultRequestHandlerV2( + agent_executor=mock_agent_executor, + task_store=mock_task_store, + agent_card=create_default_agent_card(), + ) + message_config = SendMessageConfiguration( + history_length=-1, accepted_output_modes=['text/plain'] + ) + params = SendMessageRequest( + message=Message( + role=Role.ROLE_USER, message_id='msg1', parts=[Part(text='hello')] + ), + configuration=message_config, + ) + context = create_server_call_context() + with pytest.raises(InvalidParamsError) as exc_info: + await request_handler.on_message_send(params, context) + assert 'history length must be non-negative' in exc_info.value.message + + +@pytest.mark.asyncio +async def test_on_message_send_limit_history(): + task_store = InMemoryTaskStore() + push_store = InMemoryPushNotificationConfigStore() + + request_handler = DefaultRequestHandlerV2( + agent_executor=HelloAgentExecutor(), + task_store=task_store, + push_config_store=push_store, + agent_card=create_default_agent_card(), + ) + params = SendMessageRequest( + message=Message( + role=Role.ROLE_USER, + message_id='msg_push', + parts=[Part(text='Hi')], + ), + configuration=SendMessageConfiguration( + accepted_output_modes=['text/plain'], + history_length=1, + ), + ) + + context = create_server_call_context() + result = await request_handler.on_message_send(params, context) + + # verify that history_length is honored + assert result is not None + assert isinstance(result, Task) + assert result.history is not None and len(result.history) == 1 + assert result.status.state == TaskState.TASK_STATE_COMPLETED + + # verify that history is still persisted to the store + task = await task_store.get(result.id, context) + assert task is not None + assert task.history is not None and len(task.history) > 1 + + +@pytest.mark.asyncio +async def test_on_message_send_stream_task_id_mismatch(): + mock_task_store = AsyncMock(spec=TaskStore) + mock_agent_executor = AsyncMock(spec=AgentExecutor) + mock_request_context_builder = AsyncMock(spec=RequestContextBuilder) + + context_task_id = 'context_task_id_stream_1' + result_task_id = 'DIFFERENT_task_id_stream_1' + + mock_request_context = MagicMock() + mock_request_context.task_id = context_task_id + mock_request_context_builder.build.return_value = mock_request_context + + request_handler = DefaultRequestHandlerV2( + agent_executor=mock_agent_executor, + task_store=mock_task_store, + request_context_builder=mock_request_context_builder, + agent_card=create_default_agent_card(), + ) + params = SendMessageRequest( + message=Message( + role=Role.ROLE_USER, + message_id='msg_id_mismatch_stream', + parts=[Part(text='hello')], + ) + ) + + mismatched_task = create_sample_task(task_id=result_task_id) + + async def mock_subscribe(request=None, include_initial_task=False): + yield mismatched_task + + mock_active_task = MagicMock() + mock_active_task.subscribe.side_effect = mock_subscribe + mock_active_task.start = AsyncMock() + mock_active_task.enqueue_request = AsyncMock() + + with ( + patch.object( + request_handler._active_task_registry, + 'get_or_create', + return_value=mock_active_task, + ), + patch( + 'a2a.server.request_handlers.default_request_handler.TaskManager.get_task', + return_value=None, + ), + ): + stream = request_handler.on_message_send_stream( + params, context=MagicMock() + ) + with pytest.raises(InternalError) as exc_info: + async for _ in stream: + pass + assert 'Task ID mismatch' in exc_info.value.message + + +@pytest.mark.asyncio +async def test_on_message_send_non_blocking(): + task_store = InMemoryTaskStore() + push_store = InMemoryPushNotificationConfigStore() + + request_handler = DefaultRequestHandlerV2( + agent_executor=HelloAgentExecutor(), + task_store=task_store, + push_config_store=push_store, + agent_card=create_default_agent_card(), + ) + params = SendMessageRequest( + message=Message( + role=Role.ROLE_USER, + message_id='msg_push_non_blocking', + parts=[Part(text='Hi')], + ), + configuration=SendMessageConfiguration( + return_immediately=True, + ), + ) + + context = create_server_call_context() + result = await request_handler.on_message_send(params, context) + + # non-blocking should return the task immediately + assert result is not None + assert isinstance(result, Task) + assert result.status.state == TaskState.TASK_STATE_SUBMITTED + + +@pytest.mark.asyncio +async def test_on_message_send_with_push_notification(): + task_store = InMemoryTaskStore() + push_store = AsyncMock(spec=PushNotificationConfigStore) + + request_handler = DefaultRequestHandlerV2( + agent_executor=HelloAgentExecutor(), + task_store=task_store, + push_config_store=push_store, + agent_card=create_default_agent_card(), + ) + push_config = TaskPushNotificationConfig(url='http://example.com/webhook') + params = SendMessageRequest( + message=Message( + role=Role.ROLE_USER, + message_id='msg_push_1', + parts=[Part(text='Hi')], + ), + configuration=SendMessageConfiguration( + task_push_notification_config=push_config + ), + ) + + context = create_server_call_context() + result = await request_handler.on_message_send(params, context) + + assert result is not None + assert isinstance(result, Task) + push_store.set_info.assert_awaited_once_with( + result.id, push_config, context + ) + + +class MultipleMessagesAgentExecutor(AgentExecutor): + """Misbehaving agent that yields more than one Message.""" + + async def execute(self, context: RequestContext, event_queue: EventQueue): + await event_queue.enqueue_event( + new_text_message('first', role=Role.ROLE_AGENT) + ) + await event_queue.enqueue_event( + new_text_message('second', role=Role.ROLE_AGENT) + ) + + async def cancel(self, context: RequestContext, event_queue: EventQueue): + pass + + +class MessageAfterTaskEventAgentExecutor(AgentExecutor): + """Misbehaving agent that yields a task-mode event then a Message.""" + + async def execute(self, context: RequestContext, event_queue: EventQueue): + task = new_task_from_user_message(context.message) + await event_queue.enqueue_event(task) + updater = TaskUpdater(event_queue, task.id, task.context_id) + await updater.update_status(TaskState.TASK_STATE_WORKING) + await event_queue.enqueue_event( + new_text_message('stray message', role=Role.ROLE_AGENT) + ) + + async def cancel(self, context: RequestContext, event_queue: EventQueue): + pass + + +class TaskEventAfterMessageAgentExecutor(AgentExecutor): + """Misbehaving agent that yields a Message and then a task-mode event.""" + + async def execute(self, context: RequestContext, event_queue: EventQueue): + await event_queue.enqueue_event( + new_text_message('only message', role=Role.ROLE_AGENT) + ) + await event_queue.enqueue_event( + TaskStatusUpdateEvent( + task_id=str(context.task_id or ''), + context_id=str(context.context_id or ''), + status=TaskStatus(state=TaskState.TASK_STATE_WORKING), + ) + ) + + async def cancel(self, context: RequestContext, event_queue: EventQueue): + pass + + +class EventAfterTerminalStateAgentExecutor(AgentExecutor): + """Misbehaving agent that yields an event after reaching a terminal state.""" + + async def execute(self, context: RequestContext, event_queue: EventQueue): + task = new_task_from_user_message(context.message) + await event_queue.enqueue_event(task) + updater = TaskUpdater(event_queue, task.id, task.context_id) + await updater.complete() + await event_queue.enqueue_event( + new_text_message('after terminal', role=Role.ROLE_AGENT) + ) + + async def cancel(self, context: RequestContext, event_queue: EventQueue): + pass + + +@pytest.mark.asyncio +@pytest.mark.timeout(1) +async def test_on_message_send_stream_rejects_multiple_messages(): + """Stream surfaces InvalidAgentResponseError when the agent yields a + second Message after the first one (see comment in on_message_send_stream).""" + request_handler = DefaultRequestHandlerV2( + agent_executor=MultipleMessagesAgentExecutor(), + task_store=InMemoryTaskStore(), + agent_card=create_default_agent_card(), + ) + params = SendMessageRequest( + message=Message( + role=Role.ROLE_USER, + message_id='msg_multi_stream', + parts=[Part(text='Hi')], + ) + ) + with pytest.raises(InvalidAgentResponseError, match='Multiple Message'): + async for _ in request_handler.on_message_send_stream( + params, create_server_call_context() + ): + pass + + +@pytest.mark.asyncio +@pytest.mark.timeout(1) +async def test_on_message_send_stream_rejects_message_after_task_event(): + """Stream surfaces InvalidAgentResponseError when the agent yields a + Message after entering task mode (see comment in on_message_send_stream).""" + request_handler = DefaultRequestHandlerV2( + agent_executor=MessageAfterTaskEventAgentExecutor(), + task_store=InMemoryTaskStore(), + agent_card=create_default_agent_card(), + ) + params = SendMessageRequest( + message=Message( + role=Role.ROLE_USER, + message_id='msg_after_task_stream', + parts=[Part(text='Hi')], + ) + ) + with pytest.raises( + InvalidAgentResponseError, match='Message object in task mode' + ): + async for _ in request_handler.on_message_send_stream( + params, create_server_call_context() + ): + pass + + +@pytest.mark.asyncio +@pytest.mark.timeout(1) +async def test_on_message_send_stream_rejects_task_event_after_message(): + """Stream surfaces InvalidAgentResponseError when the agent yields a + task-mode event after a Message (see comment in on_message_send_stream).""" + request_handler = DefaultRequestHandlerV2( + agent_executor=TaskEventAfterMessageAgentExecutor(), + task_store=InMemoryTaskStore(), + agent_card=create_default_agent_card(), + ) + params = SendMessageRequest( + message=Message( + role=Role.ROLE_USER, + message_id='msg_then_task_stream', + parts=[Part(text='Hi')], + ) + ) + with pytest.raises(InvalidAgentResponseError, match='in message mode'): + async for _ in request_handler.on_message_send_stream( + params, create_server_call_context() + ): + pass + + +@pytest.mark.asyncio +@pytest.mark.timeout(1) +async def test_on_message_send_stream_rejects_event_after_terminal_state(): + """Stream surfaces InvalidAgentResponseError when the agent yields an event + after reaching a terminal state (see comment in on_message_send_stream).""" + request_handler = DefaultRequestHandlerV2( + agent_executor=EventAfterTerminalStateAgentExecutor(), + task_store=InMemoryTaskStore(), + agent_card=create_default_agent_card(), + ) + params = SendMessageRequest( + message=Message( + role=Role.ROLE_USER, + message_id='msg_after_terminal_stream', + parts=[Part(text='Hi')], + ) + ) + with pytest.raises( + InvalidAgentResponseError, match='Message object in task mode' + ): + async for _ in request_handler.on_message_send_stream( + params, create_server_call_context() + ): + pass diff --git a/tests/server/request_handlers/test_grpc_handler.py b/tests/server/request_handlers/test_grpc_handler.py index 9d8da2bb4..d140d3d7b 100644 --- a/tests/server/request_handlers/test_grpc_handler.py +++ b/tests/server/request_handlers/test_grpc_handler.py @@ -1,15 +1,16 @@ +from typing import Any from unittest.mock import AsyncMock, MagicMock import grpc import grpc.aio import pytest +from google.rpc import error_details_pb2, status_pb2 from a2a import types from a2a.extensions.common import HTTP_EXTENSION_HEADER -from a2a.grpc import a2a_pb2 from a2a.server.context import ServerCallContext from a2a.server.request_handlers import GrpcHandler, RequestHandler -from a2a.utils.errors import ServerError +from a2a.types import a2a_pb2 # --- Fixtures --- @@ -33,7 +34,11 @@ def sample_agent_card() -> types.AgentCard: return types.AgentCard( name='Test Agent', description='A test agent', - url='http://localhost', + supported_interfaces=[ + types.AgentInterface( + protocol_binding='GRPC', url='http://localhost' + ) + ], version='1.0.0', capabilities=types.AgentCapabilities( streaming=True, push_notifications=True @@ -48,9 +53,8 @@ def sample_agent_card() -> types.AgentCard: def grpc_handler( mock_request_handler: AsyncMock, sample_agent_card: types.AgentCard ) -> GrpcHandler: - return GrpcHandler( - agent_card=sample_agent_card, request_handler=mock_request_handler - ) + mock_request_handler._agent_card = sample_agent_card + return GrpcHandler(request_handler=mock_request_handler) # --- Test Cases --- @@ -64,12 +68,12 @@ async def test_send_message_success( ) -> None: """Test successful SendMessage call.""" request_proto = a2a_pb2.SendMessageRequest( - request=a2a_pb2.Message(message_id='msg-1') + message=a2a_pb2.Message(message_id='msg-1') ) response_model = types.Task( id='task-1', context_id='ctx-1', - status=types.TaskStatus(state=types.TaskState.completed), + status=types.TaskStatus(state=types.TaskState.TASK_STATE_COMPLETED), ) mock_request_handler.on_message_send.return_value = response_model @@ -87,15 +91,15 @@ async def test_send_message_server_error( mock_request_handler: AsyncMock, mock_grpc_context: AsyncMock, ) -> None: - """Test SendMessage call when handler raises a ServerError.""" + """Test SendMessage call when handler raises an A2AError.""" request_proto = a2a_pb2.SendMessageRequest() - error = ServerError(error=types.InvalidParamsError(message='Bad params')) + error = types.InvalidParamsError(message='Bad params') mock_request_handler.on_message_send.side_effect = error await grpc_handler.SendMessage(request_proto, mock_grpc_context) mock_grpc_context.abort.assert_awaited_once_with( - grpc.StatusCode.INVALID_ARGUMENT, 'InvalidParamsError: Bad params' + grpc.StatusCode.INVALID_ARGUMENT, 'Bad params' ) @@ -106,11 +110,11 @@ async def test_get_task_success( mock_grpc_context: AsyncMock, ) -> None: """Test successful GetTask call.""" - request_proto = a2a_pb2.GetTaskRequest(name='tasks/task-1') + request_proto = a2a_pb2.GetTaskRequest(id='task-1') response_model = types.Task( id='task-1', context_id='ctx-1', - status=types.TaskStatus(state=types.TaskState.working), + status=types.TaskStatus(state=types.TaskState.TASK_STATE_WORKING), ) mock_request_handler.on_get_task.return_value = response_model @@ -128,32 +132,13 @@ async def test_get_task_not_found( mock_grpc_context: AsyncMock, ) -> None: """Test GetTask call when task is not found.""" - request_proto = a2a_pb2.GetTaskRequest(name='tasks/task-1') + request_proto = a2a_pb2.GetTaskRequest(id='task-1') mock_request_handler.on_get_task.return_value = None await grpc_handler.GetTask(request_proto, mock_grpc_context) mock_grpc_context.abort.assert_awaited_once_with( - grpc.StatusCode.NOT_FOUND, 'TaskNotFoundError: Task not found' - ) - - -@pytest.mark.asyncio -async def test_cancel_task_server_error( - grpc_handler: GrpcHandler, - mock_request_handler: AsyncMock, - mock_grpc_context: AsyncMock, -) -> None: - """Test CancelTask call when handler raises ServerError.""" - request_proto = a2a_pb2.CancelTaskRequest(name='tasks/task-1') - error = ServerError(error=types.TaskNotCancelableError()) - mock_request_handler.on_cancel_task.side_effect = error - - await grpc_handler.CancelTask(request_proto, mock_grpc_context) - - mock_grpc_context.abort.assert_awaited_once_with( - grpc.StatusCode.UNIMPLEMENTED, - 'TaskNotCancelableError: Task cannot be canceled', + grpc.StatusCode.NOT_FOUND, 'Task not found' ) @@ -169,10 +154,14 @@ async def mock_stream(): yield types.Task( id='task-1', context_id='ctx-1', - status=types.TaskStatus(state=types.TaskState.working), + status=types.TaskStatus(state=types.TaskState.TASK_STATE_WORKING), ) - mock_request_handler.on_message_send_stream.return_value = mock_stream() + # Use MagicMock because on_message_send_stream is an async generator, + # and we iterate over it directly. AsyncMock would return a coroutine. + mock_request_handler.on_message_send_stream = MagicMock( + return_value=mock_stream() + ) request_proto = a2a_pb2.SendMessageRequest() results = [ @@ -188,43 +177,55 @@ async def mock_stream(): @pytest.mark.asyncio -async def test_get_agent_card( +async def test_get_extended_agent_card( grpc_handler: GrpcHandler, sample_agent_card: types.AgentCard, mock_grpc_context: AsyncMock, + mock_request_handler: AsyncMock, ) -> None: - """Test GetAgentCard call.""" - request_proto = a2a_pb2.GetAgentCardRequest() - response = await grpc_handler.GetAgentCard(request_proto, mock_grpc_context) + """Test GetExtendedAgentCard call.""" + async def to_coro(*args, **kwargs): + return sample_agent_card + + mock_request_handler.on_get_extended_agent_card.side_effect = to_coro + request_proto = a2a_pb2.GetExtendedAgentCardRequest() + response = await grpc_handler.GetExtendedAgentCard( + request_proto, mock_grpc_context + ) + mock_request_handler.on_get_extended_agent_card.assert_awaited_once() assert response.name == sample_agent_card.name assert response.version == sample_agent_card.version @pytest.mark.asyncio -async def test_get_agent_card_with_modifier( +async def test_get_extended_agent_card_with_modifier( mock_request_handler: AsyncMock, sample_agent_card: types.AgentCard, mock_grpc_context: AsyncMock, ) -> None: - """Test GetAgentCard call with a card_modifier.""" + """Test GetExtendedAgentCard call with a card_modifier.""" async def modifier(card: types.AgentCard) -> types.AgentCard: - modified_card = card.model_copy(deep=True) + modified_card = types.AgentCard() + modified_card.CopyFrom(card) modified_card.name = 'Modified gRPC Agent' return modified_card - grpc_handler_modified = GrpcHandler( - agent_card=sample_agent_card, - request_handler=mock_request_handler, - card_modifier=modifier, - ) + # Use side_effect to ensure it returns an awaitable + async def side_effect_func(*_args, **_kwargs): + return await modifier(sample_agent_card) - request_proto = a2a_pb2.GetAgentCardRequest() - response = await grpc_handler_modified.GetAgentCard( + mock_request_handler.on_get_extended_agent_card.side_effect = ( + side_effect_func + ) + mock_request_handler._agent_card = sample_agent_card + grpc_handler_modified = GrpcHandler(request_handler=mock_request_handler) + request_proto = a2a_pb2.GetExtendedAgentCardRequest() + response = await grpc_handler_modified.GetExtendedAgentCard( request_proto, mock_grpc_context ) - + mock_request_handler.on_get_extended_agent_card.assert_awaited_once() assert response.name == 'Modified gRPC Agent' assert response.version == sample_agent_card.version @@ -238,107 +239,174 @@ async def test_get_agent_card_with_modifier_sync( """Test GetAgentCard call with a synchronous card_modifier.""" def modifier(card: types.AgentCard) -> types.AgentCard: - modified_card = card.model_copy(deep=True) + # For proto, we need to create a new message with modified fields + modified_card = types.AgentCard() + modified_card.CopyFrom(card) modified_card.name = 'Modified gRPC Agent' return modified_card - grpc_handler_modified = GrpcHandler( - agent_card=sample_agent_card, - request_handler=mock_request_handler, - card_modifier=modifier, - ) + async def async_modifier(*args, **kwargs): + return modifier(sample_agent_card) - request_proto = a2a_pb2.GetAgentCardRequest() - response = await grpc_handler_modified.GetAgentCard( + mock_request_handler.on_get_extended_agent_card.side_effect = async_modifier + mock_request_handler._agent_card = sample_agent_card + grpc_handler_modified = GrpcHandler(request_handler=mock_request_handler) + request_proto = a2a_pb2.GetExtendedAgentCardRequest() + response = await grpc_handler_modified.GetExtendedAgentCard( request_proto, mock_grpc_context ) - + mock_request_handler.on_get_extended_agent_card.assert_awaited_once() assert response.name == 'Modified gRPC Agent' assert response.version == sample_agent_card.version +@pytest.mark.asyncio +async def test_list_tasks_success( + grpc_handler: GrpcHandler, + mock_request_handler: AsyncMock, + mock_grpc_context: AsyncMock, +): + """Test successful ListTasks call.""" + mock_request_handler.on_list_tasks.return_value = a2a_pb2.ListTasksResponse( + next_page_token='123', + tasks=[ + types.Task( + id='task-1', + context_id='ctx-1', + status=types.TaskStatus( + state=types.TaskState.TASK_STATE_COMPLETED + ), + ), + types.Task( + id='task-2', + context_id='ctx-1', + status=types.TaskStatus( + state=types.TaskState.TASK_STATE_WORKING + ), + ), + ], + ) + + response = await grpc_handler.ListTasks( + a2a_pb2.ListTasksRequest(page_size=2), mock_grpc_context + ) + + mock_request_handler.on_list_tasks.assert_awaited_once() + assert isinstance(response, a2a_pb2.ListTasksResponse) + assert len(response.tasks) == 2 + assert response.tasks[0].id == 'task-1' + assert response.tasks[1].id == 'task-2' + + @pytest.mark.asyncio @pytest.mark.parametrize( - 'server_error, grpc_status_code, error_message_part', + 'a2a_error, grpc_status_code, error_message_part', [ ( - ServerError(error=types.JSONParseError()), - grpc.StatusCode.INTERNAL, - 'JSONParseError', - ), - ( - ServerError(error=types.InvalidRequestError()), + types.InvalidRequestError(), grpc.StatusCode.INVALID_ARGUMENT, 'InvalidRequestError', ), ( - ServerError(error=types.MethodNotFoundError()), + types.MethodNotFoundError(), grpc.StatusCode.NOT_FOUND, 'MethodNotFoundError', ), ( - ServerError(error=types.InvalidParamsError()), + types.InvalidParamsError(), grpc.StatusCode.INVALID_ARGUMENT, 'InvalidParamsError', ), ( - ServerError(error=types.InternalError()), + types.InternalError(), grpc.StatusCode.INTERNAL, 'InternalError', ), ( - ServerError(error=types.TaskNotFoundError()), + types.TaskNotFoundError(), grpc.StatusCode.NOT_FOUND, 'TaskNotFoundError', ), ( - ServerError(error=types.TaskNotCancelableError()), - grpc.StatusCode.UNIMPLEMENTED, + types.TaskNotCancelableError(), + grpc.StatusCode.FAILED_PRECONDITION, 'TaskNotCancelableError', ), ( - ServerError(error=types.PushNotificationNotSupportedError()), + types.PushNotificationNotSupportedError(), grpc.StatusCode.UNIMPLEMENTED, 'PushNotificationNotSupportedError', ), ( - ServerError(error=types.UnsupportedOperationError()), + types.UnsupportedOperationError(), grpc.StatusCode.UNIMPLEMENTED, 'UnsupportedOperationError', ), ( - ServerError(error=types.ContentTypeNotSupportedError()), - grpc.StatusCode.UNIMPLEMENTED, + types.ContentTypeNotSupportedError(), + grpc.StatusCode.INVALID_ARGUMENT, 'ContentTypeNotSupportedError', ), ( - ServerError(error=types.InvalidAgentResponseError()), + types.InvalidAgentResponseError(), grpc.StatusCode.INTERNAL, 'InvalidAgentResponseError', ), - ( - ServerError(error=types.JSONRPCError(code=99, message='Unknown')), - grpc.StatusCode.UNKNOWN, - 'Unknown error', - ), ], ) -async def test_abort_context_error_mapping( # noqa: PLR0913 +async def test_abort_context_error_mapping( grpc_handler: GrpcHandler, mock_request_handler: AsyncMock, mock_grpc_context: AsyncMock, - server_error: ServerError, + a2a_error: Exception, grpc_status_code: grpc.StatusCode, error_message_part: str, ) -> None: - mock_request_handler.on_get_task.side_effect = server_error - request_proto = a2a_pb2.GetTaskRequest(name='tasks/any') + mock_request_handler.on_get_task.side_effect = a2a_error + request_proto = a2a_pb2.GetTaskRequest(id='any') await grpc_handler.GetTask(request_proto, mock_grpc_context) mock_grpc_context.abort.assert_awaited_once() call_args, _ = mock_grpc_context.abort.call_args assert call_args[0] == grpc_status_code - assert error_message_part in call_args[1] + + # We shouldn't rely on the legacy ExceptionName: message string format + # But for backward compatability fallback it shouldn't fail + mock_grpc_context.set_trailing_metadata.assert_called_once() + metadata = mock_grpc_context.set_trailing_metadata.call_args[0][0] + + assert any(key == 'grpc-status-details-bin' for key, _ in metadata) + + +@pytest.mark.asyncio +async def test_abort_context_rich_error_format( + grpc_handler: GrpcHandler, + mock_request_handler: AsyncMock, + mock_grpc_context: AsyncMock, +) -> None: + + error = types.TaskNotFoundError('Could not find the task') + mock_request_handler.on_get_task.side_effect = error + request_proto = a2a_pb2.GetTaskRequest(id='any') + await grpc_handler.GetTask(request_proto, mock_grpc_context) + + mock_grpc_context.set_trailing_metadata.assert_called_once() + metadata = mock_grpc_context.set_trailing_metadata.call_args[0][0] + + bin_values = [v for k, v in metadata if k == 'grpc-status-details-bin'] + assert len(bin_values) == 1 + + status = status_pb2.Status.FromString(bin_values[0]) + assert status.code == grpc.StatusCode.NOT_FOUND.value[0] + assert status.message == 'Could not find the task' + + assert len(status.details) == 1 + + error_info = error_details_pb2.ErrorInfo() + status.details[0].Unpack(error_info) + + assert error_info.reason == 'TASK_NOT_FOUND' + assert error_info.domain == 'a2a-protocol.org' @pytest.mark.asyncio @@ -353,17 +421,11 @@ async def test_send_message_with_extensions( (HTTP_EXTENSION_HEADER.lower(), 'foo'), (HTTP_EXTENSION_HEADER.lower(), 'bar'), ) - - def side_effect(request, context: ServerCallContext): - context.activated_extensions.add('foo') - context.activated_extensions.add('baz') - return types.Task( - id='task-1', - context_id='ctx-1', - status=types.TaskStatus(state=types.TaskState.completed), - ) - - mock_request_handler.on_message_send.side_effect = side_effect + mock_request_handler.on_message_send.return_value = types.Task( + id='task-1', + context_id='ctx-1', + status=types.TaskStatus(state=types.TaskState.TASK_STATE_COMPLETED), + ) await grpc_handler.SendMessage( a2a_pb2.SendMessageRequest(), mock_grpc_context @@ -374,15 +436,6 @@ def side_effect(request, context: ServerCallContext): assert isinstance(call_context, ServerCallContext) assert call_context.requested_extensions == {'foo', 'bar'} - mock_grpc_context.set_trailing_metadata.assert_called_once() - called_metadata = ( - mock_grpc_context.set_trailing_metadata.call_args.args[0] - ) - assert set(called_metadata) == { - (HTTP_EXTENSION_HEADER.lower(), 'foo'), - (HTTP_EXTENSION_HEADER.lower(), 'baz'), - } - async def test_send_message_with_comma_separated_extensions( self, grpc_handler: GrpcHandler, @@ -395,8 +448,8 @@ async def test_send_message_with_comma_separated_extensions( ) mock_request_handler.on_message_send.return_value = types.Message( message_id='1', - role=types.Role.agent, - parts=[types.Part(root=types.TextPart(text='test'))], + role=types.Role.ROLE_AGENT, + parts=[types.Part(text='test')], ) await grpc_handler.SendMessage( @@ -420,12 +473,12 @@ async def test_send_streaming_message_with_extensions( ) async def side_effect(request, context: ServerCallContext): - context.activated_extensions.add('foo') - context.activated_extensions.add('baz') yield types.Task( id='task-1', context_id='ctx-1', - status=types.TaskStatus(state=types.TaskState.working), + status=types.TaskStatus( + state=types.TaskState.TASK_STATE_WORKING + ), ) mock_request_handler.on_message_send_stream.side_effect = side_effect @@ -445,11 +498,252 @@ async def side_effect(request, context: ServerCallContext): assert isinstance(call_context, ServerCallContext) assert call_context.requested_extensions == {'foo', 'bar'} - mock_grpc_context.set_trailing_metadata.assert_called_once() - called_metadata = ( - mock_grpc_context.set_trailing_metadata.call_args.args[0] - ) - assert set(called_metadata) == { - (HTTP_EXTENSION_HEADER.lower(), 'foo'), - (HTTP_EXTENSION_HEADER.lower(), 'baz'), - } + +@pytest.mark.asyncio +class TestTenantExtraction: + @pytest.mark.parametrize( + 'method_name, request_proto, handler_method_name, return_value', + [ + ( + 'SendMessage', + a2a_pb2.SendMessageRequest(tenant='my-tenant'), + 'on_message_send', + types.Message(), + ), + ( + 'CancelTask', + a2a_pb2.CancelTaskRequest(tenant='my-tenant', id='1'), + 'on_cancel_task', + types.Task(id='1'), + ), + ( + 'GetTask', + a2a_pb2.GetTaskRequest(tenant='my-tenant', id='1'), + 'on_get_task', + types.Task(id='1'), + ), + ( + 'ListTasks', + a2a_pb2.ListTasksRequest(tenant='my-tenant'), + 'on_list_tasks', + a2a_pb2.ListTasksResponse(), + ), + ( + 'GetTaskPushNotificationConfig', + a2a_pb2.GetTaskPushNotificationConfigRequest( + tenant='my-tenant', task_id='1', id='c1' + ), + 'on_get_task_push_notification_config', + a2a_pb2.TaskPushNotificationConfig(), + ), + ( + 'CreateTaskPushNotificationConfig', + a2a_pb2.TaskPushNotificationConfig( + tenant='my-tenant', + task_id='1', + ), + 'on_create_task_push_notification_config', + a2a_pb2.TaskPushNotificationConfig(), + ), + ( + 'ListTaskPushNotificationConfigs', + a2a_pb2.ListTaskPushNotificationConfigsRequest( + tenant='my-tenant', task_id='1' + ), + 'on_list_task_push_notification_configs', + a2a_pb2.ListTaskPushNotificationConfigsResponse(), + ), + ( + 'DeleteTaskPushNotificationConfig', + a2a_pb2.DeleteTaskPushNotificationConfigRequest( + tenant='my-tenant', task_id='1', id='c1' + ), + 'on_delete_task_push_notification_config', + None, + ), + ], + ) + async def test_non_streaming_tenant_extraction( + self, + grpc_handler: GrpcHandler, + mock_request_handler: AsyncMock, + mock_grpc_context: AsyncMock, + method_name: str, + request_proto: Any, + handler_method_name: str, + return_value: Any, + ) -> None: + handler_mock = getattr(mock_request_handler, handler_method_name) + handler_mock.return_value = return_value + + grpc_method = getattr(grpc_handler, method_name) + await grpc_method(request_proto, mock_grpc_context) + + handler_mock.assert_awaited_once() + call_args = handler_mock.call_args + server_context = call_args[0][1] + assert isinstance(server_context, ServerCallContext) + assert server_context.tenant == 'my-tenant' + + @pytest.mark.parametrize( + 'method_name, request_proto, handler_method_name', + [ + ( + 'SendStreamingMessage', + a2a_pb2.SendMessageRequest(tenant='my-tenant'), + 'on_message_send_stream', + ), + ( + 'SubscribeToTask', + a2a_pb2.SubscribeToTaskRequest(tenant='my-tenant', id='1'), + 'on_subscribe_to_task', + ), + ], + ) + async def test_streaming_tenant_extraction( + self, + grpc_handler: GrpcHandler, + mock_request_handler: AsyncMock, + mock_grpc_context: AsyncMock, + method_name: str, + request_proto: Any, + handler_method_name: str, + ) -> None: + async def mock_stream(*args, **kwargs): + yield types.Message(message_id='msg-1') + + handler_mock_attr = MagicMock(return_value=mock_stream()) + setattr(mock_request_handler, handler_method_name, handler_mock_attr) + + grpc_method = getattr(grpc_handler, method_name) + + async for _ in grpc_method(request_proto, mock_grpc_context): + pass + + handler_mock_attr.assert_called_once() + call_args = handler_mock_attr.call_args + server_context = call_args[0][1] + assert isinstance(server_context, ServerCallContext) + assert server_context.tenant == 'my-tenant' + + @pytest.mark.parametrize( + 'method_name, request_proto, handler_method_name, return_value', + [ + ( + 'SendMessage', + a2a_pb2.SendMessageRequest(), + 'on_message_send', + types.Message(), + ), + ( + 'CancelTask', + a2a_pb2.CancelTaskRequest(id='1'), + 'on_cancel_task', + types.Task(id='1'), + ), + ( + 'GetTask', + a2a_pb2.GetTaskRequest(id='1'), + 'on_get_task', + types.Task(id='1'), + ), + ( + 'ListTasks', + a2a_pb2.ListTasksRequest(), + 'on_list_tasks', + a2a_pb2.ListTasksResponse(), + ), + ( + 'GetTaskPushNotificationConfig', + a2a_pb2.GetTaskPushNotificationConfigRequest( + task_id='1', id='c1' + ), + 'on_get_task_push_notification_config', + a2a_pb2.TaskPushNotificationConfig(), + ), + ( + 'CreateTaskPushNotificationConfig', + a2a_pb2.TaskPushNotificationConfig( + task_id='1', + ), + 'on_create_task_push_notification_config', + a2a_pb2.TaskPushNotificationConfig(), + ), + ( + 'ListTaskPushNotificationConfigs', + a2a_pb2.ListTaskPushNotificationConfigsRequest(task_id='1'), + 'on_list_task_push_notification_configs', + a2a_pb2.ListTaskPushNotificationConfigsResponse(), + ), + ( + 'DeleteTaskPushNotificationConfig', + a2a_pb2.DeleteTaskPushNotificationConfigRequest( + task_id='1', id='c1' + ), + 'on_delete_task_push_notification_config', + None, + ), + ], + ) + async def test_non_streaming_no_tenant_extraction( + self, + grpc_handler: GrpcHandler, + mock_request_handler: AsyncMock, + mock_grpc_context: AsyncMock, + method_name: str, + request_proto: Any, + handler_method_name: str, + return_value: Any, + ) -> None: + handler_mock = getattr(mock_request_handler, handler_method_name) + handler_mock.return_value = return_value + + grpc_method = getattr(grpc_handler, method_name) + await grpc_method(request_proto, mock_grpc_context) + + handler_mock.assert_awaited_once() + call_args = handler_mock.call_args + server_context = call_args[0][1] + assert isinstance(server_context, ServerCallContext) + assert server_context.tenant == '' + + @pytest.mark.parametrize( + 'method_name, request_proto, handler_method_name', + [ + ( + 'SendStreamingMessage', + a2a_pb2.SendMessageRequest(), + 'on_message_send_stream', + ), + ( + 'SubscribeToTask', + a2a_pb2.SubscribeToTaskRequest(id='1'), + 'on_subscribe_to_task', + ), + ], + ) + async def test_streaming_no_tenant_extraction( + self, + grpc_handler: GrpcHandler, + mock_request_handler: AsyncMock, + mock_grpc_context: AsyncMock, + method_name: str, + request_proto: Any, + handler_method_name: str, + ) -> None: + async def mock_stream(*args, **kwargs): + yield types.Message(message_id='msg-1') + + handler_mock_attr = MagicMock(return_value=mock_stream()) + setattr(mock_request_handler, handler_method_name, handler_mock_attr) + + grpc_method = getattr(grpc_handler, method_name) + + async for _ in grpc_method(request_proto, mock_grpc_context): + pass + + handler_mock_attr.assert_called_once() + call_args = handler_mock_attr.call_args + server_context = call_args[0][1] + assert isinstance(server_context, ServerCallContext) + assert server_context.tenant == '' diff --git a/tests/server/request_handlers/test_jsonrpc_handler.py b/tests/server/request_handlers/test_jsonrpc_handler.py deleted file mode 100644 index 08dfd63f0..000000000 --- a/tests/server/request_handlers/test_jsonrpc_handler.py +++ /dev/null @@ -1,1378 +0,0 @@ -import asyncio -import unittest -import unittest.async_case - -from collections.abc import AsyncGenerator -from typing import Any, NoReturn -from unittest.mock import AsyncMock, MagicMock, call, patch - -import httpx -import pytest - -from a2a.server.agent_execution import AgentExecutor, RequestContext -from a2a.server.agent_execution.request_context_builder import ( - RequestContextBuilder, -) -from a2a.server.context import ServerCallContext -from a2a.server.events import QueueManager -from a2a.server.events.event_queue import EventQueue -from a2a.server.request_handlers import DefaultRequestHandler, JSONRPCHandler -from a2a.server.tasks import ( - BasePushNotificationSender, - InMemoryPushNotificationConfigStore, - PushNotificationConfigStore, - PushNotificationSender, - TaskStore, -) -from a2a.types import ( - AgentCapabilities, - AgentCard, - Artifact, - CancelTaskRequest, - CancelTaskSuccessResponse, - DeleteTaskPushNotificationConfigParams, - DeleteTaskPushNotificationConfigRequest, - DeleteTaskPushNotificationConfigSuccessResponse, - GetAuthenticatedExtendedCardRequest, - GetAuthenticatedExtendedCardResponse, - GetAuthenticatedExtendedCardSuccessResponse, - GetTaskPushNotificationConfigParams, - GetTaskPushNotificationConfigRequest, - GetTaskPushNotificationConfigResponse, - GetTaskPushNotificationConfigSuccessResponse, - GetTaskRequest, - GetTaskResponse, - GetTaskSuccessResponse, - InternalError, - JSONRPCErrorResponse, - ListTaskPushNotificationConfigParams, - ListTaskPushNotificationConfigRequest, - ListTaskPushNotificationConfigSuccessResponse, - Message, - MessageSendConfiguration, - MessageSendParams, - Part, - PushNotificationConfig, - SendMessageRequest, - SendMessageSuccessResponse, - SendStreamingMessageRequest, - SendStreamingMessageSuccessResponse, - SetTaskPushNotificationConfigRequest, - SetTaskPushNotificationConfigResponse, - SetTaskPushNotificationConfigSuccessResponse, - Task, - TaskArtifactUpdateEvent, - TaskIdParams, - TaskNotFoundError, - TaskPushNotificationConfig, - TaskQueryParams, - TaskResubscriptionRequest, - TaskState, - TaskStatus, - TaskStatusUpdateEvent, - TextPart, - UnsupportedOperationError, -) -from a2a.utils.errors import ServerError - - -MINIMAL_TASK: dict[str, Any] = { - 'id': 'task_123', - 'contextId': 'session-xyz', - 'status': {'state': 'submitted'}, - 'kind': 'task', -} -MESSAGE_PAYLOAD: dict[str, Any] = { - 'role': 'agent', - 'parts': [{'text': 'test message'}], - 'messageId': '111', -} - - -class TestJSONRPCtHandler(unittest.async_case.IsolatedAsyncioTestCase): - @pytest.fixture(autouse=True) - def init_fixtures(self) -> None: - self.mock_agent_card = MagicMock( - spec=AgentCard, - url='http://agent.example.com/api', - supports_authenticated_extended_card=True, - ) - - async def test_on_get_task_success(self) -> None: - mock_agent_executor = AsyncMock(spec=AgentExecutor) - mock_task_store = AsyncMock(spec=TaskStore) - request_handler = DefaultRequestHandler( - mock_agent_executor, mock_task_store - ) - call_context = ServerCallContext(state={'foo': 'bar'}) - handler = JSONRPCHandler(self.mock_agent_card, request_handler) - task_id = 'test_task_id' - mock_task = Task(**MINIMAL_TASK) - mock_task_store.get.return_value = mock_task - request = GetTaskRequest(id='1', params=TaskQueryParams(id=task_id)) - response: GetTaskResponse = await handler.on_get_task( - request, call_context - ) - self.assertIsInstance(response.root, GetTaskSuccessResponse) - assert response.root.result == mock_task # type: ignore - mock_task_store.get.assert_called_once_with(task_id, unittest.mock.ANY) - - async def test_on_get_task_not_found(self) -> None: - mock_agent_executor = AsyncMock(spec=AgentExecutor) - mock_task_store = AsyncMock(spec=TaskStore) - request_handler = DefaultRequestHandler( - mock_agent_executor, mock_task_store - ) - handler = JSONRPCHandler(self.mock_agent_card, request_handler) - mock_task_store.get.return_value = None - request = GetTaskRequest( - id='1', - method='tasks/get', - params=TaskQueryParams(id='nonexistent_id'), - ) - call_context = ServerCallContext(state={'foo': 'bar'}) - response: GetTaskResponse = await handler.on_get_task( - request, call_context - ) - self.assertIsInstance(response.root, JSONRPCErrorResponse) - assert response.root.error == TaskNotFoundError() # type: ignore - - async def test_on_cancel_task_success(self) -> None: - mock_agent_executor = AsyncMock(spec=AgentExecutor) - mock_task_store = AsyncMock(spec=TaskStore) - request_handler = DefaultRequestHandler( - mock_agent_executor, mock_task_store - ) - handler = JSONRPCHandler(self.mock_agent_card, request_handler) - task_id = 'test_task_id' - mock_task = Task(**MINIMAL_TASK) - mock_task_store.get.return_value = mock_task - mock_agent_executor.cancel.return_value = None - call_context = ServerCallContext(state={'foo': 'bar'}) - - async def streaming_coro(): - mock_task.status.state = TaskState.canceled - yield mock_task - - with patch( - 'a2a.server.request_handlers.default_request_handler.EventConsumer.consume_all', - return_value=streaming_coro(), - ): - request = CancelTaskRequest(id='1', params=TaskIdParams(id=task_id)) - response = await handler.on_cancel_task(request, call_context) - assert mock_agent_executor.cancel.call_count == 1 - self.assertIsInstance(response.root, CancelTaskSuccessResponse) - assert response.root.result == mock_task # type: ignore - assert response.root.result.status.state == TaskState.canceled - mock_agent_executor.cancel.assert_called_once() - - async def test_on_cancel_task_not_supported(self) -> None: - mock_agent_executor = AsyncMock(spec=AgentExecutor) - mock_task_store = AsyncMock(spec=TaskStore) - request_handler = DefaultRequestHandler( - mock_agent_executor, mock_task_store - ) - handler = JSONRPCHandler(self.mock_agent_card, request_handler) - task_id = 'test_task_id' - mock_task = Task(**MINIMAL_TASK) - mock_task_store.get.return_value = mock_task - mock_agent_executor.cancel.return_value = None - call_context = ServerCallContext(state={'foo': 'bar'}) - - async def streaming_coro(): - raise ServerError(UnsupportedOperationError()) - yield - - with patch( - 'a2a.server.request_handlers.default_request_handler.EventConsumer.consume_all', - return_value=streaming_coro(), - ): - request = CancelTaskRequest(id='1', params=TaskIdParams(id=task_id)) - response = await handler.on_cancel_task(request, call_context) - assert mock_agent_executor.cancel.call_count == 1 - self.assertIsInstance(response.root, JSONRPCErrorResponse) - assert response.root.error == UnsupportedOperationError() # type: ignore - mock_agent_executor.cancel.assert_called_once() - - async def test_on_cancel_task_not_found(self) -> None: - mock_agent_executor = AsyncMock(spec=AgentExecutor) - mock_task_store = AsyncMock(spec=TaskStore) - request_handler = DefaultRequestHandler( - mock_agent_executor, mock_task_store - ) - handler = JSONRPCHandler(self.mock_agent_card, request_handler) - mock_task_store.get.return_value = None - request = CancelTaskRequest( - id='1', - method='tasks/cancel', - params=TaskIdParams(id='nonexistent_id'), - ) - response = await handler.on_cancel_task(request) - self.assertIsInstance(response.root, JSONRPCErrorResponse) - assert response.root.error == TaskNotFoundError() # type: ignore - mock_task_store.get.assert_called_once_with( - 'nonexistent_id', unittest.mock.ANY - ) - mock_agent_executor.cancel.assert_not_called() - - @patch( - 'a2a.server.agent_execution.simple_request_context_builder.SimpleRequestContextBuilder.build' - ) - async def test_on_message_new_message_success( - self, _mock_builder_build: AsyncMock - ) -> None: - mock_agent_executor = AsyncMock(spec=AgentExecutor) - mock_task_store = AsyncMock(spec=TaskStore) - request_handler = DefaultRequestHandler( - mock_agent_executor, mock_task_store - ) - handler = JSONRPCHandler(self.mock_agent_card, request_handler) - mock_task = Task(**MINIMAL_TASK) - mock_task_store.get.return_value = mock_task - mock_agent_executor.execute.return_value = None - - _mock_builder_build.return_value = RequestContext( - request=MagicMock(), - task_id='task_123', - context_id='session-xyz', - task=None, - related_tasks=None, - ) - - async def streaming_coro(): - yield mock_task - - with patch( - 'a2a.server.request_handlers.default_request_handler.EventConsumer.consume_all', - return_value=streaming_coro(), - ): - request = SendMessageRequest( - id='1', - params=MessageSendParams(message=Message(**MESSAGE_PAYLOAD)), - ) - response = await handler.on_message_send(request) - assert mock_agent_executor.execute.call_count == 1 - self.assertIsInstance(response.root, SendMessageSuccessResponse) - assert response.root.result == mock_task # type: ignore - mock_agent_executor.execute.assert_called_once() - - async def test_on_message_new_message_with_existing_task_success( - self, - ) -> None: - mock_agent_executor = AsyncMock(spec=AgentExecutor) - mock_task_store = AsyncMock(spec=TaskStore) - request_handler = DefaultRequestHandler( - mock_agent_executor, mock_task_store - ) - handler = JSONRPCHandler(self.mock_agent_card, request_handler) - mock_task = Task(**MINIMAL_TASK) - mock_task_store.get.return_value = mock_task - mock_agent_executor.execute.return_value = None - - async def streaming_coro(): - yield mock_task - - with patch( - 'a2a.server.request_handlers.default_request_handler.EventConsumer.consume_all', - return_value=streaming_coro(), - ): - request = SendMessageRequest( - id='1', - params=MessageSendParams( - message=Message( - **MESSAGE_PAYLOAD, - task_id=mock_task.id, - context_id=mock_task.context_id, - ) - ), - ) - response = await handler.on_message_send(request) - assert mock_agent_executor.execute.call_count == 1 - self.assertIsInstance(response.root, SendMessageSuccessResponse) - assert response.root.result == mock_task # type: ignore - mock_agent_executor.execute.assert_called_once() - - async def test_on_message_error(self) -> None: - mock_agent_executor = AsyncMock(spec=AgentExecutor) - mock_task_store = AsyncMock(spec=TaskStore) - request_handler = DefaultRequestHandler( - mock_agent_executor, mock_task_store - ) - handler = JSONRPCHandler(self.mock_agent_card, request_handler) - mock_task_store.get.return_value = None - mock_agent_executor.execute.return_value = None - - async def streaming_coro(): - raise ServerError(error=UnsupportedOperationError()) - yield - - with patch( - 'a2a.server.request_handlers.default_request_handler.EventConsumer.consume_all', - return_value=streaming_coro(), - ): - request = SendMessageRequest( - id='1', - params=MessageSendParams( - message=Message( - **MESSAGE_PAYLOAD, - ) - ), - ) - response = await handler.on_message_send(request) - - self.assertIsInstance(response.root, JSONRPCErrorResponse) - assert response.root.error == UnsupportedOperationError() # type: ignore - - @patch( - 'a2a.server.agent_execution.simple_request_context_builder.SimpleRequestContextBuilder.build' - ) - async def test_on_message_stream_new_message_success( - self, _mock_builder_build: AsyncMock - ) -> None: - mock_agent_executor = AsyncMock(spec=AgentExecutor) - mock_task_store = AsyncMock(spec=TaskStore) - request_handler = DefaultRequestHandler( - mock_agent_executor, mock_task_store - ) - - self.mock_agent_card.capabilities = AgentCapabilities(streaming=True) - handler = JSONRPCHandler(self.mock_agent_card, request_handler) - _mock_builder_build.return_value = RequestContext( - request=MagicMock(), - task_id='task_123', - context_id='session-xyz', - task=None, - related_tasks=None, - ) - - events: list[Any] = [ - Task(**MINIMAL_TASK), - TaskArtifactUpdateEvent( - task_id='task_123', - context_id='session-xyz', - artifact=Artifact( - artifact_id='11', parts=[Part(TextPart(text='text'))] - ), - ), - TaskStatusUpdateEvent( - task_id='task_123', - context_id='session-xyz', - status=TaskStatus(state=TaskState.completed), - final=True, - ), - ] - - async def streaming_coro(): - for event in events: - yield event - - # Latch to ensure background execute is scheduled before asserting - execute_called = asyncio.Event() - - async def exec_side_effect(*args, **kwargs): - execute_called.set() - - mock_agent_executor.execute.side_effect = exec_side_effect - - with patch( - 'a2a.server.request_handlers.default_request_handler.EventConsumer.consume_all', - return_value=streaming_coro(), - ): - mock_task_store.get.return_value = None - mock_agent_executor.execute.return_value = None - request = SendStreamingMessageRequest( - id='1', - params=MessageSendParams(message=Message(**MESSAGE_PAYLOAD)), - ) - response = handler.on_message_send_stream(request) - assert isinstance(response, AsyncGenerator) - collected_events: list[Any] = [] - async for event in response: - collected_events.append(event) - assert len(collected_events) == len(events) - for i, event in enumerate(collected_events): - assert isinstance( - event.root, SendStreamingMessageSuccessResponse - ) - assert event.root.result == events[i] - await asyncio.wait_for(execute_called.wait(), timeout=0.1) - mock_agent_executor.execute.assert_called_once() - - async def test_on_message_stream_new_message_existing_task_success( - self, - ) -> None: - mock_agent_executor = AsyncMock(spec=AgentExecutor) - mock_task_store = AsyncMock(spec=TaskStore) - request_handler = DefaultRequestHandler( - mock_agent_executor, mock_task_store - ) - - self.mock_agent_card.capabilities = AgentCapabilities(streaming=True) - - handler = JSONRPCHandler(self.mock_agent_card, request_handler) - mock_task = Task(**MINIMAL_TASK, history=[]) - events: list[Any] = [ - mock_task, - TaskArtifactUpdateEvent( - task_id='task_123', - context_id='session-xyz', - artifact=Artifact( - artifact_id='11', parts=[Part(TextPart(text='text'))] - ), - ), - TaskStatusUpdateEvent( - task_id='task_123', - context_id='session-xyz', - status=TaskStatus(state=TaskState.working), - final=True, - ), - ] - - async def streaming_coro(): - for event in events: - yield event - - # Latch to ensure background execute is scheduled before asserting - execute_called = asyncio.Event() - - async def exec_side_effect(*args, **kwargs): - execute_called.set() - - mock_agent_executor.execute.side_effect = exec_side_effect - - with patch( - 'a2a.server.request_handlers.default_request_handler.EventConsumer.consume_all', - return_value=streaming_coro(), - ): - mock_task_store.get.return_value = mock_task - mock_agent_executor.execute.return_value = None - request = SendStreamingMessageRequest( - id='1', - params=MessageSendParams( - message=Message( - **MESSAGE_PAYLOAD, - task_id=mock_task.id, - context_id=mock_task.context_id, - ) - ), - ) - response = handler.on_message_send_stream(request) - assert isinstance(response, AsyncGenerator) - collected_events = [item async for item in response] - assert len(collected_events) == len(events) - await asyncio.wait_for(execute_called.wait(), timeout=0.1) - mock_agent_executor.execute.assert_called_once() - assert mock_task.history is not None and len(mock_task.history) == 1 - - async def test_set_push_notification_success(self) -> None: - mock_agent_executor = AsyncMock(spec=AgentExecutor) - mock_task_store = AsyncMock(spec=TaskStore) - mock_push_notification_store = AsyncMock( - spec=PushNotificationConfigStore - ) - - request_handler = DefaultRequestHandler( - mock_agent_executor, - mock_task_store, - push_config_store=mock_push_notification_store, - ) - self.mock_agent_card.capabilities = AgentCapabilities( - streaming=True, push_notifications=True - ) - handler = JSONRPCHandler(self.mock_agent_card, request_handler) - mock_task = Task(**MINIMAL_TASK) - mock_task_store.get.return_value = mock_task - task_push_config = TaskPushNotificationConfig( - task_id=mock_task.id, - push_notification_config=PushNotificationConfig( - url='http://example.com' - ), - ) - request = SetTaskPushNotificationConfigRequest( - id='1', params=task_push_config - ) - response: SetTaskPushNotificationConfigResponse = ( - await handler.set_push_notification_config(request) - ) - self.assertIsInstance( - response.root, SetTaskPushNotificationConfigSuccessResponse - ) - assert response.root.result == task_push_config # type: ignore - mock_push_notification_store.set_info.assert_called_once_with( - mock_task.id, task_push_config.push_notification_config - ) - - async def test_get_push_notification_success(self) -> None: - mock_agent_executor = AsyncMock(spec=AgentExecutor) - mock_task_store = AsyncMock(spec=TaskStore) - push_notification_store = InMemoryPushNotificationConfigStore() - request_handler = DefaultRequestHandler( - mock_agent_executor, - mock_task_store, - push_config_store=push_notification_store, - ) - self.mock_agent_card.capabilities = AgentCapabilities( - streaming=True, push_notifications=True - ) - handler = JSONRPCHandler(self.mock_agent_card, request_handler) - mock_task = Task(**MINIMAL_TASK) - mock_task_store.get.return_value = mock_task - task_push_config = TaskPushNotificationConfig( - task_id=mock_task.id, - push_notification_config=PushNotificationConfig( - url='http://example.com' - ), - ) - request = SetTaskPushNotificationConfigRequest( - id='1', params=task_push_config - ) - await handler.set_push_notification_config(request) - - get_request: GetTaskPushNotificationConfigRequest = ( - GetTaskPushNotificationConfigRequest( - id='1', params=TaskIdParams(id=mock_task.id) - ) - ) - get_response: GetTaskPushNotificationConfigResponse = ( - await handler.get_push_notification_config(get_request) - ) - self.assertIsInstance( - get_response.root, GetTaskPushNotificationConfigSuccessResponse - ) - assert get_response.root.result == task_push_config # type: ignore - - @patch( - 'a2a.server.agent_execution.simple_request_context_builder.SimpleRequestContextBuilder.build' - ) - async def test_on_message_stream_new_message_send_push_notification_success( - self, _mock_builder_build: AsyncMock - ) -> None: - mock_agent_executor = AsyncMock(spec=AgentExecutor) - mock_task_store = AsyncMock(spec=TaskStore) - mock_httpx_client = AsyncMock(spec=httpx.AsyncClient) - push_notification_store = InMemoryPushNotificationConfigStore() - push_notification_sender = BasePushNotificationSender( - mock_httpx_client, push_notification_store - ) - request_handler = DefaultRequestHandler( - mock_agent_executor, - mock_task_store, - push_config_store=push_notification_store, - push_sender=push_notification_sender, - ) - self.mock_agent_card.capabilities = AgentCapabilities( - streaming=True, push_notifications=True - ) - _mock_builder_build.return_value = RequestContext( - request=MagicMock(), - task_id='task_123', - context_id='session-xyz', - task=None, - related_tasks=None, - ) - - handler = JSONRPCHandler(self.mock_agent_card, request_handler) - events: list[Any] = [ - Task(**MINIMAL_TASK), - TaskArtifactUpdateEvent( - task_id='task_123', - context_id='session-xyz', - artifact=Artifact( - artifact_id='11', parts=[Part(TextPart(text='text'))] - ), - ), - TaskStatusUpdateEvent( - task_id='task_123', - context_id='session-xyz', - status=TaskStatus(state=TaskState.completed), - final=True, - ), - ] - - async def streaming_coro(): - for event in events: - yield event - - with patch( - 'a2a.server.request_handlers.default_request_handler.EventConsumer.consume_all', - return_value=streaming_coro(), - ): - mock_task_store.get.return_value = None - mock_agent_executor.execute.return_value = None - mock_httpx_client.post.return_value = httpx.Response(200) - request = SendStreamingMessageRequest( - id='1', - params=MessageSendParams(message=Message(**MESSAGE_PAYLOAD)), - ) - request.params.configuration = MessageSendConfiguration( - accepted_output_modes=['text'], - push_notification_config=PushNotificationConfig( - url='http://example.com' - ), - ) - response = handler.on_message_send_stream(request) - assert isinstance(response, AsyncGenerator) - - collected_events = [item async for item in response] - assert len(collected_events) == len(events) - - calls = [ - call( - 'http://example.com', - json={ - 'contextId': 'session-xyz', - 'id': 'task_123', - 'kind': 'task', - 'status': {'state': 'submitted'}, - }, - headers=None, - ), - call( - 'http://example.com', - json={ - 'artifacts': [ - { - 'artifactId': '11', - 'parts': [ - { - 'kind': 'text', - 'text': 'text', - } - ], - } - ], - 'contextId': 'session-xyz', - 'id': 'task_123', - 'kind': 'task', - 'status': {'state': 'submitted'}, - }, - headers=None, - ), - call( - 'http://example.com', - json={ - 'artifacts': [ - { - 'artifactId': '11', - 'parts': [ - { - 'kind': 'text', - 'text': 'text', - } - ], - } - ], - 'contextId': 'session-xyz', - 'id': 'task_123', - 'kind': 'task', - 'status': {'state': 'completed'}, - }, - headers=None, - ), - ] - mock_httpx_client.post.assert_has_calls(calls) - - async def test_on_resubscribe_existing_task_success( - self, - ) -> None: - mock_agent_executor = AsyncMock(spec=AgentExecutor) - mock_task_store = AsyncMock(spec=TaskStore) - mock_queue_manager = AsyncMock(spec=QueueManager) - request_handler = DefaultRequestHandler( - mock_agent_executor, mock_task_store, mock_queue_manager - ) - self.mock_agent_card = MagicMock(spec=AgentCard) - handler = JSONRPCHandler(self.mock_agent_card, request_handler) - mock_task = Task(**MINIMAL_TASK, history=[]) - events: list[Any] = [ - TaskArtifactUpdateEvent( - task_id='task_123', - context_id='session-xyz', - artifact=Artifact( - artifact_id='11', parts=[Part(TextPart(text='text'))] - ), - ), - TaskStatusUpdateEvent( - task_id='task_123', - context_id='session-xyz', - status=TaskStatus(state=TaskState.completed), - final=True, - ), - ] - - async def streaming_coro(): - for event in events: - yield event - - with patch( - 'a2a.server.request_handlers.default_request_handler.EventConsumer.consume_all', - return_value=streaming_coro(), - ): - mock_task_store.get.return_value = mock_task - mock_queue_manager.tap.return_value = EventQueue() - request = TaskResubscriptionRequest( - id='1', params=TaskIdParams(id=mock_task.id) - ) - response = handler.on_resubscribe_to_task(request) - assert isinstance(response, AsyncGenerator) - collected_events: list[Any] = [] - async for event in response: - collected_events.append(event) - assert len(collected_events) == len(events) - assert mock_task.history is not None and len(mock_task.history) == 0 - - async def test_on_resubscribe_no_existing_task_error(self) -> None: - mock_agent_executor = AsyncMock(spec=AgentExecutor) - mock_task_store = AsyncMock(spec=TaskStore) - request_handler = DefaultRequestHandler( - mock_agent_executor, mock_task_store - ) - handler = JSONRPCHandler(self.mock_agent_card, request_handler) - mock_task_store.get.return_value = None - request = TaskResubscriptionRequest( - id='1', params=TaskIdParams(id='nonexistent_id') - ) - response = handler.on_resubscribe_to_task(request) - assert isinstance(response, AsyncGenerator) - collected_events: list[Any] = [] - async for event in response: - collected_events.append(event) - assert len(collected_events) == 1 - self.assertIsInstance(collected_events[0].root, JSONRPCErrorResponse) - assert collected_events[0].root.error == TaskNotFoundError() - - async def test_streaming_not_supported_error( - self, - ) -> None: - """Test that on_message_send_stream raises an error when streaming not supported.""" - # Arrange - mock_agent_executor = AsyncMock(spec=AgentExecutor) - mock_task_store = AsyncMock(spec=TaskStore) - request_handler = DefaultRequestHandler( - mock_agent_executor, mock_task_store - ) - # Create agent card with streaming capability disabled - self.mock_agent_card.capabilities = AgentCapabilities(streaming=False) - handler = JSONRPCHandler(self.mock_agent_card, request_handler) - - # Act & Assert - request = SendStreamingMessageRequest( - id='1', - params=MessageSendParams(message=Message(**MESSAGE_PAYLOAD)), - ) - - # Should raise ServerError about streaming not supported - with self.assertRaises(ServerError) as context: - async for _ in handler.on_message_send_stream(request): - pass - - self.assertEqual( - str(context.exception.error.message), # type: ignore - 'Streaming is not supported by the agent', - ) - - async def test_push_notifications_not_supported_error(self) -> None: - """Test that set_push_notification raises an error when push notifications not supported.""" - # Arrange - mock_agent_executor = AsyncMock(spec=AgentExecutor) - mock_task_store = AsyncMock(spec=TaskStore) - request_handler = DefaultRequestHandler( - mock_agent_executor, mock_task_store - ) - # Create agent card with push notifications capability disabled - self.mock_agent_card.capabilities = AgentCapabilities( - push_notifications=False, streaming=True - ) - handler = JSONRPCHandler(self.mock_agent_card, request_handler) - - # Act & Assert - task_push_config = TaskPushNotificationConfig( - task_id='task_123', - push_notification_config=PushNotificationConfig( - url='http://example.com' - ), - ) - request = SetTaskPushNotificationConfigRequest( - id='1', params=task_push_config - ) - - # Should raise ServerError about push notifications not supported - with self.assertRaises(ServerError) as context: - await handler.set_push_notification_config(request) - - self.assertEqual( - str(context.exception.error.message), # type: ignore - 'Push notifications are not supported by the agent', - ) - - async def test_on_get_push_notification_no_push_config_store(self) -> None: - """Test get_push_notification with no push notifier configured.""" - # Arrange - mock_agent_executor = AsyncMock(spec=AgentExecutor) - mock_task_store = AsyncMock(spec=TaskStore) - # Create request handler without a push notifier - request_handler = DefaultRequestHandler( - mock_agent_executor, mock_task_store - ) - self.mock_agent_card.capabilities = AgentCapabilities( - push_notifications=True - ) - handler = JSONRPCHandler(self.mock_agent_card, request_handler) - - mock_task = Task(**MINIMAL_TASK) - mock_task_store.get.return_value = mock_task - - # Act - get_request = GetTaskPushNotificationConfigRequest( - id='1', params=TaskIdParams(id=mock_task.id) - ) - response = await handler.get_push_notification_config(get_request) - - # Assert - self.assertIsInstance(response.root, JSONRPCErrorResponse) - self.assertEqual(response.root.error, UnsupportedOperationError()) # type: ignore - - async def test_on_set_push_notification_no_push_config_store(self) -> None: - """Test set_push_notification with no push notifier configured.""" - # Arrange - mock_agent_executor = AsyncMock(spec=AgentExecutor) - mock_task_store = AsyncMock(spec=TaskStore) - # Create request handler without a push notifier - request_handler = DefaultRequestHandler( - mock_agent_executor, mock_task_store - ) - self.mock_agent_card.capabilities = AgentCapabilities( - push_notifications=True - ) - handler = JSONRPCHandler(self.mock_agent_card, request_handler) - - mock_task = Task(**MINIMAL_TASK) - mock_task_store.get.return_value = mock_task - - # Act - task_push_config = TaskPushNotificationConfig( - task_id=mock_task.id, - push_notification_config=PushNotificationConfig( - url='http://example.com' - ), - ) - request = SetTaskPushNotificationConfigRequest( - id='1', params=task_push_config - ) - response = await handler.set_push_notification_config(request) - - # Assert - self.assertIsInstance(response.root, JSONRPCErrorResponse) - self.assertEqual(response.root.error, UnsupportedOperationError()) # type: ignore - - async def test_on_message_send_internal_error(self) -> None: - """Test on_message_send with an internal error.""" - # Arrange - mock_agent_executor = AsyncMock(spec=AgentExecutor) - mock_task_store = AsyncMock(spec=TaskStore) - request_handler = DefaultRequestHandler( - mock_agent_executor, mock_task_store - ) - handler = JSONRPCHandler(self.mock_agent_card, request_handler) - - # Make the request handler raise an Internal error without specifying an error type - async def raise_server_error(*args, **kwargs) -> NoReturn: - raise ServerError(InternalError(message='Internal Error')) - - # Patch the method to raise an error - with patch.object( - request_handler, 'on_message_send', side_effect=raise_server_error - ): - # Act - request = SendMessageRequest( - id='1', - params=MessageSendParams(message=Message(**MESSAGE_PAYLOAD)), - ) - response = await handler.on_message_send(request) - - # Assert - self.assertIsInstance(response.root, JSONRPCErrorResponse) - self.assertIsInstance(response.root.error, InternalError) # type: ignore - - async def test_on_message_stream_internal_error(self) -> None: - """Test on_message_send_stream with an internal error.""" - # Arrange - mock_agent_executor = AsyncMock(spec=AgentExecutor) - mock_task_store = AsyncMock(spec=TaskStore) - request_handler = DefaultRequestHandler( - mock_agent_executor, mock_task_store - ) - self.mock_agent_card.capabilities = AgentCapabilities(streaming=True) - handler = JSONRPCHandler(self.mock_agent_card, request_handler) - - # Make the request handler raise an Internal error without specifying an error type - async def raise_server_error(*args, **kwargs): - raise ServerError(InternalError(message='Internal Error')) - yield # Need this to make it an async generator - - # Patch the method to raise an error - with patch.object( - request_handler, - 'on_message_send_stream', - return_value=raise_server_error(), - ): - # Act - request = SendStreamingMessageRequest( - id='1', - params=MessageSendParams(message=Message(**MESSAGE_PAYLOAD)), - ) - - # Get the single error response - responses = [] - async for response in handler.on_message_send_stream(request): - responses.append(response) - - # Assert - self.assertEqual(len(responses), 1) - self.assertIsInstance(responses[0].root, JSONRPCErrorResponse) - self.assertIsInstance(responses[0].root.error, InternalError) - - async def test_default_request_handler_with_custom_components(self) -> None: - """Test DefaultRequestHandler initialization with custom components.""" - # Arrange - mock_agent_executor = AsyncMock(spec=AgentExecutor) - mock_task_store = AsyncMock(spec=TaskStore) - mock_queue_manager = AsyncMock(spec=QueueManager) - mock_push_config_store = AsyncMock(spec=PushNotificationConfigStore) - mock_push_sender = AsyncMock(spec=PushNotificationSender) - mock_request_context_builder = AsyncMock(spec=RequestContextBuilder) - - # Act - handler = DefaultRequestHandler( - agent_executor=mock_agent_executor, - task_store=mock_task_store, - queue_manager=mock_queue_manager, - push_config_store=mock_push_config_store, - push_sender=mock_push_sender, - request_context_builder=mock_request_context_builder, - ) - - # Assert - self.assertEqual(handler.agent_executor, mock_agent_executor) - self.assertEqual(handler.task_store, mock_task_store) - self.assertEqual(handler._queue_manager, mock_queue_manager) - self.assertEqual(handler._push_config_store, mock_push_config_store) - self.assertEqual(handler._push_sender, mock_push_sender) - self.assertEqual( - handler._request_context_builder, mock_request_context_builder - ) - - async def test_on_message_send_error_handling(self) -> None: - """Test error handling in on_message_send when consuming raises ServerError.""" - # Arrange - mock_agent_executor = AsyncMock(spec=AgentExecutor) - mock_task_store = AsyncMock(spec=TaskStore) - request_handler = DefaultRequestHandler( - mock_agent_executor, mock_task_store - ) - handler = JSONRPCHandler(self.mock_agent_card, request_handler) - - # Let task exist - mock_task = Task(**MINIMAL_TASK) - mock_task_store.get.return_value = mock_task - - # Set up consume_and_break_on_interrupt to raise ServerError - async def consume_raises_error(*args, **kwargs) -> NoReturn: - raise ServerError(error=UnsupportedOperationError()) - - with patch( - 'a2a.server.tasks.result_aggregator.ResultAggregator.consume_and_break_on_interrupt', - side_effect=consume_raises_error, - ): - # Act - request = SendMessageRequest( - id='1', - params=MessageSendParams( - message=Message( - **MESSAGE_PAYLOAD, - task_id=mock_task.id, - context_id=mock_task.context_id, - ) - ), - ) - - response = await handler.on_message_send(request) - - # Assert - self.assertIsInstance(response.root, JSONRPCErrorResponse) - self.assertEqual(response.root.error, UnsupportedOperationError()) # type: ignore - - async def test_on_message_send_task_id_mismatch(self) -> None: - mock_agent_executor = AsyncMock(spec=AgentExecutor) - mock_task_store = AsyncMock(spec=TaskStore) - request_handler = DefaultRequestHandler( - mock_agent_executor, mock_task_store - ) - handler = JSONRPCHandler(self.mock_agent_card, request_handler) - mock_task = Task(**MINIMAL_TASK) - mock_task_store.get.return_value = mock_task - mock_agent_executor.execute.return_value = None - - async def streaming_coro(): - yield mock_task - - with patch( - 'a2a.server.request_handlers.default_request_handler.EventConsumer.consume_all', - return_value=streaming_coro(), - ): - request = SendMessageRequest( - id='1', - params=MessageSendParams(message=Message(**MESSAGE_PAYLOAD)), - ) - response = await handler.on_message_send(request) - assert mock_agent_executor.execute.call_count == 1 - self.assertIsInstance(response.root, JSONRPCErrorResponse) - self.assertIsInstance(response.root.error, InternalError) # type: ignore - - async def test_on_message_stream_task_id_mismatch(self) -> None: - mock_agent_executor = AsyncMock(spec=AgentExecutor) - mock_task_store = AsyncMock(spec=TaskStore) - request_handler = DefaultRequestHandler( - mock_agent_executor, mock_task_store - ) - - self.mock_agent_card.capabilities = AgentCapabilities(streaming=True) - handler = JSONRPCHandler(self.mock_agent_card, request_handler) - events: list[Any] = [Task(**MINIMAL_TASK)] - - async def streaming_coro(): - for event in events: - yield event - - with patch( - 'a2a.server.request_handlers.default_request_handler.EventConsumer.consume_all', - return_value=streaming_coro(), - ): - mock_task_store.get.return_value = None - mock_agent_executor.execute.return_value = None - request = SendStreamingMessageRequest( - id='1', - params=MessageSendParams(message=Message(**MESSAGE_PAYLOAD)), - ) - response = handler.on_message_send_stream(request) - assert isinstance(response, AsyncGenerator) - collected_events: list[Any] = [] - async for event in response: - collected_events.append(event) - assert len(collected_events) == 1 - self.assertIsInstance( - collected_events[0].root, JSONRPCErrorResponse - ) - self.assertIsInstance(collected_events[0].root.error, InternalError) - - async def test_on_get_push_notification(self) -> None: - """Test get_push_notification_config handling""" - mock_task_store = AsyncMock(spec=TaskStore) - - mock_task = Task(**MINIMAL_TASK) - mock_task_store.get.return_value = mock_task - - # Create request handler without a push notifier - request_handler = AsyncMock(spec=DefaultRequestHandler) - task_push_config = TaskPushNotificationConfig( - task_id=mock_task.id, - push_notification_config=PushNotificationConfig( - id='config1', url='http://example.com' - ), - ) - request_handler.on_get_task_push_notification_config.return_value = ( - task_push_config - ) - - self.mock_agent_card.capabilities = AgentCapabilities( - push_notifications=True - ) - handler = JSONRPCHandler(self.mock_agent_card, request_handler) - list_request = GetTaskPushNotificationConfigRequest( - id='1', - params=GetTaskPushNotificationConfigParams( - id=mock_task.id, push_notification_config_id='config1' - ), - ) - response = await handler.get_push_notification_config(list_request) - # Assert - self.assertIsInstance( - response.root, GetTaskPushNotificationConfigSuccessResponse - ) - self.assertEqual(response.root.result, task_push_config) # type: ignore - - async def test_on_list_push_notification(self) -> None: - """Test list_push_notification_config handling""" - mock_task_store = AsyncMock(spec=TaskStore) - - mock_task = Task(**MINIMAL_TASK) - mock_task_store.get.return_value = mock_task - - # Create request handler without a push notifier - request_handler = AsyncMock(spec=DefaultRequestHandler) - task_push_config = TaskPushNotificationConfig( - task_id=mock_task.id, - push_notification_config=PushNotificationConfig( - url='http://example.com' - ), - ) - request_handler.on_list_task_push_notification_config.return_value = [ - task_push_config - ] - - self.mock_agent_card.capabilities = AgentCapabilities( - push_notifications=True - ) - handler = JSONRPCHandler(self.mock_agent_card, request_handler) - list_request = ListTaskPushNotificationConfigRequest( - id='1', params=ListTaskPushNotificationConfigParams(id=mock_task.id) - ) - response = await handler.list_push_notification_config(list_request) - # Assert - self.assertIsInstance( - response.root, ListTaskPushNotificationConfigSuccessResponse - ) - self.assertEqual(response.root.result, [task_push_config]) # type: ignore - - async def test_on_list_push_notification_error(self) -> None: - """Test list_push_notification_config handling""" - mock_task_store = AsyncMock(spec=TaskStore) - - mock_task = Task(**MINIMAL_TASK) - mock_task_store.get.return_value = mock_task - - # Create request handler without a push notifier - request_handler = AsyncMock(spec=DefaultRequestHandler) - _ = TaskPushNotificationConfig( - task_id=mock_task.id, - push_notification_config=PushNotificationConfig( - url='http://example.com' - ), - ) - # throw server error - request_handler.on_list_task_push_notification_config.side_effect = ( - ServerError(InternalError()) - ) - - self.mock_agent_card.capabilities = AgentCapabilities( - push_notifications=True - ) - handler = JSONRPCHandler(self.mock_agent_card, request_handler) - list_request = ListTaskPushNotificationConfigRequest( - id='1', params=ListTaskPushNotificationConfigParams(id=mock_task.id) - ) - response = await handler.list_push_notification_config(list_request) - # Assert - self.assertIsInstance(response.root, JSONRPCErrorResponse) - self.assertEqual(response.root.error, InternalError()) # type: ignore - - async def test_on_delete_push_notification(self) -> None: - """Test delete_push_notification_config handling""" - - # Create request handler without a push notifier - request_handler = AsyncMock(spec=DefaultRequestHandler) - request_handler.on_delete_task_push_notification_config.return_value = ( - None - ) - - self.mock_agent_card.capabilities = AgentCapabilities( - push_notifications=True - ) - handler = JSONRPCHandler(self.mock_agent_card, request_handler) - delete_request = DeleteTaskPushNotificationConfigRequest( - id='1', - params=DeleteTaskPushNotificationConfigParams( - id='task1', push_notification_config_id='config1' - ), - ) - response = await handler.delete_push_notification_config(delete_request) - # Assert - self.assertIsInstance( - response.root, DeleteTaskPushNotificationConfigSuccessResponse - ) - self.assertEqual(response.root.result, None) # type: ignore - - async def test_on_delete_push_notification_error(self) -> None: - """Test delete_push_notification_config error handling""" - - # Create request handler without a push notifier - request_handler = AsyncMock(spec=DefaultRequestHandler) - # throw server error - request_handler.on_delete_task_push_notification_config.side_effect = ( - ServerError(UnsupportedOperationError()) - ) - - self.mock_agent_card.capabilities = AgentCapabilities( - push_notifications=True - ) - handler = JSONRPCHandler(self.mock_agent_card, request_handler) - delete_request = DeleteTaskPushNotificationConfigRequest( - id='1', - params=DeleteTaskPushNotificationConfigParams( - id='task1', push_notification_config_id='config1' - ), - ) - response = await handler.delete_push_notification_config(delete_request) - # Assert - self.assertIsInstance(response.root, JSONRPCErrorResponse) - self.assertEqual(response.root.error, UnsupportedOperationError()) # type: ignore - - async def test_get_authenticated_extended_card_success(self) -> None: - """Test successful retrieval of the authenticated extended agent card.""" - # Arrange - mock_request_handler = AsyncMock(spec=DefaultRequestHandler) - mock_extended_card = AgentCard( - name='Extended Card', - description='More details', - url='http://agent.example.com/api', - version='1.1', - capabilities=AgentCapabilities(), - default_input_modes=['text/plain'], - default_output_modes=['application/json'], - skills=[], - ) - handler = JSONRPCHandler( - self.mock_agent_card, - mock_request_handler, - extended_agent_card=mock_extended_card, - extended_card_modifier=None, - ) - request = GetAuthenticatedExtendedCardRequest(id='ext-card-req-1') - call_context = ServerCallContext(state={'foo': 'bar'}) - - # Act - response: GetAuthenticatedExtendedCardResponse = ( - await handler.get_authenticated_extended_card(request, call_context) - ) - - # Assert - self.assertIsInstance( - response.root, GetAuthenticatedExtendedCardSuccessResponse - ) - self.assertEqual(response.root.id, 'ext-card-req-1') - self.assertEqual(response.root.result, mock_extended_card) - - async def test_get_authenticated_extended_card_not_configured(self) -> None: - """Test error when authenticated extended agent card is not configured.""" - # Arrange - mock_request_handler = AsyncMock(spec=DefaultRequestHandler) - self.mock_agent_card.supports_extended_card = True - handler = JSONRPCHandler( - self.mock_agent_card, - mock_request_handler, - extended_agent_card=None, - extended_card_modifier=None, - ) - request = GetAuthenticatedExtendedCardRequest(id='ext-card-req-2') - call_context = ServerCallContext(state={'foo': 'bar'}) - - # Act - response: GetAuthenticatedExtendedCardResponse = ( - await handler.get_authenticated_extended_card(request, call_context) - ) - - # Assert - # Authenticated Extended Card flag is set with no extended card, - # returns base card in this case. - self.assertIsInstance( - response.root, GetAuthenticatedExtendedCardSuccessResponse - ) - self.assertEqual(response.root.id, 'ext-card-req-2') - - async def test_get_authenticated_extended_card_with_modifier(self) -> None: - """Test successful retrieval of a dynamically modified extended agent card.""" - # Arrange - mock_request_handler = AsyncMock(spec=DefaultRequestHandler) - mock_base_card = AgentCard( - name='Base Card', - description='Base details', - url='http://agent.example.com/api', - version='1.0', - capabilities=AgentCapabilities(), - default_input_modes=['text/plain'], - default_output_modes=['application/json'], - skills=[], - ) - - async def modifier( - card: AgentCard, context: ServerCallContext - ) -> AgentCard: - modified_card = card.model_copy(deep=True) - modified_card.name = 'Modified Card' - modified_card.description = ( - f'Modified for context: {context.state.get("foo")}' - ) - return modified_card - - handler = JSONRPCHandler( - self.mock_agent_card, - mock_request_handler, - extended_agent_card=mock_base_card, - extended_card_modifier=modifier, - ) - request = GetAuthenticatedExtendedCardRequest(id='ext-card-req-mod') - call_context = ServerCallContext(state={'foo': 'bar'}) - - # Act - response: GetAuthenticatedExtendedCardResponse = ( - await handler.get_authenticated_extended_card(request, call_context) - ) - - # Assert - self.assertIsInstance( - response.root, GetAuthenticatedExtendedCardSuccessResponse - ) - self.assertEqual(response.root.id, 'ext-card-req-mod') - modified_card = response.root.result - self.assertEqual(modified_card.name, 'Modified Card') - self.assertEqual(modified_card.description, 'Modified for context: bar') - self.assertEqual(modified_card.version, '1.0') - - async def test_get_authenticated_extended_card_with_modifier_sync( - self, - ) -> None: - """Test successful retrieval of a synchronously dynamically modified extended agent card.""" - # Arrange - mock_request_handler = AsyncMock(spec=DefaultRequestHandler) - mock_base_card = AgentCard( - name='Base Card', - description='Base details', - url='http://agent.example.com/api', - version='1.0', - capabilities=AgentCapabilities(), - default_input_modes=['text/plain'], - default_output_modes=['application/json'], - skills=[], - ) - - def modifier(card: AgentCard, context: ServerCallContext) -> AgentCard: - modified_card = card.model_copy(deep=True) - modified_card.name = 'Modified Card' - modified_card.description = ( - f'Modified for context: {context.state.get("foo")}' - ) - return modified_card - - handler = JSONRPCHandler( - self.mock_agent_card, - mock_request_handler, - extended_agent_card=mock_base_card, - extended_card_modifier=modifier, - ) - request = GetAuthenticatedExtendedCardRequest(id='ext-card-req-mod') - call_context = ServerCallContext(state={'foo': 'bar'}) - - # Act - response: GetAuthenticatedExtendedCardResponse = ( - await handler.get_authenticated_extended_card(request, call_context) - ) - - # Assert - self.assertIsInstance( - response.root, GetAuthenticatedExtendedCardSuccessResponse - ) - self.assertEqual(response.root.id, 'ext-card-req-mod') - modified_card = response.root.result - self.assertEqual(modified_card.name, 'Modified Card') - self.assertEqual(modified_card.description, 'Modified for context: bar') - self.assertEqual(modified_card.version, '1.0') diff --git a/tests/server/request_handlers/test_response_helpers.py b/tests/server/request_handlers/test_response_helpers.py index 36de78e62..71706f149 100644 --- a/tests/server/request_handlers/test_response_helpers.py +++ b/tests/server/request_handlers/test_response_helpers.py @@ -1,97 +1,291 @@ import unittest -from unittest.mock import patch +from google.protobuf.json_format import MessageToDict from a2a.server.request_handlers.response_helpers import ( + agent_card_to_dict, build_error_response, prepare_response_object, ) from a2a.types import ( - A2AError, - GetTaskResponse, - GetTaskSuccessResponse, - InvalidAgentResponseError, InvalidParamsError, - JSONRPCError, - JSONRPCErrorResponse, - Task, TaskNotFoundError, +) +from a2a.types.a2a_pb2 import ( + AgentCapabilities, + AgentCard, + AgentInterface, + Task, TaskState, TaskStatus, ) class TestResponseHelpers(unittest.TestCase): + def test_agent_card_to_dict_without_extended_card(self) -> None: + card = AgentCard( + name='Test Agent', + description='Test Description', + version='1.0', + capabilities=AgentCapabilities(extended_agent_card=False), + supported_interfaces=[ + AgentInterface( + url='http://jsonrpc.v03.com', + protocol_binding='JSONRPC', + protocol_version='0.3', + ), + ], + ) + result = agent_card_to_dict(card) + self.assertNotIn('supportsAuthenticatedExtendedCard', result) + self.assertEqual(result['name'], 'Test Agent') + + def test_agent_card_to_dict_with_extended_card(self) -> None: + card = AgentCard( + name='Test Agent', + description='Test Description', + version='1.0', + capabilities=AgentCapabilities(extended_agent_card=True), + supported_interfaces=[ + AgentInterface( + url='http://jsonrpc.v03.com', + protocol_binding='JSONRPC', + protocol_version='0.3', + ), + ], + ) + result = agent_card_to_dict(card) + self.assertIn('supportsAuthenticatedExtendedCard', result) + self.assertTrue(result['supportsAuthenticatedExtendedCard']) + self.assertEqual(result['name'], 'Test Agent') + + def test_agent_card_to_dict_all_transports_all_versions(self) -> None: + + card = AgentCard( + name='Complex Agent', + description='Agent with many interfaces', + version='1.2.3', + supported_interfaces=[ + AgentInterface( + url='http://jsonrpc.v10.com', + protocol_binding='JSONRPC', + protocol_version='1.0.0', + ), + AgentInterface( + url='http://jsonrpc.v03.com', + protocol_binding='JSONRPC', + protocol_version='0.3.0', + ), + AgentInterface( + url='http://grpc.v10.com', + protocol_binding='GRPC', + protocol_version='1.0.0', + ), + AgentInterface( + url='http://grpc.v03.com', + protocol_binding='GRPC', + protocol_version='0.3.0', + ), + AgentInterface( + url='http://httpjson.v10.com', + protocol_binding='HTTP+JSON', + protocol_version='1.0.0', + ), + AgentInterface( + url='http://httpjson.v03.com', + protocol_binding='HTTP+JSON', + protocol_version='0.3.0', + ), + ], + ) + + result = agent_card_to_dict(card) + + expected = { + 'name': 'Complex Agent', + 'description': 'Agent with many interfaces', + 'version': '1.2.3', + 'supportedInterfaces': [ + { + 'url': 'http://jsonrpc.v10.com', + 'protocolBinding': 'JSONRPC', + 'protocolVersion': '1.0.0', + }, + { + 'url': 'http://jsonrpc.v03.com', + 'protocolBinding': 'JSONRPC', + 'protocolVersion': '0.3.0', + }, + { + 'url': 'http://grpc.v10.com', + 'protocolBinding': 'GRPC', + 'protocolVersion': '1.0.0', + }, + { + 'url': 'http://grpc.v03.com', + 'protocolBinding': 'GRPC', + 'protocolVersion': '0.3.0', + }, + { + 'url': 'http://httpjson.v10.com', + 'protocolBinding': 'HTTP+JSON', + 'protocolVersion': '1.0.0', + }, + { + 'url': 'http://httpjson.v03.com', + 'protocolBinding': 'HTTP+JSON', + 'protocolVersion': '0.3.0', + }, + ], + # Compatibility fields (v0.3) + 'url': 'http://jsonrpc.v03.com', + 'preferredTransport': 'JSONRPC', + 'protocolVersion': '0.3.0', + 'additionalInterfaces': [ + {'url': 'http://grpc.v03.com', 'transport': 'GRPC'}, + {'url': 'http://httpjson.v03.com', 'transport': 'HTTP+JSON'}, + ], + 'capabilities': {}, + 'defaultInputModes': [], + 'defaultOutputModes': [], + 'skills': [], + } + + self.assertEqual(result, expected) + + def test_agent_card_to_dict_only_1_0_interfaces(self) -> None: + card = AgentCard( + name='Modern Agent', + description='Agent with only 1.0 interfaces', + version='2.0.0', + supported_interfaces=[ + AgentInterface( + url='http://jsonrpc.v10.com', + protocol_binding='JSONRPC', + protocol_version='1.0.0', + ), + ], + ) + + result = agent_card_to_dict(card) + + expected = { + 'name': 'Modern Agent', + 'description': 'Agent with only 1.0 interfaces', + 'version': '2.0.0', + 'supportedInterfaces': [ + { + 'url': 'http://jsonrpc.v10.com', + 'protocolBinding': 'JSONRPC', + 'protocolVersion': '1.0.0', + }, + ], + } + + self.assertEqual(result, expected) + + def test_agent_card_to_dict_single_interface_no_version(self) -> None: + card = AgentCard( + name='Legacy Agent', + description='Agent with no protocol version', + version='1.0.0', + supported_interfaces=[ + AgentInterface( + url='http://jsonrpc.legacy.com', + protocol_binding='JSONRPC', + ), + ], + ) + + result = agent_card_to_dict(card) + + expected = { + 'name': 'Legacy Agent', + 'description': 'Agent with no protocol version', + 'version': '1.0.0', + 'supportedInterfaces': [ + { + 'url': 'http://jsonrpc.legacy.com', + 'protocolBinding': 'JSONRPC', + }, + ], + # Compatibility fields (v0.3) + 'url': 'http://jsonrpc.legacy.com', + 'preferredTransport': 'JSONRPC', + 'protocolVersion': '0.3', + 'capabilities': {}, + 'defaultInputModes': [], + 'defaultOutputModes': [], + 'skills': [], + } + + self.assertEqual(result, expected) + def test_build_error_response_with_a2a_error(self) -> None: request_id = 'req1' specific_error = TaskNotFoundError() - a2a_error = A2AError(root=specific_error) # Correctly wrap - response_wrapper = build_error_response( - request_id, a2a_error, GetTaskResponse - ) - self.assertIsInstance(response_wrapper, GetTaskResponse) - self.assertIsInstance(response_wrapper.root, JSONRPCErrorResponse) - self.assertEqual(response_wrapper.root.id, request_id) - self.assertEqual( - response_wrapper.root.error, specific_error - ) # build_error_response unwraps A2AError + response = build_error_response(request_id, specific_error) + + # Response is now a dict with JSON-RPC 2.0 structure + self.assertIsInstance(response, dict) + self.assertEqual(response.get('jsonrpc'), '2.0') + self.assertEqual(response.get('id'), request_id) + self.assertIn('error', response) + self.assertEqual(response['error']['code'], -32001) + self.assertEqual(response['error']['message'], specific_error.message) def test_build_error_response_with_jsonrpc_error(self) -> None: request_id = 123 - json_rpc_error = InvalidParamsError( - message='Custom invalid params' - ) # This is a specific error, not A2AError wrapped - response_wrapper = build_error_response( - request_id, json_rpc_error, GetTaskResponse - ) - self.assertIsInstance(response_wrapper, GetTaskResponse) - self.assertIsInstance(response_wrapper.root, JSONRPCErrorResponse) - self.assertEqual(response_wrapper.root.id, request_id) - self.assertEqual( - response_wrapper.root.error, json_rpc_error - ) # No .root access for json_rpc_error + json_rpc_error = InvalidParamsError(message='Custom invalid params') + response = build_error_response(request_id, json_rpc_error) + + self.assertIsInstance(response, dict) + self.assertEqual(response.get('jsonrpc'), '2.0') + self.assertEqual(response.get('id'), request_id) + self.assertIn('error', response) + self.assertEqual(response['error']['code'], -32602) + self.assertEqual(response['error']['message'], json_rpc_error.message) - def test_build_error_response_with_a2a_wrapping_jsonrpc_error(self) -> None: + def test_build_error_response_with_invalid_params_error(self) -> None: request_id = 'req_wrap' specific_jsonrpc_error = InvalidParamsError(message='Detail error') - a2a_error_wrapping = A2AError( - root=specific_jsonrpc_error - ) # Correctly wrap - response_wrapper = build_error_response( - request_id, a2a_error_wrapping, GetTaskResponse + response = build_error_response(request_id, specific_jsonrpc_error) + + self.assertIsInstance(response, dict) + self.assertEqual(response.get('jsonrpc'), '2.0') + self.assertEqual(response.get('id'), request_id) + self.assertIn('error', response) + self.assertEqual(response['error']['code'], -32602) + self.assertEqual( + response['error']['message'], specific_jsonrpc_error.message ) - self.assertIsInstance(response_wrapper, GetTaskResponse) - self.assertIsInstance(response_wrapper.root, JSONRPCErrorResponse) - self.assertEqual(response_wrapper.root.id, request_id) - self.assertEqual(response_wrapper.root.error, specific_jsonrpc_error) def test_build_error_response_with_request_id_string(self) -> None: request_id = 'string_id_test' - # Pass an A2AError-wrapped specific error for consistency with how build_error_response handles A2AError - error = A2AError(root=TaskNotFoundError()) - response_wrapper = build_error_response( - request_id, error, GetTaskResponse - ) - self.assertIsInstance(response_wrapper.root, JSONRPCErrorResponse) - self.assertEqual(response_wrapper.root.id, request_id) + error = TaskNotFoundError() + response = build_error_response(request_id, error) + + self.assertIsInstance(response, dict) + self.assertIn('error', response) + self.assertEqual(response.get('id'), request_id) def test_build_error_response_with_request_id_int(self) -> None: request_id = 456 - error = A2AError(root=TaskNotFoundError()) - response_wrapper = build_error_response( - request_id, error, GetTaskResponse - ) - self.assertIsInstance(response_wrapper.root, JSONRPCErrorResponse) - self.assertEqual(response_wrapper.root.id, request_id) + error = TaskNotFoundError() + response = build_error_response(request_id, error) + + self.assertIsInstance(response, dict) + self.assertIn('error', response) + self.assertEqual(response.get('id'), request_id) def test_build_error_response_with_request_id_none(self) -> None: request_id = None - error = A2AError(root=TaskNotFoundError()) - response_wrapper = build_error_response( - request_id, error, GetTaskResponse - ) - self.assertIsInstance(response_wrapper.root, JSONRPCErrorResponse) - self.assertIsNone(response_wrapper.root.id) + error = TaskNotFoundError() + response = build_error_response(request_id, error) + + self.assertIsInstance(response, dict) + self.assertIn('error', response) + self.assertIsNone(response.get('id')) def _create_sample_task( self, task_id: str = 'task123', context_id: str = 'ctx456' @@ -99,166 +293,59 @@ def _create_sample_task( return Task( id=task_id, context_id=context_id, - status=TaskStatus(state=TaskState.submitted), + status=TaskStatus(state=TaskState.TASK_STATE_SUBMITTED), history=[], ) - def test_prepare_response_object_successful_response(self) -> None: + def test_prepare_response_object_with_proto_message(self) -> None: request_id = 'req_success' task_result = self._create_sample_task() - response_wrapper = prepare_response_object( + response = prepare_response_object( request_id=request_id, response=task_result, success_response_types=(Task,), - success_payload_type=GetTaskSuccessResponse, - response_type=GetTaskResponse, ) - self.assertIsInstance(response_wrapper, GetTaskResponse) - self.assertIsInstance(response_wrapper.root, GetTaskSuccessResponse) - self.assertEqual(response_wrapper.root.id, request_id) - self.assertEqual(response_wrapper.root.result, task_result) - - @patch('a2a.server.request_handlers.response_helpers.build_error_response') - def test_prepare_response_object_with_a2a_error_instance( - self, mock_build_error - ) -> None: - request_id = 'req_a2a_err' - specific_error = TaskNotFoundError() - a2a_error_instance = A2AError( - root=specific_error - ) # Correctly wrapped A2AError - - # This is what build_error_response (when called by prepare_response_object) will return - mock_wrapped_error_response = GetTaskResponse( - root=JSONRPCErrorResponse( - id=request_id, error=specific_error, jsonrpc='2.0' - ) - ) - mock_build_error.return_value = mock_wrapped_error_response - response_wrapper = prepare_response_object( - request_id=request_id, - response=a2a_error_instance, # Pass the A2AError instance - success_response_types=(Task,), - success_payload_type=GetTaskSuccessResponse, - response_type=GetTaskResponse, - ) - # prepare_response_object should identify A2AError and call build_error_response - mock_build_error.assert_called_once_with( - request_id, a2a_error_instance, GetTaskResponse + # Response is now a dict with JSON-RPC 2.0 structure + self.assertIsInstance(response, dict) + self.assertEqual(response.get('jsonrpc'), '2.0') + self.assertEqual(response.get('id'), request_id) + self.assertIn('result', response) + # Result is the proto message converted to dict + expected_result = MessageToDict( + task_result, preserving_proto_field_name=False ) - self.assertEqual(response_wrapper, mock_wrapped_error_response) - - @patch('a2a.server.request_handlers.response_helpers.build_error_response') - def test_prepare_response_object_with_jsonrpcerror_base_instance( - self, mock_build_error - ) -> None: - request_id = 789 - # Use the base JSONRPCError class instance - json_rpc_base_error = JSONRPCError( - code=-32000, message='Generic JSONRPC error' - ) - - mock_wrapped_error_response = GetTaskResponse( - root=JSONRPCErrorResponse( - id=request_id, error=json_rpc_base_error, jsonrpc='2.0' - ) - ) - mock_build_error.return_value = mock_wrapped_error_response + self.assertEqual(response['result'], expected_result) - response_wrapper = prepare_response_object( + def test_prepare_response_object_with_error(self) -> None: + request_id = 'req_error' + error = TaskNotFoundError() + response = prepare_response_object( request_id=request_id, - response=json_rpc_base_error, # Pass the JSONRPCError instance + response=error, success_response_types=(Task,), - success_payload_type=GetTaskSuccessResponse, - response_type=GetTaskResponse, - ) - # prepare_response_object should identify JSONRPCError and call build_error_response - mock_build_error.assert_called_once_with( - request_id, json_rpc_base_error, GetTaskResponse - ) - self.assertEqual(response_wrapper, mock_wrapped_error_response) - - @patch('a2a.server.request_handlers.response_helpers.build_error_response') - def test_prepare_response_object_specific_error_model_as_unexpected( - self, mock_build_error - ) -> None: - request_id = 'req_specific_unexpected' - # Pass a specific error model (like TaskNotFoundError) directly, NOT wrapped in A2AError - # This should be treated as an "unexpected" type by prepare_response_object's current logic - specific_error_direct = TaskNotFoundError() - - # This is the InvalidAgentResponseError that prepare_response_object will generate - generated_error_wrapper = A2AError( - root=InvalidAgentResponseError( - message='Agent returned invalid type response for this method' - ) ) - # This is what build_error_response will be called with (the generated error) - # And this is what it will return (the generated error, wrapped in GetTaskResponse) - mock_final_wrapped_response = GetTaskResponse( - root=JSONRPCErrorResponse( - id=request_id, error=generated_error_wrapper.root, jsonrpc='2.0' - ) - ) - mock_build_error.return_value = mock_final_wrapped_response + self.assertIsInstance(response, dict) + self.assertEqual(response.get('jsonrpc'), '2.0') + self.assertEqual(response.get('id'), request_id) + self.assertIn('error', response) + self.assertEqual(response['error']['code'], -32001) - response_wrapper = prepare_response_object( + def test_prepare_response_object_with_invalid_response(self) -> None: + request_id = 'req_invalid' + invalid_response = object() + response = prepare_response_object( request_id=request_id, - response=specific_error_direct, # Pass TaskNotFoundError() directly + response=invalid_response, # type: ignore success_response_types=(Task,), - success_payload_type=GetTaskSuccessResponse, - response_type=GetTaskResponse, ) - self.assertEqual(mock_build_error.call_count, 1) - args, _ = mock_build_error.call_args - self.assertEqual(args[0], request_id) - # Check that the error passed to build_error_response is the generated A2AError(InvalidAgentResponseError) - self.assertIsInstance(args[1], A2AError) - self.assertIsInstance(args[1].root, InvalidAgentResponseError) - self.assertEqual(args[2], GetTaskResponse) - self.assertEqual(response_wrapper, mock_final_wrapped_response) - - def test_prepare_response_object_with_request_id_string(self) -> None: - request_id = 'string_id_prep' - task_result = self._create_sample_task() - response_wrapper = prepare_response_object( - request_id=request_id, - response=task_result, - success_response_types=(Task,), - success_payload_type=GetTaskSuccessResponse, - response_type=GetTaskResponse, - ) - self.assertIsInstance(response_wrapper.root, GetTaskSuccessResponse) - self.assertEqual(response_wrapper.root.id, request_id) - - def test_prepare_response_object_with_request_id_int(self) -> None: - request_id = 101112 - task_result = self._create_sample_task() - response_wrapper = prepare_response_object( - request_id=request_id, - response=task_result, - success_response_types=(Task,), - success_payload_type=GetTaskSuccessResponse, - response_type=GetTaskResponse, - ) - self.assertIsInstance(response_wrapper.root, GetTaskSuccessResponse) - self.assertEqual(response_wrapper.root.id, request_id) - - def test_prepare_response_object_with_request_id_none(self) -> None: - request_id = None - task_result = self._create_sample_task() - response_wrapper = prepare_response_object( - request_id=request_id, - response=task_result, - success_response_types=(Task,), - success_payload_type=GetTaskSuccessResponse, - response_type=GetTaskResponse, - ) - self.assertIsInstance(response_wrapper.root, GetTaskSuccessResponse) - self.assertIsNone(response_wrapper.root.id) + # Should return an InvalidAgentResponseError + self.assertIsInstance(response, dict) + self.assertIn('error', response) + # Check that it's an InvalidAgentResponseError (code -32006) + self.assertEqual(response['error']['code'], -32006) if __name__ == '__main__': diff --git a/tests/server/routes/__init__.py b/tests/server/routes/__init__.py new file mode 100644 index 000000000..e69de29bb diff --git a/tests/server/routes/test_agent_card_routes.py b/tests/server/routes/test_agent_card_routes.py new file mode 100644 index 000000000..b24438a57 --- /dev/null +++ b/tests/server/routes/test_agent_card_routes.py @@ -0,0 +1,71 @@ +from unittest.mock import AsyncMock, MagicMock + +import pytest +from starlette.testclient import TestClient +from starlette.applications import Starlette + +from a2a.server.routes.agent_card_routes import create_agent_card_routes +from a2a.types.a2a_pb2 import AgentCard + + +@pytest.fixture +def agent_card(): + return AgentCard() + + +def test_get_agent_card_success(agent_card): + """Tests that the agent card route returns the card correctly.""" + routes = create_agent_card_routes(agent_card=agent_card) + + app = Starlette(routes=routes) + client = TestClient(app) + + response = client.get('/.well-known/agent-card.json') + assert response.status_code == 200 + assert response.headers['content-type'] == 'application/json' + assert response.json() == {} # Empty card serializes to empty dict/json + + +def test_get_agent_card_with_modifier(agent_card): + """Tests that card_modifier is called and modifies the response.""" + + # To test modification, let's assume we can mock the dict conversion or just see if the modifier runs. + # Actually card_modifier receives AgentCard and returns AgentCard. + async def modifier(card: AgentCard) -> AgentCard: + # Clone or modify + modified = AgentCard() + # Set some field if possible, or just return a different instance to verify. + # Since Protobuf objects have fields, let's look at one we can set. + # Usually they have fields like 'url' in v0.3 or others. + # Let's just return a MagicMock or set Something that shows up in dict if we know it. + # Wait, if we return a different object, we can verify it. + # Let's try to mock the conversion or just verify it was called. + return card + + mock_modifier = AsyncMock(side_effect=modifier) + routes = create_agent_card_routes( + agent_card=agent_card, card_modifier=mock_modifier + ) + + app = Starlette(routes=routes) + client = TestClient(app) + + response = client.get('/.well-known/agent-card.json') + assert response.status_code == 200 + assert mock_modifier.called + + +def test_agent_card_custom_url(agent_card): + """Tests that custom card_url is respected.""" + custom_url = '/custom/path/agent.json' + routes = create_agent_card_routes( + agent_card=agent_card, card_url=custom_url + ) + + app = Starlette(routes=routes) + client = TestClient(app) + + # Check that default returns 404 + assert client.get('/.well-known/agent-card.json').status_code == 404 + # Check that custom returns 200 + assert client.get(custom_url).status_code == 200 diff --git a/tests/server/routes/test_common.py b/tests/server/routes/test_common.py new file mode 100644 index 000000000..3c4a08d2b --- /dev/null +++ b/tests/server/routes/test_common.py @@ -0,0 +1,156 @@ +from unittest.mock import MagicMock + +import pytest +from starlette.datastructures import Headers + +try: + from starlette.authentication import BaseUser as StarletteBaseUser +except ImportError: + StarletteBaseUser = MagicMock() # type: ignore + +from a2a.auth.user import UnauthenticatedUser +from a2a.extensions.common import HTTP_EXTENSION_HEADER +from a2a.server.context import ServerCallContext +from a2a.server.routes.common import ( + StarletteUser, + DefaultServerCallContextBuilder, +) + + +# --- StarletteUser Tests --- + + +class TestStarletteUser: + def test_is_authenticated_true(self): + starlette_user = MagicMock(spec=StarletteBaseUser) + starlette_user.is_authenticated = True + proxy = StarletteUser(starlette_user) + assert proxy.is_authenticated is True + + def test_is_authenticated_false(self): + starlette_user = MagicMock(spec=StarletteBaseUser) + starlette_user.is_authenticated = False + proxy = StarletteUser(starlette_user) + assert proxy.is_authenticated is False + + def test_user_name(self): + starlette_user = MagicMock(spec=StarletteBaseUser) + starlette_user.display_name = 'Test User' + proxy = StarletteUser(starlette_user) + assert proxy.user_name == 'Test User' + + def test_user_name_raises_attribute_error(self): + starlette_user = MagicMock(spec=StarletteBaseUser) + del starlette_user.display_name + proxy = StarletteUser(starlette_user) + with pytest.raises(AttributeError, match='display_name'): + _ = proxy.user_name + + +# --- default_user_builder Tests --- + + +def _make_mock_request(scope=None, headers=None): + request = MagicMock() + request.scope = scope or {} + request.headers = Headers(headers or {}) + return request + + +class TestDefaultContextBuilder: + def test_returns_unauthenticated_user_when_no_user_in_scope(self): + request = _make_mock_request(scope={}) + user = DefaultServerCallContextBuilder().build_user(request) + assert isinstance(user, UnauthenticatedUser) + assert user.is_authenticated is False + assert user.user_name == '' + + def test_returns_proxy_when_user_in_scope(self): + starlette_user = MagicMock() + starlette_user.is_authenticated = True + starlette_user.display_name = 'Alice' + request = _make_mock_request(scope={'user': starlette_user}) + request.user = starlette_user + + user = DefaultServerCallContextBuilder().build_user(request) + assert isinstance(user, StarletteUser) + assert user.is_authenticated is True + assert user.user_name == 'Alice' + + def test_returns_unauthenticated_proxy_when_user_not_authenticated(self): + starlette_user = MagicMock() + starlette_user.is_authenticated = False + starlette_user.display_name = '' + request = _make_mock_request(scope={'user': starlette_user}) + request.user = starlette_user + + user = DefaultServerCallContextBuilder().build_user(request) + assert isinstance(user, StarletteUser) + assert user.is_authenticated is False + + +# --- build_server_call_context Tests --- + + +class TestBuildServerCallContext: + def test_basic_context_with_default_user_builder(self): + request = _make_mock_request( + scope={}, headers={'content-type': 'application/json'} + ) + ctx = DefaultServerCallContextBuilder().build(request) + + assert isinstance(ctx, ServerCallContext) + assert isinstance(ctx.user, UnauthenticatedUser) + assert 'headers' in ctx.state + assert ctx.state['headers']['content-type'] == 'application/json' + assert 'auth' not in ctx.state + + def test_auth_populated_when_in_scope(self): + auth_credentials = MagicMock() + request = _make_mock_request(scope={'auth': auth_credentials}) + request.auth = auth_credentials + + ctx = DefaultServerCallContextBuilder().build(request) + assert ctx.state['auth'] is auth_credentials + + def test_auth_not_populated_when_not_in_scope(self): + request = _make_mock_request(scope={}) + ctx = DefaultServerCallContextBuilder().build(request) + assert 'auth' not in ctx.state + + def test_headers_captured_in_state(self): + request = _make_mock_request( + headers={'x-custom': 'value', 'authorization': 'Bearer tok'} + ) + ctx = DefaultServerCallContextBuilder().build(request) + assert ctx.state['headers']['x-custom'] == 'value' + assert ctx.state['headers']['authorization'] == 'Bearer tok' + + def test_requested_extensions_single(self): + request = _make_mock_request(headers={HTTP_EXTENSION_HEADER: 'foo'}) + ctx = DefaultServerCallContextBuilder().build(request) + assert ctx.requested_extensions == {'foo'} + + def test_requested_extensions_comma_separated(self): + request = _make_mock_request( + headers={HTTP_EXTENSION_HEADER: 'foo, bar'} + ) + ctx = DefaultServerCallContextBuilder().build(request) + assert ctx.requested_extensions == {'foo', 'bar'} + + def test_no_extensions(self): + request = _make_mock_request() + ctx = DefaultServerCallContextBuilder().build(request) + assert ctx.requested_extensions == set() + + def test_custom_user_builder(self): + custom_user = MagicMock(spec=UnauthenticatedUser) + custom_user.is_authenticated = True + + class MyContextBuilder(DefaultServerCallContextBuilder): + def build_user(self, req): + return custom_user + + request = _make_mock_request() + ctx = MyContextBuilder().build(request) + assert ctx.user is custom_user diff --git a/tests/server/routes/test_jsonrpc_dispatcher.py b/tests/server/routes/test_jsonrpc_dispatcher.py new file mode 100644 index 000000000..7ce73eb2e --- /dev/null +++ b/tests/server/routes/test_jsonrpc_dispatcher.py @@ -0,0 +1,598 @@ +import asyncio +import json +from typing import Any +from unittest.mock import AsyncMock, MagicMock, patch + +import pytest +from starlette.responses import JSONResponse +from starlette.testclient import TestClient + +try: + from starlette.authentication import BaseUser as StarletteBaseUser +except ImportError: + StarletteBaseUser = MagicMock() # type: ignore + +from a2a.extensions.common import HTTP_EXTENSION_HEADER +from a2a.server.context import ServerCallContext +from a2a.server.request_handlers.request_handler import RequestHandler +from a2a.types.a2a_pb2 import ( + AgentCapabilities, + AgentCard, + Artifact, + ListTaskPushNotificationConfigsResponse, + ListTasksResponse, + Message, + Part, + Role, + Task, + TaskArtifactUpdateEvent, + TaskPushNotificationConfig, + TaskState, + TaskStatus, +) +from a2a.server.routes import jsonrpc_dispatcher + +from a2a.server.routes.jsonrpc_dispatcher import JsonRpcDispatcher +from a2a.server.routes.jsonrpc_routes import create_jsonrpc_routes +from a2a.server.routes.agent_card_routes import create_agent_card_routes +from a2a.server.jsonrpc_models import JSONRPCError +from a2a.utils.errors import A2AError + + +# --- JsonRpcDispatcher Tests --- + + +@pytest.fixture +def mock_handler(): + handler = AsyncMock(spec=RequestHandler) + handler.on_message_send.return_value = Message( + message_id='test', + role=Role.ROLE_AGENT, + parts=[Part(text='response message')], + ) + return handler + + +@pytest.fixture +def test_app(mock_handler): + mock_agent_card = MagicMock(spec=AgentCard) + mock_agent_card.url = 'http://mockurl.com' + mock_agent_card.capabilities = MagicMock() + mock_agent_card.capabilities.streaming = False + + jsonrpc_routes = create_jsonrpc_routes( + request_handler=mock_handler, rpc_url='/' + ) + + from starlette.applications import Starlette + + return Starlette(routes=jsonrpc_routes) + + +@pytest.fixture +def client(test_app): + return TestClient(test_app, headers={'A2A-Version': '1.0'}) + + +def _make_send_message_request( + text: str = 'hi', tenant: str | None = None +) -> dict: + params: dict[str, Any] = { + 'message': { + 'messageId': '1', + 'role': 'ROLE_USER', + 'parts': [{'text': text}], + } + } + if tenant is not None: + params['tenant'] = tenant + + return { + 'jsonrpc': '2.0', + 'id': '1', + 'method': 'SendMessage', + 'params': params, + } + + +class TestJsonRpcDispatcherOptionalDependencies: + @pytest.fixture(scope='class') + def mock_app_params(self) -> dict: + mock_handler = MagicMock(spec=RequestHandler) + mock_agent_card = MagicMock(spec=AgentCard) + mock_agent_card.url = 'http://example.com' + mock_handler._agent_card = mock_agent_card + return {'request_handler': mock_handler} + + @pytest.fixture(scope='class') + def mark_pkg_starlette_not_installed(self): + pkg_starlette_installed_flag = ( + jsonrpc_dispatcher._package_starlette_installed + ) + jsonrpc_dispatcher._package_starlette_installed = False + yield + jsonrpc_dispatcher._package_starlette_installed = ( + pkg_starlette_installed_flag + ) + + def test_create_dispatcher_with_missing_deps_raises_importerror( + self, mock_app_params: dict, mark_pkg_starlette_not_installed: Any + ): + with pytest.raises( + ImportError, + match=( + 'Packages `starlette` and `sse-starlette` are required to use' + ' the `JsonRpcDispatcher`' + ), + ): + JsonRpcDispatcher(**mock_app_params) + + +class TestJsonRpcDispatcherExtensions: + def test_request_with_single_extension(self, client, mock_handler): + headers = {HTTP_EXTENSION_HEADER: 'foo'} + response = client.post( + '/', + headers=headers, + json=_make_send_message_request(), + ) + response.raise_for_status() + + mock_handler.on_message_send.assert_called_once() + call_context = mock_handler.on_message_send.call_args[0][1] + assert isinstance(call_context, ServerCallContext) + assert call_context.requested_extensions == {'foo'} + + def test_request_with_comma_separated_extensions( + self, client, mock_handler + ): + headers = {HTTP_EXTENSION_HEADER: 'foo, bar'} + response = client.post( + '/', + headers=headers, + json=_make_send_message_request(), + ) + response.raise_for_status() + + mock_handler.on_message_send.assert_called_once() + call_context = mock_handler.on_message_send.call_args[0][1] + assert call_context.requested_extensions == {'foo', 'bar'} + + def test_method_added_to_call_context_state(self, client, mock_handler): + response = client.post( + '/', + json=_make_send_message_request(), + ) + response.raise_for_status() + + mock_handler.on_message_send.assert_called_once() + call_context = mock_handler.on_message_send.call_args[0][1] + assert call_context.state['method'] == 'SendMessage' + + +class TestJsonRpcDispatcherTenant: + def test_tenant_extraction_from_params(self, client, mock_handler): + tenant_id = 'my-tenant-123' + response = client.post( + '/', + json=_make_send_message_request(tenant=tenant_id), + ) + response.raise_for_status() + + mock_handler.on_message_send.assert_called_once() + call_context = mock_handler.on_message_send.call_args[0][1] + assert isinstance(call_context, ServerCallContext) + assert call_context.tenant == tenant_id + + def test_no_tenant_extraction(self, client, mock_handler): + response = client.post( + '/', + json=_make_send_message_request(tenant=None), + ) + response.raise_for_status() + + mock_handler.on_message_send.assert_called_once() + call_context = mock_handler.on_message_send.call_args[0][1] + assert isinstance(call_context, ServerCallContext) + assert call_context.tenant == '' + + +class TestJsonRpcDispatcherV03Compat: + def test_v0_3_compat_flag_routes_to_adapter(self, mock_handler): + mock_agent_card = MagicMock(spec=AgentCard) + mock_agent_card.url = 'http://mockurl.com' + mock_agent_card.capabilities = MagicMock() + mock_agent_card.capabilities.streaming = False + + mock_handler._agent_card = mock_agent_card + + from starlette.applications import Starlette + + jsonrpc_routes = create_jsonrpc_routes( + request_handler=mock_handler, enable_v0_3_compat=True, rpc_url='/' + ) + app = Starlette(routes=jsonrpc_routes) + client = TestClient(app) + + request_data = { + 'jsonrpc': '2.0', + 'id': '1', + 'method': 'message/send', + 'params': { + 'message': { + 'messageId': 'msg-1', + 'role': 'ROLE_USER', + 'parts': [{'text': 'Hello'}], + } + }, + } + + dispatcher_instance = jsonrpc_routes[0].endpoint.__self__ + with patch.object( + dispatcher_instance._v03_adapter, + 'handle_request', + new_callable=AsyncMock, + ) as mock_handle: + mock_handle.return_value = JSONResponse( + {'jsonrpc': '2.0', 'id': '1', 'result': {}} + ) + + response = client.post('/', json=request_data) + + response.raise_for_status() + assert mock_handle.called + assert mock_handle.call_args[1]['method'] == 'message/send' + + +def _make_jsonrpc_request(method: str, params: dict | None = None) -> dict: + """Helper to build a JSON-RPC 2.0 request dict.""" + return { + 'jsonrpc': '2.0', + 'id': '1', + 'method': method, + 'params': params or {}, + } + + +class TestJsonRpcDispatcherMethodRouting: + """Tests that each JSON-RPC method name routes to the correct handler.""" + + @pytest.fixture + def handler(self): + handler = AsyncMock(spec=RequestHandler) + handler.on_message_send.return_value = Message( + message_id='test', + role=Role.ROLE_AGENT, + parts=[Part(text='ok')], + ) + handler.on_cancel_task.return_value = Task( + id='task1', + context_id='ctx1', + status=TaskStatus(state=TaskState.TASK_STATE_CANCELED), + ) + handler.on_get_task.return_value = Task( + id='task1', + context_id='ctx1', + status=TaskStatus(state=TaskState.TASK_STATE_SUBMITTED), + ) + handler.on_list_tasks.return_value = ListTasksResponse() + handler.on_create_task_push_notification_config.return_value = ( + TaskPushNotificationConfig(task_id='t1', url='https://example.com') + ) + handler.on_get_task_push_notification_config.return_value = ( + TaskPushNotificationConfig(task_id='t1', url='https://example.com') + ) + handler.on_list_task_push_notification_configs.return_value = ( + ListTaskPushNotificationConfigsResponse() + ) + handler.on_delete_task_push_notification_config.return_value = None + return handler + + @pytest.fixture + def agent_card(self): + return AgentCard( + capabilities=AgentCapabilities( + streaming=True, + push_notifications=True, + extended_agent_card=True, + ), + name='TestAgent', + version='1.0', + ) + + @pytest.fixture + def client(self, handler, agent_card): + jsonrpc_routes = create_jsonrpc_routes( + request_handler=handler, + rpc_url='/', + ) + from starlette.applications import Starlette + + app = Starlette(routes=jsonrpc_routes) + return TestClient(app, headers={'A2A-Version': '1.0'}) + + # --- Non-streaming method routing tests --- + + def test_send_message_routes_to_on_message_send(self, client, handler): + response = client.post( + '/', + json=_make_jsonrpc_request( + 'SendMessage', + { + 'message': { + 'messageId': '1', + 'role': 'ROLE_USER', + 'parts': [{'text': 'hello'}], + } + }, + ), + ) + response.raise_for_status() + + handler.on_message_send.assert_called_once() + call_context = handler.on_message_send.call_args[0][1] + assert call_context.state['method'] == 'SendMessage' + + def test_cancel_task_routes_to_on_cancel_task(self, client, handler): + response = client.post( + '/', + json=_make_jsonrpc_request('CancelTask', {'id': 'task1'}), + ) + response.raise_for_status() + + handler.on_cancel_task.assert_called_once() + call_context = handler.on_cancel_task.call_args[0][1] + assert call_context.state['method'] == 'CancelTask' + + def test_get_task_routes_to_on_get_task(self, client, handler): + response = client.post( + '/', + json=_make_jsonrpc_request('GetTask', {'id': 'task1'}), + ) + response.raise_for_status() + + handler.on_get_task.assert_called_once() + call_context = handler.on_get_task.call_args[0][1] + assert call_context.state['method'] == 'GetTask' + + def test_list_tasks_routes_to_on_list_tasks(self, client, handler): + response = client.post( + '/', + json=_make_jsonrpc_request('ListTasks'), + ) + response.raise_for_status() + + handler.on_list_tasks.assert_called_once() + call_context = handler.on_list_tasks.call_args[0][1] + assert call_context.state['method'] == 'ListTasks' + + def test_create_push_notification_config_routes_correctly( + self, client, handler + ): + response = client.post( + '/', + json=_make_jsonrpc_request( + 'CreateTaskPushNotificationConfig', + {'taskId': 't1', 'url': 'https://example.com'}, + ), + ) + response.raise_for_status() + + handler.on_create_task_push_notification_config.assert_called_once() + call_context = ( + handler.on_create_task_push_notification_config.call_args[0][1] + ) + assert ( + call_context.state['method'] == 'CreateTaskPushNotificationConfig' + ) + + def test_get_push_notification_config_routes_correctly( + self, client, handler + ): + response = client.post( + '/', + json=_make_jsonrpc_request( + 'GetTaskPushNotificationConfig', + {'taskId': 't1', 'id': 'config1'}, + ), + ) + response.raise_for_status() + + handler.on_get_task_push_notification_config.assert_called_once() + call_context = handler.on_get_task_push_notification_config.call_args[ + 0 + ][1] + assert call_context.state['method'] == 'GetTaskPushNotificationConfig' + + def test_list_push_notification_configs_routes_correctly( + self, client, handler + ): + response = client.post( + '/', + json=_make_jsonrpc_request( + 'ListTaskPushNotificationConfigs', + {'taskId': 't1'}, + ), + ) + response.raise_for_status() + + handler.on_list_task_push_notification_configs.assert_called_once() + call_context = handler.on_list_task_push_notification_configs.call_args[ + 0 + ][1] + assert call_context.state['method'] == 'ListTaskPushNotificationConfigs' + + def test_delete_push_notification_config_routes_correctly( + self, client, handler + ): + response = client.post( + '/', + json=_make_jsonrpc_request( + 'DeleteTaskPushNotificationConfig', + {'taskId': 't1', 'id': 'config1'}, + ), + ) + response.raise_for_status() + data = response.json() + assert data.get('result') is None + + handler.on_delete_task_push_notification_config.assert_called_once() + call_context = ( + handler.on_delete_task_push_notification_config.call_args[0][1] + ) + assert ( + call_context.state['method'] == 'DeleteTaskPushNotificationConfig' + ) + + def test_get_extended_agent_card_routes_correctly( + self, handler, agent_card + ): + captured: dict[str, Any] = {} + + async def capture_modifier(card, context): + captured['method'] = context.state.get('method') + return card + + handler.on_get_extended_agent_card.return_value = agent_card + jsonrpc_routes = create_jsonrpc_routes( + request_handler=handler, + rpc_url='/', + ) + from starlette.applications import Starlette + + app = Starlette(routes=jsonrpc_routes) + client = TestClient(app, headers={'A2A-Version': '1.0'}) + + response = client.post( + '/', + json=_make_jsonrpc_request('GetExtendedAgentCard'), + ) + response.raise_for_status() + data = response.json() + assert 'result' in data + assert data['result']['name'] == 'TestAgent' + handler.on_get_extended_agent_card.assert_called_once() + + # --- Streaming method routing tests --- + + @pytest.mark.asyncio + async def test_send_streaming_message_routes_to_on_message_send_stream( + self, handler, agent_card + ): + async def stream_generator(): + yield TaskArtifactUpdateEvent( + artifact=Artifact( + artifact_id='a1', + name='result', + parts=[Part(text='streamed')], + ), + task_id='task1', + context_id='ctx1', + append=False, + last_chunk=True, + ) + + handler.on_message_send_stream = MagicMock( + return_value=stream_generator() + ) + + jsonrpc_routes = create_jsonrpc_routes( + request_handler=handler, + rpc_url='/', + ) + from starlette.applications import Starlette + + app = Starlette(routes=jsonrpc_routes) + client = TestClient(app, headers={'A2A-Version': '1.0'}) + + try: + with client.stream( + 'POST', + '/', + json=_make_jsonrpc_request( + 'SendStreamingMessage', + { + 'message': { + 'messageId': '1', + 'role': 'ROLE_USER', + 'parts': [{'text': 'hello'}], + } + }, + ), + ) as response: + assert response.status_code == 200 + assert response.headers['content-type'].startswith( + 'text/event-stream' + ) + content = b'' + for chunk in response.iter_bytes(): + content += chunk + assert b'a1' in content + finally: + client.close() + await asyncio.sleep(0.1) + + handler.on_message_send_stream.assert_called_once() + call_context = handler.on_message_send_stream.call_args[0][1] + assert call_context.state['method'] == 'SendStreamingMessage' + + @pytest.mark.asyncio + async def test_subscribe_to_task_routes_to_on_subscribe_to_task( + self, handler, agent_card + ): + async def stream_generator(): + yield TaskArtifactUpdateEvent( + artifact=Artifact( + artifact_id='a1', + name='result', + parts=[Part(text='streamed')], + ), + task_id='task1', + context_id='ctx1', + append=False, + last_chunk=True, + ) + + handler.on_subscribe_to_task = MagicMock( + return_value=stream_generator() + ) + + jsonrpc_routes = create_jsonrpc_routes( + request_handler=handler, + rpc_url='/', + ) + from starlette.applications import Starlette + + app = Starlette(routes=jsonrpc_routes) + client = TestClient(app, headers={'A2A-Version': '1.0'}) + + try: + with client.stream( + 'POST', + '/', + json=_make_jsonrpc_request( + 'SubscribeToTask', + { + 'id': 'task1', + }, + ), + ) as response: + assert response.status_code == 200 + assert response.headers['content-type'].startswith( + 'text/event-stream' + ) + content = b'' + for chunk in response.iter_bytes(): + content += chunk + assert b'a1' in content + finally: + client.close() + await asyncio.sleep(0.1) + + handler.on_subscribe_to_task.assert_called_once() + call_context = handler.on_subscribe_to_task.call_args[0][1] + assert call_context.state['method'] == 'SubscribeToTask' + + +if __name__ == '__main__': + pytest.main([__file__]) diff --git a/tests/server/routes/test_jsonrpc_routes.py b/tests/server/routes/test_jsonrpc_routes.py new file mode 100644 index 000000000..ff1b81f3f --- /dev/null +++ b/tests/server/routes/test_jsonrpc_routes.py @@ -0,0 +1,59 @@ +from typing import Any +from unittest.mock import AsyncMock, MagicMock + +import pytest +from starlette.testclient import TestClient +from starlette.applications import Starlette + +from a2a.server.routes.jsonrpc_routes import create_jsonrpc_routes +from a2a.server.request_handlers.request_handler import RequestHandler +from a2a.types.a2a_pb2 import AgentCard + + +@pytest.fixture +def agent_card(): + return AgentCard() + + +@pytest.fixture +def mock_handler(): + return AsyncMock(spec=RequestHandler) + + +def test_routes_creation(agent_card, mock_handler): + """Tests that create_jsonrpc_routes creates Route objects list.""" + routes = create_jsonrpc_routes( + request_handler=mock_handler, rpc_url='/a2a/jsonrpc' + ) + + assert isinstance(routes, list) + assert len(routes) == 1 + + from starlette.routing import Route + + assert isinstance(routes[0], Route) + assert routes[0].methods == {'POST'} + + +def test_jsonrpc_custom_url(agent_card, mock_handler): + """Tests that custom rpc_url is respected for routing.""" + custom_url = '/custom/api/jsonrpc' + routes = create_jsonrpc_routes( + request_handler=mock_handler, rpc_url=custom_url + ) + + app = Starlette(routes=routes) + client = TestClient(app) + + # Check that default path returns 404 + assert client.post('/a2a/jsonrpc', json={}).status_code == 404 + + # Check that custom path routes to dispatcher (which will return JSON-RPC response, even if error) + response = client.post( + custom_url, json={'jsonrpc': '2.0', 'id': '1', 'method': 'foo'} + ) + assert response.status_code == 200 + resp_json = response.json() + assert 'error' in resp_json + # Method not found error from dispatcher + assert resp_json['error']['code'] == -32601 diff --git a/tests/server/routes/test_rest_dispatcher.py b/tests/server/routes/test_rest_dispatcher.py new file mode 100644 index 000000000..a1d2c27cd --- /dev/null +++ b/tests/server/routes/test_rest_dispatcher.py @@ -0,0 +1,295 @@ +import json +from collections.abc import AsyncIterator +from typing import Any +from unittest.mock import AsyncMock, MagicMock + +import pytest +from starlette.requests import Request +from starlette.responses import JSONResponse + +from a2a.server.context import ServerCallContext +from a2a.server.request_handlers.request_handler import RequestHandler +from a2a.server.routes import rest_dispatcher +from a2a.server.routes.rest_dispatcher import ( + RestDispatcher, +) +from a2a.types.a2a_pb2 import ( + AgentCapabilities, + AgentCard, + Message, + SendMessageResponse, + Task, + TaskPushNotificationConfig, + ListTasksResponse, + ListTaskPushNotificationConfigsResponse, +) +from a2a.utils.errors import ( + ExtendedAgentCardNotConfiguredError, + TaskNotFoundError, + UnsupportedOperationError, +) + + +@pytest.fixture +def agent_card(): + card = MagicMock(spec=AgentCard) + card.capabilities = AgentCapabilities( + streaming=True, + push_notifications=True, + extended_agent_card=True, + ) + return card + + +@pytest.fixture +def mock_handler(agent_card): + handler = AsyncMock(spec=RequestHandler) + # Default success cases + handler._agent_card = agent_card + handler.on_message_send.return_value = Message(message_id='test_msg') + handler.on_cancel_task.return_value = Task(id='test_task') + handler.on_get_task.return_value = Task(id='test_task') + handler.on_get_extended_agent_card.return_value = agent_card() + handler.on_list_tasks.return_value = ListTasksResponse() + handler.on_get_task_push_notification_config.return_value = ( + TaskPushNotificationConfig(url='http://test') + ) + handler.on_create_task_push_notification_config.return_value = ( + TaskPushNotificationConfig(url='http://test') + ) + handler.on_list_task_push_notification_configs.return_value = ( + ListTaskPushNotificationConfigsResponse() + ) + + # Streaming mocks + async def mock_stream(*args, **kwargs) -> AsyncIterator[Task]: + yield Task(id='chunk1') + yield Task(id='chunk2') + + handler.on_message_send_stream.side_effect = mock_stream + handler.on_subscribe_to_task.side_effect = mock_stream + return handler + + +@pytest.fixture +def rest_dispatcher_instance(mock_handler): + return RestDispatcher(request_handler=mock_handler) + + +from starlette.datastructures import Headers + + +def make_mock_request( + method: str = 'GET', + path_params: dict | None = None, + query_params: dict | None = None, + headers: dict | None = None, + body: bytes = b'{}', +) -> Request: + mock_req = MagicMock(spec=Request) + mock_req.method = method + mock_req.path_params = path_params or {} + mock_req.query_params = query_params or {} + + # Default valid headers for A2A + default_headers = {'a2a-version': '1.0'} + if headers: + default_headers.update(headers) + + mock_req.headers = Headers(default_headers) + mock_req.body = AsyncMock(return_value=body) + + # Needs to be able to build ServerCallContext, so provide .user and .auth etc. if needed + mock_req.user = MagicMock(is_authenticated=False) + mock_req.auth = None + mock_req.scope = {} + return mock_req + + +class TestRestDispatcherInitialization: + @pytest.fixture(scope='class') + def mark_pkg_starlette_not_installed(self): + pkg_starlette_installed_flag = ( + rest_dispatcher._package_starlette_installed + ) + rest_dispatcher._package_starlette_installed = False + yield + rest_dispatcher._package_starlette_installed = ( + pkg_starlette_installed_flag + ) + + def test_missing_starlette_raises_importerror( + self, mark_pkg_starlette_not_installed, mock_handler + ): + with pytest.raises( + ImportError, + match='Packages `starlette` and `sse-starlette` are required', + ): + RestDispatcher(request_handler=mock_handler) + + +@pytest.mark.asyncio +class TestRestDispatcherContextManagement: + async def test_build_call_context(self, rest_dispatcher_instance): + req = make_mock_request(path_params={'tenant': 'my-tenant'}) + context = rest_dispatcher_instance._build_call_context(req) + + assert isinstance(context, ServerCallContext) + assert context.tenant == 'my-tenant' + assert context.state['headers']['a2a-version'] == '1.0' + + +@pytest.mark.asyncio +class TestRestDispatcherEndpoints: + async def test_on_message_send_throws_error_for_unsupported_version( + self, rest_dispatcher_instance, mock_handler + ): + # 0.3 is currently not supported for direct message sending on RestDispatcher + req = make_mock_request(method='POST', headers={'a2a-version': '0.3.0'}) + response = await rest_dispatcher_instance.on_message_send(req) + + # VersionNotSupportedError maps to 400 Bad Request + assert response.status_code == 400 + + async def test_on_message_send_returns_message( + self, rest_dispatcher_instance, mock_handler + ): + req = make_mock_request(method='POST') + response = await rest_dispatcher_instance.on_message_send(req) + + assert isinstance(response, JSONResponse) + assert response.status_code == 200 + data = json.loads(response.body) + assert 'message' in data + + async def test_on_message_send_returns_task( + self, rest_dispatcher_instance, mock_handler + ): + mock_handler.on_message_send.return_value = Task(id='new_task') + req = make_mock_request(method='POST') + + response = await rest_dispatcher_instance.on_message_send(req) + assert response.status_code == 200 + data = json.loads(response.body) + assert 'task' in data + assert data['task']['id'] == 'new_task' + + async def test_on_cancel_task_success( + self, rest_dispatcher_instance, mock_handler + ): + req = make_mock_request(method='POST', path_params={'id': 'test_task'}) + response = await rest_dispatcher_instance.on_cancel_task(req) + + assert response.status_code == 200 + data = json.loads(response.body) + assert data['id'] == 'test_task' + + async def test_on_cancel_task_not_found( + self, rest_dispatcher_instance, mock_handler + ): + mock_handler.on_cancel_task.return_value = None + req = make_mock_request(method='POST', path_params={'id': 'test_task'}) + + response = await rest_dispatcher_instance.on_cancel_task(req) + assert response.status_code == 404 # TaskNotFoundError maps to 404 + + async def test_on_get_task_success( + self, rest_dispatcher_instance, mock_handler + ): + req = make_mock_request(method='GET', path_params={'id': 'test_task'}) + response = await rest_dispatcher_instance.on_get_task(req) + + assert response.status_code == 200 + data = json.loads(response.body) + assert data['id'] == 'test_task' + + async def test_on_get_task_not_found( + self, rest_dispatcher_instance, mock_handler + ): + mock_handler.on_get_task.return_value = None + req = make_mock_request( + method='GET', path_params={'id': 'missing_task'} + ) + + response = await rest_dispatcher_instance.on_get_task(req) + assert response.status_code == 404 + + async def test_list_tasks(self, rest_dispatcher_instance, mock_handler): + req = make_mock_request(method='GET') + response = await rest_dispatcher_instance.list_tasks(req) + assert response.status_code == 200 + + async def test_get_push_notification( + self, rest_dispatcher_instance, mock_handler + ): + req = make_mock_request( + method='GET', path_params={'id': 'task1', 'push_id': 'push1'} + ) + response = await rest_dispatcher_instance.get_push_notification(req) + assert response.status_code == 200 + data = json.loads(response.body) + assert data['url'] == 'http://test' + + async def test_delete_push_notification( + self, rest_dispatcher_instance, mock_handler + ): + req = make_mock_request( + method='DELETE', path_params={'id': 'task1', 'push_id': 'push1'} + ) + response = await rest_dispatcher_instance.delete_push_notification(req) + assert response.status_code == 200 + + async def test_handle_authenticated_agent_card( + self, rest_dispatcher_instance + ): + req = make_mock_request() + response = ( + await rest_dispatcher_instance.handle_authenticated_agent_card(req) + ) + assert response.status_code == 200 + + +@pytest.mark.asyncio +class TestRestDispatcherStreaming: + async def test_on_message_send_stream_success( + self, rest_dispatcher_instance + ): + req = make_mock_request(method='POST') + response = await rest_dispatcher_instance.on_message_send_stream(req) + + assert response.status_code == 200 + + chunks = [] + async for chunk in response.body_iterator: + chunks.append(chunk) + + assert len(chunks) == 2 + assert 'chunk1' in chunks[0].data + assert 'chunk2' in chunks[1].data + + async def test_on_subscribe_to_task_success(self, rest_dispatcher_instance): + req = make_mock_request(method='GET', path_params={'id': 'test_task'}) + response = await rest_dispatcher_instance.on_subscribe_to_task(req) + + assert response.status_code == 200 + + chunks = [] + async for chunk in response.body_iterator: + chunks.append(chunk) + + assert len(chunks) == 2 + assert 'chunk1' in chunks[0].data + assert 'chunk2' in chunks[1].data + + async def test_on_message_send_stream_handler_error(self, mock_handler): + from a2a.utils.errors import UnsupportedOperationError + + mock_handler.on_message_send_stream.side_effect = ( + UnsupportedOperationError('Mocked error') + ) + + dispatcher = RestDispatcher(request_handler=mock_handler) + req = make_mock_request(method='POST') + + response = await dispatcher.on_message_send_stream(req) + assert response.status_code == 400 diff --git a/tests/server/routes/test_rest_routes.py b/tests/server/routes/test_rest_routes.py new file mode 100644 index 000000000..2b3477c6b --- /dev/null +++ b/tests/server/routes/test_rest_routes.py @@ -0,0 +1,94 @@ +from unittest.mock import AsyncMock + +import pytest +from starlette.applications import Starlette +from starlette.testclient import TestClient +from starlette.routing import BaseRoute, Route + +from a2a.server.request_handlers.request_handler import RequestHandler +from a2a.server.routes.rest_routes import create_rest_routes +from a2a.types.a2a_pb2 import AgentCard, Task, ListTasksResponse + + +@pytest.fixture +def agent_card(): + return AgentCard() + + +@pytest.fixture +def mock_handler(): + return AsyncMock(spec=RequestHandler) + + +def test_routes_creation(agent_card, mock_handler): + """Tests that create_rest_routes creates Route objects list.""" + routes = create_rest_routes(request_handler=mock_handler) + + assert isinstance(routes, list) + assert len(routes) > 0 + assert all((isinstance(r, BaseRoute) for r in routes)) + + +def test_routes_creation_v03_compat(agent_card, mock_handler): + """Tests that create_rest_routes creates more routes with enable_v0_3_compat.""" + mock_handler._agent_card = agent_card + routes_without_compat = create_rest_routes( + request_handler=mock_handler, enable_v0_3_compat=False + ) + routes_with_compat = create_rest_routes( + request_handler=mock_handler, enable_v0_3_compat=True + ) + + assert len(routes_with_compat) > len(routes_without_compat) + + +def test_rest_endpoints_routing(agent_card, mock_handler): + """Tests that mounted routes route to the handler endpoints.""" + mock_handler.on_message_send.return_value = Task(id='123') + + routes = create_rest_routes(request_handler=mock_handler) + app = Starlette(routes=routes) + client = TestClient(app) + + # Test POST /message:send + response = client.post( + '/message:send', json={}, headers={'A2A-Version': '1.0'} + ) + assert response.status_code == 200 + assert response.json()['task']['id'] == '123' + assert mock_handler.on_message_send.called + + +def test_rest_endpoints_routing_tenant(agent_card, mock_handler): + """Tests that mounted routes with {tenant} route to the handler endpoints.""" + mock_handler.on_message_send.return_value = Task(id='123') + + routes = create_rest_routes(request_handler=mock_handler) + app = Starlette(routes=routes) + client = TestClient(app) + + # Test POST /{tenant}/message:send + response = client.post( + '/my-tenant/message:send', json={}, headers={'A2A-Version': '1.0'} + ) + assert response.status_code == 200 + + # Verify that tenant was set in call context + call_args = mock_handler.on_message_send.call_args + assert call_args is not None + # call_args[0] is positional args. In on_message_send(params, context): + context = call_args[0][1] + assert context.tenant == 'my-tenant' + + +def test_rest_list_tasks(agent_card, mock_handler): + """Tests that list tasks endpoint is routed to the handler.""" + mock_handler.on_list_tasks.return_value = ListTasksResponse() + + routes = create_rest_routes(request_handler=mock_handler) + app = Starlette(routes=routes) + client = TestClient(app) + + response = client.get('/tasks', headers={'A2A-Version': '1.0'}) + assert response.status_code == 200 + assert mock_handler.on_list_tasks.called diff --git a/tests/server/tasks/test_copying_task_store.py b/tests/server/tasks/test_copying_task_store.py new file mode 100644 index 000000000..5e07b909b --- /dev/null +++ b/tests/server/tasks/test_copying_task_store.py @@ -0,0 +1,132 @@ +from __future__ import annotations + +import unittest +import pytest + +from unittest.mock import AsyncMock + +from a2a.server.context import ServerCallContext +from a2a.server.tasks.copying_task_store import CopyingTaskStoreAdapter +from a2a.server.tasks.task_store import TaskStore +from a2a.types.a2a_pb2 import ( + ListTasksRequest, + ListTasksResponse, + Task, + TaskState, +) + + +@pytest.mark.asyncio +async def test_copying_task_store_save(): + """Test that the adapter makes a copy of the task when saving.""" + mock_store = AsyncMock(spec=TaskStore) + adapter = CopyingTaskStoreAdapter(mock_store) + + original_task = Task( + id='test_task', status={'state': TaskState.TASK_STATE_WORKING} + ) + context = ServerCallContext() + + await adapter.save(original_task, context) + + # Verify underlying store was called + mock_store.save.assert_awaited_once() + + # Get the saved task + saved_task = mock_store.save.call_args[0][0] + saved_context = mock_store.save.call_args[0][1] + + # Verify context is passed correctly + assert saved_context is context + + # Verify content is identical + assert saved_task.id == original_task.id + assert saved_task.status.state == original_task.status.state + + # Verify it is a COPY, not the same reference + assert saved_task is not original_task + + +@pytest.mark.asyncio +async def test_copying_task_store_get(): + """Test that the adapter returns a copy of the task retrieved.""" + mock_store = AsyncMock(spec=TaskStore) + adapter = CopyingTaskStoreAdapter(mock_store) + + stored_task = Task( + id='test_task', status={'state': TaskState.TASK_STATE_WORKING} + ) + mock_store.get.return_value = stored_task + context = ServerCallContext() + + retrieved_task = await adapter.get('test_task', context) + + # Verify underlying store was called + mock_store.get.assert_awaited_once_with('test_task', context) + + # Verify retrieved task has identical content + assert retrieved_task is not None + assert retrieved_task.id == stored_task.id + assert retrieved_task.status.state == stored_task.status.state + + # Verify it is a COPY, not the same reference + assert retrieved_task is not stored_task + + +@pytest.mark.asyncio +async def test_copying_task_store_get_none(): + """Test that the adapter properly returns None when no task is found.""" + mock_store = AsyncMock(spec=TaskStore) + adapter = CopyingTaskStoreAdapter(mock_store) + + mock_store.get.return_value = None + context = ServerCallContext() + + retrieved_task = await adapter.get('test_task', context) + + # Verify underlying store was called + mock_store.get.assert_awaited_once_with('test_task', context) + assert retrieved_task is None + + +@pytest.mark.asyncio +async def test_copying_task_store_list(): + """Test that the adapter returns a copy of the list response.""" + mock_store = AsyncMock(spec=TaskStore) + adapter = CopyingTaskStoreAdapter(mock_store) + + task1 = Task(id='test_task_1') + task2 = Task(id='test_task_2') + stored_response = ListTasksResponse(tasks=[task1, task2]) + mock_store.list.return_value = stored_response + context = ServerCallContext() + request = ListTasksRequest(page_size=10) + + retrieved_response = await adapter.list(request, context) + + # Verify underlying store was called + mock_store.list.assert_awaited_once_with(request, context) + + # Verify retrieved response has identical content + assert len(retrieved_response.tasks) == 2 + assert retrieved_response.tasks[0].id == 'test_task_1' + assert retrieved_response.tasks[1].id == 'test_task_2' + + # Verify it is a COPY, not the same reference + assert retrieved_response is not stored_response + # Also verify inner tasks are copies + assert retrieved_response.tasks[0] is not task1 + assert retrieved_response.tasks[1] is not task2 + + +@pytest.mark.asyncio +async def test_copying_task_store_delete(): + """Test that the adapter calls delete on underlying store.""" + mock_store = AsyncMock(spec=TaskStore) + adapter = CopyingTaskStoreAdapter(mock_store) + context = ServerCallContext() + + await adapter.delete('test_task', context) + + # Verify underlying store was called + mock_store.delete.assert_awaited_once_with('test_task', context) diff --git a/tests/server/tasks/test_database_push_notification_config_store.py b/tests/server/tasks/test_database_push_notification_config_store.py index 0c3bd4683..b13a5cf55 100644 --- a/tests/server/tasks/test_database_push_notification_config_store.py +++ b/tests/server/tasks/test_database_push_notification_config_store.py @@ -1,8 +1,13 @@ import os +from unittest.mock import MagicMock from collections.abc import AsyncGenerator import pytest +from a2a.server.context import ServerCallContext +from a2a.auth.user import User +from a2a.compat.v0_3 import types as types_v03 +from sqlalchemy import insert # Skip entire test module if SQLAlchemy is not installed @@ -25,17 +30,23 @@ ) from sqlalchemy.inspection import inspect +from google.protobuf.json_format import MessageToJson +from google.protobuf.timestamp_pb2 import Timestamp + from a2a.server.models import ( Base, PushNotificationConfigModel, ) # Important: To get Base.metadata from a2a.server.tasks import DatabasePushNotificationConfigStore -from a2a.types import ( - PushNotificationConfig, +from a2a.types.a2a_pb2 import ( + TaskPushNotificationConfig, Task, TaskState, TaskStatus, ) +from a2a.compat.v0_3.model_conversions import ( + core_to_compat_push_notification_config_model, +) # DSNs for different databases @@ -79,21 +90,44 @@ ) +# Create a proper Timestamp for TaskStatus +def _create_timestamp() -> Timestamp: + """Create a Timestamp from ISO format string.""" + ts = Timestamp() + ts.FromJsonString('2023-01-01T00:00:00Z') + return ts + + # Minimal Task object for testing - remains the same task_status_submitted = TaskStatus( - state=TaskState.submitted, timestamp='2023-01-01T00:00:00Z' + state=TaskState.TASK_STATE_SUBMITTED, timestamp=_create_timestamp() ) MINIMAL_TASK_OBJ = Task( id='task-abc', context_id='session-xyz', status=task_status_submitted, - kind='task', metadata={'test_key': 'test_value'}, - artifacts=[], - history=[], ) +class SampleUser(User): + """A test implementation of the User interface.""" + + def __init__(self, user_name: str): + self._user_name = user_name + + @property + def is_authenticated(self) -> bool: + return True + + @property + def user_name(self) -> str: + return self._user_name + + +MINIMAL_CALL_CONTEXT = ServerCallContext(user=SampleUser(user_name='user')) + + @pytest_asyncio.fixture(params=DB_CONFIGS) async def db_store_parameterized( request, @@ -171,10 +205,12 @@ async def test_set_and_get_info_single_config( ): """Test setting and retrieving a single configuration.""" task_id = 'task-1' - config = PushNotificationConfig(id='config-1', url='http://example.com') + config = TaskPushNotificationConfig(id='config-1', url='http://example.com') - await db_store_parameterized.set_info(task_id, config) - retrieved_configs = await db_store_parameterized.get_info(task_id) + await db_store_parameterized.set_info(task_id, config, MINIMAL_CALL_CONTEXT) + retrieved_configs = await db_store_parameterized.get_info( + task_id, MINIMAL_CALL_CONTEXT + ) assert len(retrieved_configs) == 1 assert retrieved_configs[0] == config @@ -187,12 +223,22 @@ async def test_set_and_get_info_multiple_configs( """Test setting and retrieving multiple configurations for a single task.""" task_id = 'task-1' - config1 = PushNotificationConfig(id='config-1', url='http://example.com/1') - config2 = PushNotificationConfig(id='config-2', url='http://example.com/2') + config1 = TaskPushNotificationConfig( + id='config-1', task_id=task_id, url='http://example.com/1' + ) + config2 = TaskPushNotificationConfig( + id='config-2', task_id=task_id, url='http://example.com/2' + ) - await db_store_parameterized.set_info(task_id, config1) - await db_store_parameterized.set_info(task_id, config2) - retrieved_configs = await db_store_parameterized.get_info(task_id) + await db_store_parameterized.set_info( + task_id, config1, MINIMAL_CALL_CONTEXT + ) + await db_store_parameterized.set_info( + task_id, config2, MINIMAL_CALL_CONTEXT + ) + retrieved_configs = await db_store_parameterized.get_info( + task_id, MINIMAL_CALL_CONTEXT + ) assert len(retrieved_configs) == 2 assert config1 in retrieved_configs @@ -206,16 +252,22 @@ async def test_set_info_updates_existing_config( """Test that setting an existing config ID updates the record.""" task_id = 'task-1' config_id = 'config-1' - initial_config = PushNotificationConfig( + initial_config = TaskPushNotificationConfig( id=config_id, url='http://initial.url' ) - updated_config = PushNotificationConfig( + updated_config = TaskPushNotificationConfig( id=config_id, url='http://updated.url' ) - await db_store_parameterized.set_info(task_id, initial_config) - await db_store_parameterized.set_info(task_id, updated_config) - retrieved_configs = await db_store_parameterized.get_info(task_id) + await db_store_parameterized.set_info( + task_id, initial_config, MINIMAL_CALL_CONTEXT + ) + await db_store_parameterized.set_info( + task_id, updated_config, MINIMAL_CALL_CONTEXT + ) + retrieved_configs = await db_store_parameterized.get_info( + task_id, MINIMAL_CALL_CONTEXT + ) assert len(retrieved_configs) == 1 assert retrieved_configs[0].url == 'http://updated.url' @@ -227,10 +279,12 @@ async def test_set_info_defaults_config_id_to_task_id( ): """Test that config.id defaults to task_id if not provided.""" task_id = 'task-1' - config = PushNotificationConfig(url='http://example.com') # id is None + config = TaskPushNotificationConfig(url='http://example.com') # id is None - await db_store_parameterized.set_info(task_id, config) - retrieved_configs = await db_store_parameterized.get_info(task_id) + await db_store_parameterized.set_info(task_id, config, MINIMAL_CALL_CONTEXT) + retrieved_configs = await db_store_parameterized.get_info( + task_id, MINIMAL_CALL_CONTEXT + ) assert len(retrieved_configs) == 1 assert retrieved_configs[0].id == task_id @@ -242,7 +296,7 @@ async def test_get_info_not_found( ): """Test getting info for a task with no configs returns an empty list.""" retrieved_configs = await db_store_parameterized.get_info( - 'non-existent-task' + 'non-existent-task', MINIMAL_CALL_CONTEXT ) assert retrieved_configs == [] @@ -253,14 +307,22 @@ async def test_delete_info_specific_config( ): """Test deleting a single, specific configuration.""" task_id = 'task-1' - config1 = PushNotificationConfig(id='config-1', url='http://a.com') - config2 = PushNotificationConfig(id='config-2', url='http://b.com') + config1 = TaskPushNotificationConfig(id='config-1', url='http://a.com') + config2 = TaskPushNotificationConfig(id='config-2', url='http://b.com') - await db_store_parameterized.set_info(task_id, config1) - await db_store_parameterized.set_info(task_id, config2) + await db_store_parameterized.set_info( + task_id, config1, MINIMAL_CALL_CONTEXT + ) + await db_store_parameterized.set_info( + task_id, config2, MINIMAL_CALL_CONTEXT + ) - await db_store_parameterized.delete_info(task_id, 'config-1') - retrieved_configs = await db_store_parameterized.get_info(task_id) + await db_store_parameterized.delete_info( + task_id, MINIMAL_CALL_CONTEXT, 'config-1' + ) + retrieved_configs = await db_store_parameterized.get_info( + task_id, MINIMAL_CALL_CONTEXT + ) assert len(retrieved_configs) == 1 assert retrieved_configs[0] == config2 @@ -273,14 +335,22 @@ async def test_delete_info_all_for_task( """Test deleting all configurations for a task when config_id is None.""" task_id = 'task-1' - config1 = PushNotificationConfig(id='config-1', url='http://a.com') - config2 = PushNotificationConfig(id='config-2', url='http://b.com') + config1 = TaskPushNotificationConfig(id='config-1', url='http://a.com') + config2 = TaskPushNotificationConfig(id='config-2', url='http://b.com') - await db_store_parameterized.set_info(task_id, config1) - await db_store_parameterized.set_info(task_id, config2) + await db_store_parameterized.set_info( + task_id, config1, MINIMAL_CALL_CONTEXT + ) + await db_store_parameterized.set_info( + task_id, config2, MINIMAL_CALL_CONTEXT + ) - await db_store_parameterized.delete_info(task_id, None) - retrieved_configs = await db_store_parameterized.get_info(task_id) + await db_store_parameterized.delete_info( + task_id, MINIMAL_CALL_CONTEXT, None + ) + retrieved_configs = await db_store_parameterized.get_info( + task_id, MINIMAL_CALL_CONTEXT + ) assert retrieved_configs == [] @@ -291,7 +361,9 @@ async def test_delete_info_not_found( ): """Test that deleting a non-existent config does not raise an error.""" # Should not raise - await db_store_parameterized.delete_info('task-1', 'non-existent-config') + await db_store_parameterized.delete_info( + 'task-1', MINIMAL_CALL_CONTEXT, 'non-existent-config' + ) @pytest.mark.asyncio @@ -300,12 +372,12 @@ async def test_data_is_encrypted_in_db( ): """Verify that the data stored in the database is actually encrypted.""" task_id = 'encrypted-task' - config = PushNotificationConfig( + config = TaskPushNotificationConfig( id='config-1', url='http://secret.url', token='secret-token' ) - plain_json = config.model_dump_json() + plain_json = MessageToJson(config) - await db_store_parameterized.set_info(task_id, config) + await db_store_parameterized.set_info(task_id, config, MINIMAL_CALL_CONTEXT) # Directly query the database to inspect the raw data async_session = async_sessionmaker( @@ -334,8 +406,8 @@ async def test_decryption_error_with_wrong_key( # 1. Store with one key task_id = 'wrong-key-task' - config = PushNotificationConfig(id='config-1', url='http://secret.url') - await db_store_parameterized.set_info(task_id, config) + config = TaskPushNotificationConfig(id='config-1', url='http://secret.url') + await db_store_parameterized.set_info(task_id, config, MINIMAL_CALL_CONTEXT) # 2. Try to read with a different key # Directly query the database to inspect the raw data @@ -344,7 +416,7 @@ async def test_decryption_error_with_wrong_key( db_store_parameterized.engine, encryption_key=wrong_key ) - retrieved_configs = await store2.get_info(task_id) + retrieved_configs = await store2.get_info(task_id, MINIMAL_CALL_CONTEXT) assert retrieved_configs == [] # _from_orm should raise a ValueError @@ -368,14 +440,14 @@ async def test_decryption_error_with_no_key( # 1. Store with one key task_id = 'wrong-key-task' - config = PushNotificationConfig(id='config-1', url='http://secret.url') - await db_store_parameterized.set_info(task_id, config) + config = TaskPushNotificationConfig(id='config-1', url='http://secret.url') + await db_store_parameterized.set_info(task_id, config, MINIMAL_CALL_CONTEXT) # 2. Try to read with no key set # Directly query the database to inspect the raw data store2 = DatabasePushNotificationConfigStore(db_store_parameterized.engine) - retrieved_configs = await store2.get_info(task_id) + retrieved_configs = await store2.get_info(task_id, MINIMAL_CALL_CONTEXT) assert retrieved_configs == [] # _from_orm should raise a ValueError @@ -409,11 +481,15 @@ async def test_custom_table_name( ) task_id = 'custom-table-task' - config = PushNotificationConfig(id='config-1', url='http://custom.url') + config = TaskPushNotificationConfig( + id='config-1', url='http://custom.url' + ) # This will create the table on first use - await custom_store.set_info(task_id, config) - retrieved_configs = await custom_store.get_info(task_id) + await custom_store.set_info(task_id, config, MINIMAL_CALL_CONTEXT) + retrieved_configs = await custom_store.get_info( + task_id, MINIMAL_CALL_CONTEXT + ) assert len(retrieved_configs) == 1 assert retrieved_configs[0] == config @@ -454,12 +530,16 @@ async def test_set_and_get_info_multiple_configs_no_key( await store.initialize() task_id = 'task-1' - config1 = PushNotificationConfig(id='config-1', url='http://example.com/1') - config2 = PushNotificationConfig(id='config-2', url='http://example.com/2') + config1 = TaskPushNotificationConfig( + id='config-1', url='http://example.com/1' + ) + config2 = TaskPushNotificationConfig( + id='config-2', url='http://example.com/2' + ) - await store.set_info(task_id, config1) - await store.set_info(task_id, config2) - retrieved_configs = await store.get_info(task_id) + await store.set_info(task_id, config1, MINIMAL_CALL_CONTEXT) + await store.set_info(task_id, config2, MINIMAL_CALL_CONTEXT) + retrieved_configs = await store.get_info(task_id, MINIMAL_CALL_CONTEXT) assert len(retrieved_configs) == 2 assert config1 in retrieved_configs @@ -480,10 +560,12 @@ async def test_data_is_not_encrypted_in_db_if_no_key_is_set( await store.initialize() task_id = 'task-1' - config = PushNotificationConfig(id='config-1', url='http://example.com/1') - plain_json = config.model_dump_json() + config = TaskPushNotificationConfig( + id='config-1', url='http://example.com/1' + ) + plain_json = MessageToJson(config) - await store.set_info(task_id, config) + await store.set_info(task_id, config, MINIMAL_CALL_CONTEXT) # Directly query the database to inspect the raw data async_session = async_sessionmaker( @@ -513,11 +595,13 @@ async def test_decryption_fallback_for_unencrypted_data( await unencrypted_store.initialize() task_id = 'mixed-encryption-task' - config = PushNotificationConfig(id='config-1', url='http://plain.url') - await unencrypted_store.set_info(task_id, config) + config = TaskPushNotificationConfig(id='config-1', url='http://plain.url') + await unencrypted_store.set_info(task_id, config, MINIMAL_CALL_CONTEXT) # 2. Try to read with the encryption-enabled store from the fixture - retrieved_configs = await db_store_parameterized.get_info(task_id) + retrieved_configs = await db_store_parameterized.get_info( + task_id, MINIMAL_CALL_CONTEXT + ) # Should fall back to parsing as plain JSON and not fail assert len(retrieved_configs) == 1 @@ -547,12 +631,15 @@ async def test_parsing_error_after_successful_decryption( task_id=task_id, config_id=config_id, config_data=encrypted_data, + owner='user', ) session.add(db_model) await session.commit() # 3. get_info should log an error and return an empty list - retrieved_configs = await db_store_parameterized.get_info(task_id) + retrieved_configs = await db_store_parameterized.get_info( + task_id, MINIMAL_CALL_CONTEXT + ) assert retrieved_configs == [] # 4. _from_orm should raise a ValueError @@ -563,3 +650,221 @@ async def test_parsing_error_after_successful_decryption( with pytest.raises(ValueError): db_store_parameterized._from_orm(db_model_retrieved) # type: ignore + + +@pytest.mark.asyncio +async def test_owner_resource_scoping( + db_store_parameterized: DatabasePushNotificationConfigStore, +) -> None: + """Test that operations are scoped to the correct owner.""" + config_store = db_store_parameterized + + context_user1 = ServerCallContext(user=SampleUser(user_name='user1')) + context_user2 = ServerCallContext(user=SampleUser(user_name='user2')) + + # Create configs for different owners + task1_u1_config1 = TaskPushNotificationConfig( + id='t1-u1-c1', url='http://u1.com/1' + ) + task1_u1_config2 = TaskPushNotificationConfig( + id='t1-u1-c2', url='http://u1.com/2' + ) + task1_u2_config1 = TaskPushNotificationConfig( + id='t1-u2-c1', url='http://u2.com/1' + ) + task2_u1_config1 = TaskPushNotificationConfig( + id='t2-u1-c1', url='http://u1.com/3' + ) + + await config_store.set_info('task1', task1_u1_config1, context_user1) + await config_store.set_info('task1', task1_u1_config2, context_user1) + await config_store.set_info('task1', task1_u2_config1, context_user2) + await config_store.set_info('task2', task2_u1_config1, context_user1) + + # Test GET_INFO + # User 1 should get only their configs for task1 + u1_task1_configs = await config_store.get_info('task1', context_user1) + assert len(u1_task1_configs) == 2 + assert {c.id for c in u1_task1_configs} == {'t1-u1-c1', 't1-u1-c2'} + + # User 2 should get only their configs for task1 + u2_task1_configs = await config_store.get_info('task1', context_user2) + assert len(u2_task1_configs) == 1 + assert u2_task1_configs[0].id == 't1-u2-c1' + + # User 2 should get no configs for task2 + u2_task2_configs = await config_store.get_info('task2', context_user2) + assert len(u2_task2_configs) == 0 + + # User 1 should get their config for task2 + u1_task2_configs = await config_store.get_info('task2', context_user1) + assert len(u1_task2_configs) == 1 + assert u1_task2_configs[0].id == 't2-u1-c1' + + # Test DELETE_INFO + # User 2 deleting User 1's config should not work + await config_store.delete_info('task1', context_user2, 't1-u1-c1') + u1_task1_configs = await config_store.get_info('task1', context_user1) + assert len(u1_task1_configs) == 2 + + # User 1 deleting their own config + await config_store.delete_info( + 'task1', + context_user1, + 't1-u1-c1', + ) + u1_task1_configs = await config_store.get_info('task1', context_user1) + assert len(u1_task1_configs) == 1 + assert u1_task1_configs[0].id == 't1-u1-c2' + + # User 1 deleting all configs for task2 + await config_store.delete_info('task2', context=context_user1) + u1_task2_configs = await config_store.get_info('task2', context_user1) + assert len(u1_task2_configs) == 0 + + # Cleanup remaining + await config_store.delete_info('task1', context=context_user1) + await config_store.delete_info('task1', context=context_user2) + + +@pytest.mark.asyncio +async def test_get_0_3_push_notification_config_detailed( + db_store_parameterized: DatabasePushNotificationConfigStore, +) -> None: + """Test retrieving a legacy v0.3 push notification config from the database. + + This test simulates a database that already contains legacy v0.3 JSON data + and verifies that the store correctly converts it to the modern Protobuf model. + """ + task_id = 'legacy-push-1' + config_id = 'config-legacy-1' + owner = 'legacy_user' + context_user = ServerCallContext(user=SampleUser(user_name=owner)) + + # 1. Create a legacy PushNotificationConfig using v0.3 models + legacy_config = types_v03.PushNotificationConfig( + id=config_id, + url='https://example.com/push', + token='legacy-token', + authentication=types_v03.PushNotificationAuthenticationInfo( + schemes=['bearer'], + credentials='legacy-creds', + ), + ) + + # 2. Manually insert the legacy data into the database + # For PushNotificationConfigStore, the data is stored in the config_data column. + async with db_store_parameterized.async_session_maker.begin() as session: + # Pydantic model_dump_json() produces the JSON that we'll store. + # Note: DatabasePushNotificationConfigStore normally encrypts this, but here + # we'll store it as plain JSON bytes to simulate legacy data. + legacy_json = legacy_config.model_dump_json() + + stmt = insert(db_store_parameterized.config_model).values( + task_id=task_id, + config_id=config_id, + owner=owner, + config_data=legacy_json.encode('utf-8'), + ) + await session.execute(stmt) + + # 3. Retrieve the config using the standard store.get_info() + # This will trigger the DatabasePushNotificationConfigStore._from_orm legacy conversion + retrieved_configs = await db_store_parameterized.get_info( + task_id, context_user + ) + + # 4. Verify the conversion to modern Protobuf + assert len(retrieved_configs) == 1 + retrieved = retrieved_configs[0] + assert retrieved.task_id == task_id + assert retrieved.id == config_id + assert retrieved.url == 'https://example.com/push' + assert retrieved.token == 'legacy-token' + assert retrieved.authentication.scheme == 'bearer' + assert retrieved.authentication.credentials == 'legacy-creds' + + +@pytest.mark.asyncio +async def test_custom_conversion(): + engine = MagicMock() + + # Custom callables + mock_to_orm = MagicMock( + return_value=PushNotificationConfigModel(task_id='t1', config_id='c1') + ) + mock_from_orm = MagicMock( + return_value=TaskPushNotificationConfig(id='custom_config') + ) + store = DatabasePushNotificationConfigStore( + engine=engine, + core_to_model_conversion=mock_to_orm, + model_to_core_conversion=mock_from_orm, + ) + + config = TaskPushNotificationConfig(id='orig') + model = store._to_orm('t1', config, 'owner') + assert model.config_id == 'c1' + mock_to_orm.assert_called_once_with('t1', config, 'owner', None) + + model_instance = PushNotificationConfigModel(task_id='t1', config_id='c1') + loaded_config = store._from_orm(model_instance) + assert loaded_config.id == 'custom_config' + mock_from_orm.assert_called_once_with(model_instance) + + +@pytest.mark.asyncio +async def test_core_to_0_3_model_conversion( + db_store_parameterized: DatabasePushNotificationConfigStore, +) -> None: + """Test storing and retrieving push notification configs in v0.3 format using conversion utilities. + + Tests both class-level and instance-level assignment of the conversion function. + Setting the model_to_core_conversion to compat_push_notification_config_model_to_core would be redundant as + it is always called when retrieving 0.3 PushNotificationConfigs. + """ + store = db_store_parameterized + + # Set the v0.3 persistence utilities + store.core_to_model_conversion = ( + core_to_compat_push_notification_config_model + ) + + task_id = 'v03-persistence-task' + config_id = 'c1' + original_config = TaskPushNotificationConfig( + id=config_id, + url='https://example.com/push', + token='legacy-token', + ) + # 1. Save the config (will use core_to_compat_push_notification_config_model) + await store.set_info(task_id, original_config, MINIMAL_CALL_CONTEXT) + + # 2. Verify it's stored in v0.3 format directly in DB + async with store.async_session_maker() as session: + db_model = await session.get(store.config_model, (task_id, config_id)) + assert db_model is not None + assert db_model.protocol_version == '0.3' + # v0.3 JSON structure for PushNotificationConfig (unwrapped) + import json + + raw_data = db_model.config_data + if store._fernet: + raw_data = store._fernet.decrypt(raw_data) + data = json.loads(raw_data.decode('utf-8')) + assert data['url'] == 'https://example.com/push' + assert data['id'] == 'c1' + assert data['token'] == 'legacy-token' + assert 'taskId' not in data + + # 3. Retrieve the config (will use compat_push_notification_config_model_to_core) + retrieved_configs = await store.get_info(task_id, MINIMAL_CALL_CONTEXT) + assert len(retrieved_configs) == 1 + retrieved = retrieved_configs[0] + assert retrieved.id == original_config.id + assert retrieved.url == original_config.url + assert retrieved.token == original_config.token + + # Reset conversion attributes + store.core_to_model_conversion = None + await store.delete_info(task_id, MINIMAL_CALL_CONTEXT) diff --git a/tests/server/tasks/test_database_task_store.py b/tests/server/tasks/test_database_task_store.py index 87069be46..021345a7e 100644 --- a/tests/server/tasks/test_database_task_store.py +++ b/tests/server/tasks/test_database_task_store.py @@ -1,4 +1,6 @@ import os +from datetime import datetime, timezone +from unittest.mock import MagicMock from collections.abc import AsyncGenerator @@ -6,6 +8,9 @@ import pytest_asyncio from _pytest.mark.structures import ParameterSet +from a2a.types.a2a_pb2 import ListTasksRequest +from a2a.compat.v0_3 import types as types_v03 +from sqlalchemy import insert # Skip entire test module if SQLAlchemy is not installed @@ -15,18 +20,43 @@ from sqlalchemy.ext.asyncio import create_async_engine from sqlalchemy.inspection import inspect +from google.protobuf.json_format import MessageToDict + from a2a.server.models import Base, TaskModel # Important: To get Base.metadata from a2a.server.tasks.database_task_store import DatabaseTaskStore -from a2a.types import ( +from a2a.compat.v0_3.model_conversions import core_to_compat_task_model +from a2a.types.a2a_pb2 import ( Artifact, + ListTasksRequest, Message, Part, Role, Task, TaskState, TaskStatus, - TextPart, ) +from a2a.auth.user import User +from a2a.server.context import ServerCallContext +from a2a.utils.constants import DEFAULT_LIST_TASKS_PAGE_SIZE +from a2a.utils.errors import InvalidParamsError + + +class SampleUser(User): + """A test implementation of the User interface.""" + + def __init__(self, user_name: str): + self._user_name = user_name + + @property + def is_authenticated(self) -> bool: + return True + + @property + def user_name(self) -> str: + return self._user_name + + +TEST_CONTEXT = ServerCallContext(user=SampleUser('test_user')) # DSNs for different databases @@ -71,17 +101,11 @@ # Minimal Task object for testing - remains the same -task_status_submitted = TaskStatus( - state=TaskState.submitted, timestamp='2023-01-01T00:00:00Z' -) +task_status_submitted = TaskStatus(state=TaskState.TASK_STATE_SUBMITTED) MINIMAL_TASK_OBJ = Task( id='task-abc', context_id='session-xyz', status=task_status_submitted, - kind='task', - metadata={'test_key': 'test_value'}, - artifacts=[], - history=[], ) @@ -142,33 +166,242 @@ def has_table_sync(sync_conn): @pytest.mark.asyncio async def test_save_task(db_store_parameterized: DatabaseTaskStore) -> None: """Test saving a task to the DatabaseTaskStore.""" - task_to_save = MINIMAL_TASK_OBJ.model_copy(deep=True) + # Create a copy of the minimal task with a unique ID + task_to_save = Task() + task_to_save.CopyFrom(MINIMAL_TASK_OBJ) # Ensure unique ID for parameterized tests if needed, or rely on table isolation task_to_save.id = ( f'save-task-{db_store_parameterized.engine.url.drivername}' ) - await db_store_parameterized.save(task_to_save) + await db_store_parameterized.save(task_to_save, TEST_CONTEXT) - retrieved_task = await db_store_parameterized.get(task_to_save.id) + retrieved_task = await db_store_parameterized.get( + task_to_save.id, TEST_CONTEXT + ) assert retrieved_task is not None assert retrieved_task.id == task_to_save.id - assert retrieved_task.model_dump() == task_to_save.model_dump() - await db_store_parameterized.delete(task_to_save.id) # Cleanup + assert MessageToDict(retrieved_task) == MessageToDict(task_to_save) + await db_store_parameterized.delete( + task_to_save.id, TEST_CONTEXT + ) # Cleanup @pytest.mark.asyncio async def test_get_task(db_store_parameterized: DatabaseTaskStore) -> None: """Test retrieving a task from the DatabaseTaskStore.""" task_id = f'get-test-task-{db_store_parameterized.engine.url.drivername}' - task_to_save = MINIMAL_TASK_OBJ.model_copy(update={'id': task_id}) - await db_store_parameterized.save(task_to_save) + task_to_save = Task() + task_to_save.CopyFrom(MINIMAL_TASK_OBJ) + task_to_save.id = task_id + await db_store_parameterized.save(task_to_save, TEST_CONTEXT) - retrieved_task = await db_store_parameterized.get(task_to_save.id) + retrieved_task = await db_store_parameterized.get( + task_to_save.id, TEST_CONTEXT + ) assert retrieved_task is not None assert retrieved_task.id == task_to_save.id assert retrieved_task.context_id == task_to_save.context_id - assert retrieved_task.status.state == TaskState.submitted - await db_store_parameterized.delete(task_to_save.id) # Cleanup + assert retrieved_task.status.state == TaskState.TASK_STATE_SUBMITTED + await db_store_parameterized.delete( + task_to_save.id, TEST_CONTEXT + ) # Cleanup + + +@pytest.mark.asyncio +@pytest.mark.parametrize( + 'params, expected_ids, total_count, next_page_token', + [ + # No parameters, should return all tasks + ( + ListTasksRequest(), + ['task-2', 'task-1', 'task-0', 'task-4', 'task-3'], + 5, + None, + ), + # Unknown context + ( + ListTasksRequest(context_id='nonexistent'), + [], + 0, + None, + ), + # Pagination (first page) + ( + ListTasksRequest(page_size=2), + ['task-2', 'task-1'], + 5, + 'dGFzay0w', # base64 for 'task-0' + ), + # Pagination (same timestamp) + ( + ListTasksRequest( + page_size=2, + page_token='dGFzay0x', # base64 for 'task-1' + ), + ['task-1', 'task-0'], + 5, + 'dGFzay00', # base64 for 'task-4' + ), + # Pagination (final page) + ( + ListTasksRequest( + page_size=2, + page_token='dGFzay0z', # base64 for 'task-3' + ), + ['task-3'], + 5, + None, + ), + # Filtering by context_id + ( + ListTasksRequest(context_id='context-1'), + ['task-1', 'task-3'], + 2, + None, + ), + # Filtering by status + ( + ListTasksRequest(status=TaskState.TASK_STATE_WORKING), + ['task-1', 'task-3'], + 2, + None, + ), + # Combined filtering (context_id and status) + ( + ListTasksRequest( + context_id='context-0', status=TaskState.TASK_STATE_SUBMITTED + ), + ['task-2', 'task-0'], + 2, + None, + ), + # Combined filtering and pagination + ( + ListTasksRequest( + context_id='context-0', + page_size=1, + ), + ['task-2'], + 3, + 'dGFzay0w', # base64 for 'task-0' + ), + ], +) +async def test_list_tasks( + db_store_parameterized: DatabaseTaskStore, + params: ListTasksRequest, + expected_ids: list[str], + total_count: int, + next_page_token: str, +) -> None: + """Test listing tasks with various filters and pagination.""" + tasks_to_create = [ + Task( + id='task-0', + context_id='context-0', + status=TaskStatus( + state=TaskState.TASK_STATE_SUBMITTED, + timestamp=datetime(2025, 1, 1, tzinfo=timezone.utc), + ), + ), + Task( + id='task-1', + context_id='context-1', + status=TaskStatus( + state=TaskState.TASK_STATE_WORKING, + timestamp=datetime(2025, 1, 1, tzinfo=timezone.utc), + ), + ), + Task( + id='task-2', + context_id='context-0', + status=TaskStatus( + state=TaskState.TASK_STATE_SUBMITTED, + timestamp=datetime(2025, 1, 2, tzinfo=timezone.utc), + ), + ), + Task( + id='task-3', + context_id='context-1', + status=TaskStatus(state=TaskState.TASK_STATE_WORKING), + ), + Task( + id='task-4', + context_id='context-0', + status=TaskStatus(state=TaskState.TASK_STATE_COMPLETED), + ), + ] + for task in tasks_to_create: + await db_store_parameterized.save(task, TEST_CONTEXT) + + page = await db_store_parameterized.list(params, TEST_CONTEXT) + + retrieved_ids = [task.id for task in page.tasks] + assert retrieved_ids == expected_ids + assert page.total_size == total_count + assert page.next_page_token == (next_page_token or '') + assert page.page_size == (params.page_size or DEFAULT_LIST_TASKS_PAGE_SIZE) + + # Cleanup + for task in tasks_to_create: + await db_store_parameterized.delete(task.id, TEST_CONTEXT) + + +@pytest.mark.asyncio +@pytest.mark.parametrize( + 'params, expected_error_message', + [ + ( + ListTasksRequest( + page_size=2, + page_token='invalid', + ), + 'Token is not a valid base64-encoded cursor.', + ), + ( + ListTasksRequest( + page_size=2, + page_token='dGFzay0xMDA=', # base64 for 'task-100' + ), + 'Invalid page token: dGFzay0xMDA=', + ), + ], +) +async def test_list_tasks_fails( + db_store_parameterized: DatabaseTaskStore, + params: ListTasksRequest, + expected_error_message: str, +) -> None: + """Test listing tasks with invalid parameters that should fail.""" + tasks_to_create = [ + Task( + id='task-0', + context_id='context-0', + status=TaskStatus( + state=TaskState.TASK_STATE_SUBMITTED, + timestamp=datetime(2025, 1, 1, tzinfo=timezone.utc), + ), + ), + Task( + id='task-1', + context_id='context-1', + status=TaskStatus( + state=TaskState.TASK_STATE_WORKING, + timestamp=datetime(2025, 1, 1, tzinfo=timezone.utc), + ), + ), + ] + for task in tasks_to_create: + await db_store_parameterized.save(task, TEST_CONTEXT) + + with pytest.raises(InvalidParamsError) as excinfo: + await db_store_parameterized.list(params, TEST_CONTEXT) + + assert expected_error_message in str(excinfo.value) + + # Cleanup + for task in tasks_to_create: + await db_store_parameterized.delete(task.id, TEST_CONTEXT) @pytest.mark.asyncio @@ -176,7 +409,9 @@ async def test_get_nonexistent_task( db_store_parameterized: DatabaseTaskStore, ) -> None: """Test retrieving a nonexistent task.""" - retrieved_task = await db_store_parameterized.get('nonexistent-task-id') + retrieved_task = await db_store_parameterized.get( + 'nonexistent-task-id', TEST_CONTEXT + ) assert retrieved_task is None @@ -184,16 +419,26 @@ async def test_get_nonexistent_task( async def test_delete_task(db_store_parameterized: DatabaseTaskStore) -> None: """Test deleting a task from the DatabaseTaskStore.""" task_id = f'delete-test-task-{db_store_parameterized.engine.url.drivername}' - task_to_save_and_delete = MINIMAL_TASK_OBJ.model_copy( - update={'id': task_id} - ) - await db_store_parameterized.save(task_to_save_and_delete) + task_to_save_and_delete = Task() + task_to_save_and_delete.CopyFrom(MINIMAL_TASK_OBJ) + task_to_save_and_delete.id = task_id + await db_store_parameterized.save(task_to_save_and_delete, TEST_CONTEXT) assert ( - await db_store_parameterized.get(task_to_save_and_delete.id) is not None + await db_store_parameterized.get( + task_to_save_and_delete.id, TEST_CONTEXT + ) + is not None + ) + await db_store_parameterized.delete( + task_to_save_and_delete.id, TEST_CONTEXT + ) + assert ( + await db_store_parameterized.get( + task_to_save_and_delete.id, TEST_CONTEXT + ) + is None ) - await db_store_parameterized.delete(task_to_save_and_delete.id) - assert await db_store_parameterized.get(task_to_save_and_delete.id) is None @pytest.mark.asyncio @@ -201,7 +446,9 @@ async def test_delete_nonexistent_task( db_store_parameterized: DatabaseTaskStore, ) -> None: """Test deleting a nonexistent task. Should not error.""" - await db_store_parameterized.delete('nonexistent-delete-task-id') + await db_store_parameterized.delete( + 'nonexistent-delete-task-id', TEST_CONTEXT + ) @pytest.mark.asyncio @@ -210,131 +457,149 @@ async def test_save_and_get_detailed_task( ) -> None: """Test saving and retrieving a task with more fields populated.""" task_id = f'detailed-task-{db_store_parameterized.engine.url.drivername}' + test_timestamp = datetime(2023, 1, 1, 12, 0, 0, tzinfo=timezone.utc) test_task = Task( id=task_id, context_id='test-session-1', status=TaskStatus( - state=TaskState.working, timestamp='2023-01-01T12:00:00Z' + state=TaskState.TASK_STATE_WORKING, timestamp=test_timestamp ), - kind='task', metadata={'key1': 'value1', 'key2': 123}, artifacts=[ Artifact( artifact_id='artifact-1', - parts=[Part(root=TextPart(text='hello'))], + parts=[Part(text='hello')], ) ], history=[ Message( message_id='msg-1', - role=Role.user, - parts=[Part(root=TextPart(text='user input'))], + role=Role.ROLE_USER, + parts=[Part(text='user input')], ) ], ) - await db_store_parameterized.save(test_task) - retrieved_task = await db_store_parameterized.get(test_task.id) + await db_store_parameterized.save(test_task, TEST_CONTEXT) + retrieved_task = await db_store_parameterized.get( + test_task.id, TEST_CONTEXT + ) assert retrieved_task is not None assert retrieved_task.id == test_task.id assert retrieved_task.context_id == test_task.context_id - assert retrieved_task.status.state == TaskState.working - assert retrieved_task.status.timestamp == '2023-01-01T12:00:00Z' - assert retrieved_task.metadata == {'key1': 'value1', 'key2': 123} + assert retrieved_task.status.state == TaskState.TASK_STATE_WORKING + # Compare timestamps - proto Timestamp has ToDatetime() method + assert ( + retrieved_task.status.timestamp.ToDatetime() + == test_timestamp.replace(tzinfo=None) + ) + assert dict(retrieved_task.metadata) == {'key1': 'value1', 'key2': 123} - # Pydantic models handle their own serialization for comparison if model_dump is used + # Use MessageToDict for proto serialization comparisons assert ( - retrieved_task.model_dump()['artifacts'] - == test_task.model_dump()['artifacts'] + MessageToDict(retrieved_task)['artifacts'] + == MessageToDict(test_task)['artifacts'] ) assert ( - retrieved_task.model_dump()['history'] - == test_task.model_dump()['history'] + MessageToDict(retrieved_task)['history'] + == MessageToDict(test_task)['history'] ) - await db_store_parameterized.delete(test_task.id) - assert await db_store_parameterized.get(test_task.id) is None + await db_store_parameterized.delete(test_task.id, TEST_CONTEXT) + assert await db_store_parameterized.get(test_task.id, TEST_CONTEXT) is None @pytest.mark.asyncio async def test_update_task(db_store_parameterized: DatabaseTaskStore) -> None: """Test updating an existing task.""" task_id = f'update-test-task-{db_store_parameterized.engine.url.drivername}' + original_timestamp = datetime(2023, 1, 2, 10, 0, 0, tzinfo=timezone.utc) original_task = Task( id=task_id, context_id='session-update', status=TaskStatus( - state=TaskState.submitted, timestamp='2023-01-02T10:00:00Z' + state=TaskState.TASK_STATE_SUBMITTED, timestamp=original_timestamp ), - kind='task', - metadata=None, # Explicitly None + # Proto metadata is a Struct, can't be None - leave empty artifacts=[], history=[], ) - await db_store_parameterized.save(original_task) + await db_store_parameterized.save(original_task, TEST_CONTEXT) - retrieved_before_update = await db_store_parameterized.get(task_id) + retrieved_before_update = await db_store_parameterized.get( + task_id, TEST_CONTEXT + ) assert retrieved_before_update is not None - assert retrieved_before_update.status.state == TaskState.submitted - assert retrieved_before_update.metadata is None + assert ( + retrieved_before_update.status.state == TaskState.TASK_STATE_SUBMITTED + ) + assert ( + len(retrieved_before_update.metadata) == 0 + ) # Proto map is empty, not None - updated_task = original_task.model_copy(deep=True) - updated_task.status.state = TaskState.completed - updated_task.status.timestamp = '2023-01-02T11:00:00Z' - updated_task.metadata = {'update_key': 'update_value'} + updated_timestamp = datetime(2023, 1, 2, 11, 0, 0, tzinfo=timezone.utc) + updated_task = Task() + updated_task.CopyFrom(original_task) + updated_task.status.state = TaskState.TASK_STATE_COMPLETED + updated_task.status.timestamp.FromDatetime(updated_timestamp) + updated_task.metadata['update_key'] = 'update_value' - await db_store_parameterized.save(updated_task) + await db_store_parameterized.save(updated_task, TEST_CONTEXT) - retrieved_after_update = await db_store_parameterized.get(task_id) + retrieved_after_update = await db_store_parameterized.get( + task_id, TEST_CONTEXT + ) assert retrieved_after_update is not None - assert retrieved_after_update.status.state == TaskState.completed - assert retrieved_after_update.metadata == {'update_key': 'update_value'} + assert retrieved_after_update.status.state == TaskState.TASK_STATE_COMPLETED + assert dict(retrieved_after_update.metadata) == { + 'update_key': 'update_value' + } - await db_store_parameterized.delete(task_id) + await db_store_parameterized.delete(task_id, TEST_CONTEXT) @pytest.mark.asyncio async def test_metadata_field_mapping( db_store_parameterized: DatabaseTaskStore, ) -> None: - """Test that metadata field is correctly mapped between Pydantic and SQLAlchemy. + """Test that metadata field is correctly mapped between Proto and SQLAlchemy. This test verifies: - 1. Metadata can be None + 1. Metadata can be empty (proto Struct can't be None) 2. Metadata can be a simple dict 3. Metadata can contain nested structures 4. Metadata is correctly saved and retrieved 5. The mapping between task.metadata and task_metadata column works """ - # Test 1: Task with no metadata (None) + # Test 1: Task with no metadata (empty Struct in proto) task_no_metadata = Task( id='task-metadata-test-1', context_id='session-meta-1', - status=TaskStatus(state=TaskState.submitted), - kind='task', - metadata=None, + status=TaskStatus(state=TaskState.TASK_STATE_SUBMITTED), ) - await db_store_parameterized.save(task_no_metadata) + await db_store_parameterized.save(task_no_metadata, TEST_CONTEXT) retrieved_no_metadata = await db_store_parameterized.get( - 'task-metadata-test-1' + 'task-metadata-test-1', TEST_CONTEXT ) assert retrieved_no_metadata is not None - assert retrieved_no_metadata.metadata is None + # Proto Struct is empty, not None + assert len(retrieved_no_metadata.metadata) == 0 # Test 2: Task with simple metadata simple_metadata = {'key': 'value', 'number': 42, 'boolean': True} task_simple_metadata = Task( id='task-metadata-test-2', context_id='session-meta-2', - status=TaskStatus(state=TaskState.working), - kind='task', + status=TaskStatus(state=TaskState.TASK_STATE_WORKING), metadata=simple_metadata, ) - await db_store_parameterized.save(task_simple_metadata) - retrieved_simple = await db_store_parameterized.get('task-metadata-test-2') + await db_store_parameterized.save(task_simple_metadata, TEST_CONTEXT) + retrieved_simple = await db_store_parameterized.get( + 'task-metadata-test-2', TEST_CONTEXT + ) assert retrieved_simple is not None - assert retrieved_simple.metadata == simple_metadata + assert dict(retrieved_simple.metadata) == simple_metadata # Test 3: Task with complex nested metadata complex_metadata = { @@ -347,54 +612,327 @@ async def test_metadata_field_mapping( }, 'special_chars': 'Hello\nWorld\t!', 'unicode': '🚀 Unicode test 你好', - 'null_value': None, } task_complex_metadata = Task( id='task-metadata-test-3', context_id='session-meta-3', - status=TaskStatus(state=TaskState.completed), - kind='task', + status=TaskStatus(state=TaskState.TASK_STATE_COMPLETED), metadata=complex_metadata, ) - await db_store_parameterized.save(task_complex_metadata) - retrieved_complex = await db_store_parameterized.get('task-metadata-test-3') + await db_store_parameterized.save(task_complex_metadata, TEST_CONTEXT) + retrieved_complex = await db_store_parameterized.get( + 'task-metadata-test-3', TEST_CONTEXT + ) assert retrieved_complex is not None - assert retrieved_complex.metadata == complex_metadata + # Convert proto Struct to dict for comparison + retrieved_meta = MessageToDict(retrieved_complex.metadata) + assert retrieved_meta == complex_metadata - # Test 4: Update metadata from None to dict + # Test 4: Update metadata from empty to dict task_update_metadata = Task( id='task-metadata-test-4', context_id='session-meta-4', - status=TaskStatus(state=TaskState.submitted), - kind='task', - metadata=None, + status=TaskStatus(state=TaskState.TASK_STATE_SUBMITTED), ) - await db_store_parameterized.save(task_update_metadata) + await db_store_parameterized.save(task_update_metadata, TEST_CONTEXT) # Update metadata - task_update_metadata.metadata = {'updated': True, 'timestamp': '2024-01-01'} - await db_store_parameterized.save(task_update_metadata) + task_update_metadata.metadata['updated'] = True + task_update_metadata.metadata['timestamp'] = '2024-01-01' + await db_store_parameterized.save(task_update_metadata, TEST_CONTEXT) - retrieved_updated = await db_store_parameterized.get('task-metadata-test-4') + retrieved_updated = await db_store_parameterized.get( + 'task-metadata-test-4', TEST_CONTEXT + ) assert retrieved_updated is not None - assert retrieved_updated.metadata == { + assert dict(retrieved_updated.metadata) == { 'updated': True, 'timestamp': '2024-01-01', } - # Test 5: Update metadata from dict to None - task_update_metadata.metadata = None - await db_store_parameterized.save(task_update_metadata) + # Test 5: Clear metadata (set to empty) + task_update_metadata.metadata.Clear() + await db_store_parameterized.save(task_update_metadata, TEST_CONTEXT) - retrieved_none = await db_store_parameterized.get('task-metadata-test-4') + retrieved_none = await db_store_parameterized.get( + 'task-metadata-test-4', TEST_CONTEXT + ) assert retrieved_none is not None - assert retrieved_none.metadata is None + assert len(retrieved_none.metadata) == 0 # Cleanup - await db_store_parameterized.delete('task-metadata-test-1') - await db_store_parameterized.delete('task-metadata-test-2') - await db_store_parameterized.delete('task-metadata-test-3') - await db_store_parameterized.delete('task-metadata-test-4') + await db_store_parameterized.delete('task-metadata-test-1', TEST_CONTEXT) + await db_store_parameterized.delete('task-metadata-test-2', TEST_CONTEXT) + await db_store_parameterized.delete('task-metadata-test-3', TEST_CONTEXT) + await db_store_parameterized.delete('task-metadata-test-4', TEST_CONTEXT) + + +@pytest.mark.asyncio +async def test_owner_resource_scoping( + db_store_parameterized: DatabaseTaskStore, +) -> None: + """Test that operations are scoped to the correct owner.""" + task_store = db_store_parameterized + + context_user1 = ServerCallContext(user=SampleUser(user_name='user1')) + context_user2 = ServerCallContext(user=SampleUser(user_name='user2')) + context_user3 = ServerCallContext( + user=SampleUser(user_name='user3') + ) # user with no tasks + + # Create tasks for different owners + task1_user1, task2_user1, task1_user2 = Task(), Task(), Task() + task1_user1.CopyFrom(MINIMAL_TASK_OBJ) + task1_user1.id = 'u1-task1' + task2_user1.CopyFrom(MINIMAL_TASK_OBJ) + task2_user1.id = 'u1-task2' + task1_user2.CopyFrom(MINIMAL_TASK_OBJ) + task1_user2.id = 'u2-task1' + + await task_store.save(task1_user1, context_user1) + await task_store.save(task2_user1, context_user1) + await task_store.save(task1_user2, context_user2) + + # Test GET + assert await task_store.get('u1-task1', context_user1) is not None + assert await task_store.get('u1-task1', context_user2) is None + assert await task_store.get('u2-task1', context_user1) is None + assert await task_store.get('u2-task1', context_user2) is not None + + # Test LIST + params = ListTasksRequest() + page_user1 = await task_store.list(params, context_user1) + assert len(page_user1.tasks) == 2 + assert {t.id for t in page_user1.tasks} == {'u1-task1', 'u1-task2'} + assert page_user1.total_size == 2 + + page_user2 = await task_store.list(params, context_user2) + assert len(page_user2.tasks) == 1 + assert {t.id for t in page_user2.tasks} == {'u2-task1'} + assert page_user2.total_size == 1 + + page_user3 = await task_store.list(params, context_user3) + assert len(page_user3.tasks) == 0 + assert page_user3.total_size == 0 + + # Test DELETE + await task_store.delete('u1-task1', context_user2) # Should not delete + assert await task_store.get('u1-task1', context_user1) is not None + + await task_store.delete('u1-task1', context_user1) # Should delete + assert await task_store.get('u1-task1', context_user1) is None + + # Cleanup remaining tasks + await task_store.delete('u1-task2', context_user1) + await task_store.delete('u2-task1', context_user2) + + +@pytest.mark.asyncio +async def test_get_0_3_task_detailed( + db_store_parameterized: DatabaseTaskStore, +) -> None: + """Test retrieving a detailed legacy v0.3 task from the database. + + This test simulates a database that already contains legacy v0.3 JSON data + (string-based enums, different field names) and verifies that the store + correctly converts it to the modern Protobuf-based Task model. + """ + + task_id = 'legacy-detailed-1' + owner = 'legacy_user' + context_user = ServerCallContext(user=SampleUser(user_name=owner)) + + # 1. Create a detailed legacy Task using v0.3 models + legacy_task = types_v03.Task( + id=task_id, + context_id='legacy-ctx-1', + status=types_v03.TaskStatus( + state=types_v03.TaskState.working, + message=types_v03.Message( + message_id='msg-status', + role=types_v03.Role.agent, + parts=[ + types_v03.Part( + root=types_v03.TextPart(text='Legacy status message') + ) + ], + ), + timestamp='2023-10-27T10:00:00Z', + ), + history=[ + types_v03.Message( + message_id='msg-1', + role=types_v03.Role.user, + parts=[ + types_v03.Part(root=types_v03.TextPart(text='Hello legacy')) + ], + ), + types_v03.Message( + message_id='msg-2', + role=types_v03.Role.agent, + parts=[ + types_v03.Part( + root=types_v03.DataPart(data={'legacy_key': 'value'}) + ) + ], + ), + ], + artifacts=[ + types_v03.Artifact( + artifact_id='art-1', + name='Legacy Artifact', + parts=[ + types_v03.Part( + root=types_v03.FilePart( + file=types_v03.FileWithUri( + uri='https://example.com/legacy.txt', + mime_type='text/plain', + ) + ) + ) + ], + ) + ], + metadata={'meta_key': 'meta_val'}, + ) + + # 2. Manually insert the legacy data into the database + # We must bypass the store's save() method because it expects Protobuf objects. + async with db_store_parameterized.async_session_maker.begin() as session: + # Pydantic model_dump(mode='json') produces exactly what would be in the legacy DB + legacy_data = legacy_task.model_dump(mode='json') + + stmt = insert(db_store_parameterized.task_model).values( + id=task_id, + context_id=legacy_task.context_id, + owner=owner, + status=legacy_data['status'], + history=legacy_data['history'], + artifacts=legacy_data['artifacts'], + task_metadata=legacy_data['metadata'], + kind='task', + last_updated=None, + ) + await session.execute(stmt) + + # 3. Retrieve the task using the standard store.get() + # This will trigger conversion from legacy to 1.0 format in the _from_orm method + retrieved_task = await db_store_parameterized.get(task_id, context_user) + + # 4. Verify the conversion to modern Protobuf + assert retrieved_task is not None + assert retrieved_task.id == task_id + assert retrieved_task.context_id == 'legacy-ctx-1' + + # Check Status & State (The most critical part: string 'working' -> enum TASK_STATE_WORKING) + assert retrieved_task.status.state == TaskState.TASK_STATE_WORKING + assert retrieved_task.status.message.message_id == 'msg-status' + assert retrieved_task.status.message.role == Role.ROLE_AGENT + assert ( + retrieved_task.status.message.parts[0].text == 'Legacy status message' + ) + + # Check History + assert len(retrieved_task.history) == 2 + assert retrieved_task.history[0].message_id == 'msg-1' + assert retrieved_task.history[0].role == Role.ROLE_USER + assert retrieved_task.history[0].parts[0].text == 'Hello legacy' + + assert retrieved_task.history[1].message_id == 'msg-2' + assert retrieved_task.history[1].role == Role.ROLE_AGENT + assert ( + MessageToDict(retrieved_task.history[1].parts[0].data)['legacy_key'] + == 'value' + ) + + # Check Artifacts + assert len(retrieved_task.artifacts) == 1 + assert retrieved_task.artifacts[0].artifact_id == 'art-1' + assert retrieved_task.artifacts[0].name == 'Legacy Artifact' + assert ( + retrieved_task.artifacts[0].parts[0].url + == 'https://example.com/legacy.txt' + ) + + # Check Metadata + assert dict(retrieved_task.metadata) == {'meta_key': 'meta_val'} + + retrieved_tasks = await db_store_parameterized.list( + ListTasksRequest(), context_user + ) + assert retrieved_tasks is not None + assert retrieved_tasks.tasks == [retrieved_task] + + await db_store_parameterized.delete(task_id, context_user) + + +@pytest.mark.asyncio +async def test_custom_conversion(): + engine = MagicMock() + # Custom callables + mock_to_orm = MagicMock( + return_value=TaskModel(id='custom_id', protocol_version='custom') + ) + mock_from_orm = MagicMock(return_value=Task(id='custom_id')) + store = DatabaseTaskStore( + engine=engine, + core_to_model_conversion=mock_to_orm, + model_to_core_conversion=mock_from_orm, + ) + + task = Task(id='123') + model = store._to_orm(task, 'owner') + assert model.id == 'custom_id' + mock_to_orm.assert_called_once_with(task, 'owner') + model_instance = TaskModel(id='dummy') + loaded_task = store._from_orm(model_instance) + assert loaded_task.id == 'custom_id' + mock_from_orm.assert_called_once_with(model_instance) + + +@pytest.mark.asyncio +async def test_core_to_0_3_model_conversion( + db_store_parameterized: DatabaseTaskStore, +) -> None: + """Test storing and retrieving tasks in v0.3 format using conversion utilities. + + Tests both class-level and instance-level assignment of the conversion function. + Setting the model_to_core_conversion class variables to compat_task_model_to_core would be redundant + as it is always called when retrieving 0.3 tasks. + """ + store = db_store_parameterized + + # Set the v0.3 persistence utilities + store.core_to_model_conversion = core_to_compat_task_model + task_id = 'v03-persistence-task' + original_task = Task( + id=task_id, + context_id='v03-context', + status=TaskStatus(state=TaskState.TASK_STATE_WORKING), + metadata={'key': 'value'}, + ) + + # 1. Save the task (will use core_to_compat_task_model) + await store.save(original_task, TEST_CONTEXT) + + # 2. Verify it's stored in v0.3 format directly in DB + async with store.async_session_maker() as session: + db_task = await session.get(TaskModel, task_id) + assert db_task is not None + assert db_task.protocol_version == '0.3' + # v0.3 status JSON uses string for state + assert isinstance(db_task.status, dict) + assert db_task.status['state'] == 'working' + + # 3. Retrieve the task (will use compat_task_model_to_core) + retrieved_task = await store.get(task_id, context=TEST_CONTEXT) + assert retrieved_task is not None + assert retrieved_task.id == original_task.id + assert retrieved_task.status.state == TaskState.TASK_STATE_WORKING + assert dict(retrieved_task.metadata) == {'key': 'value'} + # Reset conversion attributes + store.core_to_model_conversion = None + await store.delete('v03-persistence-task', TEST_CONTEXT) # Ensure aiosqlite, asyncpg, and aiomysql are installed in the test environment (added to pyproject.toml). diff --git a/tests/server/tasks/test_id_generator.py b/tests/server/tasks/test_id_generator.py index 11bfff2b9..1812c0ab8 100644 --- a/tests/server/tasks/test_id_generator.py +++ b/tests/server/tasks/test_id_generator.py @@ -52,7 +52,7 @@ def test_context_mutability(self): def test_context_validation(self): """Test that context raises validation error for invalid types.""" with pytest.raises(ValidationError): - IDGeneratorContext(task_id={'not': 'a string'}) + IDGeneratorContext(task_id={'not': 'a string'}) # type: ignore[arg-type] class TestIDGenerator: @@ -61,7 +61,7 @@ class TestIDGenerator: def test_cannot_instantiate_abstract_class(self): """Test that IDGenerator cannot be instantiated directly.""" with pytest.raises(TypeError): - IDGenerator() + IDGenerator() # type: ignore[abstract] def test_subclass_must_implement_generate(self): """Test that subclasses must implement the generate method.""" @@ -70,7 +70,7 @@ class IncompleteGenerator(IDGenerator): pass with pytest.raises(TypeError): - IncompleteGenerator() + IncompleteGenerator() # type: ignore[abstract] def test_valid_subclass_implementation(self): """Test that a valid subclass can be instantiated.""" diff --git a/tests/server/tasks/test_inmemory_push_notifications.py b/tests/server/tasks/test_inmemory_push_notifications.py index 375ed97ca..d8b560aae 100644 --- a/tests/server/tasks/test_inmemory_push_notifications.py +++ b/tests/server/tasks/test_inmemory_push_notifications.py @@ -3,22 +3,32 @@ from unittest.mock import AsyncMock, MagicMock, patch import httpx +from google.protobuf.json_format import MessageToDict +from a2a.auth.user import User +from a2a.server.context import ServerCallContext from a2a.server.tasks.base_push_notification_sender import ( BasePushNotificationSender, ) from a2a.server.tasks.inmemory_push_notification_config_store import ( InMemoryPushNotificationConfigStore, ) -from a2a.types import PushNotificationConfig, Task, TaskState, TaskStatus +from a2a.types.a2a_pb2 import ( + TaskPushNotificationConfig, + StreamResponse, + Task, + TaskState, + TaskStatus, +) # Suppress logging for cleaner test output, can be enabled for debugging # logging.disable(logging.CRITICAL) -def create_sample_task( - task_id: str = 'task123', status_state: TaskState = TaskState.completed +def _create_sample_task( + task_id: str = 'task123', + status_state: TaskState = TaskState.TASK_STATE_COMPLETED, ) -> Task: return Task( id=task_id, @@ -27,12 +37,30 @@ def create_sample_task( ) -def create_sample_push_config( +def _create_sample_push_config( url: str = 'http://example.com/callback', config_id: str = 'cfg1', token: str | None = None, -) -> PushNotificationConfig: - return PushNotificationConfig(id=config_id, url=url, token=token) +) -> TaskPushNotificationConfig: + return TaskPushNotificationConfig(id=config_id, url=url, token=token) + + +class SampleUser(User): + """A test implementation of the User interface.""" + + def __init__(self, user_name: str): + self._user_name = user_name + + @property + def is_authenticated(self) -> bool: + return True + + @property + def user_name(self) -> str: + return self._user_name + + +MINIMAL_CALL_CONTEXT = ServerCallContext(user=SampleUser(user_name='user')) class TestInMemoryPushNotifier(unittest.IsolatedAsyncioTestCase): @@ -40,7 +68,9 @@ def setUp(self) -> None: self.mock_httpx_client = AsyncMock(spec=httpx.AsyncClient) self.config_store = InMemoryPushNotificationConfigStore() self.notifier = BasePushNotificationSender( - httpx_client=self.mock_httpx_client, config_store=self.config_store + httpx_client=self.mock_httpx_client, + config_store=self.config_store, + context=MINIMAL_CALL_CONTEXT, ) # Corrected argument name def test_constructor_stores_client(self) -> None: @@ -48,114 +78,135 @@ def test_constructor_stores_client(self) -> None: async def test_set_info_adds_new_config(self) -> None: task_id = 'task_new' - config = create_sample_push_config(url='http://new.url/callback') + config = _create_sample_push_config(url='http://new.url/callback') - await self.config_store.set_info(task_id, config) + await self.config_store.set_info(task_id, config, MINIMAL_CALL_CONTEXT) - self.assertIn(task_id, self.config_store._push_notification_infos) - self.assertEqual( - self.config_store._push_notification_infos[task_id], [config] + retrieved = await self.config_store.get_info( + task_id, MINIMAL_CALL_CONTEXT ) + self.assertEqual(retrieved, [config]) async def test_set_info_appends_to_existing_config(self) -> None: task_id = 'task_update' - initial_config = create_sample_push_config( + initial_config = _create_sample_push_config( url='http://initial.url/callback', config_id='cfg_initial' ) - await self.config_store.set_info(task_id, initial_config) + await self.config_store.set_info( + task_id, initial_config, MINIMAL_CALL_CONTEXT + ) - updated_config = create_sample_push_config( + updated_config = _create_sample_push_config( url='http://updated.url/callback', config_id='cfg_updated' ) - await self.config_store.set_info(task_id, updated_config) - - self.assertIn(task_id, self.config_store._push_notification_infos) - self.assertEqual( - self.config_store._push_notification_infos[task_id][0], - initial_config, + await self.config_store.set_info( + task_id, updated_config, MINIMAL_CALL_CONTEXT ) - self.assertEqual( - self.config_store._push_notification_infos[task_id][1], - updated_config, + + retrieved = await self.config_store.get_info( + task_id, MINIMAL_CALL_CONTEXT ) + self.assertEqual(len(retrieved), 2) + self.assertEqual(retrieved[0], initial_config) + self.assertEqual(retrieved[1], updated_config) async def test_set_info_without_config_id(self) -> None: task_id = 'task1' - initial_config = PushNotificationConfig( + initial_config = TaskPushNotificationConfig( url='http://initial.url/callback' ) - await self.config_store.set_info(task_id, initial_config) + await self.config_store.set_info( + task_id, initial_config, MINIMAL_CALL_CONTEXT + ) - assert ( - self.config_store._push_notification_infos[task_id][0].id == task_id + retrieved = await self.config_store.get_info( + task_id, MINIMAL_CALL_CONTEXT ) + assert retrieved[0].id == task_id - updated_config = PushNotificationConfig( + updated_config = TaskPushNotificationConfig( url='http://initial.url/callback_new' ) - await self.config_store.set_info(task_id, updated_config) + await self.config_store.set_info( + task_id, updated_config, MINIMAL_CALL_CONTEXT + ) - self.assertIn(task_id, self.config_store._push_notification_infos) - assert len(self.config_store._push_notification_infos[task_id]) == 1 - self.assertEqual( - self.config_store._push_notification_infos[task_id][0].url, - updated_config.url, + retrieved = await self.config_store.get_info( + task_id, MINIMAL_CALL_CONTEXT ) + assert len(retrieved) == 1 + self.assertEqual(retrieved[0].url, updated_config.url) async def test_get_info_existing_config(self) -> None: task_id = 'task_get_exist' - config = create_sample_push_config(url='http://get.this/callback') - await self.config_store.set_info(task_id, config) + config = _create_sample_push_config(url='http://get.this/callback') + await self.config_store.set_info(task_id, config, MINIMAL_CALL_CONTEXT) - retrieved_config = await self.config_store.get_info(task_id) + retrieved_config = await self.config_store.get_info( + task_id, MINIMAL_CALL_CONTEXT + ) self.assertEqual(retrieved_config, [config]) async def test_get_info_non_existent_config(self) -> None: task_id = 'task_get_non_exist' - retrieved_config = await self.config_store.get_info(task_id) + retrieved_config = await self.config_store.get_info( + task_id, MINIMAL_CALL_CONTEXT + ) assert retrieved_config == [] async def test_delete_info_existing_config(self) -> None: task_id = 'task_delete_exist' - config = create_sample_push_config(url='http://delete.this/callback') - await self.config_store.set_info(task_id, config) + config = _create_sample_push_config(url='http://delete.this/callback') + await self.config_store.set_info(task_id, config, MINIMAL_CALL_CONTEXT) - self.assertIn(task_id, self.config_store._push_notification_infos) - await self.config_store.delete_info(task_id, config_id=config.id) - self.assertNotIn(task_id, self.config_store._push_notification_infos) + retrieved = await self.config_store.get_info( + task_id, MINIMAL_CALL_CONTEXT + ) + self.assertEqual(len(retrieved), 1) + + await self.config_store.delete_info( + task_id, config_id=config.id, context=MINIMAL_CALL_CONTEXT + ) + retrieved = await self.config_store.get_info( + task_id, MINIMAL_CALL_CONTEXT + ) + self.assertEqual(len(retrieved), 0) async def test_delete_info_non_existent_config(self) -> None: task_id = 'task_delete_non_exist' # Ensure it doesn't raise an error try: - await self.config_store.delete_info(task_id) + await self.config_store.delete_info( + task_id, context=MINIMAL_CALL_CONTEXT + ) except Exception as e: self.fail( f'delete_info raised {e} unexpectedly for nonexistent task_id' ) - self.assertNotIn( - task_id, self.config_store._push_notification_infos - ) # Should still not be there + retrieved = await self.config_store.get_info( + task_id, MINIMAL_CALL_CONTEXT + ) + self.assertEqual(len(retrieved), 0) async def test_send_notification_success(self) -> None: task_id = 'task_send_success' - task_data = create_sample_task(task_id=task_id) - config = create_sample_push_config(url='http://notify.me/here') - await self.config_store.set_info(task_id, config) + task_data = _create_sample_task(task_id=task_id) + config = _create_sample_push_config(url='http://notify.me/here') + await self.config_store.set_info(task_id, config, MINIMAL_CALL_CONTEXT) # Mock the post call to simulate success mock_response = AsyncMock(spec=httpx.Response) mock_response.status_code = 200 self.mock_httpx_client.post.return_value = mock_response - await self.notifier.send_notification(task_data) # Pass only task_data + await self.notifier.send_notification(task_id, task_data) self.mock_httpx_client.post.assert_awaited_once() called_args, called_kwargs = self.mock_httpx_client.post.call_args self.assertEqual(called_args[0], config.url) self.assertEqual( called_kwargs['json'], - task_data.model_dump(mode='json', exclude_none=True), + MessageToDict(StreamResponse(task=task_data)), ) self.assertNotIn( 'auth', called_kwargs @@ -164,25 +215,25 @@ async def test_send_notification_success(self) -> None: async def test_send_notification_with_token_success(self) -> None: task_id = 'task_send_success' - task_data = create_sample_task(task_id=task_id) - config = create_sample_push_config( + task_data = _create_sample_task(task_id=task_id) + config = _create_sample_push_config( url='http://notify.me/here', token='unique_token' ) - await self.config_store.set_info(task_id, config) + await self.config_store.set_info(task_id, config, MINIMAL_CALL_CONTEXT) # Mock the post call to simulate success mock_response = AsyncMock(spec=httpx.Response) mock_response.status_code = 200 self.mock_httpx_client.post.return_value = mock_response - await self.notifier.send_notification(task_data) # Pass only task_data + await self.notifier.send_notification(task_id, task_data) self.mock_httpx_client.post.assert_awaited_once() called_args, called_kwargs = self.mock_httpx_client.post.call_args self.assertEqual(called_args[0], config.url) self.assertEqual( called_kwargs['json'], - task_data.model_dump(mode='json', exclude_none=True), + MessageToDict(StreamResponse(task=task_data)), ) self.assertEqual( called_kwargs['headers'], @@ -195,9 +246,9 @@ async def test_send_notification_with_token_success(self) -> None: async def test_send_notification_no_config(self) -> None: task_id = 'task_send_no_config' - task_data = create_sample_task(task_id=task_id) + task_data = _create_sample_task(task_id=task_id) - await self.notifier.send_notification(task_data) # Pass only task_data + await self.notifier.send_notification(task_id, task_data) self.mock_httpx_client.post.assert_not_called() @@ -206,9 +257,9 @@ async def test_send_notification_http_status_error( self, mock_logger: MagicMock ) -> None: task_id = 'task_send_http_err' - task_data = create_sample_task(task_id=task_id) - config = create_sample_push_config(url='http://notify.me/http_error') - await self.config_store.set_info(task_id, config) + task_data = _create_sample_task(task_id=task_id) + config = _create_sample_push_config(url='http://notify.me/http_error') + await self.config_store.set_info(task_id, config, MINIMAL_CALL_CONTEXT) mock_response = MagicMock( spec=httpx.Response @@ -221,7 +272,7 @@ async def test_send_notification_http_status_error( self.mock_httpx_client.post.side_effect = http_error # The method should catch the error and log it, not re-raise - await self.notifier.send_notification(task_data) # Pass only task_data + await self.notifier.send_notification(task_id, task_data) self.mock_httpx_client.post.assert_awaited_once() mock_logger.exception.assert_called_once() @@ -236,14 +287,14 @@ async def test_send_notification_request_error( self, mock_logger: MagicMock ) -> None: task_id = 'task_send_req_err' - task_data = create_sample_task(task_id=task_id) - config = create_sample_push_config(url='http://notify.me/req_error') - await self.config_store.set_info(task_id, config) + task_data = _create_sample_task(task_id=task_id) + config = _create_sample_push_config(url='http://notify.me/req_error') + await self.config_store.set_info(task_id, config, MINIMAL_CALL_CONTEXT) request_error = httpx.RequestError('Network issue', request=MagicMock()) self.mock_httpx_client.post.side_effect = request_error - await self.notifier.send_notification(task_data) # Pass only task_data + await self.notifier.send_notification(task_id, task_data) self.mock_httpx_client.post.assert_awaited_once() mock_logger.exception.assert_called_once() @@ -256,43 +307,126 @@ async def test_send_notification_request_error( async def test_send_notification_with_auth( self, mock_logger: MagicMock ) -> None: + """Test that auth field is not used by current implementation. + + The current BasePushNotificationSender only supports token-based auth, + not the authentication field. This test verifies that the notification + still works even if the config has an authentication field set. + """ task_id = 'task_send_auth' - task_data = create_sample_task(task_id=task_id) - auth_info = ('user', 'pass') - config = create_sample_push_config(url='http://notify.me/auth') - config.authentication = MagicMock() # Mocking the structure for auth - config.authentication.schemes = ['basic'] # Assume basic for simplicity - config.authentication.credentials = ( - auth_info # This might need to be a specific model - ) - # For now, let's assume it's a tuple for basic auth - # The actual PushNotificationAuthenticationInfo is more complex - # For this test, we'll simplify and assume InMemoryPushNotifier - # directly uses tuple for httpx's `auth` param if basic. - # A more accurate test would construct the real auth model. - # Given the current implementation of InMemoryPushNotifier, - # it only supports basic auth via tuple. - - await self.config_store.set_info(task_id, config) + task_data = _create_sample_task(task_id=task_id) + config = _create_sample_push_config(url='http://notify.me/auth') + # The current implementation doesn't use the authentication field + # It only supports token-based auth via the token field + await self.config_store.set_info(task_id, config, MINIMAL_CALL_CONTEXT) mock_response = AsyncMock(spec=httpx.Response) mock_response.status_code = 200 self.mock_httpx_client.post.return_value = mock_response - await self.notifier.send_notification(task_data) # Pass only task_data + await self.notifier.send_notification(task_id, task_data) self.mock_httpx_client.post.assert_awaited_once() called_args, called_kwargs = self.mock_httpx_client.post.call_args self.assertEqual(called_args[0], config.url) self.assertEqual( called_kwargs['json'], - task_data.model_dump(mode='json', exclude_none=True), + MessageToDict(StreamResponse(task=task_data)), ) self.assertNotIn( 'auth', called_kwargs ) # auth is not passed by current implementation mock_response.raise_for_status.assert_called_once() + async def test_owner_resource_scoping(self) -> None: + """Test that operations are scoped to the correct owner.""" + context_user1 = ServerCallContext(user=SampleUser(user_name='user1')) + context_user2 = ServerCallContext(user=SampleUser(user_name='user2')) + + # Create configs for different owners + task1_u1_config1 = TaskPushNotificationConfig( + id='t1-u1-c1', url='http://u1.com/1' + ) + task1_u1_config2 = TaskPushNotificationConfig( + id='t1-u1-c2', url='http://u1.com/2' + ) + task1_u2_config1 = TaskPushNotificationConfig( + id='t1-u2-c1', url='http://u2.com/1' + ) + task2_u1_config1 = TaskPushNotificationConfig( + id='t2-u1-c1', url='http://u1.com/3' + ) + + await self.config_store.set_info( + 'task1', task1_u1_config1, context_user1 + ) + await self.config_store.set_info( + 'task1', task1_u1_config2, context_user1 + ) + await self.config_store.set_info( + 'task1', task1_u2_config1, context_user2 + ) + await self.config_store.set_info( + 'task2', task2_u1_config1, context_user1 + ) + + # Test GET_INFO + # User 1 should get only their configs for task1 + u1_task1_configs = await self.config_store.get_info( + 'task1', context_user1 + ) + self.assertEqual(len(u1_task1_configs), 2) + self.assertEqual( + {c.id for c in u1_task1_configs}, {'t1-u1-c1', 't1-u1-c2'} + ) + + # User 2 should get only their configs for task1 + u2_task1_configs = await self.config_store.get_info( + 'task1', context_user2 + ) + self.assertEqual(len(u2_task1_configs), 1) + self.assertEqual(u2_task1_configs[0].id, 't1-u2-c1') + + # User 2 should get no configs for task2 + u2_task2_configs = await self.config_store.get_info( + 'task2', context_user2 + ) + self.assertEqual(len(u2_task2_configs), 0) + + # User 1 should get their config for task2 + u1_task2_configs = await self.config_store.get_info( + 'task2', context_user1 + ) + self.assertEqual(len(u1_task2_configs), 1) + self.assertEqual(u1_task2_configs[0].id, 't2-u1-c1') + + # Test DELETE_INFO + # User 2 deleting User 1's config should not work + await self.config_store.delete_info('task1', context_user2, 't1-u1-c1') + u1_task1_configs = await self.config_store.get_info( + 'task1', context_user1 + ) + self.assertEqual(len(u1_task1_configs), 2) + + # User 1 deleting their own config + await self.config_store.delete_info('task1', context_user1, 't1-u1-c1') + u1_task1_configs = await self.config_store.get_info( + 'task1', context_user1 + ) + self.assertEqual(len(u1_task1_configs), 1) + self.assertEqual(u1_task1_configs[0].id, 't1-u1-c2') + + # User 1 deleting all configs for task2 + await self.config_store.delete_info('task2', context=context_user1) + u1_task2_configs = await self.config_store.get_info( + 'task2', context_user1 + ) + self.assertEqual(len(u1_task2_configs), 0) + + # Cleanup remaining + await self.config_store.delete_info('task1', context=context_user1) + await self.config_store.delete_info('task1', context=context_user2) + if __name__ == '__main__': unittest.main() diff --git a/tests/server/tasks/test_inmemory_task_store.py b/tests/server/tasks/test_inmemory_task_store.py index c41e3559f..f04a69170 100644 --- a/tests/server/tasks/test_inmemory_task_store.py +++ b/tests/server/tasks/test_inmemory_task_store.py @@ -1,26 +1,51 @@ -from typing import Any - +from a2a.server.context import ServerCallContext import pytest +from datetime import datetime, timezone from a2a.server.tasks import InMemoryTaskStore -from a2a.types import Task +from a2a.types.a2a_pb2 import Task, TaskState, TaskStatus, ListTasksRequest +from a2a.utils.constants import DEFAULT_LIST_TASKS_PAGE_SIZE +from a2a.utils.errors import InvalidParamsError + +from a2a.auth.user import User + + +class SampleUser(User): + """A test implementation of the User interface.""" + + def __init__(self, user_name: str): + self._user_name = user_name + + @property + def is_authenticated(self) -> bool: + return True + @property + def user_name(self) -> str: + return self._user_name -MINIMAL_TASK: dict[str, Any] = { - 'id': 'task-abc', - 'context_id': 'session-xyz', - 'status': {'state': 'submitted'}, - 'kind': 'task', -} + +TEST_CONTEXT = ServerCallContext(user=SampleUser('test_user')) + + +def create_minimal_task( + task_id: str = 'task-abc', context_id: str = 'session-xyz' +) -> Task: + """Create a minimal task for testing.""" + return Task( + id=task_id, + context_id=context_id, + status=TaskStatus(state=TaskState.TASK_STATE_SUBMITTED), + ) @pytest.mark.asyncio async def test_in_memory_task_store_save_and_get() -> None: """Test saving and retrieving a task from the in-memory store.""" store = InMemoryTaskStore() - task = Task(**MINIMAL_TASK) - await store.save(task) - retrieved_task = await store.get(MINIMAL_TASK['id']) + task = create_minimal_task() + await store.save(task, TEST_CONTEXT) + retrieved_task = await store.get('task-abc', TEST_CONTEXT) assert retrieved_task == task @@ -28,18 +53,214 @@ async def test_in_memory_task_store_save_and_get() -> None: async def test_in_memory_task_store_get_nonexistent() -> None: """Test retrieving a nonexistent task.""" store = InMemoryTaskStore() - retrieved_task = await store.get('nonexistent') + retrieved_task = await store.get('nonexistent', TEST_CONTEXT) assert retrieved_task is None +@pytest.mark.asyncio +@pytest.mark.parametrize( + 'params, expected_ids, total_count, next_page_token', + [ + # No parameters, should return all tasks + ( + ListTasksRequest(), + ['task-2', 'task-1', 'task-0', 'task-4', 'task-3'], + 5, + None, + ), + # Unknown context + ( + ListTasksRequest(context_id='nonexistent'), + [], + 0, + None, + ), + # Pagination (first page) + ( + ListTasksRequest(page_size=2), + ['task-2', 'task-1'], + 5, + 'dGFzay0w', # base64 for 'task-0' + ), + # Pagination (same timestamp) + ( + ListTasksRequest( + page_size=2, + page_token='dGFzay0x', # base64 for 'task-1' + ), + ['task-1', 'task-0'], + 5, + 'dGFzay00', # base64 for 'task-4' + ), + # Pagination (final page) + ( + ListTasksRequest( + page_size=2, + page_token='dGFzay0z', # base64 for 'task-3' + ), + ['task-3'], + 5, + None, + ), + # Filtering by context_id + ( + ListTasksRequest(context_id='context-1'), + ['task-1', 'task-3'], + 2, + None, + ), + # Filtering by status + ( + ListTasksRequest(status=TaskState.TASK_STATE_WORKING), + ['task-1', 'task-3'], + 2, + None, + ), + # Combined filtering (context_id and status) + ( + ListTasksRequest( + context_id='context-0', status=TaskState.TASK_STATE_SUBMITTED + ), + ['task-2', 'task-0'], + 2, + None, + ), + # Combined filtering and pagination + ( + ListTasksRequest( + context_id='context-0', + page_size=1, + ), + ['task-2'], + 3, + 'dGFzay0w', # base64 for 'task-0' + ), + ], +) +async def test_list_tasks( + params: ListTasksRequest, + expected_ids: list[str], + total_count: int, + next_page_token: str, +) -> None: + """Test listing tasks with various filters and pagination.""" + store = InMemoryTaskStore() + tasks_to_create = [ + Task( + id='task-0', + context_id='context-0', + status=TaskStatus( + state=TaskState.TASK_STATE_SUBMITTED, + timestamp=datetime(2025, 1, 1, tzinfo=timezone.utc), + ), + ), + Task( + id='task-1', + context_id='context-1', + status=TaskStatus( + state=TaskState.TASK_STATE_WORKING, + timestamp=datetime(2025, 1, 1, tzinfo=timezone.utc), + ), + ), + Task( + id='task-2', + context_id='context-0', + status=TaskStatus( + state=TaskState.TASK_STATE_SUBMITTED, + timestamp=datetime(2025, 1, 2, tzinfo=timezone.utc), + ), + ), + Task( + id='task-3', + context_id='context-1', + status=TaskStatus(state=TaskState.TASK_STATE_WORKING), + ), + Task( + id='task-4', + context_id='context-0', + status=TaskStatus(state=TaskState.TASK_STATE_COMPLETED), + ), + ] + for task in tasks_to_create: + await store.save(task, TEST_CONTEXT) + + page = await store.list(params, TEST_CONTEXT) + + retrieved_ids = [task.id for task in page.tasks] + assert retrieved_ids == expected_ids + assert page.total_size == total_count + assert page.next_page_token == (next_page_token or '') + assert page.page_size == (params.page_size or DEFAULT_LIST_TASKS_PAGE_SIZE) + + # Cleanup + for task in tasks_to_create: + await store.delete(task.id, TEST_CONTEXT) + + +@pytest.mark.asyncio +@pytest.mark.parametrize( + 'params, expected_error_message', + [ + ( + ListTasksRequest( + page_size=2, + page_token='invalid', + ), + 'Token is not a valid base64-encoded cursor.', + ), + ( + ListTasksRequest( + page_size=2, + page_token='dGFzay0xMDA=', # base64 for 'task-100' + ), + 'Invalid page token: dGFzay0xMDA=', + ), + ], +) +async def test_list_tasks_fails( + params: ListTasksRequest, expected_error_message: str +) -> None: + """Test listing tasks with invalid parameters that should fail.""" + store = InMemoryTaskStore() + tasks_to_create = [ + Task( + id='task-0', + context_id='context-0', + status=TaskStatus( + state=TaskState.TASK_STATE_SUBMITTED, + timestamp=datetime(2025, 1, 1, tzinfo=timezone.utc), + ), + ), + Task( + id='task-1', + context_id='context-1', + status=TaskStatus( + state=TaskState.TASK_STATE_WORKING, + timestamp=datetime(2025, 1, 1, tzinfo=timezone.utc), + ), + ), + ] + for task in tasks_to_create: + await store.save(task, TEST_CONTEXT) + + with pytest.raises(InvalidParamsError) as excinfo: + await store.list(params, TEST_CONTEXT) + + assert expected_error_message in str(excinfo.value) + + # Cleanup + for task in tasks_to_create: + await store.delete(task.id, TEST_CONTEXT) + + @pytest.mark.asyncio async def test_in_memory_task_store_delete() -> None: """Test deleting a task from the store.""" store = InMemoryTaskStore() - task = Task(**MINIMAL_TASK) - await store.save(task) - await store.delete(MINIMAL_TASK['id']) - retrieved_task = await store.get(MINIMAL_TASK['id']) + task = create_minimal_task() + await store.save(task, TEST_CONTEXT) + await store.delete('task-abc', TEST_CONTEXT) + retrieved_task = await store.get('task-abc', TEST_CONTEXT) assert retrieved_task is None @@ -47,4 +268,103 @@ async def test_in_memory_task_store_delete() -> None: async def test_in_memory_task_store_delete_nonexistent() -> None: """Test deleting a nonexistent task.""" store = InMemoryTaskStore() - await store.delete('nonexistent') + await store.delete('nonexistent', TEST_CONTEXT) + + +@pytest.mark.asyncio +async def test_owner_resource_scoping() -> None: + """Test that operations are scoped to the correct owner.""" + store = InMemoryTaskStore() + task = create_minimal_task() + + context_user1 = ServerCallContext(user=SampleUser(user_name='user1')) + context_user2 = ServerCallContext(user=SampleUser(user_name='user2')) + context_user3 = ServerCallContext( + user=SampleUser(user_name='user3') + ) # For testing non-existent user + + # Create tasks for different owners + task1_user1 = Task() + task1_user1.CopyFrom(task) + task1_user1.id = 'u1-task1' + + task2_user1 = Task() + task2_user1.CopyFrom(task) + task2_user1.id = 'u1-task2' + + task1_user2 = Task() + task1_user2.CopyFrom(task) + task1_user2.id = 'u2-task1' + + await store.save(task1_user1, context_user1) + await store.save(task2_user1, context_user1) + await store.save(task1_user2, context_user2) + + # Test GET + assert await store.get('u1-task1', context_user1) is not None + assert await store.get('u1-task1', context_user2) is None + assert await store.get('u2-task1', context_user1) is None + assert await store.get('u2-task1', context_user2) is not None + assert await store.get('u2-task1', context_user3) is None + + # Test LIST + params = ListTasksRequest() + page_user1 = await store.list(params, context_user1) + assert len(page_user1.tasks) == 2 + assert {t.id for t in page_user1.tasks} == {'u1-task1', 'u1-task2'} + assert page_user1.total_size == 2 + + page_user2 = await store.list(params, context_user2) + assert len(page_user2.tasks) == 1 + assert {t.id for t in page_user2.tasks} == {'u2-task1'} + assert page_user2.total_size == 1 + + page_user3 = await store.list(params, context_user3) + assert len(page_user3.tasks) == 0 + assert page_user3.total_size == 0 + + # Test DELETE + await store.delete('u1-task1', context_user2) # Should not delete + assert await store.get('u1-task1', context_user1) is not None + + await store.delete('u1-task1', context_user1) # Should delete + assert await store.get('u1-task1', context_user1) is None + + # Cleanup remaining tasks + await store.delete('u1-task2', context_user1) + await store.delete('u2-task1', context_user2) + + +@pytest.mark.asyncio +@pytest.mark.parametrize('use_copying', [True, False]) +async def test_inmemory_task_store_copying_behavior(use_copying: bool): + """Verify that tasks are copied (or not) based on use_copying parameter.""" + store = InMemoryTaskStore(use_copying=use_copying) + + original_task = Task( + id='test_task', status=TaskStatus(state=TaskState.TASK_STATE_WORKING) + ) + await store.save(original_task, TEST_CONTEXT) + + # Retrieve it + retrieved_task = await store.get('test_task', TEST_CONTEXT) + assert retrieved_task is not None + + if use_copying: + assert retrieved_task is not original_task + else: + assert retrieved_task is original_task + + # Modify retrieved task + retrieved_task.status.state = TaskState.TASK_STATE_COMPLETED + + # Retrieve it again, it should NOT be modified in the store if use_copying=True + retrieved_task_2 = await store.get('test_task', TEST_CONTEXT) + assert retrieved_task_2 is not None + + if use_copying: + assert retrieved_task_2.status.state == TaskState.TASK_STATE_WORKING + assert retrieved_task_2 is not retrieved_task + else: + assert retrieved_task_2.status.state == TaskState.TASK_STATE_COMPLETED + assert retrieved_task_2 is retrieved_task diff --git a/tests/server/tasks/test_push_notification_sender.py b/tests/server/tasks/test_push_notification_sender.py index a3272c2c1..783e1f413 100644 --- a/tests/server/tasks/test_push_notification_sender.py +++ b/tests/server/tasks/test_push_notification_sender.py @@ -4,19 +4,45 @@ import httpx +from google.protobuf.json_format import MessageToDict + +from a2a.auth.user import User +from a2a.server.context import ServerCallContext from a2a.server.tasks.base_push_notification_sender import ( BasePushNotificationSender, ) -from a2a.types import ( - PushNotificationConfig, +from a2a.types.a2a_pb2 import ( + TaskPushNotificationConfig, + StreamResponse, Task, + TaskArtifactUpdateEvent, TaskState, TaskStatus, + TaskStatusUpdateEvent, ) -def create_sample_task( - task_id: str = 'task123', status_state: TaskState = TaskState.completed +class SampleUser(User): + """A test implementation of the User interface.""" + + def __init__(self, user_name: str): + self._user_name = user_name + + @property + def is_authenticated(self) -> bool: + return True + + @property + def user_name(self) -> str: + return self._user_name + + +MINIMAL_CALL_CONTEXT = ServerCallContext(user=SampleUser(user_name='user')) + + +def _create_sample_task( + task_id: str = 'task123', + status_state: TaskState = TaskState.TASK_STATE_COMPLETED, ) -> Task: return Task( id=task_id, @@ -25,12 +51,12 @@ def create_sample_task( ) -def create_sample_push_config( +def _create_sample_push_config( url: str = 'http://example.com/callback', config_id: str = 'cfg1', token: str | None = None, -) -> PushNotificationConfig: - return PushNotificationConfig(id=config_id, url=url, token=token) +) -> TaskPushNotificationConfig: + return TaskPushNotificationConfig(id=config_id, url=url, token=token) class TestBasePushNotificationSender(unittest.IsolatedAsyncioTestCase): @@ -40,6 +66,7 @@ def setUp(self) -> None: self.sender = BasePushNotificationSender( httpx_client=self.mock_httpx_client, config_store=self.mock_config_store, + context=MINIMAL_CALL_CONTEXT, ) def test_constructor_stores_client_and_config_store(self) -> None: @@ -48,30 +75,32 @@ def test_constructor_stores_client_and_config_store(self) -> None: async def test_send_notification_success(self) -> None: task_id = 'task_send_success' - task_data = create_sample_task(task_id=task_id) - config = create_sample_push_config(url='http://notify.me/here') + task_data = _create_sample_task(task_id=task_id) + config = _create_sample_push_config(url='http://notify.me/here') self.mock_config_store.get_info.return_value = [config] mock_response = AsyncMock(spec=httpx.Response) mock_response.status_code = 200 self.mock_httpx_client.post.return_value = mock_response - await self.sender.send_notification(task_data) + await self.sender.send_notification(task_id, task_data) - self.mock_config_store.get_info.assert_awaited_once_with + self.mock_config_store.get_info.assert_awaited_once_with( + task_data.id, MINIMAL_CALL_CONTEXT + ) # assert httpx_client post method got invoked with right parameters self.mock_httpx_client.post.assert_awaited_once_with( config.url, - json=task_data.model_dump(mode='json', exclude_none=True), + json=MessageToDict(StreamResponse(task=task_data)), headers=None, ) mock_response.raise_for_status.assert_called_once() async def test_send_notification_with_token_success(self) -> None: task_id = 'task_send_success' - task_data = create_sample_task(task_id=task_id) - config = create_sample_push_config( + task_data = _create_sample_task(task_id=task_id) + config = _create_sample_push_config( url='http://notify.me/here', token='unique_token' ) self.mock_config_store.get_info.return_value = [config] @@ -80,26 +109,30 @@ async def test_send_notification_with_token_success(self) -> None: mock_response.status_code = 200 self.mock_httpx_client.post.return_value = mock_response - await self.sender.send_notification(task_data) + await self.sender.send_notification(task_id, task_data) - self.mock_config_store.get_info.assert_awaited_once_with + self.mock_config_store.get_info.assert_awaited_once_with( + task_data.id, MINIMAL_CALL_CONTEXT + ) # assert httpx_client post method got invoked with right parameters self.mock_httpx_client.post.assert_awaited_once_with( config.url, - json=task_data.model_dump(mode='json', exclude_none=True), + json=MessageToDict(StreamResponse(task=task_data)), headers={'X-A2A-Notification-Token': 'unique_token'}, ) mock_response.raise_for_status.assert_called_once() async def test_send_notification_no_config(self) -> None: task_id = 'task_send_no_config' - task_data = create_sample_task(task_id=task_id) + task_data = _create_sample_task(task_id=task_id) self.mock_config_store.get_info.return_value = [] - await self.sender.send_notification(task_data) + await self.sender.send_notification(task_id, task_data) - self.mock_config_store.get_info.assert_awaited_once_with(task_id) + self.mock_config_store.get_info.assert_awaited_once_with( + task_id, MINIMAL_CALL_CONTEXT + ) self.mock_httpx_client.post.assert_not_called() @patch('a2a.server.tasks.base_push_notification_sender.logger') @@ -107,8 +140,8 @@ async def test_send_notification_http_status_error( self, mock_logger: MagicMock ) -> None: task_id = 'task_send_http_err' - task_data = create_sample_task(task_id=task_id) - config = create_sample_push_config(url='http://notify.me/http_error') + task_data = _create_sample_task(task_id=task_id) + config = _create_sample_push_config(url='http://notify.me/http_error') self.mock_config_store.get_info.return_value = [config] mock_response = MagicMock(spec=httpx.Response) @@ -119,23 +152,25 @@ async def test_send_notification_http_status_error( ) self.mock_httpx_client.post.side_effect = http_error - await self.sender.send_notification(task_data) + await self.sender.send_notification(task_id, task_data) - self.mock_config_store.get_info.assert_awaited_once_with(task_id) + self.mock_config_store.get_info.assert_awaited_once_with( + task_id, MINIMAL_CALL_CONTEXT + ) self.mock_httpx_client.post.assert_awaited_once_with( config.url, - json=task_data.model_dump(mode='json', exclude_none=True), + json=MessageToDict(StreamResponse(task=task_data)), headers=None, ) mock_logger.exception.assert_called_once() async def test_send_notification_multiple_configs(self) -> None: task_id = 'task_multiple_configs' - task_data = create_sample_task(task_id=task_id) - config1 = create_sample_push_config( + task_data = _create_sample_task(task_id=task_id) + config1 = _create_sample_push_config( url='http://notify.me/cfg1', config_id='cfg1' ) - config2 = create_sample_push_config( + config2 = _create_sample_push_config( url='http://notify.me/cfg2', config_id='cfg2' ) self.mock_config_store.get_info.return_value = [config1, config2] @@ -144,21 +179,71 @@ async def test_send_notification_multiple_configs(self) -> None: mock_response.status_code = 200 self.mock_httpx_client.post.return_value = mock_response - await self.sender.send_notification(task_data) + await self.sender.send_notification(task_id, task_data) - self.mock_config_store.get_info.assert_awaited_once_with(task_id) + self.mock_config_store.get_info.assert_awaited_once_with( + task_id, MINIMAL_CALL_CONTEXT + ) self.assertEqual(self.mock_httpx_client.post.call_count, 2) # Check calls for config1 self.mock_httpx_client.post.assert_any_call( config1.url, - json=task_data.model_dump(mode='json', exclude_none=True), + json=MessageToDict(StreamResponse(task=task_data)), headers=None, ) # Check calls for config2 self.mock_httpx_client.post.assert_any_call( config2.url, - json=task_data.model_dump(mode='json', exclude_none=True), + json=MessageToDict(StreamResponse(task=task_data)), headers=None, ) mock_response.raise_for_status.call_count = 2 + + async def test_send_notification_status_update_event(self) -> None: + task_id = 'task_status_update' + event = TaskStatusUpdateEvent( + task_id=task_id, + status=TaskStatus(state=TaskState.TASK_STATE_WORKING), + ) + config = _create_sample_push_config(url='http://notify.me/status') + self.mock_config_store.get_info.return_value = [config] + + mock_response = AsyncMock(spec=httpx.Response) + mock_response.status_code = 200 + self.mock_httpx_client.post.return_value = mock_response + + await self.sender.send_notification(task_id, event) + + self.mock_config_store.get_info.assert_awaited_once_with( + task_id, MINIMAL_CALL_CONTEXT + ) + self.mock_httpx_client.post.assert_awaited_once_with( + config.url, + json=MessageToDict(StreamResponse(status_update=event)), + headers=None, + ) + + async def test_send_notification_artifact_update_event(self) -> None: + task_id = 'task_artifact_update' + event = TaskArtifactUpdateEvent( + task_id=task_id, + append=True, + ) + config = _create_sample_push_config(url='http://notify.me/artifact') + self.mock_config_store.get_info.return_value = [config] + + mock_response = AsyncMock(spec=httpx.Response) + mock_response.status_code = 200 + self.mock_httpx_client.post.return_value = mock_response + + await self.sender.send_notification(task_id, event) + + self.mock_config_store.get_info.assert_awaited_once_with( + task_id, MINIMAL_CALL_CONTEXT + ) + self.mock_httpx_client.post.assert_awaited_once_with( + config.url, + json=MessageToDict(StreamResponse(artifact_update=event)), + headers=None, + ) diff --git a/tests/server/tasks/test_result_aggregator.py b/tests/server/tasks/test_result_aggregator.py index 7b29ea4c8..9e1ce1f91 100644 --- a/tests/server/tasks/test_result_aggregator.py +++ b/tests/server/tasks/test_result_aggregator.py @@ -2,14 +2,14 @@ import unittest from collections.abc import AsyncIterator -from unittest.mock import AsyncMock, MagicMock, patch +from unittest.mock import ANY, AsyncMock, MagicMock, patch from typing_extensions import override from a2a.server.events.event_consumer import EventConsumer from a2a.server.tasks.result_aggregator import ResultAggregator from a2a.server.tasks.task_manager import TaskManager -from a2a.types import ( +from a2a.types.a2a_pb2 import ( Message, Part, Role, @@ -17,25 +17,26 @@ TaskState, TaskStatus, TaskStatusUpdateEvent, - TextPart, ) # Helper to create a simple message def create_sample_message( - content: str = 'test message', msg_id: str = 'msg1', role: Role = Role.user + content: str = 'test message', + msg_id: str = 'msg1', + role: Role = Role.ROLE_USER, ) -> Message: return Message( message_id=msg_id, role=role, - parts=[Part(root=TextPart(text=content))], + parts=[Part(text=content)], ) # Helper to create a simple task def create_sample_task( task_id: str = 'task1', - status_state: TaskState = TaskState.submitted, + status_state: TaskState = TaskState.TASK_STATE_SUBMITTED, context_id: str = 'ctx1', ) -> Task: return Task( @@ -48,14 +49,14 @@ def create_sample_task( # Helper to create a TaskStatusUpdateEvent def create_sample_status_update( task_id: str = 'task1', - status_state: TaskState = TaskState.working, + status_state: TaskState = TaskState.TASK_STATE_WORKING, context_id: str = 'ctx1', ) -> TaskStatusUpdateEvent: return TaskStatusUpdateEvent( task_id=task_id, context_id=context_id, status=TaskStatus(state=status_state), - final=False, # Typically false unless it's the very last update + # Typically false unless it's the very last update ) @@ -92,10 +93,10 @@ async def test_current_result_property_with_message_none(self) -> None: async def test_consume_and_emit(self) -> None: event1 = create_sample_message(content='event one', msg_id='e1') event2 = create_sample_task( - task_id='task_event', status_state=TaskState.working + task_id='task_event', status_state=TaskState.TASK_STATE_WORKING ) event3 = create_sample_status_update( - task_id='task_event', status_state=TaskState.completed + task_id='task_event', status_state=TaskState.TASK_STATE_COMPLETED ) # Mock event_consumer.consume() to be an async generator @@ -146,10 +147,12 @@ async def mock_consume_generator(): async def test_consume_all_other_event_types(self) -> None: task_event = create_sample_task(task_id='task_other_event') status_update_event = create_sample_status_update( - task_id='task_other_event', status_state=TaskState.completed + task_id='task_other_event', + status_state=TaskState.TASK_STATE_COMPLETED, ) final_task_state = create_sample_task( - task_id='task_other_event', status_state=TaskState.completed + task_id='task_other_event', + status_state=TaskState.TASK_STATE_COMPLETED, ) async def mock_consume_generator(): @@ -209,7 +212,7 @@ async def raiser_gen(): # Ensure process was called for the event before the exception self.mock_task_manager.process.assert_called_once_with( - unittest.mock.ANY # Check it was called, arg is the task + ANY # Check it was called, arg is the task ) self.mock_task_manager.get_task.assert_not_called() @@ -245,7 +248,7 @@ async def test_consume_and_break_on_auth_required_task_event( self, mock_create_task: MagicMock ) -> None: auth_task = create_sample_task( - task_id='auth_task', status_state=TaskState.auth_required + task_id='auth_task', status_state=TaskState.TASK_STATE_AUTH_REQUIRED ) event_after_auth = create_sample_message('after auth') @@ -261,7 +264,7 @@ async def mock_consume_generator(): ) # Mock _continue_consuming to check if it's called by create_task - self.aggregator._continue_consuming = AsyncMock() + self.aggregator._continue_consuming = AsyncMock() # type: ignore[method-assign] mock_create_task.side_effect = lambda coro: asyncio.ensure_future(coro) ( @@ -299,10 +302,12 @@ async def test_consume_and_break_on_auth_required_status_update_event( self, mock_create_task: MagicMock ) -> None: auth_status_update = create_sample_status_update( - task_id='auth_status_task', status_state=TaskState.auth_required + task_id='auth_status_task', + status_state=TaskState.TASK_STATE_AUTH_REQUIRED, ) current_task_state_after_update = create_sample_task( - task_id='auth_status_task', status_state=TaskState.auth_required + task_id='auth_status_task', + status_state=TaskState.TASK_STATE_AUTH_REQUIRED, ) async def mock_consume_generator(): @@ -315,7 +320,7 @@ async def mock_consume_generator(): self.mock_task_manager.get_task.return_value = ( current_task_state_after_update ) - self.aggregator._continue_consuming = AsyncMock() + self.aggregator._continue_consuming = AsyncMock() # type: ignore[method-assign] mock_create_task.side_effect = lambda coro: asyncio.ensure_future(coro) ( @@ -342,7 +347,7 @@ async def test_consume_and_break_completes_normally(self) -> None: event1 = create_sample_message('event one normal', msg_id='n1') event2 = create_sample_task('normal_task') final_task_state = create_sample_task( - 'normal_task', status_state=TaskState.completed + 'normal_task', status_state=TaskState.TASK_STATE_COMPLETED ) async def mock_consume_generator(): @@ -395,7 +400,7 @@ async def raiser_gen_interrupt(): ) self.mock_task_manager.process.assert_called_once_with( - unittest.mock.ANY # Check it was called, arg is the task + ANY # Check it was called, arg is the task ) self.mock_task_manager.get_task.assert_not_called() @@ -415,9 +420,9 @@ async def mock_consume_generator(): mock_consume_generator() ) # After processing `first_event`, the current result will be that task. - self.aggregator.task_manager.get_task.return_value = first_event + self.mock_task_manager.get_task.return_value = first_event - self.aggregator._continue_consuming = AsyncMock() + self.aggregator._continue_consuming = AsyncMock() # type: ignore[method-assign] mock_create_task.side_effect = lambda coro: asyncio.ensure_future(coro) ( @@ -447,7 +452,8 @@ async def test_continue_consuming_processes_remaining_events( # the events *after* the interrupting one are processed by _continue_consuming. auth_event = create_sample_task( - 'task_auth_for_continue', status_state=TaskState.auth_required + 'task_auth_for_continue', + status_state=TaskState.TASK_STATE_AUTH_REQUIRED, ) event_after_auth1 = create_sample_message( 'after auth 1', msg_id='cont1' diff --git a/tests/server/tasks/test_task_manager.py b/tests/server/tasks/test_task_manager.py index 8208ca780..eba8d2f14 100644 --- a/tests/server/tasks/test_task_manager.py +++ b/tests/server/tasks/test_task_manager.py @@ -3,10 +3,12 @@ import pytest +from a2a.auth.user import User +from a2a.server.context import ServerCallContext from a2a.server.tasks import TaskManager -from a2a.types import ( +from a2a.server.tasks.task_manager import append_artifact_to_task +from a2a.types.a2a_pb2 import ( Artifact, - InvalidParamsError, Message, Part, Role, @@ -15,17 +17,42 @@ TaskState, TaskStatus, TaskStatusUpdateEvent, - TextPart, ) -from a2a.utils.errors import ServerError +from a2a.utils.errors import InvalidParamsError -MINIMAL_TASK: dict[str, Any] = { - 'id': 'task-abc', - 'context_id': 'session-xyz', - 'status': {'state': 'submitted'}, - 'kind': 'task', -} +class SampleUser(User): + """A test implementation of the User interface.""" + + def __init__(self, user_name: str): + self._user_name = user_name + + @property + def is_authenticated(self) -> bool: + return True + + @property + def user_name(self) -> str: + return self._user_name + + +TEST_CONTEXT = ServerCallContext(user=SampleUser('test_user')) + + +# Create proto task instead of dict +def create_minimal_task( + task_id: str = 'task-abc', + context_id: str = 'session-xyz', +) -> Task: + return Task( + id=task_id, + context_id=context_id, + status=TaskStatus(state=TaskState.TASK_STATE_SUBMITTED), + ) + + +MINIMAL_TASK_ID = 'task-abc' +MINIMAL_CONTEXT_ID = 'session-xyz' @pytest.fixture @@ -38,10 +65,11 @@ def mock_task_store() -> AsyncMock: def task_manager(mock_task_store: AsyncMock) -> TaskManager: """Fixture for a TaskManager with a mock TaskStore.""" return TaskManager( - task_id=MINIMAL_TASK['id'], - context_id=MINIMAL_TASK['context_id'], + task_id=MINIMAL_TASK_ID, + context_id=MINIMAL_CONTEXT_ID, task_store=mock_task_store, initial_message=None, + context=TEST_CONTEXT, ) @@ -56,6 +84,7 @@ def test_task_manager_invalid_task_id( context_id='test_context', task_store=mock_task_store, initial_message=None, + context=TEST_CONTEXT, ) @@ -64,11 +93,11 @@ async def test_get_task_existing( task_manager: TaskManager, mock_task_store: AsyncMock ) -> None: """Test getting an existing task.""" - expected_task = Task(**MINIMAL_TASK) + expected_task = create_minimal_task() mock_task_store.get.return_value = expected_task retrieved_task = await task_manager.get_task() assert retrieved_task == expected_task - mock_task_store.get.assert_called_once_with(MINIMAL_TASK['id'], None) + mock_task_store.get.assert_called_once_with(MINIMAL_TASK_ID, TEST_CONTEXT) @pytest.mark.asyncio @@ -79,7 +108,7 @@ async def test_get_task_nonexistent( mock_task_store.get.return_value = None retrieved_task = await task_manager.get_task() assert retrieved_task is None - mock_task_store.get.assert_called_once_with(MINIMAL_TASK['id'], None) + mock_task_store.get.assert_called_once_with(MINIMAL_TASK_ID, TEST_CONTEXT) @pytest.mark.asyncio @@ -87,9 +116,9 @@ async def test_save_task_event_new_task( task_manager: TaskManager, mock_task_store: AsyncMock ) -> None: """Test saving a new task.""" - task = Task(**MINIMAL_TASK) + task = create_minimal_task() await task_manager.save_task_event(task) - mock_task_store.save.assert_called_once_with(task, None) + mock_task_store.save.assert_called_once_with(task, TEST_CONTEXT) @pytest.mark.asyncio @@ -97,26 +126,27 @@ async def test_save_task_event_status_update( task_manager: TaskManager, mock_task_store: AsyncMock ) -> None: """Test saving a status update for an existing task.""" - initial_task = Task(**MINIMAL_TASK) + initial_task = create_minimal_task() mock_task_store.get.return_value = initial_task new_status = TaskStatus( - state=TaskState.working, + state=TaskState.TASK_STATE_WORKING, message=Message( - role=Role.agent, - parts=[Part(TextPart(text='content'))], + role=Role.ROLE_AGENT, + parts=[Part(text='content')], message_id='message-id', ), ) event = TaskStatusUpdateEvent( - task_id=MINIMAL_TASK['id'], - context_id=MINIMAL_TASK['context_id'], + task_id=MINIMAL_TASK_ID, + context_id=MINIMAL_CONTEXT_ID, status=new_status, - final=False, ) await task_manager.save_task_event(event) - updated_task = initial_task - updated_task.status = new_status - mock_task_store.save.assert_called_once_with(updated_task, None) + # Verify save was called and the task has updated status + call_args = mock_task_store.save.call_args + assert call_args is not None + saved_task = call_args[0][0] + assert saved_task.status.state == TaskState.TASK_STATE_WORKING @pytest.mark.asyncio @@ -124,22 +154,25 @@ async def test_save_task_event_artifact_update( task_manager: TaskManager, mock_task_store: AsyncMock ) -> None: """Test saving an artifact update for an existing task.""" - initial_task = Task(**MINIMAL_TASK) + initial_task = create_minimal_task() mock_task_store.get.return_value = initial_task new_artifact = Artifact( artifact_id='artifact-id', name='artifact1', - parts=[Part(TextPart(text='content'))], + parts=[Part(text='content')], ) event = TaskArtifactUpdateEvent( - task_id=MINIMAL_TASK['id'], - context_id=MINIMAL_TASK['context_id'], + task_id=MINIMAL_TASK_ID, + context_id=MINIMAL_CONTEXT_ID, artifact=new_artifact, ) await task_manager.save_task_event(event) - updated_task = initial_task - updated_task.artifacts = [new_artifact] - mock_task_store.save.assert_called_once_with(updated_task, None) + # Verify save was called and the task has the artifact + call_args = mock_task_store.save.call_args + assert call_args is not None + saved_task = call_args[0][0] + assert len(saved_task.artifacts) == 1 + assert saved_task.artifacts[0].artifact_id == 'artifact-id' @pytest.mark.asyncio @@ -147,16 +180,15 @@ async def test_save_task_event_metadata_update( task_manager: TaskManager, mock_task_store: AsyncMock ) -> None: """Test saving an updated metadata for an existing task.""" - initial_task = Task(**MINIMAL_TASK) + initial_task = create_minimal_task() mock_task_store.get.return_value = initial_task new_metadata = {'meta_key_test': 'meta_value_test'} event = TaskStatusUpdateEvent( - task_id=MINIMAL_TASK['id'], - context_id=MINIMAL_TASK['context_id'], + task_id=MINIMAL_TASK_ID, + context_id=MINIMAL_CONTEXT_ID, metadata=new_metadata, - status=TaskStatus(state=TaskState.working), - final=False, + status=TaskStatus(state=TaskState.TASK_STATE_WORKING), ) await task_manager.save_task_event(event) @@ -169,17 +201,16 @@ async def test_ensure_task_existing( task_manager: TaskManager, mock_task_store: AsyncMock ) -> None: """Test ensuring an existing task.""" - expected_task = Task(**MINIMAL_TASK) + expected_task = create_minimal_task() mock_task_store.get.return_value = expected_task event = TaskStatusUpdateEvent( - task_id=MINIMAL_TASK['id'], - context_id=MINIMAL_TASK['context_id'], - status=TaskStatus(state=TaskState.working), - final=False, + task_id=MINIMAL_TASK_ID, + context_id=MINIMAL_CONTEXT_ID, + status=TaskStatus(state=TaskState.TASK_STATE_WORKING), ) retrieved_task = await task_manager.ensure_task(event) assert retrieved_task == expected_task - mock_task_store.get.assert_called_once_with(MINIMAL_TASK['id'], None) + mock_task_store.get.assert_called_once_with(MINIMAL_TASK_ID, TEST_CONTEXT) @pytest.mark.asyncio @@ -193,18 +224,18 @@ async def test_ensure_task_nonexistent( context_id=None, task_store=mock_task_store, initial_message=None, + context=TEST_CONTEXT, ) event = TaskStatusUpdateEvent( task_id='new-task', context_id='some-context', - status=TaskStatus(state=TaskState.submitted), - final=False, + status=TaskStatus(state=TaskState.TASK_STATE_SUBMITTED), ) new_task = await task_manager_without_id.ensure_task(event) assert new_task.id == 'new-task' assert new_task.context_id == 'some-context' - assert new_task.status.state == TaskState.submitted - mock_task_store.save.assert_called_once_with(new_task, None) + assert new_task.status.state == TaskState.TASK_STATE_SUBMITTED + mock_task_store.save.assert_called_once_with(new_task, TEST_CONTEXT) assert task_manager_without_id.task_id == 'new-task' assert task_manager_without_id.context_id == 'some-context' @@ -214,7 +245,7 @@ def test_init_task_obj(task_manager: TaskManager) -> None: new_task = task_manager._init_task_obj('new-task', 'new-context') # type: ignore assert new_task.id == 'new-task' assert new_task.context_id == 'new-context' - assert new_task.status.state == TaskState.submitted + assert new_task.status.state == TaskState.TASK_STATE_SUBMITTED assert new_task.history == [] @@ -223,26 +254,26 @@ async def test_save_task( task_manager: TaskManager, mock_task_store: AsyncMock ) -> None: """Test saving a task.""" - task = Task(**MINIMAL_TASK) + task = create_minimal_task() await task_manager._save_task(task) # type: ignore - mock_task_store.save.assert_called_once_with(task, None) + mock_task_store.save.assert_called_once_with(task, TEST_CONTEXT) @pytest.mark.asyncio async def test_save_task_event_mismatched_id_raises_error( task_manager: TaskManager, ) -> None: - """Test that save_task_event raises ServerError on task ID mismatch.""" + """Test that save_task_event raises InvalidParamsError on task ID mismatch.""" # The task_manager is initialized with 'task-abc' mismatched_task = Task( id='wrong-id', context_id='session-xyz', - status=TaskStatus(state=TaskState.submitted), + status=TaskStatus(state=TaskState.TASK_STATE_SUBMITTED), ) - with pytest.raises(ServerError) as exc_info: + with pytest.raises(InvalidParamsError) as exc_info: await task_manager.save_task_event(mismatched_task) - assert isinstance(exc_info.value.error, InvalidParamsError) + assert exc_info.value is not None @pytest.mark.asyncio @@ -255,20 +286,19 @@ async def test_save_task_event_new_task_no_task_id( context_id=None, task_store=mock_task_store, initial_message=None, + context=TEST_CONTEXT, + ) + task = Task( + id='new-task-id', + context_id='some-context', + status=TaskStatus(state=TaskState.TASK_STATE_WORKING), ) - task_data: dict[str, Any] = { - 'id': 'new-task-id', - 'context_id': 'some-context', - 'status': {'state': 'working'}, - 'kind': 'task', - } - task = Task(**task_data) await task_manager_without_id.save_task_event(task) - mock_task_store.save.assert_called_once_with(task, None) + mock_task_store.save.assert_called_once_with(task, TEST_CONTEXT) assert task_manager_without_id.task_id == 'new-task-id' assert task_manager_without_id.context_id == 'some-context' # initial submit should be updated to working - assert task.status.state == TaskState.working + assert task.status.state == TaskState.TASK_STATE_WORKING @pytest.mark.asyncio @@ -281,6 +311,7 @@ async def test_get_task_no_task_id( context_id='some-context', task_store=mock_task_store, initial_message=None, + context=TEST_CONTEXT, ) retrieved_task = await task_manager_without_id.get_task() assert retrieved_task is None @@ -297,13 +328,13 @@ async def test_save_task_event_no_task_existing( context_id=None, task_store=mock_task_store, initial_message=None, + context=TEST_CONTEXT, ) mock_task_store.get.return_value = None event = TaskStatusUpdateEvent( task_id='event-task-id', context_id='some-context', - status=TaskStatus(state=TaskState.completed), - final=True, + status=TaskStatus(state=TaskState.TASK_STATE_COMPLETED), ) await task_manager_without_id.save_task_event(event) # Check if a new task was created and saved @@ -312,6 +343,102 @@ async def test_save_task_event_no_task_existing( saved_task = call_args[0][0] assert saved_task.id == 'event-task-id' assert saved_task.context_id == 'some-context' - assert saved_task.status.state == TaskState.completed + assert saved_task.status.state == TaskState.TASK_STATE_COMPLETED assert task_manager_without_id.task_id == 'event-task-id' assert task_manager_without_id.context_id == 'some-context' + + +def test_append_artifact_to_task(): + # Prepare base task + task = create_minimal_task() + assert task.id == 'task-abc' + assert task.context_id == 'session-xyz' + assert task.status.state == TaskState.TASK_STATE_SUBMITTED + assert len(task.history) == 0 # proto repeated fields are empty, not None + assert len(task.artifacts) == 0 + + # Prepare appending artifact and event + artifact_1 = Artifact( + artifact_id='artifact-123', parts=[Part(text='Hello')] + ) + append_event_1 = TaskArtifactUpdateEvent( + artifact=artifact_1, append=False, task_id='123', context_id='123' + ) + + # Test adding a new artifact (not appending) + append_artifact_to_task(task, append_event_1) + assert len(task.artifacts) == 1 + assert task.artifacts[0].artifact_id == 'artifact-123' + assert task.artifacts[0].name == '' # proto default for string + assert len(task.artifacts[0].parts) == 1 + assert task.artifacts[0].parts[0].text == 'Hello' + + # Test replacing the artifact + artifact_2 = Artifact( + artifact_id='artifact-123', + name='updated name', + parts=[Part(text='Updated')], + metadata={'existing_key': 'existing_value'}, + ) + append_event_2 = TaskArtifactUpdateEvent( + artifact=artifact_2, append=False, task_id='123', context_id='123' + ) + append_artifact_to_task(task, append_event_2) + assert len(task.artifacts) == 1 # Should still have one artifact + assert task.artifacts[0].artifact_id == 'artifact-123' + assert task.artifacts[0].name == 'updated name' + assert len(task.artifacts[0].parts) == 1 + assert task.artifacts[0].parts[0].text == 'Updated' + assert task.artifacts[0].metadata['existing_key'] == 'existing_value' + + # Test appending parts to an existing artifact + artifact_with_parts = Artifact( + artifact_id='artifact-123', + parts=[Part(text='Part 2')], + metadata={'new_key': 'new_value'}, + ) + append_event_3 = TaskArtifactUpdateEvent( + artifact=artifact_with_parts, + append=True, + task_id='123', + context_id='123', + ) + append_artifact_to_task(task, append_event_3) + assert len(task.artifacts[0].parts) == 2 + assert task.artifacts[0].parts[0].text == 'Updated' + assert task.artifacts[0].parts[1].text == 'Part 2' + assert task.artifacts[0].metadata['existing_key'] == 'existing_value' + assert task.artifacts[0].metadata['new_key'] == 'new_value' + + # Test adding another new artifact + another_artifact_with_parts = Artifact( + artifact_id='new_artifact', + parts=[Part(text='new artifact Part 1')], + ) + append_event_4 = TaskArtifactUpdateEvent( + artifact=another_artifact_with_parts, + append=False, + task_id='123', + context_id='123', + ) + append_artifact_to_task(task, append_event_4) + assert len(task.artifacts) == 2 + assert task.artifacts[0].artifact_id == 'artifact-123' + assert task.artifacts[1].artifact_id == 'new_artifact' + assert len(task.artifacts[0].parts) == 2 + assert len(task.artifacts[1].parts) == 1 + + # Test appending part to a task that does not have a matching artifact + non_existing_artifact_with_parts = Artifact( + artifact_id='artifact-456', parts=[Part(text='Part 1')] + ) + append_event_5 = TaskArtifactUpdateEvent( + artifact=non_existing_artifact_with_parts, + append=True, + task_id='123', + context_id='123', + ) + append_artifact_to_task(task, append_event_5) + assert len(task.artifacts) == 2 + assert len(task.artifacts[0].parts) == 2 + assert len(task.artifacts[1].parts) == 1 diff --git a/tests/server/tasks/test_task_updater.py b/tests/server/tasks/test_task_updater.py index 891f8a10b..49d9dee43 100644 --- a/tests/server/tasks/test_task_updater.py +++ b/tests/server/tasks/test_task_updater.py @@ -8,14 +8,13 @@ from a2a.server.events import EventQueue from a2a.server.id_generator import IDGenerator from a2a.server.tasks import TaskUpdater -from a2a.types import ( +from a2a.types.a2a_pb2 import ( Message, Part, Role, TaskArtifactUpdateEvent, TaskState, TaskStatusUpdateEvent, - TextPart, ) @@ -39,18 +38,18 @@ def task_updater(event_queue: AsyncMock) -> TaskUpdater: def sample_message() -> Message: """Create a sample message for testing.""" return Message( - role=Role.agent, + role=Role.ROLE_AGENT, task_id='test-task-id', context_id='test-context-id', message_id='test-message-id', - parts=[Part(root=TextPart(text='Test message'))], + parts=[Part(text='Test message')], ) @pytest.fixture def sample_parts() -> list[Part]: """Create sample parts for testing.""" - return [Part(root=TextPart(text='Test part'))] + return [Part(text='Test part')] def test_init(event_queue: AsyncMock) -> None: @@ -71,7 +70,7 @@ async def test_update_status_without_message( task_updater: TaskUpdater, event_queue: AsyncMock ) -> None: """Test updating status without a message.""" - await task_updater.update_status(TaskState.working) + await task_updater.update_status(TaskState.TASK_STATE_WORKING) event_queue.enqueue_event.assert_called_once() event = event_queue.enqueue_event.call_args[0][0] @@ -79,9 +78,8 @@ async def test_update_status_without_message( assert isinstance(event, TaskStatusUpdateEvent) assert event.task_id == 'test-task-id' assert event.context_id == 'test-context-id' - assert event.final is False - assert event.status.state == TaskState.working - assert event.status.message is None + assert event.status.state == TaskState.TASK_STATE_WORKING + assert not event.status.HasField('message') @pytest.mark.asyncio @@ -89,7 +87,9 @@ async def test_update_status_with_message( task_updater: TaskUpdater, event_queue: AsyncMock, sample_message: Message ) -> None: """Test updating status with a message.""" - await task_updater.update_status(TaskState.working, message=sample_message) + await task_updater.update_status( + TaskState.TASK_STATE_WORKING, message=sample_message + ) event_queue.enqueue_event.assert_called_once() event = event_queue.enqueue_event.call_args[0][0] @@ -97,8 +97,7 @@ async def test_update_status_with_message( assert isinstance(event, TaskStatusUpdateEvent) assert event.task_id == 'test-task-id' assert event.context_id == 'test-context-id' - assert event.final is False - assert event.status.state == TaskState.working + assert event.status.state == TaskState.TASK_STATE_WORKING assert event.status.message == sample_message @@ -106,15 +105,14 @@ async def test_update_status_with_message( async def test_update_status_final( task_updater: TaskUpdater, event_queue: AsyncMock ) -> None: - """Test updating status with final=True.""" - await task_updater.update_status(TaskState.completed, final=True) + """Test updating status with .""" + await task_updater.update_status(TaskState.TASK_STATE_COMPLETED) event_queue.enqueue_event.assert_called_once() event = event_queue.enqueue_event.call_args[0][0] assert isinstance(event, TaskStatusUpdateEvent) - assert event.final is True - assert event.status.state == TaskState.completed + assert event.status.state == TaskState.TASK_STATE_COMPLETED @pytest.mark.asyncio @@ -152,8 +150,8 @@ async def test_add_artifact_generates_id( assert isinstance(event, TaskArtifactUpdateEvent) assert event.artifact.artifact_id == str(known_uuid) assert event.artifact.parts == sample_parts - assert event.append is None - assert event.last_chunk is None + assert event.append is False + assert event.last_chunk is False @pytest.mark.asyncio @@ -224,9 +222,8 @@ async def test_complete_without_message( event = event_queue.enqueue_event.call_args[0][0] assert isinstance(event, TaskStatusUpdateEvent) - assert event.status.state == TaskState.completed - assert event.final is True - assert event.status.message is None + assert event.status.state == TaskState.TASK_STATE_COMPLETED + assert not event.status.HasField('message') @pytest.mark.asyncio @@ -240,8 +237,7 @@ async def test_complete_with_message( event = event_queue.enqueue_event.call_args[0][0] assert isinstance(event, TaskStatusUpdateEvent) - assert event.status.state == TaskState.completed - assert event.final is True + assert event.status.state == TaskState.TASK_STATE_COMPLETED assert event.status.message == sample_message @@ -256,9 +252,8 @@ async def test_submit_without_message( event = event_queue.enqueue_event.call_args[0][0] assert isinstance(event, TaskStatusUpdateEvent) - assert event.status.state == TaskState.submitted - assert event.final is False - assert event.status.message is None + assert event.status.state == TaskState.TASK_STATE_SUBMITTED + assert not event.status.HasField('message') @pytest.mark.asyncio @@ -272,8 +267,7 @@ async def test_submit_with_message( event = event_queue.enqueue_event.call_args[0][0] assert isinstance(event, TaskStatusUpdateEvent) - assert event.status.state == TaskState.submitted - assert event.final is False + assert event.status.state == TaskState.TASK_STATE_SUBMITTED assert event.status.message == sample_message @@ -288,9 +282,8 @@ async def test_start_work_without_message( event = event_queue.enqueue_event.call_args[0][0] assert isinstance(event, TaskStatusUpdateEvent) - assert event.status.state == TaskState.working - assert event.final is False - assert event.status.message is None + assert event.status.state == TaskState.TASK_STATE_WORKING + assert not event.status.HasField('message') @pytest.mark.asyncio @@ -304,8 +297,7 @@ async def test_start_work_with_message( event = event_queue.enqueue_event.call_args[0][0] assert isinstance(event, TaskStatusUpdateEvent) - assert event.status.state == TaskState.working - assert event.final is False + assert event.status.state == TaskState.TASK_STATE_WORKING assert event.status.message == sample_message @@ -319,18 +311,18 @@ def test_new_agent_message( ): message = task_updater.new_agent_message(parts=sample_parts) - assert message.role == Role.agent + assert message.role == Role.ROLE_AGENT assert message.task_id == 'test-task-id' assert message.context_id == 'test-context-id' assert message.message_id == '12345678-1234-5678-1234-567812345678' assert message.parts == sample_parts - assert message.metadata is None + assert not message.HasField('metadata') def test_new_agent_message_with_metadata( task_updater: TaskUpdater, sample_parts: list[Part] ) -> None: - """Test creating a new agent message with metadata and final=True.""" + """Test creating a new agent message with metadata and .""" metadata = {'key': 'value'} with patch( @@ -341,7 +333,7 @@ def test_new_agent_message_with_metadata( parts=sample_parts, metadata=metadata ) - assert message.role == Role.agent + assert message.role == Role.ROLE_AGENT assert message.task_id == 'test-task-id' assert message.context_id == 'test-context-id' assert message.message_id == '12345678-1234-5678-1234-567812345678' @@ -378,9 +370,8 @@ async def test_failed_without_message( event = event_queue.enqueue_event.call_args[0][0] assert isinstance(event, TaskStatusUpdateEvent) - assert event.status.state == TaskState.failed - assert event.final is True - assert event.status.message is None + assert event.status.state == TaskState.TASK_STATE_FAILED + assert not event.status.HasField('message') @pytest.mark.asyncio @@ -394,8 +385,7 @@ async def test_failed_with_message( event = event_queue.enqueue_event.call_args[0][0] assert isinstance(event, TaskStatusUpdateEvent) - assert event.status.state == TaskState.failed - assert event.final is True + assert event.status.state == TaskState.TASK_STATE_FAILED assert event.status.message == sample_message @@ -410,9 +400,8 @@ async def test_reject_without_message( event = event_queue.enqueue_event.call_args[0][0] assert isinstance(event, TaskStatusUpdateEvent) - assert event.status.state == TaskState.rejected - assert event.final is True - assert event.status.message is None + assert event.status.state == TaskState.TASK_STATE_REJECTED + assert not event.status.HasField('message') @pytest.mark.asyncio @@ -426,8 +415,7 @@ async def test_reject_with_message( event = event_queue.enqueue_event.call_args[0][0] assert isinstance(event, TaskStatusUpdateEvent) - assert event.status.state == TaskState.rejected - assert event.final is True + assert event.status.state == TaskState.TASK_STATE_REJECTED assert event.status.message == sample_message @@ -442,9 +430,8 @@ async def test_requires_input_without_message( event = event_queue.enqueue_event.call_args[0][0] assert isinstance(event, TaskStatusUpdateEvent) - assert event.status.state == TaskState.input_required - assert event.final is False - assert event.status.message is None + assert event.status.state == TaskState.TASK_STATE_INPUT_REQUIRED + assert not event.status.HasField('message') @pytest.mark.asyncio @@ -458,8 +445,7 @@ async def test_requires_input_with_message( event = event_queue.enqueue_event.call_args[0][0] assert isinstance(event, TaskStatusUpdateEvent) - assert event.status.state == TaskState.input_required - assert event.final is False + assert event.status.state == TaskState.TASK_STATE_INPUT_REQUIRED assert event.status.message == sample_message @@ -467,31 +453,29 @@ async def test_requires_input_with_message( async def test_requires_input_final_true( task_updater: TaskUpdater, event_queue: AsyncMock ) -> None: - """Test marking a task as input required with final=True.""" - await task_updater.requires_input(final=True) + """Test marking a task as input required with .""" + await task_updater.requires_input() event_queue.enqueue_event.assert_called_once() event = event_queue.enqueue_event.call_args[0][0] assert isinstance(event, TaskStatusUpdateEvent) - assert event.status.state == TaskState.input_required - assert event.final is True - assert event.status.message is None + assert event.status.state == TaskState.TASK_STATE_INPUT_REQUIRED + assert not event.status.HasField('message') @pytest.mark.asyncio async def test_requires_input_with_message_and_final( task_updater: TaskUpdater, event_queue: AsyncMock, sample_message: Message ) -> None: - """Test marking a task as input required with message and final=True.""" - await task_updater.requires_input(message=sample_message, final=True) + """Test marking a task as input required with message and .""" + await task_updater.requires_input(message=sample_message) event_queue.enqueue_event.assert_called_once() event = event_queue.enqueue_event.call_args[0][0] assert isinstance(event, TaskStatusUpdateEvent) - assert event.status.state == TaskState.input_required - assert event.final is True + assert event.status.state == TaskState.TASK_STATE_INPUT_REQUIRED assert event.status.message == sample_message @@ -506,9 +490,8 @@ async def test_requires_auth_without_message( event = event_queue.enqueue_event.call_args[0][0] assert isinstance(event, TaskStatusUpdateEvent) - assert event.status.state == TaskState.auth_required - assert event.final is False - assert event.status.message is None + assert event.status.state == TaskState.TASK_STATE_AUTH_REQUIRED + assert not event.status.HasField('message') @pytest.mark.asyncio @@ -522,8 +505,7 @@ async def test_requires_auth_with_message( event = event_queue.enqueue_event.call_args[0][0] assert isinstance(event, TaskStatusUpdateEvent) - assert event.status.state == TaskState.auth_required - assert event.final is False + assert event.status.state == TaskState.TASK_STATE_AUTH_REQUIRED assert event.status.message == sample_message @@ -531,31 +513,29 @@ async def test_requires_auth_with_message( async def test_requires_auth_final_true( task_updater: TaskUpdater, event_queue: AsyncMock ) -> None: - """Test marking a task as auth required with final=True.""" - await task_updater.requires_auth(final=True) + """Test marking a task as auth required with .""" + await task_updater.requires_auth() event_queue.enqueue_event.assert_called_once() event = event_queue.enqueue_event.call_args[0][0] assert isinstance(event, TaskStatusUpdateEvent) - assert event.status.state == TaskState.auth_required - assert event.final is True - assert event.status.message is None + assert event.status.state == TaskState.TASK_STATE_AUTH_REQUIRED + assert not event.status.HasField('message') @pytest.mark.asyncio async def test_requires_auth_with_message_and_final( task_updater: TaskUpdater, event_queue: AsyncMock, sample_message: Message ) -> None: - """Test marking a task as auth required with message and final=True.""" - await task_updater.requires_auth(message=sample_message, final=True) + """Test marking a task as auth required with message and .""" + await task_updater.requires_auth(message=sample_message) event_queue.enqueue_event.assert_called_once() event = event_queue.enqueue_event.call_args[0][0] assert isinstance(event, TaskStatusUpdateEvent) - assert event.status.state == TaskState.auth_required - assert event.final is True + assert event.status.state == TaskState.TASK_STATE_AUTH_REQUIRED assert event.status.message == sample_message @@ -570,9 +550,8 @@ async def test_cancel_without_message( event = event_queue.enqueue_event.call_args[0][0] assert isinstance(event, TaskStatusUpdateEvent) - assert event.status.state == TaskState.canceled - assert event.final is True - assert event.status.message is None + assert event.status.state == TaskState.TASK_STATE_CANCELED + assert not event.status.HasField('message') @pytest.mark.asyncio @@ -586,8 +565,7 @@ async def test_cancel_with_message( event = event_queue.enqueue_event.call_args[0][0] assert isinstance(event, TaskStatusUpdateEvent) - assert event.status.state == TaskState.canceled - assert event.final is True + assert event.status.state == TaskState.TASK_STATE_CANCELED assert event.status.message == sample_message @@ -651,5 +629,7 @@ async def test_reject_concurrently_with_complete( event = event_queue.enqueue_event.call_args[0][0] assert isinstance(event, TaskStatusUpdateEvent) - assert event.final is True - assert event.status.state in [TaskState.rejected, TaskState.completed] + assert event.status.state in [ + TaskState.TASK_STATE_REJECTED, + TaskState.TASK_STATE_COMPLETED, + ] diff --git a/tests/server/test_integration.py b/tests/server/test_integration.py index 8080136c1..56663e7e9 100644 --- a/tests/server/test_integration.py +++ b/tests/server/test_integration.py @@ -1,10 +1,7 @@ import asyncio - -from typing import Any from unittest import mock import pytest - from starlette.authentication import ( AuthCredentials, AuthenticationBackend, @@ -18,112 +15,115 @@ from starlette.routing import Route from starlette.testclient import TestClient -from a2a.server.apps import ( - A2AFastAPIApplication, - A2AStarletteApplication, +from a2a.server.jsonrpc_models import ( + InternalError, + InvalidParamsError, + InvalidRequestError, + JSONParseError, + MethodNotFoundError, ) -from a2a.server.context import ServerCallContext +from a2a.server.routes import create_agent_card_routes, create_jsonrpc_routes from a2a.types import ( + UnsupportedOperationError, +) +from a2a.types.a2a_pb2 import ( AgentCapabilities, AgentCard, + AgentInterface, + AgentSkill, Artifact, - DataPart, - InternalError, - InvalidParamsError, - InvalidRequestError, - JSONParseError, Message, - MethodNotFoundError, Part, - PushNotificationConfig, Role, - SendMessageResponse, - SendMessageSuccessResponse, Task, TaskArtifactUpdateEvent, TaskPushNotificationConfig, TaskState, TaskStatus, - TextPart, - UnsupportedOperationError, ) from a2a.utils import ( AGENT_CARD_WELL_KNOWN_PATH, - EXTENDED_AGENT_CARD_PATH, - PREV_AGENT_CARD_WELL_KNOWN_PATH, ) -from a2a.utils.errors import MethodNotImplementedError # === TEST SETUP === -MINIMAL_AGENT_SKILL: dict[str, Any] = { - 'id': 'skill-123', - 'name': 'Recipe Finder', - 'description': 'Finds recipes', - 'tags': ['cooking'], -} +MINIMAL_AGENT_SKILL = AgentSkill( + id='skill-123', + name='Recipe Finder', + description='Finds recipes', + tags=['cooking'], +) -MINIMAL_AGENT_AUTH: dict[str, Any] = {'schemes': ['Bearer']} +AGENT_CAPS = AgentCapabilities(push_notifications=True, streaming=True) + +MINIMAL_AGENT_CARD_DATA = AgentCard( + capabilities=AGENT_CAPS, + default_input_modes=['text/plain'], + default_output_modes=['application/json'], + description='Test Agent', + name='TestAgent', + skills=[MINIMAL_AGENT_SKILL], + supported_interfaces=[ + AgentInterface( + url='http://example.com/agent', protocol_binding='HTTP+JSON' + ) + ], + version='1.0', +) -AGENT_CAPS = AgentCapabilities( - push_notifications=True, state_transition_history=False, streaming=True +EXTENDED_AGENT_SKILL = AgentSkill( + id='skill-extended', + name='Extended Skill', + description='Does more things', + tags=['extended'], ) -MINIMAL_AGENT_CARD: dict[str, Any] = { - 'authentication': MINIMAL_AGENT_AUTH, - 'capabilities': AGENT_CAPS, # AgentCapabilities is required but can be empty - 'defaultInputModes': ['text/plain'], - 'defaultOutputModes': ['application/json'], - 'description': 'Test Agent', - 'name': 'TestAgent', - 'skills': [MINIMAL_AGENT_SKILL], - 'url': 'http://example.com/agent', - 'version': '1.0', -} - -EXTENDED_AGENT_CARD_DATA: dict[str, Any] = { - **MINIMAL_AGENT_CARD, - 'name': 'TestAgent Extended', - 'description': 'Test Agent with more details', - 'skills': [ - MINIMAL_AGENT_SKILL, - { - 'id': 'skill-extended', - 'name': 'Extended Skill', - 'description': 'Does more things', - 'tags': ['extended'], - }, +EXTENDED_AGENT_CARD_DATA = AgentCard( + capabilities=AGENT_CAPS, + default_input_modes=['text/plain'], + default_output_modes=['application/json'], + description='Test Agent with more details', + name='TestAgent Extended', + skills=[MINIMAL_AGENT_SKILL, EXTENDED_AGENT_SKILL], + supported_interfaces=[ + AgentInterface( + url='http://example.com/agent', protocol_binding='HTTP+JSON' + ) ], -} -TEXT_PART_DATA: dict[str, Any] = {'kind': 'text', 'text': 'Hello'} + version='1.0', +) +from google.protobuf.struct_pb2 import Struct, Value + +TEXT_PART_DATA = Part(text='Hello') -DATA_PART_DATA: dict[str, Any] = {'kind': 'data', 'data': {'key': 'value'}} +# For proto, Part.data takes a Value(struct_value=Struct) +_struct = Struct() +_struct.update({'key': 'value'}) +DATA_PART = Part(data=Value(struct_value=_struct)) -MINIMAL_MESSAGE_USER: dict[str, Any] = { - 'role': 'user', - 'parts': [TEXT_PART_DATA], - 'message_id': 'msg-123', - 'kind': 'message', -} +MINIMAL_MESSAGE_USER = Message( + role=Role.ROLE_USER, + parts=[TEXT_PART_DATA], + message_id='msg-123', +) -MINIMAL_TASK_STATUS: dict[str, Any] = {'state': 'submitted'} +MINIMAL_TASK_STATUS = TaskStatus(state=TaskState.TASK_STATE_SUBMITTED) -FULL_TASK_STATUS: dict[str, Any] = { - 'state': 'working', - 'message': MINIMAL_MESSAGE_USER, - 'timestamp': '2023-10-27T10:00:00Z', -} +FULL_TASK_STATUS = TaskStatus( + state=TaskState.TASK_STATE_WORKING, + message=MINIMAL_MESSAGE_USER, +) @pytest.fixture def agent_card(): - return AgentCard(**MINIMAL_AGENT_CARD) + return MINIMAL_AGENT_CARD_DATA @pytest.fixture def extended_agent_card_fixture(): - return AgentCard(**EXTENDED_AGENT_CARD_DATA) + return EXTENDED_AGENT_CARD_DATA @pytest.fixture @@ -135,19 +135,51 @@ def handler(): handler.set_push_notification = mock.AsyncMock() handler.get_push_notification = mock.AsyncMock() handler.on_message_send_stream = mock.Mock() - handler.on_resubscribe_to_task = mock.Mock() + handler.on_subscribe_to_task = mock.Mock() return handler +class AppBuilder: + def __init__(self, agent_card, handler, card_modifier=None): + self.agent_card = agent_card + self.handler = handler + self.card_modifier = card_modifier + + def build( + self, + rpc_url='/', + agent_card_url=AGENT_CARD_WELL_KNOWN_PATH, + middleware=None, + routes=None, + ): + from starlette.applications import Starlette + + app_instance = Starlette(middleware=middleware, routes=routes or []) + + # Agent card router + card_routes = create_agent_card_routes( + self.agent_card, + card_url=agent_card_url, + card_modifier=self.card_modifier, + ) + app_instance.routes.extend(card_routes) + + # JSON-RPC router + rpc_routes = create_jsonrpc_routes(self.handler, rpc_url=rpc_url) + app_instance.routes.extend(rpc_routes) + + return app_instance + + @pytest.fixture def app(agent_card: AgentCard, handler: mock.AsyncMock): - return A2AStarletteApplication(agent_card, handler) + return AppBuilder(agent_card, handler) @pytest.fixture -def client(app: A2AStarletteApplication, **kwargs): - """Create a test client with the Starlette app.""" - return TestClient(app.build(**kwargs)) +def client(app, **kwargs): + """Create a test client with the app builder.""" + return TestClient(app.build(**kwargs), headers={'A2A-Version': '1.0'}) # === BASIC FUNCTIONALITY TESTS === @@ -163,120 +195,7 @@ def test_agent_card_endpoint(client: TestClient, agent_card: AgentCard): assert 'streaming' in data['capabilities'] -def test_authenticated_extended_agent_card_endpoint_not_supported( - agent_card: AgentCard, handler: mock.AsyncMock -): - """Test extended card endpoint returns 404 if not supported by main card.""" - # Ensure supportsAuthenticatedExtendedCard is False or None - agent_card.supports_authenticated_extended_card = False - app_instance = A2AStarletteApplication(agent_card, handler) - # The route should not even be added if supportsAuthenticatedExtendedCard is false - # So, building the app and trying to hit it should result in 404 from Starlette itself - client = TestClient(app_instance.build()) - response = client.get('/agent/authenticatedExtendedCard') - assert response.status_code == 404 # Starlette's default for no route - - -def test_agent_card_default_endpoint_has_deprecated_route( - agent_card: AgentCard, handler: mock.AsyncMock -): - """Test agent card deprecated route is available for default route.""" - app_instance = A2AStarletteApplication(agent_card, handler) - client = TestClient(app_instance.build()) - response = client.get(AGENT_CARD_WELL_KNOWN_PATH) - assert response.status_code == 200 - data = response.json() - assert data['name'] == agent_card.name - response = client.get(PREV_AGENT_CARD_WELL_KNOWN_PATH) - assert response.status_code == 200 - data = response.json() - assert data['name'] == agent_card.name - - -def test_agent_card_custom_endpoint_has_no_deprecated_route( - agent_card: AgentCard, handler: mock.AsyncMock -): - """Test agent card deprecated route is not available for custom route.""" - app_instance = A2AStarletteApplication(agent_card, handler) - client = TestClient(app_instance.build(agent_card_url='/my-agent')) - response = client.get('/my-agent') - assert response.status_code == 200 - data = response.json() - assert data['name'] == agent_card.name - response = client.get(PREV_AGENT_CARD_WELL_KNOWN_PATH) - assert response.status_code == 404 - - -def test_authenticated_extended_agent_card_endpoint_not_supported_fastapi( - agent_card: AgentCard, handler: mock.AsyncMock -): - """Test extended card endpoint returns 404 if not supported by main card.""" - # Ensure supportsAuthenticatedExtendedCard is False or None - agent_card.supports_authenticated_extended_card = False - app_instance = A2AFastAPIApplication(agent_card, handler) - # The route should not even be added if supportsAuthenticatedExtendedCard is false - # So, building the app and trying to hit it should result in 404 from FastAPI itself - client = TestClient(app_instance.build()) - response = client.get('/agent/authenticatedExtendedCard') - assert response.status_code == 404 # FastAPI's default for no route - - -def test_authenticated_extended_agent_card_endpoint_supported_with_specific_extended_card_starlette( - agent_card: AgentCard, - extended_agent_card_fixture: AgentCard, - handler: mock.AsyncMock, -): - """Test extended card endpoint returns the specific extended card when provided.""" - agent_card.supports_authenticated_extended_card = ( - True # Main card must support it - ) - - app_instance = A2AStarletteApplication( - agent_card, handler, extended_agent_card=extended_agent_card_fixture - ) - client = TestClient(app_instance.build()) - - response = client.get('/agent/authenticatedExtendedCard') - assert response.status_code == 200 - data = response.json() - # Verify it's the extended card's data - assert data['name'] == extended_agent_card_fixture.name - assert data['version'] == extended_agent_card_fixture.version - assert len(data['skills']) == len(extended_agent_card_fixture.skills) - assert any(skill['id'] == 'skill-extended' for skill in data['skills']), ( - 'Extended skill not found in served card' - ) - - -def test_authenticated_extended_agent_card_endpoint_supported_with_specific_extended_card_fastapi( - agent_card: AgentCard, - extended_agent_card_fixture: AgentCard, - handler: mock.AsyncMock, -): - """Test extended card endpoint returns the specific extended card when provided.""" - agent_card.supports_authenticated_extended_card = ( - True # Main card must support it - ) - app_instance = A2AFastAPIApplication( - agent_card, handler, extended_agent_card=extended_agent_card_fixture - ) - client = TestClient(app_instance.build()) - - response = client.get('/agent/authenticatedExtendedCard') - assert response.status_code == 200 - data = response.json() - # Verify it's the extended card's data - assert data['name'] == extended_agent_card_fixture.name - assert data['version'] == extended_agent_card_fixture.version - assert len(data['skills']) == len(extended_agent_card_fixture.skills) - assert any(skill['id'] == 'skill-extended' for skill in data['skills']), ( - 'Extended skill not found in served card' - ) - - -def test_agent_card_custom_url( - app: A2AStarletteApplication, agent_card: AgentCard -): +def test_agent_card_custom_url(app, agent_card: AgentCard): """Test the agent card endpoint with a custom URL.""" client = TestClient(app.build(agent_card_url='/my-agent')) response = client.get('/my-agent') @@ -285,21 +204,21 @@ def test_agent_card_custom_url( assert data['name'] == agent_card.name -def test_starlette_rpc_endpoint_custom_url( - app: A2AStarletteApplication, handler: mock.AsyncMock -): +def test_starlette_rpc_endpoint_custom_url(app, handler: mock.AsyncMock): """Test the RPC endpoint with a custom URL.""" # Provide a valid Task object as the return value - task_status = TaskStatus(**MINIMAL_TASK_STATUS) + task_status = MINIMAL_TASK_STATUS task = Task(id='task1', context_id='ctx1', status=task_status) handler.on_get_task.return_value = task - client = TestClient(app.build(rpc_url='/api/rpc')) + client = TestClient( + app.build(rpc_url='/api/rpc'), headers={'A2A-Version': '1.0'} + ) response = client.post( '/api/rpc', json={ 'jsonrpc': '2.0', 'id': '123', - 'method': 'tasks/get', + 'method': 'GetTask', 'params': {'id': 'task1'}, }, ) @@ -308,21 +227,21 @@ def test_starlette_rpc_endpoint_custom_url( assert data['result']['id'] == 'task1' -def test_fastapi_rpc_endpoint_custom_url( - app: A2AFastAPIApplication, handler: mock.AsyncMock -): +def test_fastapi_rpc_endpoint_custom_url(app, handler: mock.AsyncMock): """Test the RPC endpoint with a custom URL.""" # Provide a valid Task object as the return value - task_status = TaskStatus(**MINIMAL_TASK_STATUS) + task_status = MINIMAL_TASK_STATUS task = Task(id='task1', context_id='ctx1', status=task_status) handler.on_get_task.return_value = task - client = TestClient(app.build(rpc_url='/api/rpc')) + client = TestClient( + app.build(rpc_url='/api/rpc'), headers={'A2A-Version': '1.0'} + ) response = client.post( '/api/rpc', json={ 'jsonrpc': '2.0', 'id': '123', - 'method': 'tasks/get', + 'method': 'GetTask', 'params': {'id': 'task1'}, }, ) @@ -331,9 +250,7 @@ def test_fastapi_rpc_endpoint_custom_url( assert data['result']['id'] == 'task1' -def test_starlette_build_with_extra_routes( - app: A2AStarletteApplication, agent_card: AgentCard -): +def test_starlette_build_with_extra_routes(app, agent_card: AgentCard): """Test building the app with additional routes.""" def custom_handler(request): @@ -341,7 +258,7 @@ def custom_handler(request): extra_route = Route('/hello', custom_handler, methods=['GET']) test_app = app.build(routes=[extra_route]) - client = TestClient(test_app) + client = TestClient(test_app, headers={'A2A-Version': '1.0'}) # Test the added route response = client.get('/hello') @@ -355,9 +272,7 @@ def custom_handler(request): assert data['name'] == agent_card.name -def test_fastapi_build_with_extra_routes( - app: A2AFastAPIApplication, agent_card: AgentCard -): +def test_fastapi_build_with_extra_routes(app, agent_card: AgentCard): """Test building the app with additional routes.""" def custom_handler(request): @@ -378,16 +293,8 @@ def custom_handler(request): data = response.json() assert data['name'] == agent_card.name - # check if deprecated agent card path route is available with default well-known path - response = client.get(PREV_AGENT_CARD_WELL_KNOWN_PATH) - assert response.status_code == 200 - data = response.json() - assert data['name'] == agent_card.name - -def test_fastapi_build_custom_agent_card_path( - app: A2AFastAPIApplication, agent_card: AgentCard -): +def test_fastapi_build_custom_agent_card_path(app, agent_card: AgentCard): """Test building the app with a custom agent card path.""" test_app = app.build(agent_card_url='/agent-card') @@ -399,13 +306,9 @@ def test_fastapi_build_custom_agent_card_path( data = response.json() assert data['name'] == agent_card.name - # Ensure default agent card location is not available - response = client.get(AGENT_CARD_WELL_KNOWN_PATH) - assert response.status_code == 404 - - # check if deprecated agent card path route is not available - response = client.get(PREV_AGENT_CARD_WELL_KNOWN_PATH) - assert response.status_code == 404 + # Ensure default path returns 404 + default_response = client.get(AGENT_CARD_WELL_KNOWN_PATH) + assert default_response.status_code == 404 # === REQUEST METHODS TESTS === @@ -414,7 +317,7 @@ def test_fastapi_build_custom_agent_card_path( def test_send_message(client: TestClient, handler: mock.AsyncMock): """Test sending a message.""" # Prepare mock response - task_status = TaskStatus(**MINIMAL_TASK_STATUS) + task_status = MINIMAL_TASK_STATUS mock_task = Task( id='task1', context_id='session-xyz', @@ -428,15 +331,14 @@ def test_send_message(client: TestClient, handler: mock.AsyncMock): json={ 'jsonrpc': '2.0', 'id': '123', - 'method': 'message/send', + 'method': 'SendMessage', 'params': { 'message': { - 'role': 'agent', - 'parts': [{'kind': 'text', 'text': 'Hello'}], - 'message_id': '111', - 'kind': 'message', - 'task_id': 'task1', - 'context_id': 'session-xyz', + 'role': 'ROLE_AGENT', + 'parts': [{'text': 'Hello'}], + 'messageId': '111', + 'taskId': 'task1', + 'contextId': 'session-xyz', } }, }, @@ -446,8 +348,9 @@ def test_send_message(client: TestClient, handler: mock.AsyncMock): assert response.status_code == 200 data = response.json() assert 'result' in data - assert data['result']['id'] == 'task1' - assert data['result']['status']['state'] == 'submitted' + # Result is wrapped in SendMessageResponse with task field + assert data['result']['task']['id'] == 'task1' + assert data['result']['task']['status']['state'] == 'TASK_STATE_SUBMITTED' # Verify handler was called handler.on_message_send.assert_awaited_once() @@ -456,8 +359,8 @@ def test_send_message(client: TestClient, handler: mock.AsyncMock): def test_cancel_task(client: TestClient, handler: mock.AsyncMock): """Test cancelling a task.""" # Setup mock response - task_status = TaskStatus(**MINIMAL_TASK_STATUS) - task_status.state = TaskState.canceled # 'cancelled' # + task_status = MINIMAL_TASK_STATUS + task_status.state = TaskState.TASK_STATE_CANCELED # 'cancelled' # task = Task(id='task1', context_id='ctx1', status=task_status) handler.on_cancel_task.return_value = task @@ -467,7 +370,7 @@ def test_cancel_task(client: TestClient, handler: mock.AsyncMock): json={ 'jsonrpc': '2.0', 'id': '123', - 'method': 'tasks/cancel', + 'method': 'CancelTask', 'params': {'id': 'task1'}, }, ) @@ -476,7 +379,7 @@ def test_cancel_task(client: TestClient, handler: mock.AsyncMock): assert response.status_code == 200 data = response.json() assert data['result']['id'] == 'task1' - assert data['result']['status']['state'] == 'canceled' + assert data['result']['status']['state'] == 'TASK_STATE_CANCELED' # Verify handler was called handler.on_cancel_task.assert_awaited_once() @@ -485,7 +388,7 @@ def test_cancel_task(client: TestClient, handler: mock.AsyncMock): def test_get_task(client: TestClient, handler: mock.AsyncMock): """Test getting a task.""" # Setup mock response - task_status = TaskStatus(**MINIMAL_TASK_STATUS) + task_status = MINIMAL_TASK_STATUS task = Task(id='task1', context_id='ctx1', status=task_status) handler.on_get_task.return_value = task # JSONRPCResponse(root=task) @@ -495,7 +398,7 @@ def test_get_task(client: TestClient, handler: mock.AsyncMock): json={ 'jsonrpc': '2.0', 'id': '123', - 'method': 'tasks/get', + 'method': 'GetTask', 'params': {'id': 'task1'}, }, ) @@ -515,12 +418,11 @@ def test_set_push_notification_config( """Test setting push notification configuration.""" # Setup mock response task_push_config = TaskPushNotificationConfig( - task_id='t2', - push_notification_config=PushNotificationConfig( - url='https://example.com', token='secret-token' - ), + task_id='t2', url='https://example.com', token='secret-token' + ) + handler.on_create_task_push_notification_config.return_value = ( + task_push_config ) - handler.on_set_task_push_notification_config.return_value = task_push_config # Send request response = client.post( @@ -528,13 +430,11 @@ def test_set_push_notification_config( json={ 'jsonrpc': '2.0', 'id': '123', - 'method': 'tasks/pushNotificationConfig/set', + 'method': 'CreateTaskPushNotificationConfig', 'params': { 'task_id': 't2', - 'pushNotificationConfig': { - 'url': 'https://example.com', - 'token': 'secret-token', - }, + 'url': 'https://example.com', + 'token': 'secret-token', }, }, ) @@ -542,10 +442,10 @@ def test_set_push_notification_config( # Verify response assert response.status_code == 200 data = response.json() - assert data['result']['pushNotificationConfig']['token'] == 'secret-token' + assert data['result']['token'] == 'secret-token' # Verify handler was called - handler.on_set_task_push_notification_config.assert_awaited_once() + handler.on_create_task_push_notification_config.assert_awaited_once() def test_get_push_notification_config( @@ -554,10 +454,7 @@ def test_get_push_notification_config( """Test getting push notification configuration.""" # Setup mock response task_push_config = TaskPushNotificationConfig( - task_id='task1', - push_notification_config=PushNotificationConfig( - url='https://example.com', token='secret-token' - ), + task_id='task1', url='https://example.com', token='secret-token' ) handler.on_get_task_push_notification_config.return_value = task_push_config @@ -568,21 +465,24 @@ def test_get_push_notification_config( json={ 'jsonrpc': '2.0', 'id': '123', - 'method': 'tasks/pushNotificationConfig/get', - 'params': {'id': 'task1'}, + 'method': 'GetTaskPushNotificationConfig', + 'params': { + 'task_id': 'task1', + 'id': 'pushNotificationConfig', + }, }, ) # Verify response assert response.status_code == 200 data = response.json() - assert data['result']['pushNotificationConfig']['token'] == 'secret-token' + assert data['result']['token'] == 'secret-token' # Verify handler was called handler.on_get_task_push_notification_config.assert_awaited_once() -def test_server_auth(app: A2AStarletteApplication, handler: mock.AsyncMock): +def test_server_auth(app, handler: mock.AsyncMock): class TestAuthMiddleware(AuthenticationBackend): async def authenticate( self, conn: HTTPConnection @@ -597,16 +497,17 @@ async def authenticate( AuthenticationMiddleware, backend=TestAuthMiddleware() ) ] - ) + ), + headers={'A2A-Version': '1.0'}, ) # Set the output message to be the authenticated user name handler.on_message_send.side_effect = lambda params, context: Message( context_id='session-xyz', message_id='112', - role=Role.agent, + role=Role.ROLE_AGENT, parts=[ - Part(TextPart(text=context.user.user_name)), + Part(text=context.user.user_name), ], ) @@ -616,15 +517,14 @@ async def authenticate( json={ 'jsonrpc': '2.0', 'id': '123', - 'method': 'message/send', + 'method': 'SendMessage', 'params': { 'message': { - 'role': 'agent', - 'parts': [{'kind': 'text', 'text': 'Hello'}], - 'message_id': '111', - 'kind': 'message', - 'task_id': 'task1', - 'context_id': 'session-xyz', + 'role': 'ROLE_AGENT', + 'parts': [{'text': 'Hello'}], + 'messageId': '111', + 'taskId': 'task1', + 'contextId': 'session-xyz', } }, }, @@ -632,12 +532,10 @@ async def authenticate( # Verify response assert response.status_code == 200 - result = SendMessageResponse.model_validate(response.json()) - assert isinstance(result.root, SendMessageSuccessResponse) - assert isinstance(result.root.result, Message) - message = result.root.result - assert isinstance(message.parts[0].root, TextPart) - assert message.parts[0].root.text == 'test_user' + data = response.json() + assert 'result' in data + # Result is wrapped in SendMessageResponse with message field + assert data['result']['message']['parts'][0]['text'] == 'test_user' # Verify handler was called handler.on_message_send.assert_awaited_once() @@ -647,33 +545,24 @@ async def authenticate( @pytest.mark.asyncio -async def test_message_send_stream( - app: A2AStarletteApplication, handler: mock.AsyncMock -) -> None: +async def test_message_send_stream(app, handler: mock.AsyncMock) -> None: """Test streaming message sending.""" # Setup mock streaming response async def stream_generator(): for i in range(3): - text_part = TextPart(**TEXT_PART_DATA) - data_part = DataPart(**DATA_PART_DATA) artifact = Artifact( artifact_id=f'artifact-{i}', name='result_data', - parts=[Part(root=text_part), Part(root=data_part)], + parts=[TEXT_PART_DATA, DATA_PART], ) last = [False, False, True] - task_artifact_update_event_data: dict[str, Any] = { - 'artifact': artifact, - 'task_id': 'task_id', - 'context_id': 'session-xyz', - 'append': False, - 'lastChunk': last[i], - 'kind': 'artifact-update', - } - - yield TaskArtifactUpdateEvent.model_validate( - task_artifact_update_event_data + yield TaskArtifactUpdateEvent( + artifact=artifact, + task_id='task_id', + context_id='session-xyz', + append=False, + last_chunk=last[i], ) handler.on_message_send_stream.return_value = stream_generator() @@ -681,7 +570,11 @@ async def stream_generator(): client = None try: # Create client - client = TestClient(app.build(), raise_server_exceptions=False) + client = TestClient( + app.build(), + raise_server_exceptions=False, + headers={'A2A-Version': '1.0'}, + ) # Send request with client.stream( 'POST', @@ -689,15 +582,14 @@ async def stream_generator(): json={ 'jsonrpc': '2.0', 'id': '123', - 'method': 'message/stream', + 'method': 'SendStreamingMessage', 'params': { 'message': { - 'role': 'agent', - 'parts': [{'kind': 'text', 'text': 'Hello'}], - 'message_id': '111', - 'kind': 'message', - 'task_id': 'task_id', - 'context_id': 'session-xyz', + 'role': 'ROLE_AGENT', + 'parts': [{'text': 'Hello'}], + 'messageId': '111', + 'taskId': 'task_id', + 'contextId': 'session-xyz', } }, }, @@ -718,15 +610,9 @@ async def stream_generator(): event_count += 1 # Check content has event data (e.g., part of the first event) - assert ( - b'"artifactId":"artifact-0"' in content - ) # Check for the actual JSON payload - assert ( - b'"artifactId":"artifact-1"' in content - ) # Check for the actual JSON payload - assert ( - b'"artifactId":"artifact-2"' in content - ) # Check for the actual JSON payload + assert b'artifact-0' in content # Check for the actual JSON payload + assert b'artifact-1' in content # Check for the actual JSON payload + assert b'artifact-2' in content # Check for the actual JSON payload assert event_count > 0 finally: # Ensure the client is closed @@ -737,38 +623,34 @@ async def stream_generator(): @pytest.mark.asyncio -async def test_task_resubscription( - app: A2AStarletteApplication, handler: mock.AsyncMock -) -> None: +async def test_task_resubscription(app, handler: mock.AsyncMock) -> None: """Test task resubscription streaming.""" # Setup mock streaming response async def stream_generator(): for i in range(3): - text_part = TextPart(**TEXT_PART_DATA) - data_part = DataPart(**DATA_PART_DATA) artifact = Artifact( artifact_id=f'artifact-{i}', name='result_data', - parts=[Part(root=text_part), Part(root=data_part)], + parts=[TEXT_PART_DATA, DATA_PART], ) last = [False, False, True] - task_artifact_update_event_data: dict[str, Any] = { - 'artifact': artifact, - 'task_id': 'task_id', - 'context_id': 'session-xyz', - 'append': False, - 'lastChunk': last[i], - 'kind': 'artifact-update', - } - yield TaskArtifactUpdateEvent.model_validate( - task_artifact_update_event_data + yield TaskArtifactUpdateEvent( + artifact=artifact, + task_id='task_id', + context_id='session-xyz', + append=False, + last_chunk=last[i], ) - handler.on_resubscribe_to_task.return_value = stream_generator() + handler.on_subscribe_to_task.return_value = stream_generator() # Create client - client = TestClient(app.build(), raise_server_exceptions=False) + client = TestClient( + app.build(), + raise_server_exceptions=False, + headers={'A2A-Version': '1.0'}, + ) try: # Send request using client.stream() context manager @@ -779,7 +661,7 @@ async def stream_generator(): json={ 'jsonrpc': '2.0', 'id': '123', # This ID is used in the success_event above - 'method': 'tasks/resubscribe', + 'method': 'SubscribeToTask', 'params': {'id': 'task1'}, }, ) as response: @@ -804,15 +686,9 @@ async def stream_generator(): break # Check content has event data (e.g., part of the first event) - assert ( - b'"artifactId":"artifact-0"' in content - ) # Check for the actual JSON payload - assert ( - b'"artifactId":"artifact-1"' in content - ) # Check for the actual JSON payload - assert ( - b'"artifactId":"artifact-2"' in content - ) # Check for the actual JSON payload + assert b'artifact-0' in content # Check for the actual JSON payload + assert b'artifact-1' in content # Check for the actual JSON payload + assert b'artifact-2' in content # Check for the actual JSON payload assert event_count > 0 finally: # Ensure the client is closed @@ -847,9 +723,27 @@ def test_invalid_request_structure(client: TestClient): assert response.status_code == 200 data = response.json() assert 'error' in data + # The jsonrpc library returns InvalidRequestError for invalid requests format assert data['error']['code'] == InvalidRequestError().code +def test_invalid_request_method(client: TestClient): + """Test handling an invalid request method.""" + response = client.post( + '/', + json={ + 'jsonrpc': '2.0', # Missing or wrong required fields + 'id': '123', + 'method': 'foo/bar', + }, + ) + assert response.status_code == 200 + data = response.json() + assert 'error' in data + # The jsonrpc library returns MethodNotFoundError for invalid request method + assert data['error']['code'] == MethodNotFoundError().code + + # === DYNAMIC CARD MODIFIER TESTS === @@ -859,13 +753,12 @@ def test_dynamic_agent_card_modifier( """Test that the card_modifier dynamically alters the public agent card.""" async def modifier(card: AgentCard) -> AgentCard: - modified_card = card.model_copy(deep=True) + modified_card = AgentCard() + modified_card.CopyFrom(card) modified_card.name = 'Dynamically Modified Agent' return modified_card - app_instance = A2AStarletteApplication( - agent_card, handler, card_modifier=modifier - ) + app_instance = AppBuilder(agent_card, handler, card_modifier=modifier) client = TestClient(app_instance.build()) response = client.get(AGENT_CARD_WELL_KNOWN_PATH) @@ -882,14 +775,13 @@ def test_dynamic_agent_card_modifier_sync( ): """Test that a synchronous card_modifier dynamically alters the public agent card.""" - def modifier(card: AgentCard) -> AgentCard: - modified_card = card.model_copy(deep=True) + async def modifier(card: AgentCard) -> AgentCard: + modified_card = AgentCard() + modified_card.CopyFrom(card) modified_card.name = 'Dynamically Modified Agent' return modified_card - app_instance = A2AStarletteApplication( - agent_card, handler, card_modifier=modifier - ) + app_instance = AppBuilder(agent_card, handler, card_modifier=modifier) client = TestClient(app_instance.build()) response = client.get(AGENT_CARD_WELL_KNOWN_PATH) @@ -901,113 +793,18 @@ def modifier(card: AgentCard) -> AgentCard: ) # Ensure other fields are intact -def test_dynamic_extended_agent_card_modifier( - agent_card: AgentCard, - extended_agent_card_fixture: AgentCard, - handler: mock.AsyncMock, -): - """Test that the extended_card_modifier dynamically alters the extended agent card.""" - agent_card.supports_authenticated_extended_card = True - - async def modifier( - card: AgentCard, context: ServerCallContext - ) -> AgentCard: - modified_card = card.model_copy(deep=True) - modified_card.description = 'Dynamically Modified Extended Description' - return modified_card - - # Test with a base extended card - app_instance = A2AStarletteApplication( - agent_card, - handler, - extended_agent_card=extended_agent_card_fixture, - extended_card_modifier=modifier, - ) - client = TestClient(app_instance.build()) - - response = client.get(EXTENDED_AGENT_CARD_PATH) - assert response.status_code == 200 - data = response.json() - assert data['name'] == extended_agent_card_fixture.name - assert data['description'] == 'Dynamically Modified Extended Description' - - # Test without a base extended card (modifier should receive public card) - app_instance_no_base = A2AStarletteApplication( - agent_card, - handler, - extended_agent_card=None, - extended_card_modifier=modifier, - ) - client_no_base = TestClient(app_instance_no_base.build()) - response_no_base = client_no_base.get(EXTENDED_AGENT_CARD_PATH) - assert response_no_base.status_code == 200 - data_no_base = response_no_base.json() - assert data_no_base['name'] == agent_card.name - assert ( - data_no_base['description'] - == 'Dynamically Modified Extended Description' - ) - - -def test_dynamic_extended_agent_card_modifier_sync( - agent_card: AgentCard, - extended_agent_card_fixture: AgentCard, - handler: mock.AsyncMock, -): - """Test that a synchronous extended_card_modifier dynamically alters the extended agent card.""" - agent_card.supports_authenticated_extended_card = True - - def modifier(card: AgentCard, context: ServerCallContext) -> AgentCard: - modified_card = card.model_copy(deep=True) - modified_card.description = 'Dynamically Modified Extended Description' - return modified_card - - # Test with a base extended card - app_instance = A2AStarletteApplication( - agent_card, - handler, - extended_agent_card=extended_agent_card_fixture, - extended_card_modifier=modifier, - ) - client = TestClient(app_instance.build()) - - response = client.get(EXTENDED_AGENT_CARD_PATH) - assert response.status_code == 200 - data = response.json() - assert data['name'] == extended_agent_card_fixture.name - assert data['description'] == 'Dynamically Modified Extended Description' - - # Test without a base extended card (modifier should receive public card) - app_instance_no_base = A2AStarletteApplication( - agent_card, - handler, - extended_agent_card=None, - extended_card_modifier=modifier, - ) - client_no_base = TestClient(app_instance_no_base.build()) - response_no_base = client_no_base.get(EXTENDED_AGENT_CARD_PATH) - assert response_no_base.status_code == 200 - data_no_base = response_no_base.json() - assert data_no_base['name'] == agent_card.name - assert ( - data_no_base['description'] - == 'Dynamically Modified Extended Description' - ) - - def test_fastapi_dynamic_agent_card_modifier( agent_card: AgentCard, handler: mock.AsyncMock ): """Test that the card_modifier dynamically alters the public agent card for FastAPI.""" async def modifier(card: AgentCard) -> AgentCard: - modified_card = card.model_copy(deep=True) + modified_card = AgentCard() + modified_card.CopyFrom(card) modified_card.name = 'Dynamically Modified Agent' return modified_card - app_instance = A2AFastAPIApplication( - agent_card, handler, card_modifier=modifier - ) + app_instance = AppBuilder(agent_card, handler, card_modifier=modifier) client = TestClient(app_instance.build()) response = client.get(AGENT_CARD_WELL_KNOWN_PATH) @@ -1021,14 +818,13 @@ def test_fastapi_dynamic_agent_card_modifier_sync( ): """Test that a synchronous card_modifier dynamically alters the public agent card for FastAPI.""" - def modifier(card: AgentCard) -> AgentCard: - modified_card = card.model_copy(deep=True) + async def modifier(card: AgentCard) -> AgentCard: + modified_card = AgentCard() + modified_card.CopyFrom(card) modified_card.name = 'Dynamically Modified Agent' return modified_card - app_instance = A2AFastAPIApplication( - agent_card, handler, card_modifier=modifier - ) + app_instance = AppBuilder(agent_card, handler, card_modifier=modifier) client = TestClient(app_instance.build()) response = client.get(AGENT_CARD_WELL_KNOWN_PATH) @@ -1037,23 +833,25 @@ def modifier(card: AgentCard) -> AgentCard: assert data['name'] == 'Dynamically Modified Agent' -def test_method_not_implemented(client: TestClient, handler: mock.AsyncMock): - """Test handling MethodNotImplementedError.""" - handler.on_get_task.side_effect = MethodNotImplementedError() +def test_unsupported_operation_error( + client: TestClient, handler: mock.AsyncMock +): + """Test handling UnsupportedOperationError.""" + handler.on_get_task.side_effect = UnsupportedOperationError() response = client.post( '/', json={ 'jsonrpc': '2.0', 'id': '123', - 'method': 'tasks/get', + 'method': 'GetTask', 'params': {'id': 'task1'}, }, ) assert response.status_code == 200 data = response.json() assert 'error' in data - assert data['error']['code'] == UnsupportedOperationError().code + assert data['error']['code'] == -32004 # UnsupportedOperationError def test_unknown_method(client: TestClient): @@ -1082,7 +880,7 @@ def test_validation_error(client: TestClient): json={ 'jsonrpc': '2.0', 'id': '123', - 'method': 'message/send', + 'method': 'SendMessage', 'params': { 'message': { # Missing required fields @@ -1106,7 +904,7 @@ def test_unhandled_exception(client: TestClient, handler: mock.AsyncMock): json={ 'jsonrpc': '2.0', 'id': '123', - 'method': 'tasks/get', + 'method': 'GetTask', 'params': {'id': 'task1'}, }, ) @@ -1131,3 +929,29 @@ def test_non_dict_json(client: TestClient): data = response.json() assert 'error' in data assert data['error']['code'] == InvalidRequestError().code + + +def test_agent_card_backward_compatibility_supports_extended_card( + agent_card: AgentCard, handler: mock.AsyncMock +): + """Test that supportsAuthenticatedExtendedCard is injected when extended_agent_card is True.""" + agent_card.capabilities.extended_agent_card = True + app_instance = AppBuilder(agent_card, handler) + client = TestClient(app_instance.build()) + response = client.get(AGENT_CARD_WELL_KNOWN_PATH) + assert response.status_code == 200 + data = response.json() + assert data.get('supportsAuthenticatedExtendedCard') is True + + +def test_agent_card_backward_compatibility_no_extended_card( + agent_card: AgentCard, handler: mock.AsyncMock +): + """Test that supportsAuthenticatedExtendedCard is absent when extended_agent_card is False.""" + agent_card.capabilities.extended_agent_card = False + app_instance = AppBuilder(agent_card, handler) + client = TestClient(app_instance.build()) + response = client.get(AGENT_CARD_WELL_KNOWN_PATH) + assert response.status_code == 200 + data = response.json() + assert 'supportsAuthenticatedExtendedCard' not in data diff --git a/tests/server/test_models.py b/tests/server/test_models.py index 64fed1008..bfaaed9d7 100644 --- a/tests/server/test_models.py +++ b/tests/server/test_models.py @@ -5,78 +5,9 @@ from sqlalchemy.orm import DeclarativeBase from a2a.server.models import ( - PydanticListType, - PydanticType, create_push_notification_config_model, create_task_model, ) -from a2a.types import Artifact, TaskState, TaskStatus, TextPart - - -class TestPydanticType: - """Tests for PydanticType SQLAlchemy type decorator.""" - - def test_process_bind_param_with_pydantic_model(self): - pydantic_type = PydanticType(TaskStatus) - status = TaskStatus(state=TaskState.working) - dialect = MagicMock() - - result = pydantic_type.process_bind_param(status, dialect) - assert result['state'] == 'working' - assert result['message'] is None - # TaskStatus may have other optional fields - - def test_process_bind_param_with_none(self): - pydantic_type = PydanticType(TaskStatus) - dialect = MagicMock() - - result = pydantic_type.process_bind_param(None, dialect) - assert result is None - - def test_process_result_value(self): - pydantic_type = PydanticType(TaskStatus) - dialect = MagicMock() - - result = pydantic_type.process_result_value( - {'state': 'completed', 'message': None}, dialect - ) - assert isinstance(result, TaskStatus) - assert result.state == 'completed' - - -class TestPydanticListType: - """Tests for PydanticListType SQLAlchemy type decorator.""" - - def test_process_bind_param_with_list(self): - pydantic_list_type = PydanticListType(Artifact) - artifacts = [ - Artifact( - artifact_id='1', parts=[TextPart(type='text', text='Hello')] - ), - Artifact( - artifact_id='2', parts=[TextPart(type='text', text='World')] - ), - ] - dialect = MagicMock() - - result = pydantic_list_type.process_bind_param(artifacts, dialect) - assert len(result) == 2 - assert result[0]['artifactId'] == '1' # JSON mode uses camelCase - assert result[1]['artifactId'] == '2' - - def test_process_result_value_with_list(self): - pydantic_list_type = PydanticListType(Artifact) - dialect = MagicMock() - data = [ - {'artifact_id': '1', 'parts': [{'type': 'text', 'text': 'Hello'}]}, - {'artifact_id': '2', 'parts': [{'type': 'text', 'text': 'World'}]}, - ] - - result = pydantic_list_type.process_result_value(data, dialect) - assert len(result) == 2 - assert all(isinstance(art, Artifact) for art in result) - assert result[0].artifact_id == '1' - assert result[1].artifact_id == '2' def test_create_task_model(): diff --git a/tests/server/test_owner_resolver.py b/tests/server/test_owner_resolver.py new file mode 100644 index 000000000..dffee863e --- /dev/null +++ b/tests/server/test_owner_resolver.py @@ -0,0 +1,31 @@ +from a2a.auth.user import User + +from a2a.server.context import ServerCallContext +from a2a.server.owner_resolver import resolve_user_scope + + +class SampleUser(User): + """A test implementation of the User interface.""" + + def __init__(self, user_name: str): + self._user_name = user_name + + @property + def is_authenticated(self) -> bool: + return True + + @property + def user_name(self) -> str: + return self._user_name + + +def test_resolve_user_scope_with_authenticated_user(): + """Test resolve_user_scope with an authenticated user in the context.""" + user = SampleUser(user_name='SampleUser') + context = ServerCallContext(user=user) + assert resolve_user_scope(context) == 'SampleUser' + + +def test_resolve_user_default_context(): + """Test resolve_user_scope with default context.""" + assert resolve_user_scope(ServerCallContext()) == '' diff --git a/tests/test_types.py b/tests/test_types.py index 73e6af7bb..7f900498a 100644 --- a/tests/test_types.py +++ b/tests/test_types.py @@ -1,97 +1,48 @@ +"""Tests for protobuf-based A2A types. + +This module tests the proto-generated types from a2a_pb2, using protobuf +patterns like ParseDict, proto constructors, and MessageToDict. +""" + from typing import Any import pytest +from google.protobuf.json_format import MessageToDict, ParseDict +from google.protobuf.struct_pb2 import Struct, Value -from pydantic import ValidationError - -from a2a.types import ( - A2AError, - A2ARequest, - APIKeySecurityScheme, +from a2a.types.a2a_pb2 import ( AgentCapabilities, + AgentInterface, AgentCard, AgentProvider, AgentSkill, + APIKeySecurityScheme, Artifact, CancelTaskRequest, - CancelTaskResponse, - CancelTaskSuccessResponse, - ContentTypeNotSupportedError, - DataPart, - FileBase, - FilePart, - FileWithBytes, - FileWithUri, - GetAuthenticatedExtendedCardRequest, - GetAuthenticatedExtendedCardResponse, - GetAuthenticatedExtendedCardSuccessResponse, - GetTaskPushNotificationConfigParams, GetTaskPushNotificationConfigRequest, - GetTaskPushNotificationConfigResponse, - GetTaskPushNotificationConfigSuccessResponse, GetTaskRequest, - GetTaskResponse, - GetTaskSuccessResponse, - In, - InternalError, - InvalidParamsError, - InvalidRequestError, - JSONParseError, - JSONRPCError, - JSONRPCErrorResponse, - JSONRPCMessage, - JSONRPCRequest, - JSONRPCResponse, Message, - MessageSendParams, - MethodNotFoundError, - OAuth2SecurityScheme, Part, - PartBase, - PushNotificationAuthenticationInfo, - PushNotificationConfig, - PushNotificationNotSupportedError, Role, SecurityScheme, SendMessageRequest, - SendMessageResponse, - SendMessageSuccessResponse, - SendStreamingMessageRequest, - SendStreamingMessageResponse, - SendStreamingMessageSuccessResponse, - SetTaskPushNotificationConfigRequest, - SetTaskPushNotificationConfigResponse, - SetTaskPushNotificationConfigSuccessResponse, + SubscribeToTaskRequest, Task, - TaskArtifactUpdateEvent, - TaskIdParams, - TaskNotCancelableError, - TaskNotFoundError, TaskPushNotificationConfig, - TaskQueryParams, - TaskResubscriptionRequest, TaskState, TaskStatus, - TaskStatusUpdateEvent, - TextPart, - UnsupportedOperationError, ) # --- Helper Data --- -MINIMAL_AGENT_SECURITY_SCHEME: dict[str, Any] = { - 'type': 'apiKey', - 'in': 'header', - 'name': 'X-API-KEY', -} - MINIMAL_AGENT_SKILL: dict[str, Any] = { 'id': 'skill-123', 'name': 'Recipe Finder', 'description': 'Finds recipes', 'tags': ['cooking'], } + FULL_AGENT_SKILL: dict[str, Any] = { 'id': 'skill-123', 'name': 'Recipe Finder', @@ -103,1564 +54,541 @@ } MINIMAL_AGENT_CARD: dict[str, Any] = { - 'capabilities': {}, # AgentCapabilities is required but can be empty + 'capabilities': {}, 'defaultInputModes': ['text/plain'], 'defaultOutputModes': ['application/json'], 'description': 'Test Agent', 'name': 'TestAgent', 'skills': [MINIMAL_AGENT_SKILL], - 'url': 'http://example.com/agent', - 'version': '1.0', -} - -TEXT_PART_DATA: dict[str, Any] = {'kind': 'text', 'text': 'Hello'} -FILE_URI_PART_DATA: dict[str, Any] = { - 'kind': 'file', - 'file': {'uri': 'file:///path/to/file.txt', 'mimeType': 'text/plain'}, -} -FILE_BYTES_PART_DATA: dict[str, Any] = { - 'kind': 'file', - 'file': {'bytes': 'aGVsbG8=', 'name': 'hello.txt'}, # base64 for "hello" -} -DATA_PART_DATA: dict[str, Any] = {'kind': 'data', 'data': {'key': 'value'}} - -MINIMAL_MESSAGE_USER: dict[str, Any] = { - 'role': 'user', - 'parts': [TEXT_PART_DATA], - 'message_id': 'msg-123', - 'kind': 'message', -} - -AGENT_MESSAGE_WITH_FILE: dict[str, Any] = { - 'role': 'agent', - 'parts': [TEXT_PART_DATA, FILE_URI_PART_DATA], - 'metadata': {'timestamp': 'now'}, - 'message_id': 'msg-456', -} - -MINIMAL_TASK_STATUS: dict[str, Any] = {'state': 'submitted'} -FULL_TASK_STATUS: dict[str, Any] = { - 'state': 'working', - 'message': MINIMAL_MESSAGE_USER, - 'timestamp': '2023-10-27T10:00:00Z', -} - -MINIMAL_TASK: dict[str, Any] = { - 'id': 'task-abc', - 'context_id': 'session-xyz', - 'status': MINIMAL_TASK_STATUS, - 'kind': 'task', -} -FULL_TASK: dict[str, Any] = { - 'id': 'task-abc', - 'context_id': 'session-xyz', - 'status': FULL_TASK_STATUS, - 'history': [MINIMAL_MESSAGE_USER, AGENT_MESSAGE_WITH_FILE], - 'artifacts': [ - { - 'artifactId': 'artifact-123', - 'parts': [DATA_PART_DATA], - 'name': 'result_data', - } + 'supportedInterfaces': [ + {'url': 'http://example.com/agent', 'protocolBinding': 'HTTP+JSON'} ], - 'metadata': {'priority': 'high'}, - 'kind': 'task', -} - -MINIMAL_TASK_ID_PARAMS: dict[str, Any] = {'id': 'task-123'} -FULL_TASK_ID_PARAMS: dict[str, Any] = { - 'id': 'task-456', - 'metadata': {'source': 'test'}, -} - -JSONRPC_ERROR_DATA: dict[str, Any] = { - 'code': -32600, - 'message': 'Invalid Request', + 'version': '1.0', } -JSONRPC_SUCCESS_RESULT: dict[str, Any] = {'status': 'ok', 'data': [1, 2, 3]} - -# --- Test Functions --- - - -def test_security_scheme_valid(): - scheme = SecurityScheme.model_validate(MINIMAL_AGENT_SECURITY_SCHEME) - assert isinstance(scheme.root, APIKeySecurityScheme) - assert scheme.root.type == 'apiKey' - assert scheme.root.in_ == In.header - assert scheme.root.name == 'X-API-KEY' -def test_security_scheme_invalid(): - with pytest.raises(ValidationError): - APIKeySecurityScheme( - name='my_api_key', - ) # Missing "in" # type: ignore - - with pytest.raises(ValidationError): - OAuth2SecurityScheme( - description='OAuth2 scheme missing flows', - ) # Missing "flows" # type: ignore +# --- Test Agent Types --- def test_agent_capabilities(): - caps = AgentCapabilities( - streaming=None, state_transition_history=None, push_notifications=None - ) # All optional - assert caps.push_notifications is None - assert caps.state_transition_history is None - assert caps.streaming is None + """Test AgentCapabilities proto construction.""" + # Empty capabilities + caps = AgentCapabilities() + assert caps.streaming is False # Proto default + assert caps.push_notifications is False + # Full capabilities caps_full = AgentCapabilities( - push_notifications=True, state_transition_history=False, streaming=True + push_notifications=True, + streaming=True, ) assert caps_full.push_notifications is True - assert caps_full.state_transition_history is False assert caps_full.streaming is True def test_agent_provider(): - provider = AgentProvider(organization='Test Org', url='http://test.org') + """Test AgentProvider proto construction.""" + provider = AgentProvider( + organization='Test Org', + url='http://test.org', + ) assert provider.organization == 'Test Org' assert provider.url == 'http://test.org' - with pytest.raises(ValidationError): - AgentProvider(organization='Test Org') # Missing url # type: ignore - -def test_agent_skill_valid(): - skill = AgentSkill(**MINIMAL_AGENT_SKILL) +def test_agent_skill(): + """Test AgentSkill proto construction and ParseDict.""" + # Direct construction + skill = AgentSkill( + id='skill-123', + name='Recipe Finder', + description='Finds recipes', + tags=['cooking'], + ) assert skill.id == 'skill-123' assert skill.name == 'Recipe Finder' assert skill.description == 'Finds recipes' - assert skill.tags == ['cooking'] - assert skill.examples is None - - skill_full = AgentSkill(**FULL_AGENT_SKILL) - assert skill_full.examples == ['Find me a pasta recipe'] - assert skill_full.input_modes == ['text/plain'] + assert list(skill.tags) == ['cooking'] + # ParseDict from dictionary + skill_full = ParseDict(FULL_AGENT_SKILL, AgentSkill()) + assert skill_full.id == 'skill-123' + assert list(skill_full.examples) == ['Find me a pasta recipe'] + assert list(skill_full.input_modes) == ['text/plain'] -def test_agent_skill_invalid(): - with pytest.raises(ValidationError): - AgentSkill( - id='abc', name='n', description='d' - ) # Missing tags # type: ignore - AgentSkill( - **MINIMAL_AGENT_SKILL, - invalid_extra='foo', # type: ignore - ) # Extra field - - -def test_agent_card_valid(): - card = AgentCard(**MINIMAL_AGENT_CARD) +def test_agent_card(): + """Test AgentCard proto construction and ParseDict.""" + card = ParseDict(MINIMAL_AGENT_CARD, AgentCard()) assert card.name == 'TestAgent' assert card.version == '1.0' assert len(card.skills) == 1 assert card.skills[0].id == 'skill-123' - assert card.provider is None # Optional + assert not card.HasField('provider') # Optional, not set -def test_agent_card_invalid(): - bad_card_data = MINIMAL_AGENT_CARD.copy() - del bad_card_data['name'] - with pytest.raises(ValidationError): - AgentCard(**bad_card_data) # Missing name +def test_security_scheme(): + """Test SecurityScheme oneof handling.""" + # API Key scheme + api_key = APIKeySecurityScheme( + name='X-API-KEY', + location='header', # location is a string in proto + ) + scheme = SecurityScheme(api_key_security_scheme=api_key) + assert scheme.HasField('api_key_security_scheme') + assert scheme.api_key_security_scheme.name == 'X-API-KEY' + assert scheme.api_key_security_scheme.location == 'header' -# --- Test Parts --- +# --- Test Part Types --- def test_text_part(): - part = TextPart(**TEXT_PART_DATA) - assert part.kind == 'text' + """Test Part with text field (Part has text as a direct string field).""" + # Part with text + part = Part(text='Hello') assert part.text == 'Hello' - assert part.metadata is None + # Check oneof + assert part.WhichOneof('content') == 'text' - with pytest.raises(ValidationError): - TextPart(type='text') # Missing text # type: ignore - with pytest.raises(ValidationError): - TextPart( - kind='file', # type: ignore - text='hello', - ) # Wrong type literal - -def test_file_part_variants(): - # URI variant - file_uri = FileWithUri( - uri='file:///path/to/file.txt', mime_type='text/plain' +def test_part_with_url(): + """Test Part with url.""" + part = Part( + url='file:///path/to/file.txt', + media_type='text/plain', ) - part_uri = FilePart(kind='file', file=file_uri) - assert isinstance(part_uri.file, FileWithUri) - assert part_uri.file.uri == 'file:///path/to/file.txt' - assert part_uri.file.mime_type == 'text/plain' - assert not hasattr(part_uri.file, 'bytes') - - # Bytes variant - file_bytes = FileWithBytes(bytes='aGVsbG8=', name='hello.txt') - part_bytes = FilePart(kind='file', file=file_bytes) - assert isinstance(part_bytes.file, FileWithBytes) - assert part_bytes.file.bytes == 'aGVsbG8=' - assert part_bytes.file.name == 'hello.txt' - assert not hasattr(part_bytes.file, 'uri') - - # Test deserialization directly - part_uri_deserialized = FilePart.model_validate(FILE_URI_PART_DATA) - assert isinstance(part_uri_deserialized.file, FileWithUri) - assert part_uri_deserialized.file.uri == 'file:///path/to/file.txt' - - part_bytes_deserialized = FilePart.model_validate(FILE_BYTES_PART_DATA) - assert isinstance(part_bytes_deserialized.file, FileWithBytes) - assert part_bytes_deserialized.file.bytes == 'aGVsbG8=' - - # Invalid - wrong type literal - with pytest.raises(ValidationError): - FilePart(kind='text', file=file_uri) # type: ignore - - FilePart(**FILE_URI_PART_DATA, extra='extra') # type: ignore + assert part.url == 'file:///path/to/file.txt' + assert part.media_type == 'text/plain' -def test_data_part(): - part = DataPart(**DATA_PART_DATA) - assert part.kind == 'data' - assert part.data == {'key': 'value'} - - with pytest.raises(ValidationError): - DataPart(type='data') # Missing data # type: ignore - - -def test_part_root_model(): - # Test deserialization of the Union RootModel - part_text = Part.model_validate(TEXT_PART_DATA) - assert isinstance(part_text.root, TextPart) - assert part_text.root.text == 'Hello' - - part_file = Part.model_validate(FILE_URI_PART_DATA) - assert isinstance(part_file.root, FilePart) - assert isinstance(part_file.root.file, FileWithUri) +def test_part_with_raw(): + """Test Part with raw bytes.""" + part = Part( + raw=b'hello', + filename='hello.txt', + ) + assert part.raw == b'hello' + assert part.filename == 'hello.txt' - part_data = Part.model_validate(DATA_PART_DATA) - assert isinstance(part_data.root, DataPart) - assert part_data.root.data == {'key': 'value'} - # Test serialization - assert part_text.model_dump(exclude_none=True) == TEXT_PART_DATA - assert part_file.model_dump(exclude_none=True) == FILE_URI_PART_DATA - assert part_data.model_dump(exclude_none=True) == DATA_PART_DATA +def test_part_with_data(): + """Test Part with data.""" + s = Struct() + s.update({'key': 'value'}) + part = Part(data=Value(struct_value=s)) + assert part.HasField('data') # --- Test Message and Task --- def test_message(): - msg = Message(**MINIMAL_MESSAGE_USER) - assert msg.role == Role.user - assert len(msg.parts) == 1 - assert isinstance( - msg.parts[0].root, TextPart - ) # Access root for RootModel Part - assert msg.metadata is None - - msg_agent = Message(**AGENT_MESSAGE_WITH_FILE) - assert msg_agent.role == Role.agent - assert len(msg_agent.parts) == 2 - assert isinstance(msg_agent.parts[1].root, FilePart) - assert msg_agent.metadata == {'timestamp': 'now'} - - with pytest.raises(ValidationError): - Message( - role='invalid_role', # type: ignore - parts=[TEXT_PART_DATA], # type: ignore - ) # Invalid enum - with pytest.raises(ValidationError): - Message(role=Role.user) # Missing parts # type: ignore + """Test Message proto construction.""" + part = Part(text='Hello') - -def test_task_status(): - status = TaskStatus(**MINIMAL_TASK_STATUS) - assert status.state == TaskState.submitted - assert status.message is None - assert status.timestamp is None - - status_full = TaskStatus(**FULL_TASK_STATUS) - assert status_full.state == TaskState.working - assert isinstance(status_full.message, Message) - assert status_full.timestamp == '2023-10-27T10:00:00Z' - - with pytest.raises(ValidationError): - TaskStatus(state='invalid_state') # Invalid enum # type: ignore - - -def test_task(): - task = Task(**MINIMAL_TASK) - assert task.id == 'task-abc' - assert task.context_id == 'session-xyz' - assert task.status.state == TaskState.submitted - assert task.history is None - assert task.artifacts is None - assert task.metadata is None - - task_full = Task(**FULL_TASK) - assert task_full.id == 'task-abc' - assert task_full.status.state == TaskState.working - assert task_full.history is not None and len(task_full.history) == 2 - assert isinstance(task_full.history[0], Message) - assert task_full.artifacts is not None and len(task_full.artifacts) == 1 - assert isinstance(task_full.artifacts[0], Artifact) - assert task_full.artifacts[0].name == 'result_data' - assert task_full.metadata == {'priority': 'high'} - - with pytest.raises(ValidationError): - Task(id='abc', sessionId='xyz') # Missing status # type: ignore - - -# --- Test JSON-RPC Structures --- - - -def test_jsonrpc_error(): - err = JSONRPCError(code=-32600, message='Invalid Request') - assert err.code == -32600 - assert err.message == 'Invalid Request' - assert err.data is None - - err_data = JSONRPCError( - code=-32001, message='Task not found', data={'taskId': '123'} + msg = Message( + role=Role.ROLE_USER, + message_id='msg-123', ) - assert err_data.code == -32001 - assert err_data.data == {'taskId': '123'} + msg.parts.append(part) + assert msg.role == Role.ROLE_USER + assert msg.message_id == 'msg-123' + assert len(msg.parts) == 1 + assert msg.parts[0].text == 'Hello' -def test_jsonrpc_request(): - req = JSONRPCRequest(jsonrpc='2.0', method='test_method', id=1) - assert req.jsonrpc == '2.0' - assert req.method == 'test_method' - assert req.id == 1 - assert req.params is None - req_params = JSONRPCRequest( - jsonrpc='2.0', method='add', params={'a': 1, 'b': 2}, id='req-1' +def test_message_with_metadata(): + """Test Message with metadata.""" + msg = Message( + role=Role.ROLE_AGENT, + message_id='msg-456', ) - assert req_params.params == {'a': 1, 'b': 2} - assert req_params.id == 'req-1' - - with pytest.raises(ValidationError): - JSONRPCRequest( - jsonrpc='1.0', # type: ignore - method='m', - id=1, - ) # Wrong version - with pytest.raises(ValidationError): - JSONRPCRequest(jsonrpc='2.0', id=1) # Missing method # type: ignore - - -def test_jsonrpc_error_response(): - err_obj = JSONRPCError(**JSONRPC_ERROR_DATA) - resp = JSONRPCErrorResponse(jsonrpc='2.0', error=err_obj, id='err-1') - assert resp.jsonrpc == '2.0' - assert resp.id == 'err-1' - assert resp.error.code == -32600 - assert resp.error.message == 'Invalid Request' - - with pytest.raises(ValidationError): - JSONRPCErrorResponse( - jsonrpc='2.0', id='err-1' - ) # Missing error # type: ignore - - -def test_jsonrpc_response_root_model() -> None: - # Success case - success_data: dict[str, Any] = { - 'jsonrpc': '2.0', - 'result': MINIMAL_TASK, - 'id': 1, - } - resp_success = JSONRPCResponse.model_validate(success_data) - assert isinstance(resp_success.root, SendMessageSuccessResponse) - assert resp_success.root.result == Task(**MINIMAL_TASK) - - # Error case - error_data: dict[str, Any] = { - 'jsonrpc': '2.0', - 'error': JSONRPC_ERROR_DATA, - 'id': 'err-1', - } - resp_error = JSONRPCResponse.model_validate(error_data) - assert isinstance(resp_error.root, JSONRPCErrorResponse) - assert resp_error.root.error.code == -32600 - # Note: .model_dump() might serialize the nested error model - assert resp_error.model_dump(exclude_none=True) == error_data + msg.metadata.update({'timestamp': 'now'}) - # Invalid case (neither success nor error structure) - with pytest.raises(ValidationError): - JSONRPCResponse.model_validate({'jsonrpc': '2.0', 'id': 1}) + assert msg.role == Role.ROLE_AGENT + assert dict(msg.metadata) == {'timestamp': 'now'} -# --- Test Request/Response Wrappers --- - - -def test_send_message_request() -> None: - params = MessageSendParams(message=Message(**MINIMAL_MESSAGE_USER)) - req_data: dict[str, Any] = { - 'jsonrpc': '2.0', - 'method': 'message/send', - 'params': params.model_dump(), - 'id': 5, - } - req = SendMessageRequest.model_validate(req_data) - assert req.method == 'message/send' - assert isinstance(req.params, MessageSendParams) - assert req.params.message.role == Role.user - - with pytest.raises(ValidationError): # Wrong method literal - SendMessageRequest.model_validate( - {**req_data, 'method': 'wrong/method'} - ) - - -def test_send_subscribe_request() -> None: - params = MessageSendParams(message=Message(**MINIMAL_MESSAGE_USER)) - req_data: dict[str, Any] = { - 'jsonrpc': '2.0', - 'method': 'message/stream', - 'params': params.model_dump(), - 'id': 5, - } - req = SendStreamingMessageRequest.model_validate(req_data) - assert req.method == 'message/stream' - assert isinstance(req.params, MessageSendParams) - assert req.params.message.role == Role.user - - with pytest.raises(ValidationError): # Wrong method literal - SendStreamingMessageRequest.model_validate( - {**req_data, 'method': 'wrong/method'} - ) - - -def test_get_task_request() -> None: - params = TaskQueryParams(id='task-1', history_length=2) - req_data: dict[str, Any] = { - 'jsonrpc': '2.0', - 'method': 'tasks/get', - 'params': params.model_dump(), - 'id': 5, - } - req = GetTaskRequest.model_validate(req_data) - assert req.method == 'tasks/get' - assert isinstance(req.params, TaskQueryParams) - assert req.params.id == 'task-1' - assert req.params.history_length == 2 - - with pytest.raises(ValidationError): # Wrong method literal - GetTaskRequest.model_validate({**req_data, 'method': 'wrong/method'}) - - -def test_cancel_task_request() -> None: - params = TaskIdParams(id='task-1') - req_data: dict[str, Any] = { - 'jsonrpc': '2.0', - 'method': 'tasks/cancel', - 'params': params.model_dump(), - 'id': 5, - } - req = CancelTaskRequest.model_validate(req_data) - assert req.method == 'tasks/cancel' - assert isinstance(req.params, TaskIdParams) - assert req.params.id == 'task-1' - - with pytest.raises(ValidationError): # Wrong method literal - CancelTaskRequest.model_validate({**req_data, 'method': 'wrong/method'}) - +def test_task_status(): + """Test TaskStatus proto construction.""" + status = TaskStatus(state=TaskState.TASK_STATE_SUBMITTED) + assert status.state == TaskState.TASK_STATE_SUBMITTED + assert not status.HasField('message') + # timestamp is a Timestamp proto, default has seconds=0 + assert status.timestamp.seconds == 0 -def test_get_task_response() -> None: - resp_data: dict[str, Any] = { - 'jsonrpc': '2.0', - 'result': MINIMAL_TASK, - 'id': 'resp-1', - } - resp = GetTaskResponse.model_validate(resp_data) - assert resp.root.id == 'resp-1' - assert isinstance(resp.root, GetTaskSuccessResponse) - assert isinstance(resp.root.result, Task) - assert resp.root.result.id == 'task-abc' - - with pytest.raises(ValidationError): # Result is not a Task - GetTaskResponse.model_validate( - {'jsonrpc': '2.0', 'result': {'wrong': 'data'}, 'id': 1} - ) - - resp_data_err: dict[str, Any] = { - 'jsonrpc': '2.0', - 'error': JSONRPCError(**TaskNotFoundError().model_dump()), - 'id': 'resp-1', - } - resp_err = GetTaskResponse.model_validate(resp_data_err) - assert resp_err.root.id == 'resp-1' - assert isinstance(resp_err.root, JSONRPCErrorResponse) - assert resp_err.root.error is not None - assert isinstance(resp_err.root.error, JSONRPCError) - - -def test_send_message_response() -> None: - resp_data: dict[str, Any] = { - 'jsonrpc': '2.0', - 'result': MINIMAL_TASK, - 'id': 'resp-1', - } - resp = SendMessageResponse.model_validate(resp_data) - assert resp.root.id == 'resp-1' - assert isinstance(resp.root, SendMessageSuccessResponse) - assert isinstance(resp.root.result, Task) - assert resp.root.result.id == 'task-abc' - - with pytest.raises(ValidationError): # Result is not a Task - SendMessageResponse.model_validate( - {'jsonrpc': '2.0', 'result': {'wrong': 'data'}, 'id': 1} - ) - - resp_data_err: dict[str, Any] = { - 'jsonrpc': '2.0', - 'error': JSONRPCError(**TaskNotFoundError().model_dump()), - 'id': 'resp-1', - } - resp_err = SendMessageResponse.model_validate(resp_data_err) - assert resp_err.root.id == 'resp-1' - assert isinstance(resp_err.root, JSONRPCErrorResponse) - assert resp_err.root.error is not None - assert isinstance(resp_err.root.error, JSONRPCError) - - -def test_cancel_task_response() -> None: - resp_data: dict[str, Any] = { - 'jsonrpc': '2.0', - 'result': MINIMAL_TASK, - 'id': 1, - } - resp = CancelTaskResponse.model_validate(resp_data) - assert resp.root.id == 1 - assert isinstance(resp.root, CancelTaskSuccessResponse) - assert isinstance(resp.root.result, Task) - assert resp.root.result.id == 'task-abc' - - resp_data_err: dict[str, Any] = { - 'jsonrpc': '2.0', - 'error': JSONRPCError(**TaskNotFoundError().model_dump()), - 'id': 'resp-1', - } - resp_err = CancelTaskResponse.model_validate(resp_data_err) - assert resp_err.root.id == 'resp-1' - assert isinstance(resp_err.root, JSONRPCErrorResponse) - assert resp_err.root.error is not None - assert isinstance(resp_err.root.error, JSONRPCError) - - -def test_send_message_streaming_status_update_response() -> None: - task_status_update_event_data: dict[str, Any] = { - 'status': MINIMAL_TASK_STATUS, - 'taskId': '1', - 'context_id': '2', - 'final': False, - 'kind': 'status-update', - } + # TaskStatus with timestamp + from google.protobuf.timestamp_pb2 import Timestamp - event_data: dict[str, Any] = { - 'jsonrpc': '2.0', - 'id': 1, - 'result': task_status_update_event_data, - } - response = SendStreamingMessageResponse.model_validate(event_data) - assert response.root.id == 1 - assert isinstance(response.root, SendStreamingMessageSuccessResponse) - assert isinstance(response.root.result, TaskStatusUpdateEvent) - assert response.root.result.status.state == TaskState.submitted - assert response.root.result.task_id == '1' - assert not response.root.result.final - - with pytest.raises( - ValidationError - ): # Result is not a TaskStatusUpdateEvent - SendStreamingMessageResponse.model_validate( - {'jsonrpc': '2.0', 'result': {'wrong': 'data'}, 'id': 1} - ) - - event_data = { - 'jsonrpc': '2.0', - 'id': 1, - 'result': {**task_status_update_event_data, 'final': True}, - } - response = SendStreamingMessageResponse.model_validate(event_data) - assert response.root.id == 1 - assert isinstance(response.root, SendStreamingMessageSuccessResponse) - assert isinstance(response.root.result, TaskStatusUpdateEvent) - assert response.root.result.final - - resp_data_err: dict[str, Any] = { - 'jsonrpc': '2.0', - 'error': JSONRPCError(**TaskNotFoundError().model_dump()), - 'id': 'resp-1', - } - resp_err = SendStreamingMessageResponse.model_validate(resp_data_err) - assert resp_err.root.id == 'resp-1' - assert isinstance(resp_err.root, JSONRPCErrorResponse) - assert resp_err.root.error is not None - assert isinstance(resp_err.root.error, JSONRPCError) - - -def test_send_message_streaming_artifact_update_response() -> None: - text_part = TextPart(**TEXT_PART_DATA) - data_part = DataPart(**DATA_PART_DATA) - artifact = Artifact( - artifact_id='artifact-123', - name='result_data', - parts=[Part(root=text_part), Part(root=data_part)], - ) - task_artifact_update_event_data: dict[str, Any] = { - 'artifact': artifact, - 'taskId': 'task_id', - 'context_id': '2', - 'append': False, - 'lastChunk': True, - 'kind': 'artifact-update', - } - event_data: dict[str, Any] = { - 'jsonrpc': '2.0', - 'id': 1, - 'result': task_artifact_update_event_data, - } - response = SendStreamingMessageResponse.model_validate(event_data) - assert response.root.id == 1 - assert isinstance(response.root, SendStreamingMessageSuccessResponse) - assert isinstance(response.root.result, TaskArtifactUpdateEvent) - assert response.root.result.artifact.artifact_id == 'artifact-123' - assert response.root.result.artifact.name == 'result_data' - assert response.root.result.task_id == 'task_id' - assert not response.root.result.append - assert response.root.result.last_chunk - assert len(response.root.result.artifact.parts) == 2 - assert isinstance(response.root.result.artifact.parts[0].root, TextPart) - assert isinstance(response.root.result.artifact.parts[1].root, DataPart) - - -def test_set_task_push_notification_response() -> None: - task_push_config = TaskPushNotificationConfig( - task_id='t2', - push_notification_config=PushNotificationConfig( - url='https://example.com', token='token' - ), - ) - resp_data: dict[str, Any] = { - 'jsonrpc': '2.0', - 'result': task_push_config.model_dump(), - 'id': 1, - } - resp = SetTaskPushNotificationConfigResponse.model_validate(resp_data) - assert resp.root.id == 1 - assert isinstance(resp.root, SetTaskPushNotificationConfigSuccessResponse) - assert isinstance(resp.root.result, TaskPushNotificationConfig) - assert resp.root.result.task_id == 't2' - assert ( - resp.root.result.push_notification_config.url == 'https://example.com' + ts = Timestamp() + ts.FromJsonString('2023-10-27T10:00:00Z') + status_working = TaskStatus( + state=TaskState.TASK_STATE_WORKING, + timestamp=ts, ) - assert resp.root.result.push_notification_config.token == 'token' - assert resp.root.result.push_notification_config.authentication is None + assert status_working.state == TaskState.TASK_STATE_WORKING + assert status_working.timestamp.seconds == ts.seconds - auth_info_dict: dict[str, Any] = { - 'schemes': ['Bearer', 'Basic'], - 'credentials': 'user:pass', - } - task_push_config.push_notification_config.authentication = ( - PushNotificationAuthenticationInfo(**auth_info_dict) - ) - resp_data = { - 'jsonrpc': '2.0', - 'result': task_push_config.model_dump(), - 'id': 1, - } - resp = SetTaskPushNotificationConfigResponse.model_validate(resp_data) - assert isinstance(resp.root, SetTaskPushNotificationConfigSuccessResponse) - assert resp.root.result.push_notification_config.authentication is not None - assert resp.root.result.push_notification_config.authentication.schemes == [ - 'Bearer', - 'Basic', - ] - assert ( - resp.root.result.push_notification_config.authentication.credentials - == 'user:pass' - ) - resp_data_err: dict[str, Any] = { - 'jsonrpc': '2.0', - 'error': JSONRPCError(**TaskNotFoundError().model_dump()), - 'id': 'resp-1', - } - resp_err = SetTaskPushNotificationConfigResponse.model_validate( - resp_data_err - ) - assert resp_err.root.id == 'resp-1' - assert isinstance(resp_err.root, JSONRPCErrorResponse) - assert resp_err.root.error is not None - assert isinstance(resp_err.root.error, JSONRPCError) - - -def test_get_task_push_notification_response() -> None: - task_push_config = TaskPushNotificationConfig( - task_id='t2', - push_notification_config=PushNotificationConfig( - url='https://example.com', token='token' - ), - ) - resp_data: dict[str, Any] = { - 'jsonrpc': '2.0', - 'result': task_push_config.model_dump(), - 'id': 1, - } - resp = GetTaskPushNotificationConfigResponse.model_validate(resp_data) - assert resp.root.id == 1 - assert isinstance(resp.root, GetTaskPushNotificationConfigSuccessResponse) - assert isinstance(resp.root.result, TaskPushNotificationConfig) - assert resp.root.result.task_id == 't2' - assert ( - resp.root.result.push_notification_config.url == 'https://example.com' +def test_task(): + """Test Task proto construction.""" + status = TaskStatus(state=TaskState.TASK_STATE_SUBMITTED) + task = Task( + id='task-abc', + context_id='session-xyz', + status=status, ) - assert resp.root.result.push_notification_config.token == 'token' - assert resp.root.result.push_notification_config.authentication is None - auth_info_dict: dict[str, Any] = { - 'schemes': ['Bearer', 'Basic'], - 'credentials': 'user:pass', - } - task_push_config.push_notification_config.authentication = ( - PushNotificationAuthenticationInfo(**auth_info_dict) - ) - resp_data = { - 'jsonrpc': '2.0', - 'result': task_push_config.model_dump(), - 'id': 1, - } - resp = GetTaskPushNotificationConfigResponse.model_validate(resp_data) - assert isinstance(resp.root, GetTaskPushNotificationConfigSuccessResponse) - assert resp.root.result.push_notification_config.authentication is not None - assert resp.root.result.push_notification_config.authentication.schemes == [ - 'Bearer', - 'Basic', - ] - assert ( - resp.root.result.push_notification_config.authentication.credentials - == 'user:pass' - ) + assert task.id == 'task-abc' + assert task.context_id == 'session-xyz' + assert task.status.state == TaskState.TASK_STATE_SUBMITTED + assert len(task.history) == 0 + assert len(task.artifacts) == 0 - resp_data_err: dict[str, Any] = { - 'jsonrpc': '2.0', - 'error': JSONRPCError(**TaskNotFoundError().model_dump()), - 'id': 'resp-1', - } - resp_err = GetTaskPushNotificationConfigResponse.model_validate( - resp_data_err + +def test_task_with_history(): + """Test Task with history.""" + status = TaskStatus(state=TaskState.TASK_STATE_WORKING) + task = Task( + id='task-abc', + context_id='session-xyz', + status=status, ) - assert resp_err.root.id == 'resp-1' - assert isinstance(resp_err.root, JSONRPCErrorResponse) - assert resp_err.root.error is not None - assert isinstance(resp_err.root.error, JSONRPCError) + # Add message to history + msg = Message(role=Role.ROLE_USER, message_id='msg-1') + msg.parts.append(Part(text='Hello')) + task.history.append(msg) -# --- Test A2ARequest Root Model --- + assert len(task.history) == 1 + assert task.history[0].role == Role.ROLE_USER -def test_a2a_request_root_model() -> None: - # SendMessageRequest case - send_params = MessageSendParams(message=Message(**MINIMAL_MESSAGE_USER)) - send_req_data: dict[str, Any] = { - 'jsonrpc': '2.0', - 'method': 'message/send', - 'params': send_params.model_dump(), - 'id': 1, - } - a2a_req_send = A2ARequest.model_validate(send_req_data) - assert isinstance(a2a_req_send.root, SendMessageRequest) - assert a2a_req_send.root.method == 'message/send' - - # SendStreamingMessageRequest case - send_subs_req_data: dict[str, Any] = { - 'jsonrpc': '2.0', - 'method': 'message/stream', - 'params': send_params.model_dump(), - 'id': 1, - } - a2a_req_send_subs = A2ARequest.model_validate(send_subs_req_data) - assert isinstance(a2a_req_send_subs.root, SendStreamingMessageRequest) - assert a2a_req_send_subs.root.method == 'message/stream' - - # GetTaskRequest case - get_params = TaskQueryParams(id='t2') - get_req_data: dict[str, Any] = { - 'jsonrpc': '2.0', - 'method': 'tasks/get', - 'params': get_params.model_dump(), - 'id': 2, - } - a2a_req_get = A2ARequest.model_validate(get_req_data) - assert isinstance(a2a_req_get.root, GetTaskRequest) - assert a2a_req_get.root.method == 'tasks/get' - - # CancelTaskRequest case - id_params = TaskIdParams(id='t2') - cancel_req_data: dict[str, Any] = { - 'jsonrpc': '2.0', - 'method': 'tasks/cancel', - 'params': id_params.model_dump(), - 'id': 2, - } - a2a_req_cancel = A2ARequest.model_validate(cancel_req_data) - assert isinstance(a2a_req_cancel.root, CancelTaskRequest) - assert a2a_req_cancel.root.method == 'tasks/cancel' - - # SetTaskPushNotificationConfigRequest - task_push_config = TaskPushNotificationConfig( - task_id='t2', - push_notification_config=PushNotificationConfig( - url='https://example.com', token='token' - ), - ) - set_push_notif_req_data: dict[str, Any] = { - 'id': 1, - 'jsonrpc': '2.0', - 'method': 'tasks/pushNotificationConfig/set', - 'params': task_push_config.model_dump(), - } - a2a_req_set_push_req = A2ARequest.model_validate(set_push_notif_req_data) - assert isinstance( - a2a_req_set_push_req.root, SetTaskPushNotificationConfigRequest - ) - assert isinstance( - a2a_req_set_push_req.root.params, TaskPushNotificationConfig - ) - assert ( - a2a_req_set_push_req.root.method == 'tasks/pushNotificationConfig/set' +def test_task_with_artifacts(): + """Test Task with artifacts.""" + status = TaskStatus(state=TaskState.TASK_STATE_COMPLETED) + task = Task( + id='task-abc', + context_id='session-xyz', + status=status, ) - # GetTaskPushNotificationConfigRequest - id_params = TaskIdParams(id='t2') - get_push_notif_req_data: dict[str, Any] = { - 'id': 1, - 'jsonrpc': '2.0', - 'method': 'tasks/pushNotificationConfig/get', - 'params': id_params.model_dump(), - } - a2a_req_get_push_req = A2ARequest.model_validate(get_push_notif_req_data) - assert isinstance( - a2a_req_get_push_req.root, GetTaskPushNotificationConfigRequest - ) - assert isinstance(a2a_req_get_push_req.root.params, TaskIdParams) - assert ( - a2a_req_get_push_req.root.method == 'tasks/pushNotificationConfig/get' - ) + # Add artifact + artifact = Artifact(artifact_id='artifact-123', name='result') + s = Struct() + s.update({'result': 42}) + v = Value(struct_value=s) + artifact.parts.append(Part(data=v)) + task.artifacts.append(artifact) - # TaskResubscriptionRequest - task_resubscribe_req_data: dict[str, Any] = { - 'jsonrpc': '2.0', - 'method': 'tasks/resubscribe', - 'params': id_params.model_dump(), - 'id': 2, - } - a2a_req_task_resubscribe_req = A2ARequest.model_validate( - task_resubscribe_req_data - ) - assert isinstance( - a2a_req_task_resubscribe_req.root, TaskResubscriptionRequest - ) - assert isinstance(a2a_req_task_resubscribe_req.root.params, TaskIdParams) - assert a2a_req_task_resubscribe_req.root.method == 'tasks/resubscribe' - - # GetAuthenticatedExtendedCardRequest - get_auth_card_req_data: dict[str, Any] = { - 'jsonrpc': '2.0', - 'method': 'agent/getAuthenticatedExtendedCard', - 'id': 2, - } - a2a_req_get_auth_card = A2ARequest.model_validate(get_auth_card_req_data) - assert isinstance( - a2a_req_get_auth_card.root, GetAuthenticatedExtendedCardRequest - ) - assert ( - a2a_req_get_auth_card.root.method - == 'agent/getAuthenticatedExtendedCard' - ) + assert len(task.artifacts) == 1 + assert task.artifacts[0].artifact_id == 'artifact-123' + assert task.artifacts[0].name == 'result' - # Invalid method case - invalid_req_data: dict[str, Any] = { - 'jsonrpc': '2.0', - 'method': 'invalid/method', - 'params': {}, - 'id': 3, - } - with pytest.raises(ValidationError): - A2ARequest.model_validate(invalid_req_data) +# --- Test Request Types --- -def test_a2a_request_root_model_id_validation() -> None: - # SendMessageRequest case - send_params = MessageSendParams(message=Message(**MINIMAL_MESSAGE_USER)) - send_req_data: dict[str, Any] = { - 'jsonrpc': '2.0', - 'method': 'message/send', - 'params': send_params.model_dump(), - } - with pytest.raises(ValidationError): - A2ARequest.model_validate(send_req_data) # missing id - - # SendStreamingMessageRequest case - send_subs_req_data: dict[str, Any] = { - 'jsonrpc': '2.0', - 'method': 'message/stream', - 'params': send_params.model_dump(), - } - with pytest.raises(ValidationError): - A2ARequest.model_validate(send_subs_req_data) # missing id - - # GetTaskRequest case - get_params = TaskQueryParams(id='t2') - get_req_data: dict[str, Any] = { - 'jsonrpc': '2.0', - 'method': 'tasks/get', - 'params': get_params.model_dump(), - } - with pytest.raises(ValidationError): - A2ARequest.model_validate(get_req_data) # missing id - - # CancelTaskRequest case - id_params = TaskIdParams(id='t2') - cancel_req_data: dict[str, Any] = { - 'jsonrpc': '2.0', - 'method': 'tasks/cancel', - 'params': id_params.model_dump(), - } - with pytest.raises(ValidationError): - A2ARequest.model_validate(cancel_req_data) # missing id - - # SetTaskPushNotificationConfigRequest - task_push_config = TaskPushNotificationConfig( - task_id='t2', - push_notification_config=PushNotificationConfig( - url='https://example.com', token='token' - ), - ) - set_push_notif_req_data: dict[str, Any] = { - 'jsonrpc': '2.0', - 'method': 'tasks/pushNotificationConfig/set', - 'params': task_push_config.model_dump(), - 'task_id': 2, - } - with pytest.raises(ValidationError): - A2ARequest.model_validate(set_push_notif_req_data) # missing id - - # GetTaskPushNotificationConfigRequest - id_params = TaskIdParams(id='t2') - get_push_notif_req_data: dict[str, Any] = { - 'jsonrpc': '2.0', - 'method': 'tasks/pushNotificationConfig/get', - 'params': id_params.model_dump(), - 'task_id': 2, - } - with pytest.raises(ValidationError): - A2ARequest.model_validate(get_push_notif_req_data) - - # TaskResubscriptionRequest - task_resubscribe_req_data: dict[str, Any] = { - 'jsonrpc': '2.0', - 'method': 'tasks/resubscribe', - 'params': id_params.model_dump(), - } - with pytest.raises(ValidationError): - A2ARequest.model_validate(task_resubscribe_req_data) - # GetAuthenticatedExtendedCardRequest - get_auth_card_req_data: dict[str, Any] = { - 'jsonrpc': '2.0', - 'method': 'agent/getAuthenticatedExtendedCard', - } - with pytest.raises(ValidationError): - A2ARequest.model_validate(get_auth_card_req_data) # missing id +def test_send_message_request(): + """Test SendMessageRequest proto construction.""" + msg = Message(role=Role.ROLE_USER, message_id='msg-123') + msg.parts.append(Part(text='Hello')) + request = SendMessageRequest(message=msg) + assert request.message.role == Role.ROLE_USER + assert request.message.parts[0].text == 'Hello' -def test_content_type_not_supported_error(): - # Test ContentTypeNotSupportedError - err = ContentTypeNotSupportedError( - code=-32005, message='Incompatible content types' - ) - assert err.code == -32005 - assert err.message == 'Incompatible content types' - assert err.data is None - - with pytest.raises(ValidationError): # Wrong code - ContentTypeNotSupportedError( - code=-32000, # type: ignore - message='Incompatible content types', - ) - - ContentTypeNotSupportedError( - code=-32005, - message='Incompatible content types', - extra='extra', # type: ignore - ) +def test_get_task_request(): + """Test GetTaskRequest proto construction.""" + request = GetTaskRequest(id='task-123') + assert request.id == 'task-123' -def test_task_not_found_error(): - # Test TaskNotFoundError - err2 = TaskNotFoundError( - code=-32001, message='Task not found', data={'taskId': 'abc'} - ) - assert err2.code == -32001 - assert err2.message == 'Task not found' - assert err2.data == {'taskId': 'abc'} - - with pytest.raises(ValidationError): # Wrong code - TaskNotFoundError(code=-32000, message='Task not found') # type: ignore - - TaskNotFoundError(code=-32001, message='Task not found', extra='extra') # type: ignore - - -def test_push_notification_not_supported_error(): - # Test PushNotificationNotSupportedError - err3 = PushNotificationNotSupportedError(data={'taskId': 'abc'}) - assert err3.code == -32003 - assert err3.message == 'Push Notification is not supported' - assert err3.data == {'taskId': 'abc'} - - with pytest.raises(ValidationError): # Wrong code - PushNotificationNotSupportedError( - code=-32000, # type: ignore - message='Push Notification is not available', - ) - with pytest.raises(ValidationError): # Extra field - PushNotificationNotSupportedError( - code=-32001, - message='Push Notification is not available', - extra='extra', # type: ignore - ) - - -def test_internal_error(): - # Test InternalError - err_internal = InternalError() - assert err_internal.code == -32603 - assert err_internal.message == 'Internal error' - assert err_internal.data is None - - err_internal_data = InternalError( - code=-32603, message='Internal error', data={'details': 'stack trace'} - ) - assert err_internal_data.data == {'details': 'stack trace'} - with pytest.raises(ValidationError): # Wrong code - InternalError(code=-32000, message='Internal error') # type: ignore +def test_cancel_task_request(): + """Test CancelTaskRequest proto construction.""" + request = CancelTaskRequest(id='task-123') + assert request.id == 'task-123' - InternalError(code=-32603, message='Internal error', extra='extra') # type: ignore +def test_subscribe_to_task_request(): + """Test SubscribeToTaskRequest proto construction.""" + request = SubscribeToTaskRequest(id='task-123') + assert request.id == 'task-123' -def test_invalid_params_error(): - # Test InvalidParamsError - err_params = InvalidParamsError() - assert err_params.code == -32602 - assert err_params.message == 'Invalid parameters' - assert err_params.data is None - err_params_data = InvalidParamsError( - code=-32602, message='Invalid parameters', data=['param1', 'param2'] +def test_set_task_push_notification_config_request(): + """Test CreateTaskPushNotificationConfigRequest proto construction.""" + request = TaskPushNotificationConfig( + task_id='task-123', + url='https://example.com/webhook', ) - assert err_params_data.data == ['param1', 'param2'] + assert request.task_id == 'task-123' + assert request.url == 'https://example.com/webhook' - with pytest.raises(ValidationError): # Wrong code - InvalidParamsError(code=-32000, message='Invalid parameters') # type: ignore - InvalidParamsError( - code=-32602, - message='Invalid parameters', - extra='extra', # type: ignore +def test_get_task_push_notification_config_request(): + """Test GetTaskPushNotificationConfigRequest proto construction.""" + request = GetTaskPushNotificationConfigRequest( + task_id='task-123', id='config-1' ) + assert request.task_id == 'task-123' -def test_invalid_request_error(): - # Test InvalidRequestError - err_request = InvalidRequestError() - assert err_request.code == -32600 - assert err_request.message == 'Request payload validation error' - assert err_request.data is None - - err_request_data = InvalidRequestError(data={'field': 'missing'}) - assert err_request_data.data == {'field': 'missing'} +# --- Test Enum Values --- - with pytest.raises(ValidationError): # Wrong code - InvalidRequestError( - code=-32000, # type: ignore - message='Request payload validation error', - ) - InvalidRequestError( - code=-32600, - message='Request payload validation error', - extra='extra', # type: ignore - ) # type: ignore +def test_role_enum(): + """Test Role enum values.""" + assert Role.ROLE_UNSPECIFIED == 0 + assert Role.ROLE_USER == 1 + assert Role.ROLE_AGENT == 2 -def test_json_parse_error(): - # Test JSONParseError - err_parse = JSONParseError(code=-32700, message='Invalid JSON payload') - assert err_parse.code == -32700 - assert err_parse.message == 'Invalid JSON payload' - assert err_parse.data is None +def test_task_state_enum(): + """Test TaskState enum values.""" + assert TaskState.TASK_STATE_UNSPECIFIED == 0 + assert TaskState.TASK_STATE_SUBMITTED == 1 + assert TaskState.TASK_STATE_WORKING == 2 + assert TaskState.TASK_STATE_COMPLETED == 3 + assert TaskState.TASK_STATE_FAILED == 4 + assert TaskState.TASK_STATE_CANCELED == 5 + assert TaskState.TASK_STATE_INPUT_REQUIRED == 6 + assert TaskState.TASK_STATE_REJECTED == 7 + assert TaskState.TASK_STATE_AUTH_REQUIRED == 8 - err_parse_data = JSONParseError(data={'foo': 'bar'}) # Explicit None data - assert err_parse_data.data == {'foo': 'bar'} - with pytest.raises(ValidationError): # Wrong code - JSONParseError(code=-32000, message='Invalid JSON payload') # type: ignore +# --- Test ParseDict and MessageToDict --- - JSONParseError(code=-32700, message='Invalid JSON payload', extra='extra') # type: ignore +def test_parse_dict_agent_card(): + """Test ParseDict for AgentCard.""" + card = ParseDict(MINIMAL_AGENT_CARD, AgentCard()) + assert card.name == 'TestAgent' + assert card.supported_interfaces[0].url == 'http://example.com/agent' -def test_method_not_found_error(): - # Test MethodNotFoundError - err_parse = MethodNotFoundError() - assert err_parse.code == -32601 - assert err_parse.message == 'Method not found' - assert err_parse.data is None - - err_parse_data = JSONParseError(data={'foo': 'bar'}) - assert err_parse_data.data == {'foo': 'bar'} - - with pytest.raises(ValidationError): # Wrong code - JSONParseError(code=-32000, message='Invalid JSON payload') # type: ignore - - JSONParseError(code=-32700, message='Invalid JSON payload', extra='extra') # type: ignore - - -def test_task_not_cancelable_error(): - # Test TaskNotCancelableError - err_parse = TaskNotCancelableError() - assert err_parse.code == -32002 - assert err_parse.message == 'Task cannot be canceled' - assert err_parse.data is None - - err_parse_data = JSONParseError( - data={'foo': 'bar'}, message='not cancelled' - ) - assert err_parse_data.data == {'foo': 'bar'} - assert err_parse_data.message == 'not cancelled' - - with pytest.raises(ValidationError): # Wrong code - JSONParseError(code=-32000, message='Task cannot be canceled') # type: ignore - - JSONParseError( - code=-32700, - message='Task cannot be canceled', - extra='extra', # type: ignore - ) - - -def test_unsupported_operation_error(): - # Test UnsupportedOperationError - err_parse = UnsupportedOperationError() - assert err_parse.code == -32004 - assert err_parse.message == 'This operation is not supported' - assert err_parse.data is None - - err_parse_data = JSONParseError( - data={'foo': 'bar'}, message='not supported' + # Round-trip through MessageToDict + card_dict = MessageToDict(card) + assert card_dict['name'] == 'TestAgent' + assert ( + card_dict['supportedInterfaces'][0]['url'] == 'http://example.com/agent' ) - assert err_parse_data.data == {'foo': 'bar'} - assert err_parse_data.message == 'not supported' - - with pytest.raises(ValidationError): # Wrong code - JSONParseError(code=-32000, message='Unsupported') # type: ignore - JSONParseError(code=-32700, message='Unsupported', extra='extra') # type: ignore +def test_parse_dict_task(): + """Test ParseDict for Task with nested structures.""" + task_data = { + 'id': 'task-123', + 'contextId': 'ctx-456', + 'status': { + 'state': 'TASK_STATE_WORKING', + }, + 'history': [ + { + 'role': 'ROLE_USER', + 'messageId': 'msg-1', + 'parts': [{'text': 'Hello'}], + } + ], + } + task = ParseDict(task_data, Task()) + assert task.id == 'task-123' + assert task.context_id == 'ctx-456' + assert task.status.state == TaskState.TASK_STATE_WORKING + assert len(task.history) == 1 + assert task.history[0].role == Role.ROLE_USER -# --- Test TaskIdParams --- +def test_message_to_dict_preserves_structure(): + """Test that MessageToDict produces correct structure.""" + msg = Message(role=Role.ROLE_USER, message_id='msg-123') + msg.parts.append(Part(text='Hello')) -def test_task_id_params_valid(): - """Tests successful validation of TaskIdParams.""" - # Minimal valid data - params_min = TaskIdParams(**MINIMAL_TASK_ID_PARAMS) - assert params_min.id == 'task-123' - assert params_min.metadata is None + msg_dict = MessageToDict(msg) + assert msg_dict['role'] == 'ROLE_USER' + assert msg_dict['messageId'] == 'msg-123' + # Part.text is a direct string field in proto + assert msg_dict['parts'][0]['text'] == 'Hello' - # Full valid data - params_full = TaskIdParams(**FULL_TASK_ID_PARAMS) - assert params_full.id == 'task-456' - assert params_full.metadata == {'source': 'test'} +# --- Test Proto Copy and Equality --- -def test_task_id_params_invalid(): - """Tests validation errors for TaskIdParams.""" - # Missing required 'id' field - with pytest.raises(ValidationError) as excinfo_missing: - TaskIdParams() # type: ignore - assert 'id' in str( - excinfo_missing.value - ) # Check that 'id' is mentioned in the error - invalid_data = MINIMAL_TASK_ID_PARAMS.copy() - invalid_data['extra_field'] = 'allowed' - TaskIdParams(**invalid_data) # type: ignore +def test_proto_copy(): + """Test copying proto messages.""" + original = Task( + id='task-123', + context_id='ctx-456', + status=TaskStatus(state=TaskState.TASK_STATE_SUBMITTED), + ) - # Incorrect type for metadata (should be dict) - invalid_metadata_type = {'id': 'task-789', 'metadata': 'not_a_dict'} - with pytest.raises(ValidationError) as excinfo_type: - TaskIdParams(**invalid_metadata_type) # type: ignore - assert 'metadata' in str( - excinfo_type.value - ) # Check that 'metadata' is mentioned + # Copy using CopyFrom + copy = Task() + copy.CopyFrom(original) + assert copy.id == 'task-123' + assert copy.context_id == 'ctx-456' + assert copy.status.state == TaskState.TASK_STATE_SUBMITTED -def test_task_push_notification_config() -> None: - """Tests successful validation of TaskPushNotificationConfig.""" - auth_info_dict: dict[str, Any] = { - 'schemes': ['Bearer', 'Basic'], - 'credentials': 'user:pass', - } - auth_info = PushNotificationAuthenticationInfo(**auth_info_dict) + # Modifying copy doesn't affect original + copy.id = 'task-999' + assert original.id == 'task-123' - push_notification_config = PushNotificationConfig( - url='https://example.com', token='token', authentication=auth_info - ) - assert push_notification_config.url == 'https://example.com' - assert push_notification_config.token == 'token' - assert push_notification_config.authentication == auth_info - task_push_notification_config = TaskPushNotificationConfig( - task_id='task-123', push_notification_config=push_notification_config +def test_proto_equality(): + """Test proto message equality.""" + task1 = Task( + id='task-123', + context_id='ctx-456', + status=TaskStatus(state=TaskState.TASK_STATE_SUBMITTED), ) - assert task_push_notification_config.task_id == 'task-123' - assert ( - task_push_notification_config.push_notification_config - == push_notification_config + task2 = Task( + id='task-123', + context_id='ctx-456', + status=TaskStatus(state=TaskState.TASK_STATE_SUBMITTED), ) - assert task_push_notification_config.model_dump(exclude_none=True) == { - 'taskId': 'task-123', - 'pushNotificationConfig': { - 'url': 'https://example.com', - 'token': 'token', - 'authentication': { - 'schemes': ['Bearer', 'Basic'], - 'credentials': 'user:pass', - }, - }, - } + assert task1 == task2 -def test_jsonrpc_message_valid(): - """Tests successful validation of JSONRPCMessage.""" - # With string ID - msg_str_id = JSONRPCMessage(jsonrpc='2.0', id='req-1') - assert msg_str_id.jsonrpc == '2.0' - assert msg_str_id.id == 'req-1' + task2.id = 'task-999' + assert task1 != task2 - # With integer ID (will be coerced to float by Pydantic for JSON number compatibility) - msg_int_id = JSONRPCMessage(jsonrpc='2.0', id=1) - assert msg_int_id.jsonrpc == '2.0' - assert ( - msg_int_id.id == 1 - ) # Pydantic v2 keeps int if possible, but float is in type hint - rpc_message = JSONRPCMessage(id=1) - assert rpc_message.jsonrpc == '2.0' - assert rpc_message.id == 1 +# --- Test HasField for Optional Fields --- -def test_jsonrpc_message_invalid(): - """Tests validation errors for JSONRPCMessage.""" - # Incorrect jsonrpc version - with pytest.raises(ValidationError): - JSONRPCMessage(jsonrpc='1.0', id=1) # type: ignore +def test_has_field_optional(): + """Test HasField for checking optional field presence.""" + status = TaskStatus(state=TaskState.TASK_STATE_SUBMITTED) + assert not status.HasField('message') - JSONRPCMessage(jsonrpc='2.0', id=1, extra_field='extra') # type: ignore + # Add message + msg = Message(role=Role.ROLE_USER, message_id='msg-1') + status.message.CopyFrom(msg) + assert status.HasField('message') - # Invalid ID type (e.g., list) - Pydantic should catch this based on type hints - with pytest.raises(ValidationError): - JSONRPCMessage(jsonrpc='2.0', id=[1, 2]) # type: ignore +def test_has_field_oneof(): + """Test HasField for oneof fields.""" + part = Part(text='Hello') + assert part.HasField('text') + assert not part.HasField('url') + assert not part.HasField('data') -def test_file_base_valid(): - """Tests successful validation of FileBase.""" - # No optional fields - base1 = FileBase() - assert base1.mime_type is None - assert base1.name is None + # WhichOneof for checking which oneof is set + assert part.WhichOneof('content') == 'text' - # With mime_type only - base2 = FileBase(mime_type='image/png') - assert base2.mime_type == 'image/png' - assert base2.name is None - # With name only - base3 = FileBase(name='document.pdf') - assert base3.mime_type is None - assert base3.name == 'document.pdf' +# --- Test Repeated Fields --- - # With both fields - base4 = FileBase(mime_type='application/json', name='data.json') - assert base4.mime_type == 'application/json' - assert base4.name == 'data.json' - - -def test_file_base_invalid(): - """Tests validation errors for FileBase.""" - FileBase(extra_field='allowed') # type: ignore - - # Incorrect type for mime_type - with pytest.raises(ValidationError) as excinfo_type_mime: - FileBase(mime_type=123) # type: ignore - assert 'mime_type' in str(excinfo_type_mime.value) - - # Incorrect type for name - with pytest.raises(ValidationError) as excinfo_type_name: - FileBase(name=['list', 'is', 'wrong']) # type: ignore - assert 'name' in str(excinfo_type_name.value) - - -def test_part_base_valid() -> None: - """Tests successful validation of PartBase.""" - # No optional fields (metadata is None) - base1 = PartBase() - assert base1.metadata is None - - # With metadata - meta_data: dict[str, Any] = {'source': 'test', 'timestamp': 12345} - base2 = PartBase(metadata=meta_data) - assert base2.metadata == meta_data +def test_repeated_field_operations(): + """Test operations on repeated fields.""" + task = Task( + id='task-123', + context_id='ctx-456', + status=TaskStatus(state=TaskState.TASK_STATE_SUBMITTED), + ) -def test_part_base_invalid(): - """Tests validation errors for PartBase.""" - PartBase(extra_field='allowed') # type: ignore + # append + msg1 = Message(role=Role.ROLE_USER, message_id='msg-1') + task.history.append(msg1) + assert len(task.history) == 1 - # Incorrect type for metadata (should be dict) - with pytest.raises(ValidationError) as excinfo_type: - PartBase(metadata='not_a_dict') # type: ignore - assert 'metadata' in str(excinfo_type.value) + # extend + msg2 = Message(role=Role.ROLE_AGENT, message_id='msg-2') + msg3 = Message(role=Role.ROLE_USER, message_id='msg-3') + task.history.extend([msg2, msg3]) + assert len(task.history) == 3 + # iteration + roles = [m.role for m in task.history] + assert roles == [Role.ROLE_USER, Role.ROLE_AGENT, Role.ROLE_USER] -def test_a2a_error_validation_and_serialization() -> None: - """Tests validation and serialization of the A2AError RootModel.""" - # 1. Test JSONParseError - json_parse_instance = JSONParseError() - json_parse_data = json_parse_instance.model_dump(exclude_none=True) - a2a_err_parse = A2AError.model_validate(json_parse_data) - assert isinstance(a2a_err_parse.root, JSONParseError) +def test_map_field_operations(): + """Test operations on map fields.""" + msg = Message(role=Role.ROLE_USER, message_id='msg-1') - # 2. Test InvalidRequestError - invalid_req_instance = InvalidRequestError() - invalid_req_data = invalid_req_instance.model_dump(exclude_none=True) - a2a_err_invalid_req = A2AError.model_validate(invalid_req_data) - assert isinstance(a2a_err_invalid_req.root, InvalidRequestError) + # Update map + msg.metadata.update({'key1': 'value1', 'key2': 'value2'}) + assert dict(msg.metadata) == {'key1': 'value1', 'key2': 'value2'} - # 3. Test MethodNotFoundError - method_not_found_instance = MethodNotFoundError() - method_not_found_data = method_not_found_instance.model_dump( - exclude_none=True - ) - a2a_err_method = A2AError.model_validate(method_not_found_data) - assert isinstance(a2a_err_method.root, MethodNotFoundError) - - # 4. Test InvalidParamsError - invalid_params_instance = InvalidParamsError() - invalid_params_data = invalid_params_instance.model_dump(exclude_none=True) - a2a_err_params = A2AError.model_validate(invalid_params_data) - assert isinstance(a2a_err_params.root, InvalidParamsError) - - # 5. Test InternalError - internal_err_instance = InternalError() - internal_err_data = internal_err_instance.model_dump(exclude_none=True) - a2a_err_internal = A2AError.model_validate(internal_err_data) - assert isinstance(a2a_err_internal.root, InternalError) - - # 6. Test TaskNotFoundError - task_not_found_instance = TaskNotFoundError(data={'taskId': 't1'}) - task_not_found_data = task_not_found_instance.model_dump(exclude_none=True) - a2a_err_task_nf = A2AError.model_validate(task_not_found_data) - assert isinstance(a2a_err_task_nf.root, TaskNotFoundError) - - # 7. Test TaskNotCancelableError - task_not_cancelable_instance = TaskNotCancelableError() - task_not_cancelable_data = task_not_cancelable_instance.model_dump( - exclude_none=True - ) - a2a_err_task_nc = A2AError.model_validate(task_not_cancelable_data) - assert isinstance(a2a_err_task_nc.root, TaskNotCancelableError) + # Access individual keys + assert msg.metadata['key1'] == 'value1' - # 8. Test PushNotificationNotSupportedError - push_not_supported_instance = PushNotificationNotSupportedError() - push_not_supported_data = push_not_supported_instance.model_dump( - exclude_none=True - ) - a2a_err_push_ns = A2AError.model_validate(push_not_supported_data) - assert isinstance(a2a_err_push_ns.root, PushNotificationNotSupportedError) - - # 9. Test UnsupportedOperationError - unsupported_op_instance = UnsupportedOperationError() - unsupported_op_data = unsupported_op_instance.model_dump(exclude_none=True) - a2a_err_unsupported = A2AError.model_validate(unsupported_op_data) - assert isinstance(a2a_err_unsupported.root, UnsupportedOperationError) - - # 10. Test ContentTypeNotSupportedError - content_type_err_instance = ContentTypeNotSupportedError() - content_type_err_data = content_type_err_instance.model_dump( - exclude_none=True - ) - a2a_err_content = A2AError.model_validate(content_type_err_data) - assert isinstance(a2a_err_content.root, ContentTypeNotSupportedError) + # Check containment + assert 'key1' in msg.metadata + assert 'key3' not in msg.metadata - # 11. Test invalid data (doesn't match any known error code/structure) - invalid_data: dict[str, Any] = {'code': -99999, 'message': 'Unknown error'} - with pytest.raises(ValidationError): - A2AError.model_validate(invalid_data) +# --- Test Serialization --- -def test_subclass_enums() -> None: - """validate subtype enum types""" - assert In.cookie == 'cookie' - assert Role.user == 'user' +def test_serialize_to_bytes(): + """Test serializing proto to bytes.""" + msg = Message(role=Role.ROLE_USER, message_id='msg-123') + msg.parts.append(Part(text='Hello')) - assert TaskState.working == 'working' + # Serialize + data = msg.SerializeToString() + assert isinstance(data, bytes) + assert len(data) > 0 + # Deserialize + msg2 = Message() + msg2.ParseFromString(data) + assert msg2.role == Role.ROLE_USER + assert msg2.message_id == 'msg-123' + assert msg2.parts[0].text == 'Hello' -def test_get_task_push_config_params() -> None: - """Tests successful validation of GetTaskPushNotificationConfigParams.""" - # Minimal valid data - params = {'id': 'task-1234'} - TaskIdParams.model_validate(params) - GetTaskPushNotificationConfigParams.model_validate(params) +def test_serialize_to_json(): + """Test serializing proto to JSON via MessageToDict.""" + msg = Message(role=Role.ROLE_USER, message_id='msg-123') + msg.parts.append(Part(text='Hello')) -def test_use_get_task_push_notification_params_for_request() -> None: - # GetTaskPushNotificationConfigRequest - get_push_notif_req_data: dict[str, Any] = { - 'id': 1, - 'jsonrpc': '2.0', - 'method': 'tasks/pushNotificationConfig/get', - 'params': {'id': 'task-1234', 'pushNotificationConfigId': 'c1'}, - } - a2a_req_get_push_req = A2ARequest.model_validate(get_push_notif_req_data) - assert isinstance( - a2a_req_get_push_req.root, GetTaskPushNotificationConfigRequest - ) - assert isinstance( - a2a_req_get_push_req.root.params, GetTaskPushNotificationConfigParams - ) - assert ( - a2a_req_get_push_req.root.method == 'tasks/pushNotificationConfig/get' - ) + # MessageToDict for JSON-serializable dict + msg_dict = MessageToDict(msg) + import json -def test_camelCase_access_raises_attribute_error() -> None: - """ - Tests that accessing or setting fields via their camelCase alias - raises an AttributeError. - """ - skill = AgentSkill( - id='hello_world', - name='Returns hello world', - description='just returns hello world', - tags=['hello world'], - examples=['hi', 'hello world'], - ) + json_str = json.dumps(msg_dict) + assert 'ROLE_USER' in json_str + assert 'msg-123' in json_str - # Initialization with camelCase still works due to Pydantic's populate_by_name config - agent_card = AgentCard( - name='Hello World Agent', - description='Just a hello world agent', - url='http://localhost:9999/', - version='1.0.0', - defaultInputModes=['text'], # type: ignore - defaultOutputModes=['text'], # type: ignore - capabilities=AgentCapabilities(streaming=True), - skills=[skill], - supportsAuthenticatedExtendedCard=True, # type: ignore - ) - # --- Test that using camelCase aliases raises errors --- +# --- Test Default Values --- - # Test setting an attribute via camelCase alias raises AttributeError - with pytest.raises( - ValueError, - match='"AgentCard" object has no field "supportsAuthenticatedExtendedCard"', - ): - agent_card.supportsAuthenticatedExtendedCard = False - # Test getting an attribute via camelCase alias raises AttributeError - with pytest.raises( - AttributeError, - match="'AgentCard' object has no attribute 'defaultInputModes'", - ): - _ = agent_card.defaultInputModes +def test_default_values(): + """Test proto default values.""" + # Empty message has defaults + msg = Message() + assert msg.role == Role.ROLE_UNSPECIFIED # Enum default is 0 + assert msg.message_id == '' # String default is empty + assert len(msg.parts) == 0 # Repeated field default is empty - # --- Test that using snake_case names works correctly --- + # Task status defaults + status = TaskStatus() + assert status.state == TaskState.TASK_STATE_UNSPECIFIED + assert status.timestamp.seconds == 0 # Timestamp proto default - # The value should be unchanged because the camelCase setattr failed - assert agent_card.supports_authenticated_extended_card is True - # Now, set it correctly using the snake_case name - agent_card.supports_authenticated_extended_card = False - assert agent_card.supports_authenticated_extended_card is False +def test_clear_field(): + """Test clearing fields.""" + msg = Message(role=Role.ROLE_USER, message_id='msg-123') + assert msg.message_id == 'msg-123' - # Get the attribute correctly using the snake_case name - default_input_modes = agent_card.default_input_modes - assert default_input_modes == ['text'] - assert agent_card.default_input_modes == ['text'] + msg.ClearField('message_id') + assert msg.message_id == '' # Back to default + # Clear nested message + status = TaskStatus(state=TaskState.TASK_STATE_WORKING) + status.message.CopyFrom(Message(role=Role.ROLE_USER)) + assert status.HasField('message') -def test_get_authenticated_extended_card_request() -> None: - req_data: dict[str, Any] = { - 'jsonrpc': '2.0', - 'method': 'agent/getAuthenticatedExtendedCard', - 'id': 5, - } - req = GetAuthenticatedExtendedCardRequest.model_validate(req_data) - assert req.method == 'agent/getAuthenticatedExtendedCard' - assert req.id == 5 - # This request has no params, so we don't check for that. - - with pytest.raises(ValidationError): # Wrong method literal - GetAuthenticatedExtendedCardRequest.model_validate( - {**req_data, 'method': 'wrong/method'} - ) - - with pytest.raises(ValidationError): # Missing id - GetAuthenticatedExtendedCardRequest.model_validate( - {'jsonrpc': '2.0', 'method': 'agent/getAuthenticatedExtendedCard'} - ) - - -def test_get_authenticated_extended_card_response() -> None: - resp_data: dict[str, Any] = { - 'jsonrpc': '2.0', - 'result': MINIMAL_AGENT_CARD, - 'id': 'resp-1', - } - resp = GetAuthenticatedExtendedCardResponse.model_validate(resp_data) - assert resp.root.id == 'resp-1' - assert isinstance(resp.root, GetAuthenticatedExtendedCardSuccessResponse) - assert isinstance(resp.root.result, AgentCard) - assert resp.root.result.name == 'TestAgent' - - with pytest.raises(ValidationError): # Result is not an AgentCard - GetAuthenticatedExtendedCardResponse.model_validate( - {'jsonrpc': '2.0', 'result': {'wrong': 'data'}, 'id': 1} - ) - - resp_data_err: dict[str, Any] = { - 'jsonrpc': '2.0', - 'error': JSONRPCError(**TaskNotFoundError().model_dump()), - 'id': 'resp-1', - } - resp_err = GetAuthenticatedExtendedCardResponse.model_validate( - resp_data_err - ) - assert resp_err.root.id == 'resp-1' - assert isinstance(resp_err.root, JSONRPCErrorResponse) - assert resp_err.root.error is not None - assert isinstance(resp_err.root.error, JSONRPCError) + status.ClearField('message') + assert not status.HasField('message') diff --git a/tests/utils/test_artifact.py b/tests/utils/test_artifact.py deleted file mode 100644 index 489c047c4..000000000 --- a/tests/utils/test_artifact.py +++ /dev/null @@ -1,159 +0,0 @@ -import unittest -import uuid - -from unittest.mock import patch - -from a2a.types import ( - Artifact, - DataPart, - Part, - TextPart, -) -from a2a.utils.artifact import ( - get_artifact_text, - new_artifact, - new_data_artifact, - new_text_artifact, -) - - -class TestArtifact(unittest.TestCase): - @patch('uuid.uuid4') - def test_new_artifact_generates_id(self, mock_uuid4): - mock_uuid = uuid.UUID('abcdef12-1234-5678-1234-567812345678') - mock_uuid4.return_value = mock_uuid - artifact = new_artifact(parts=[], name='test_artifact') - self.assertEqual(artifact.artifact_id, str(mock_uuid)) - - def test_new_artifact_assigns_parts_name_description(self): - parts = [Part(root=TextPart(text='Sample text'))] - name = 'My Artifact' - description = 'This is a test artifact.' - artifact = new_artifact(parts=parts, name=name, description=description) - self.assertEqual(artifact.parts, parts) - self.assertEqual(artifact.name, name) - self.assertEqual(artifact.description, description) - - def test_new_artifact_empty_description_if_not_provided(self): - parts = [Part(root=TextPart(text='Another sample'))] - name = 'Artifact_No_Desc' - artifact = new_artifact(parts=parts, name=name) - self.assertEqual(artifact.description, None) - - def test_new_text_artifact_creates_single_text_part(self): - text = 'This is a text artifact.' - name = 'Text_Artifact' - artifact = new_text_artifact(text=text, name=name) - self.assertEqual(len(artifact.parts), 1) - self.assertIsInstance(artifact.parts[0].root, TextPart) - - def test_new_text_artifact_part_contains_provided_text(self): - text = 'Hello, world!' - name = 'Greeting_Artifact' - artifact = new_text_artifact(text=text, name=name) - self.assertEqual(artifact.parts[0].root.text, text) - - def test_new_text_artifact_assigns_name_description(self): - text = 'Some content.' - name = 'Named_Text_Artifact' - description = 'Description for text artifact.' - artifact = new_text_artifact( - text=text, name=name, description=description - ) - self.assertEqual(artifact.name, name) - self.assertEqual(artifact.description, description) - - def test_new_data_artifact_creates_single_data_part(self): - sample_data = {'key': 'value', 'number': 123} - name = 'Data_Artifact' - artifact = new_data_artifact(data=sample_data, name=name) - self.assertEqual(len(artifact.parts), 1) - self.assertIsInstance(artifact.parts[0].root, DataPart) - - def test_new_data_artifact_part_contains_provided_data(self): - sample_data = {'content': 'test_data', 'is_valid': True} - name = 'Structured_Data_Artifact' - artifact = new_data_artifact(data=sample_data, name=name) - self.assertIsInstance(artifact.parts[0].root, DataPart) - # Ensure the 'data' attribute of DataPart is accessed for comparison - self.assertEqual(artifact.parts[0].root.data, sample_data) - - def test_new_data_artifact_assigns_name_description(self): - sample_data = {'info': 'some details'} - name = 'Named_Data_Artifact' - description = 'Description for data artifact.' - artifact = new_data_artifact( - data=sample_data, name=name, description=description - ) - self.assertEqual(artifact.name, name) - self.assertEqual(artifact.description, description) - - -class TestGetArtifactText(unittest.TestCase): - def test_get_artifact_text_single_part(self): - # Setup - artifact = Artifact( - name='test-artifact', - parts=[Part(root=TextPart(text='Hello world'))], - artifact_id='test-artifact-id', - ) - - # Exercise - result = get_artifact_text(artifact) - - # Verify - assert result == 'Hello world' - - def test_get_artifact_text_multiple_parts(self): - # Setup - artifact = Artifact( - name='test-artifact', - parts=[ - Part(root=TextPart(text='First line')), - Part(root=TextPart(text='Second line')), - Part(root=TextPart(text='Third line')), - ], - artifact_id='test-artifact-id', - ) - - # Exercise - result = get_artifact_text(artifact) - - # Verify - default delimiter is newline - assert result == 'First line\nSecond line\nThird line' - - def test_get_artifact_text_custom_delimiter(self): - # Setup - artifact = Artifact( - name='test-artifact', - parts=[ - Part(root=TextPart(text='First part')), - Part(root=TextPart(text='Second part')), - Part(root=TextPart(text='Third part')), - ], - artifact_id='test-artifact-id', - ) - - # Exercise - result = get_artifact_text(artifact, delimiter=' | ') - - # Verify - assert result == 'First part | Second part | Third part' - - def test_get_artifact_text_empty_parts(self): - # Setup - artifact = Artifact( - name='test-artifact', - parts=[], - artifact_id='test-artifact-id', - ) - - # Exercise - result = get_artifact_text(artifact) - - # Verify - assert result == '' - - -if __name__ == '__main__': - unittest.main() diff --git a/tests/utils/test_constants.py b/tests/utils/test_constants.py index 59e9b8366..1c427b3fb 100644 --- a/tests/utils/test_constants.py +++ b/tests/utils/test_constants.py @@ -8,14 +8,19 @@ def test_agent_card_constants(): assert ( constants.AGENT_CARD_WELL_KNOWN_PATH == '/.well-known/agent-card.json' ) - assert ( - constants.PREV_AGENT_CARD_WELL_KNOWN_PATH == '/.well-known/agent.json' - ) - assert ( - constants.EXTENDED_AGENT_CARD_PATH == '/agent/authenticatedExtendedCard' - ) def test_default_rpc_url(): """Test default RPC URL constant.""" assert constants.DEFAULT_RPC_URL == '/' + + +def test_version_header(): + """Test version header constant.""" + assert constants.VERSION_HEADER == 'A2A-Version' + + +def test_protocol_versions(): + """Test protocol version constants.""" + assert constants.PROTOCOL_VERSION_1_0 == '1.0' + assert constants.PROTOCOL_VERSION_CURRENT == '1.0' diff --git a/tests/utils/test_error_handlers.py b/tests/utils/test_error_handlers.py index ec41dc1f5..93ad6a7c0 100644 --- a/tests/utils/test_error_handlers.py +++ b/tests/utils/test_error_handlers.py @@ -1,44 +1,65 @@ """Tests for a2a.utils.error_handlers module.""" +import logging + from unittest.mock import patch import pytest from a2a.types import ( InternalError, - InvalidRequestError, - MethodNotFoundError, - TaskNotFoundError, ) from a2a.utils.error_handlers import ( - A2AErrorToHttpStatus, rest_error_handler, rest_stream_error_handler, ) -from a2a.utils.errors import ServerError +from a2a.utils.errors import ( + InvalidRequestError, +) class MockJSONResponse: - def __init__(self, content, status_code): + def __init__(self, content, status_code, media_type=None): self.content = content self.status_code = status_code + self.media_type = media_type + + +class MockEventSourceResponse: + def __init__(self, body_iterator): + self.body_iterator = body_iterator @pytest.mark.asyncio async def test_rest_error_handler_server_error(): - """Test rest_error_handler with ServerError.""" + """Test rest_error_handler with A2AError.""" error = InvalidRequestError(message='Bad request') @rest_error_handler async def failing_func(): - raise ServerError(error=error) + raise error with patch('a2a.utils.error_handlers.JSONResponse', MockJSONResponse): result = await failing_func() assert isinstance(result, MockJSONResponse) assert result.status_code == 400 - assert result.content == {'message': 'Bad request'} + assert result.media_type == 'application/json' + assert result.content == { + 'error': { + 'code': 400, + 'status': 'INVALID_ARGUMENT', + 'message': 'Bad request', + 'details': [ + { + '@type': 'type.googleapis.com/google.rpc.ErrorInfo', + 'reason': 'INVALID_REQUEST', + 'domain': 'a2a-protocol.org', + 'metadata': {}, + } + ], + } + } @pytest.mark.asyncio @@ -54,39 +75,116 @@ async def failing_func(): assert isinstance(result, MockJSONResponse) assert result.status_code == 500 - assert result.content == {'message': 'unknown exception'} + assert result.media_type == 'application/json' + assert result.content == { + 'error': { + 'code': 500, + 'status': 'INTERNAL', + 'message': 'unknown exception', + } + } @pytest.mark.asyncio async def test_rest_stream_error_handler_server_error(): - """Test rest_stream_error_handler with ServerError.""" + """Test rest_stream_error_handler with A2AError.""" error = InternalError(message='Internal server error') @rest_stream_error_handler async def failing_stream(): - raise ServerError(error=error) + raise error - with pytest.raises(ServerError) as exc_info: - await failing_stream() + response = await failing_stream() - assert exc_info.value.error == error + assert response.status_code == 500 @pytest.mark.asyncio async def test_rest_stream_error_handler_reraises_exception(): - """Test rest_stream_error_handler reraises other exceptions.""" + """Test rest_stream_error_handler catches other exceptions and returns JSONResponse.""" @rest_stream_error_handler async def failing_stream(): raise RuntimeError('Stream failed') - with pytest.raises(RuntimeError, match='Stream failed'): - await failing_stream() + response = await failing_stream() + assert response.status_code == 500 + + +@pytest.mark.asyncio +async def test_rest_error_handler_success(): + """Test rest_error_handler on success.""" + @rest_error_handler + async def successful_func(): + return 'success' + + result = await successful_func() + assert result == 'success' + + +@pytest.mark.asyncio +async def test_rest_stream_error_handler_generator_error(caplog): + """Test rest_stream_error_handler catches error during async generation after first success.""" + error = InternalError(message='Stream error during generation') + + async def failing_generator(): + yield 'success chunk 1' + raise error + + @rest_stream_error_handler + async def successful_prep_failing_stream(): + return MockEventSourceResponse(failing_generator()) + + response = await successful_prep_failing_stream() + + # Assert it returns successfully + assert isinstance(response, MockEventSourceResponse) + + # Now consume the stream + chunks = [] + with ( + caplog.at_level(logging.ERROR), + pytest.raises(InternalError) as exc_info, + ): + async for chunk in response.body_iterator: + chunks.append(chunk) # noqa: PERF401 + assert chunks == ['success chunk 1'] + assert exc_info.value == error + + +@pytest.mark.asyncio +async def test_rest_stream_error_handler_generator_unknown_error(caplog): + """Test rest_stream_error_handler catches unknown error during async generation.""" + + async def failing_generator(): + yield 'success chunk 1' + raise RuntimeError('Unknown stream failure') + + @rest_stream_error_handler + async def successful_prep_failing_stream(): + return MockEventSourceResponse(failing_generator()) + + response = await successful_prep_failing_stream() + + chunks = [] + with ( + caplog.at_level(logging.ERROR), + pytest.raises(RuntimeError, match='Unknown stream failure'), + ): + async for chunk in response.body_iterator: + chunks.append(chunk) # noqa: PERF401 + assert chunks == ['success chunk 1'] + assert 'Unknown streaming error occurred' in caplog.text + + +@pytest.mark.asyncio +async def test_rest_stream_error_handler_success(): + """Test rest_stream_error_handler on success.""" + + @rest_stream_error_handler + async def successful_stream(): + return 'success_stream' -def test_a2a_error_to_http_status_mapping(): - """Test A2AErrorToHttpStatus mapping.""" - assert A2AErrorToHttpStatus[InvalidRequestError] == 400 - assert A2AErrorToHttpStatus[MethodNotFoundError] == 404 - assert A2AErrorToHttpStatus[TaskNotFoundError] == 404 - assert A2AErrorToHttpStatus[InternalError] == 500 + result = await successful_stream() + assert result == 'success_stream' diff --git a/tests/utils/test_helpers.py b/tests/utils/test_helpers.py deleted file mode 100644 index dc5e0c000..000000000 --- a/tests/utils/test_helpers.py +++ /dev/null @@ -1,478 +0,0 @@ -import uuid - -from typing import Any -from unittest.mock import patch - -import pytest - -from a2a.types import ( - AgentCapabilities, - AgentCard, - AgentCardSignature, - AgentSkill, - Artifact, - Message, - MessageSendParams, - Part, - Role, - Task, - TaskArtifactUpdateEvent, - TaskState, - TextPart, -) -from a2a.utils.errors import ServerError -from a2a.utils.helpers import ( - _clean_empty, - append_artifact_to_task, - are_modalities_compatible, - build_text_artifact, - canonicalize_agent_card, - create_task_obj, - validate, -) - - -# --- Helper Data --- -TEXT_PART_DATA: dict[str, Any] = {'type': 'text', 'text': 'Hello'} - -MINIMAL_MESSAGE_USER: dict[str, Any] = { - 'role': 'user', - 'parts': [TEXT_PART_DATA], - 'message_id': 'msg-123', - 'type': 'message', -} - -MINIMAL_TASK_STATUS: dict[str, Any] = {'state': 'submitted'} - -MINIMAL_TASK: dict[str, Any] = { - 'id': 'task-abc', - 'context_id': 'session-xyz', - 'status': MINIMAL_TASK_STATUS, - 'type': 'task', -} - -SAMPLE_AGENT_CARD: dict[str, Any] = { - 'name': 'Test Agent', - 'description': 'A test agent', - 'url': 'http://localhost', - 'version': '1.0.0', - 'capabilities': AgentCapabilities( - streaming=None, - push_notifications=True, - ), - 'default_input_modes': ['text/plain'], - 'default_output_modes': ['text/plain'], - 'documentation_url': None, - 'icon_url': '', - 'skills': [ - AgentSkill( - id='skill1', - name='Test Skill', - description='A test skill', - tags=['test'], - ) - ], - 'signatures': [ - AgentCardSignature( - protected='protected_header', signature='test_signature' - ) - ], -} - - -# Test create_task_obj -def test_create_task_obj(): - message = Message(**MINIMAL_MESSAGE_USER) - send_params = MessageSendParams(message=message) - - task = create_task_obj(send_params) - assert task.id is not None - assert task.context_id == message.context_id - assert task.status.state == TaskState.submitted - assert len(task.history) == 1 - assert task.history[0] == message - - -def test_create_task_obj_generates_context_id(): - """Test that create_task_obj generates context_id if not present and uses it for the task.""" - # Message without context_id - message_no_context_id = Message( - role=Role.user, - parts=[Part(root=TextPart(text='test'))], - message_id='msg-no-ctx', - task_id='task-from-msg', # Provide a task_id to differentiate from generated task.id - ) - send_params = MessageSendParams(message=message_no_context_id) - - # Ensure message.context_id is None initially - assert send_params.message.context_id is None - - known_task_uuid = uuid.UUID('11111111-1111-1111-1111-111111111111') - known_context_uuid = uuid.UUID('22222222-2222-2222-2222-222222222222') - - # Patch uuid.uuid4 to return specific UUIDs in sequence - # The first call will be for message.context_id (if None), the second for task.id. - with patch( - 'a2a.utils.helpers.uuid4', - side_effect=[known_context_uuid, known_task_uuid], - ) as mock_uuid4: - task = create_task_obj(send_params) - - # Assert that uuid4 was called twice (once for context_id, once for task.id) - assert mock_uuid4.call_count == 2 - - # Assert that message.context_id was set to the first generated UUID - assert send_params.message.context_id == str(known_context_uuid) - - # Assert that task.context_id is the same generated UUID - assert task.context_id == str(known_context_uuid) - - # Assert that task.id is the second generated UUID - assert task.id == str(known_task_uuid) - - # Ensure the original message in history also has the updated context_id - assert len(task.history) == 1 - assert task.history[0].context_id == str(known_context_uuid) - - -# Test append_artifact_to_task -def test_append_artifact_to_task(): - # Prepare base task - task = Task(**MINIMAL_TASK) - assert task.id == 'task-abc' - assert task.context_id == 'session-xyz' - assert task.status.state == TaskState.submitted - assert task.history is None - assert task.artifacts is None - assert task.metadata is None - - # Prepare appending artifact and event - artifact_1 = Artifact( - artifact_id='artifact-123', parts=[Part(root=TextPart(text='Hello'))] - ) - append_event_1 = TaskArtifactUpdateEvent( - artifact=artifact_1, append=False, task_id='123', context_id='123' - ) - - # Test adding a new artifact (not appending) - append_artifact_to_task(task, append_event_1) - assert len(task.artifacts) == 1 - assert task.artifacts[0].artifact_id == 'artifact-123' - assert task.artifacts[0].name is None - assert len(task.artifacts[0].parts) == 1 - assert task.artifacts[0].parts[0].root.text == 'Hello' - - # Test replacing the artifact - artifact_2 = Artifact( - artifact_id='artifact-123', - name='updated name', - parts=[Part(root=TextPart(text='Updated'))], - ) - append_event_2 = TaskArtifactUpdateEvent( - artifact=artifact_2, append=False, task_id='123', context_id='123' - ) - append_artifact_to_task(task, append_event_2) - assert len(task.artifacts) == 1 # Should still have one artifact - assert task.artifacts[0].artifact_id == 'artifact-123' - assert task.artifacts[0].name == 'updated name' - assert len(task.artifacts[0].parts) == 1 - assert task.artifacts[0].parts[0].root.text == 'Updated' - - # Test appending parts to an existing artifact - artifact_with_parts = Artifact( - artifact_id='artifact-123', parts=[Part(root=TextPart(text='Part 2'))] - ) - append_event_3 = TaskArtifactUpdateEvent( - artifact=artifact_with_parts, - append=True, - task_id='123', - context_id='123', - ) - append_artifact_to_task(task, append_event_3) - assert len(task.artifacts[0].parts) == 2 - assert task.artifacts[0].parts[0].root.text == 'Updated' - assert task.artifacts[0].parts[1].root.text == 'Part 2' - - # Test adding another new artifact - another_artifact_with_parts = Artifact( - artifact_id='new_artifact', - parts=[Part(root=TextPart(text='new artifact Part 1'))], - ) - append_event_4 = TaskArtifactUpdateEvent( - artifact=another_artifact_with_parts, - append=False, - task_id='123', - context_id='123', - ) - append_artifact_to_task(task, append_event_4) - assert len(task.artifacts) == 2 - assert task.artifacts[0].artifact_id == 'artifact-123' - assert task.artifacts[1].artifact_id == 'new_artifact' - assert len(task.artifacts[0].parts) == 2 - assert len(task.artifacts[1].parts) == 1 - - # Test appending part to a task that does not have a matching artifact - non_existing_artifact_with_parts = Artifact( - artifact_id='artifact-456', parts=[Part(root=TextPart(text='Part 1'))] - ) - append_event_5 = TaskArtifactUpdateEvent( - artifact=non_existing_artifact_with_parts, - append=True, - task_id='123', - context_id='123', - ) - append_artifact_to_task(task, append_event_5) - assert len(task.artifacts) == 2 - assert len(task.artifacts[0].parts) == 2 - assert len(task.artifacts[1].parts) == 1 - - -# Test build_text_artifact -def test_build_text_artifact(): - artifact_id = 'text_artifact' - text = 'This is a sample text' - artifact = build_text_artifact(text, artifact_id) - - assert artifact.artifact_id == artifact_id - assert len(artifact.parts) == 1 - assert artifact.parts[0].root.text == text - - -# Test validate decorator -def test_validate_decorator(): - class TestClass: - condition = True - - @validate(lambda self: self.condition, 'Condition not met') - def test_method(self) -> str: - return 'Success' - - obj = TestClass() - - # Test passing condition - assert obj.test_method() == 'Success' - - # Test failing condition - obj.condition = False - with pytest.raises(ServerError) as exc_info: - obj.test_method() - assert 'Condition not met' in str(exc_info.value) - - -# Tests for are_modalities_compatible -def test_are_modalities_compatible_client_none(): - assert ( - are_modalities_compatible( - client_output_modes=None, server_output_modes=['text/plain'] - ) - is True - ) - - -def test_are_modalities_compatible_client_empty(): - assert ( - are_modalities_compatible( - client_output_modes=[], server_output_modes=['text/plain'] - ) - is True - ) - - -def test_are_modalities_compatible_server_none(): - assert ( - are_modalities_compatible( - server_output_modes=None, client_output_modes=['text/plain'] - ) - is True - ) - - -def test_are_modalities_compatible_server_empty(): - assert ( - are_modalities_compatible( - server_output_modes=[], client_output_modes=['text/plain'] - ) - is True - ) - - -def test_are_modalities_compatible_common_mode(): - assert ( - are_modalities_compatible( - server_output_modes=['text/plain', 'application/json'], - client_output_modes=['application/json', 'image/png'], - ) - is True - ) - - -def test_are_modalities_compatible_no_common_modes(): - assert ( - are_modalities_compatible( - server_output_modes=['text/plain'], - client_output_modes=['application/json'], - ) - is False - ) - - -def test_are_modalities_compatible_exact_match(): - assert ( - are_modalities_compatible( - server_output_modes=['text/plain'], - client_output_modes=['text/plain'], - ) - is True - ) - - -def test_are_modalities_compatible_server_more_but_common(): - assert ( - are_modalities_compatible( - server_output_modes=['text/plain', 'image/jpeg'], - client_output_modes=['text/plain'], - ) - is True - ) - - -def test_are_modalities_compatible_client_more_but_common(): - assert ( - are_modalities_compatible( - server_output_modes=['text/plain'], - client_output_modes=['text/plain', 'image/jpeg'], - ) - is True - ) - - -def test_are_modalities_compatible_both_none(): - assert ( - are_modalities_compatible( - server_output_modes=None, client_output_modes=None - ) - is True - ) - - -def test_are_modalities_compatible_both_empty(): - assert ( - are_modalities_compatible( - server_output_modes=[], client_output_modes=[] - ) - is True - ) - - -def test_canonicalize_agent_card(): - """Test canonicalize_agent_card with defaults, optionals, and exceptions. - - - extensions is omitted as it's not set and optional. - - protocolVersion is included because it's always added by canonicalize_agent_card. - - signatures should be omitted. - """ - agent_card = AgentCard(**SAMPLE_AGENT_CARD) - expected_jcs = ( - '{"capabilities":{"pushNotifications":true},' - '"defaultInputModes":["text/plain"],"defaultOutputModes":["text/plain"],' - '"description":"A test agent","name":"Test Agent",' - '"skills":[{"description":"A test skill","id":"skill1","name":"Test Skill","tags":["test"]}],' - '"url":"http://localhost","version":"1.0.0"}' - ) - result = canonicalize_agent_card(agent_card) - assert result == expected_jcs - - -def test_canonicalize_agent_card_preserves_false_capability(): - """Regression #692: streaming=False must not be stripped from canonical JSON.""" - card = AgentCard( - **{ - **SAMPLE_AGENT_CARD, - 'capabilities': AgentCapabilities( - streaming=False, - push_notifications=True, - ), - } - ) - result = canonicalize_agent_card(card) - assert '"streaming":false' in result - - -@pytest.mark.parametrize( - 'input_val', - [ - pytest.param({'a': ''}, id='empty-string'), - pytest.param({'a': []}, id='empty-list'), - pytest.param({'a': {}}, id='empty-dict'), - pytest.param({'a': {'b': []}}, id='nested-empty'), - pytest.param({'a': '', 'b': [], 'c': {}}, id='all-empties'), - pytest.param({'a': {'b': {'c': ''}}}, id='deeply-nested'), - ], -) -def test_clean_empty_removes_empties(input_val): - """_clean_empty removes empty strings, lists, and dicts recursively.""" - assert _clean_empty(input_val) is None - - -def test_clean_empty_top_level_list_becomes_none(): - """Top-level list that becomes empty after cleaning should return None.""" - assert _clean_empty(['', {}, []]) is None - - -@pytest.mark.parametrize( - 'input_val,expected', - [ - pytest.param({'retries': 0}, {'retries': 0}, id='int-zero'), - pytest.param({'enabled': False}, {'enabled': False}, id='bool-false'), - pytest.param({'score': 0.0}, {'score': 0.0}, id='float-zero'), - pytest.param([0, 1, 2], [0, 1, 2], id='zero-in-list'), - pytest.param([False, True], [False, True], id='false-in-list'), - pytest.param( - {'config': {'max_retries': 0, 'name': 'agent'}}, - {'config': {'max_retries': 0, 'name': 'agent'}}, - id='nested-zero', - ), - ], -) -def test_clean_empty_preserves_falsy_values(input_val, expected): - """_clean_empty preserves legitimate falsy values (0, False, 0.0).""" - assert _clean_empty(input_val) == expected - - -@pytest.mark.parametrize( - 'input_val,expected', - [ - pytest.param( - {'count': 0, 'label': '', 'items': []}, - {'count': 0}, - id='falsy-with-empties', - ), - pytest.param( - {'a': 0, 'b': 'hello', 'c': False, 'd': ''}, - {'a': 0, 'b': 'hello', 'c': False}, - id='mixed-types', - ), - pytest.param( - {'name': 'agent', 'retries': 0, 'tags': [], 'desc': ''}, - {'name': 'agent', 'retries': 0}, - id='realistic-mixed', - ), - ], -) -def test_clean_empty_mixed(input_val, expected): - """_clean_empty handles mixed empty and falsy values correctly.""" - assert _clean_empty(input_val) == expected - - -def test_clean_empty_does_not_mutate_input(): - """_clean_empty should not mutate the original input object.""" - original = {'a': '', 'b': 1, 'c': {'d': ''}} - original_copy = { - 'a': '', - 'b': 1, - 'c': {'d': ''}, - } - - _clean_empty(original) - - assert original == original_copy diff --git a/tests/utils/test_message.py b/tests/utils/test_message.py deleted file mode 100644 index 11523cbdf..000000000 --- a/tests/utils/test_message.py +++ /dev/null @@ -1,207 +0,0 @@ -import uuid - -from unittest.mock import patch - -from a2a.types import ( - DataPart, - Message, - Part, - Role, - TextPart, -) -from a2a.utils.message import ( - get_message_text, - new_agent_parts_message, - new_agent_text_message, -) - - -class TestNewAgentTextMessage: - def test_new_agent_text_message_basic(self): - # Setup - text = "Hello, I'm an agent" - - # Exercise - with a fixed uuid for testing - with patch( - 'uuid.uuid4', - return_value=uuid.UUID('12345678-1234-5678-1234-567812345678'), - ): - message = new_agent_text_message(text) - - # Verify - assert message.role == Role.agent - assert len(message.parts) == 1 - assert message.parts[0].root.text == text - assert message.message_id == '12345678-1234-5678-1234-567812345678' - assert message.task_id is None - assert message.context_id is None - - def test_new_agent_text_message_with_context_id(self): - # Setup - text = 'Message with context' - context_id = 'test-context-id' - - # Exercise - with patch( - 'uuid.uuid4', - return_value=uuid.UUID('12345678-1234-5678-1234-567812345678'), - ): - message = new_agent_text_message(text, context_id=context_id) - - # Verify - assert message.role == Role.agent - assert message.parts[0].root.text == text - assert message.message_id == '12345678-1234-5678-1234-567812345678' - assert message.context_id == context_id - assert message.task_id is None - - def test_new_agent_text_message_with_task_id(self): - # Setup - text = 'Message with task id' - task_id = 'test-task-id' - - # Exercise - with patch( - 'uuid.uuid4', - return_value=uuid.UUID('12345678-1234-5678-1234-567812345678'), - ): - message = new_agent_text_message(text, task_id=task_id) - - # Verify - assert message.role == Role.agent - assert message.parts[0].root.text == text - assert message.message_id == '12345678-1234-5678-1234-567812345678' - assert message.task_id == task_id - assert message.context_id is None - - def test_new_agent_text_message_with_both_ids(self): - # Setup - text = 'Message with both ids' - context_id = 'test-context-id' - task_id = 'test-task-id' - - # Exercise - with patch( - 'uuid.uuid4', - return_value=uuid.UUID('12345678-1234-5678-1234-567812345678'), - ): - message = new_agent_text_message( - text, context_id=context_id, task_id=task_id - ) - - # Verify - assert message.role == Role.agent - assert message.parts[0].root.text == text - assert message.message_id == '12345678-1234-5678-1234-567812345678' - assert message.context_id == context_id - assert message.task_id == task_id - - def test_new_agent_text_message_empty_text(self): - # Setup - text = '' - - # Exercise - with patch( - 'uuid.uuid4', - return_value=uuid.UUID('12345678-1234-5678-1234-567812345678'), - ): - message = new_agent_text_message(text) - - # Verify - assert message.role == Role.agent - assert message.parts[0].root.text == '' - assert message.message_id == '12345678-1234-5678-1234-567812345678' - - -class TestNewAgentPartsMessage: - def test_new_agent_parts_message(self): - """Test creating an agent message with multiple, mixed parts.""" - # Setup - parts = [ - Part(root=TextPart(text='Here is some text.')), - Part(root=DataPart(data={'product_id': 123, 'quantity': 2})), - ] - context_id = 'ctx-multi-part' - task_id = 'task-multi-part' - - # Exercise - with patch( - 'uuid.uuid4', - return_value=uuid.UUID('abcdefab-cdef-abcd-efab-cdefabcdefab'), - ): - message = new_agent_parts_message( - parts, context_id=context_id, task_id=task_id - ) - - # Verify - assert message.role == Role.agent - assert message.parts == parts - assert message.context_id == context_id - assert message.task_id == task_id - assert message.message_id == 'abcdefab-cdef-abcd-efab-cdefabcdefab' - - -class TestGetMessageText: - def test_get_message_text_single_part(self): - # Setup - message = Message( - role=Role.agent, - parts=[Part(root=TextPart(text='Hello world'))], - message_id='test-message-id', - ) - - # Exercise - result = get_message_text(message) - - # Verify - assert result == 'Hello world' - - def test_get_message_text_multiple_parts(self): - # Setup - message = Message( - role=Role.agent, - parts=[ - Part(root=TextPart(text='First line')), - Part(root=TextPart(text='Second line')), - Part(root=TextPart(text='Third line')), - ], - message_id='test-message-id', - ) - - # Exercise - result = get_message_text(message) - - # Verify - default delimiter is newline - assert result == 'First line\nSecond line\nThird line' - - def test_get_message_text_custom_delimiter(self): - # Setup - message = Message( - role=Role.agent, - parts=[ - Part(root=TextPart(text='First part')), - Part(root=TextPart(text='Second part')), - Part(root=TextPart(text='Third part')), - ], - message_id='test-message-id', - ) - - # Exercise - result = get_message_text(message, delimiter=' | ') - - # Verify - assert result == 'First part | Second part | Third part' - - def test_get_message_text_empty_parts(self): - # Setup - message = Message( - role=Role.agent, - parts=[], - message_id='test-message-id', - ) - - # Exercise - result = get_message_text(message) - - # Verify - assert result == '' diff --git a/tests/utils/test_parts.py b/tests/utils/test_parts.py deleted file mode 100644 index dcb027c2b..000000000 --- a/tests/utils/test_parts.py +++ /dev/null @@ -1,184 +0,0 @@ -from a2a.types import ( - DataPart, - FilePart, - FileWithBytes, - FileWithUri, - Part, - TextPart, -) -from a2a.utils.parts import ( - get_data_parts, - get_file_parts, - get_text_parts, -) - - -class TestGetTextParts: - def test_get_text_parts_single_text_part(self): - # Setup - parts = [Part(root=TextPart(text='Hello world'))] - - # Exercise - result = get_text_parts(parts) - - # Verify - assert result == ['Hello world'] - - def test_get_text_parts_multiple_text_parts(self): - # Setup - parts = [ - Part(root=TextPart(text='First part')), - Part(root=TextPart(text='Second part')), - Part(root=TextPart(text='Third part')), - ] - - # Exercise - result = get_text_parts(parts) - - # Verify - assert result == ['First part', 'Second part', 'Third part'] - - def test_get_text_parts_empty_list(self): - # Setup - parts = [] - - # Exercise - result = get_text_parts(parts) - - # Verify - assert result == [] - - -class TestGetDataParts: - def test_get_data_parts_single_data_part(self): - # Setup - parts = [Part(root=DataPart(data={'key': 'value'}))] - - # Exercise - result = get_data_parts(parts) - - # Verify - assert result == [{'key': 'value'}] - - def test_get_data_parts_multiple_data_parts(self): - # Setup - parts = [ - Part(root=DataPart(data={'key1': 'value1'})), - Part(root=DataPart(data={'key2': 'value2'})), - ] - - # Exercise - result = get_data_parts(parts) - - # Verify - assert result == [{'key1': 'value1'}, {'key2': 'value2'}] - - def test_get_data_parts_mixed_parts(self): - # Setup - parts = [ - Part(root=TextPart(text='some text')), - Part(root=DataPart(data={'key1': 'value1'})), - Part(root=DataPart(data={'key2': 'value2'})), - ] - - # Exercise - result = get_data_parts(parts) - - # Verify - assert result == [{'key1': 'value1'}, {'key2': 'value2'}] - - def test_get_data_parts_no_data_parts(self): - # Setup - parts = [ - Part(root=TextPart(text='some text')), - ] - - # Exercise - result = get_data_parts(parts) - - # Verify - assert result == [] - - def test_get_data_parts_empty_list(self): - # Setup - parts = [] - - # Exercise - result = get_data_parts(parts) - - # Verify - assert result == [] - - -class TestGetFileParts: - def test_get_file_parts_single_file_part(self): - # Setup - file_with_uri = FileWithUri( - uri='file://path/to/file', mimeType='text/plain' - ) - parts = [Part(root=FilePart(file=file_with_uri))] - - # Exercise - result = get_file_parts(parts) - - # Verify - assert result == [file_with_uri] - - def test_get_file_parts_multiple_file_parts(self): - # Setup - file_with_uri1 = FileWithUri( - uri='file://path/to/file1', mime_type='text/plain' - ) - file_with_bytes = FileWithBytes( - bytes='ZmlsZSBjb250ZW50', - mime_type='application/octet-stream', # 'file content' - ) - parts = [ - Part(root=FilePart(file=file_with_uri1)), - Part(root=FilePart(file=file_with_bytes)), - ] - - # Exercise - result = get_file_parts(parts) - - # Verify - assert result == [file_with_uri1, file_with_bytes] - - def test_get_file_parts_mixed_parts(self): - # Setup - file_with_uri = FileWithUri( - uri='file://path/to/file', mime_type='text/plain' - ) - parts = [ - Part(root=TextPart(text='some text')), - Part(root=FilePart(file=file_with_uri)), - ] - - # Exercise - result = get_file_parts(parts) - - # Verify - assert result == [file_with_uri] - - def test_get_file_parts_no_file_parts(self): - # Setup - parts = [ - Part(root=TextPart(text='some text')), - Part(root=DataPart(data={'key': 'value'})), - ] - - # Exercise - result = get_file_parts(parts) - - # Verify - assert result == [] - - def test_get_file_parts_empty_list(self): - # Setup - parts = [] - - # Exercise - result = get_file_parts(parts) - - # Verify - assert result == [] diff --git a/tests/utils/test_proto_utils.py b/tests/utils/test_proto_utils.py index 7fc82aad7..6d251660b 100644 --- a/tests/utils/test_proto_utils.py +++ b/tests/utils/test_proto_utils.py @@ -1,376 +1,90 @@ -from unittest import mock +"""Tests for a2a.utils.proto_utils module. +This module tests the proto utilities including to_stream_response and dictionary normalization. +""" + +import httpx import pytest -from a2a import types -from a2a.grpc import a2a_pb2 +from google.protobuf.json_format import MessageToDict, Parse +from google.protobuf.message import Message as ProtobufMessage +from google.protobuf.timestamp_pb2 import Timestamp +from starlette.datastructures import QueryParams + +from a2a.types.a2a_pb2 import ( + AgentSkill, + ListTasksRequest, + Message, + Part, + Role, + StreamResponse, + Task, + TaskArtifactUpdateEvent, + TaskState, + TaskStatus, + TaskStatusUpdateEvent, +) from a2a.utils import proto_utils -from a2a.utils.errors import ServerError - - -# --- Test Data --- - - -@pytest.fixture -def sample_message() -> types.Message: - return types.Message( - message_id='msg-1', - context_id='ctx-1', - task_id='task-1', - role=types.Role.user, - parts=[ - types.Part(root=types.TextPart(text='Hello')), - types.Part( - root=types.FilePart( - file=types.FileWithUri( - uri='file:///test.txt', - name='test.txt', - mime_type='text/plain', - ), - ) - ), - types.Part(root=types.DataPart(data={'key': 'value'})), - ], - metadata={'source': 'test'}, - ) - - -@pytest.fixture -def sample_task(sample_message: types.Message) -> types.Task: - return types.Task( - id='task-1', - context_id='ctx-1', - status=types.TaskStatus( - state=types.TaskState.working, message=sample_message - ), - history=[sample_message], - artifacts=[ - types.Artifact( - artifact_id='art-1', - parts=[ - types.Part(root=types.TextPart(text='Artifact content')) - ], - ) - ], - metadata={'source': 'test'}, - ) - - -@pytest.fixture -def sample_agent_card() -> types.AgentCard: - return types.AgentCard( - name='Test Agent', - description='A test agent', - url='http://localhost', - version='1.0.0', - capabilities=types.AgentCapabilities( - streaming=True, push_notifications=True - ), - default_input_modes=['text/plain'], - default_output_modes=['text/plain'], - skills=[ - types.AgentSkill( - id='skill1', - name='Test Skill', - description='A test skill', - tags=['test'], - ) - ], - provider=types.AgentProvider( - organization='Test Org', url='http://test.org' - ), - security=[{'oauth_scheme': ['read', 'write']}], - security_schemes={ - 'oauth_scheme': types.SecurityScheme( - root=types.OAuth2SecurityScheme( - flows=types.OAuthFlows( - client_credentials=types.ClientCredentialsOAuthFlow( - token_url='http://token.url', - scopes={ - 'read': 'Read access', - 'write': 'Write access', - }, - ) - ) - ) - ), - 'apiKey': types.SecurityScheme( - root=types.APIKeySecurityScheme( - name='X-API-KEY', in_=types.In.header - ) - ), - 'httpAuth': types.SecurityScheme( - root=types.HTTPAuthSecurityScheme(scheme='bearer') - ), - 'oidc': types.SecurityScheme( - root=types.OpenIdConnectSecurityScheme( - open_id_connect_url='http://oidc.url' - ) - ), - }, - signatures=[ - types.AgentCardSignature( - protected='protected_test', - signature='signature_test', - header={'alg': 'ES256'}, - ), - types.AgentCardSignature( - protected='protected_val', - signature='signature_val', - header={'alg': 'ES256', 'kid': 'unique-key-identifier-123'}, - ), - ], - ) - - -# --- Test Cases --- - - -class TestToProto: - def test_part_unsupported_type(self): - """Test that ToProto.part raises ValueError for an unsupported Part type.""" - - class FakePartType: - kind = 'fake' +from a2a.utils.errors import InvalidParamsError - # Create a mock Part object that has a .root attribute pointing to the fake type - mock_part = mock.MagicMock(spec=types.Part) - mock_part.root = FakePartType() - with pytest.raises(ValueError, match='Unsupported part type'): - proto_utils.ToProto.part(mock_part) +class TestToStreamResponse: + """Tests for to_stream_response function.""" - -class TestFromProto: - def test_part_unsupported_type(self): - """Test that FromProto.part raises ValueError for an unsupported part type in proto.""" - unsupported_proto_part = ( - a2a_pb2.Part() - ) # An empty part with no oneof field set - with pytest.raises(ValueError, match='Unsupported part type'): - proto_utils.FromProto.part(unsupported_proto_part) - - def test_task_query_params_invalid_name(self): - request = a2a_pb2.GetTaskRequest(name='invalid-name-format') - with pytest.raises(ServerError) as exc_info: - proto_utils.FromProto.task_query_params(request) - assert isinstance(exc_info.value.error, types.InvalidParamsError) - - -class TestProtoUtils: - def test_roundtrip_message(self, sample_message: types.Message): - """Test conversion of Message to proto and back.""" - proto_msg = proto_utils.ToProto.message(sample_message) - assert isinstance(proto_msg, a2a_pb2.Message) - - # Test file part handling - assert proto_msg.content[1].file.file_with_uri == 'file:///test.txt' - assert proto_msg.content[1].file.mime_type == 'text/plain' - assert proto_msg.content[1].file.name == 'test.txt' - - roundtrip_msg = proto_utils.FromProto.message(proto_msg) - assert roundtrip_msg == sample_message - - def test_enum_conversions(self): - """Test conversions for all enum types.""" - assert ( - proto_utils.ToProto.role(types.Role.agent) - == a2a_pb2.Role.ROLE_AGENT + def test_stream_response_with_task(self): + """Test to_stream_response with a Task event.""" + task = Task( + id='task-1', + context_id='ctx-1', + status=TaskStatus(state=TaskState.TASK_STATE_WORKING), ) - assert ( - proto_utils.FromProto.role(a2a_pb2.Role.ROLE_USER) - == types.Role.user + result = proto_utils.to_stream_response(task) + + assert isinstance(result, StreamResponse) + assert result.HasField('task') + assert result.task.id == 'task-1' + + def test_stream_response_with_message(self): + """Test to_stream_response with a Message event.""" + message = Message( + message_id='msg-1', + role=Role.ROLE_AGENT, + parts=[Part(text='Hello')], ) - - for state in types.TaskState: - proto_state = proto_utils.ToProto.task_state(state) - assert proto_utils.FromProto.task_state(proto_state) == state - - # Test unknown state case - assert ( - proto_utils.FromProto.task_state( - a2a_pb2.TaskState.TASK_STATE_UNSPECIFIED - ) - == types.TaskState.unknown + result = proto_utils.to_stream_response(message) + + assert isinstance(result, StreamResponse) + assert result.HasField('message') + assert result.message.message_id == 'msg-1' + + def test_stream_response_with_status_update(self): + """Test to_stream_response with a TaskStatusUpdateEvent.""" + status_update = TaskStatusUpdateEvent( + task_id='task-1', + context_id='ctx-1', + status=TaskStatus(state=TaskState.TASK_STATE_WORKING), ) - assert ( - proto_utils.ToProto.task_state(types.TaskState.unknown) - == a2a_pb2.TaskState.TASK_STATE_UNSPECIFIED - ) - - def test_oauth_flows_conversion(self): - """Test conversion of different OAuth2 flows.""" - # Test password flow - password_flow = types.OAuthFlows( - password=types.PasswordOAuthFlow( - token_url='http://token.url', scopes={'read': 'Read'} - ) - ) - proto_password_flow = proto_utils.ToProto.oauth2_flows(password_flow) - assert proto_password_flow.HasField('password') - - # Test implicit flow - implicit_flow = types.OAuthFlows( - implicit=types.ImplicitOAuthFlow( - authorization_url='http://auth.url', scopes={'read': 'Read'} - ) - ) - proto_implicit_flow = proto_utils.ToProto.oauth2_flows(implicit_flow) - assert proto_implicit_flow.HasField('implicit') - - # Test authorization code flow - auth_code_flow = types.OAuthFlows( - authorization_code=types.AuthorizationCodeOAuthFlow( - authorization_url='http://auth.url', - token_url='http://token.url', - scopes={'read': 'read'}, - ) - ) - proto_auth_code_flow = proto_utils.ToProto.oauth2_flows(auth_code_flow) - assert proto_auth_code_flow.HasField('authorization_code') - - # Test invalid flow - with pytest.raises(ValueError): - proto_utils.ToProto.oauth2_flows(types.OAuthFlows()) + result = proto_utils.to_stream_response(status_update) - # Test FromProto - roundtrip_password = proto_utils.FromProto.oauth2_flows( - proto_password_flow - ) - assert roundtrip_password.password is not None + assert isinstance(result, StreamResponse) + assert result.HasField('status_update') + assert result.status_update.task_id == 'task-1' - roundtrip_implicit = proto_utils.FromProto.oauth2_flows( - proto_implicit_flow + def test_stream_response_with_artifact_update(self): + """Test to_stream_response with a TaskArtifactUpdateEvent.""" + artifact_update = TaskArtifactUpdateEvent( + task_id='task-1', + context_id='ctx-1', ) - assert roundtrip_implicit.implicit is not None - - def test_task_id_params_from_proto_invalid_name(self): - request = a2a_pb2.CancelTaskRequest(name='invalid-name-format') - with pytest.raises(ServerError) as exc_info: - proto_utils.FromProto.task_id_params(request) - assert isinstance(exc_info.value.error, types.InvalidParamsError) - - def test_task_push_config_from_proto_invalid_parent(self): - request = a2a_pb2.TaskPushNotificationConfig(name='invalid-name-format') - with pytest.raises(ServerError) as exc_info: - proto_utils.FromProto.task_push_notification_config(request) - assert isinstance(exc_info.value.error, types.InvalidParamsError) - - def test_none_handling(self): - """Test that None inputs are handled gracefully.""" - assert proto_utils.ToProto.message(None) is None - assert proto_utils.ToProto.metadata(None) is None - assert proto_utils.ToProto.provider(None) is None - assert proto_utils.ToProto.security(None) is None - assert proto_utils.ToProto.security_schemes(None) is None - - def test_metadata_conversion(self): - """Test metadata conversion with various data types.""" - metadata = { - 'null_value': None, - 'bool_value': True, - 'int_value': 42, - 'float_value': 3.14, - 'string_value': 'hello', - 'dict_value': {'nested': 'dict', 'count': 10}, - 'list_value': [1, 'two', 3.0, True, None], - 'tuple_value': (1, 2, 3), - 'complex_list': [ - {'name': 'item1', 'values': [1, 2, 3]}, - {'name': 'item2', 'values': [4, 5, 6]}, - ], - } + result = proto_utils.to_stream_response(artifact_update) - # Convert to proto - proto_metadata = proto_utils.ToProto.metadata(metadata) - assert proto_metadata is not None - - # Convert back to Python - roundtrip_metadata = proto_utils.FromProto.metadata(proto_metadata) - - # Verify all values are preserved correctly - assert roundtrip_metadata['null_value'] is None - assert roundtrip_metadata['bool_value'] is True - assert roundtrip_metadata['int_value'] == 42 - assert roundtrip_metadata['float_value'] == 3.14 - assert roundtrip_metadata['string_value'] == 'hello' - assert roundtrip_metadata['dict_value']['nested'] == 'dict' - assert roundtrip_metadata['dict_value']['count'] == 10 - assert roundtrip_metadata['list_value'] == [1, 'two', 3.0, True, None] - assert roundtrip_metadata['tuple_value'] == [ - 1, - 2, - 3, - ] # tuples become lists - assert len(roundtrip_metadata['complex_list']) == 2 - assert roundtrip_metadata['complex_list'][0]['name'] == 'item1' - - def test_metadata_with_custom_objects(self): - """Test metadata conversion with custom objects using preprocessing utility.""" + assert isinstance(result, StreamResponse) + assert result.HasField('artifact_update') + assert result.artifact_update.task_id == 'task-1' - class CustomObject: - def __str__(self): - return 'custom_object_str' - def __repr__(self): - return 'CustomObject()' - - metadata = { - 'custom_obj': CustomObject(), - 'list_with_custom': [1, CustomObject(), 'text'], - 'nested_custom': {'obj': CustomObject(), 'normal': 'value'}, - } - - # Use preprocessing utility to make it serializable - serializable_metadata = proto_utils.make_dict_serializable(metadata) - - # Convert to proto - proto_metadata = proto_utils.ToProto.metadata(serializable_metadata) - assert proto_metadata is not None - - # Convert back to Python - roundtrip_metadata = proto_utils.FromProto.metadata(proto_metadata) - - # Custom objects should be converted to strings - assert roundtrip_metadata['custom_obj'] == 'custom_object_str' - assert roundtrip_metadata['list_with_custom'] == [ - 1, - 'custom_object_str', - 'text', - ] - assert roundtrip_metadata['nested_custom']['obj'] == 'custom_object_str' - assert roundtrip_metadata['nested_custom']['normal'] == 'value' - - def test_metadata_edge_cases(self): - """Test metadata conversion with edge cases.""" - metadata = { - 'empty_dict': {}, - 'empty_list': [], - 'zero': 0, - 'false': False, - 'empty_string': '', - 'unicode_string': 'string test', - 'safe_number': 9007199254740991, # JavaScript MAX_SAFE_INTEGER - 'negative_number': -42, - 'float_precision': 0.123456789, - 'numeric_string': '12345', - } - - # Convert to proto and back - proto_metadata = proto_utils.ToProto.metadata(metadata) - roundtrip_metadata = proto_utils.FromProto.metadata(proto_metadata) - - # Verify edge cases are handled correctly - assert roundtrip_metadata['empty_dict'] == {} - assert roundtrip_metadata['empty_list'] == [] - assert roundtrip_metadata['zero'] == 0 - assert roundtrip_metadata['false'] is False - assert roundtrip_metadata['empty_string'] == '' - assert roundtrip_metadata['unicode_string'] == 'string test' - assert roundtrip_metadata['safe_number'] == 9007199254740991 - assert roundtrip_metadata['negative_number'] == -42 - assert abs(roundtrip_metadata['float_precision'] - 0.123456789) < 1e-10 - assert roundtrip_metadata['numeric_string'] == '12345' +class TestDictSerialization: + """Tests for serialization utility functions.""" def test_make_dict_serializable(self): """Test the make_dict_serializable utility function.""" @@ -393,17 +107,15 @@ def __str__(self): result = proto_utils.make_dict_serializable(test_data) - # Basic types should be unchanged assert result['string'] == 'hello' assert result['int'] == 42 assert result['float'] == 3.14 assert result['bool'] is True assert result['none'] is None - # Custom objects should be converted to strings assert result['custom'] == 'custom_str' assert result['list'] == [1, 'two', 'custom_str'] - assert result['tuple'] == [1, 2, 'custom_str'] # tuples become lists + assert result['tuple'] == [1, 2, 'custom_str'] assert result['nested']['inner_custom'] == 'custom_str' assert result['nested']['inner_normal'] == 'value' @@ -412,7 +124,7 @@ def test_normalize_large_integers_to_strings(self): test_data = { 'small_int': 42, - 'large_int': 9999999999999999999, # > 15 digits + 'large_int': 9999999999999999999, 'negative_large': -9999999999999999999, 'float': 3.14, 'string': 'hello', @@ -422,24 +134,17 @@ def test_normalize_large_integers_to_strings(self): result = proto_utils.normalize_large_integers_to_strings(test_data) - # Small integers should remain as integers assert result['small_int'] == 42 assert isinstance(result['small_int'], int) - # Large integers should be converted to strings assert result['large_int'] == '9999999999999999999' assert isinstance(result['large_int'], str) assert result['negative_large'] == '-9999999999999999999' assert isinstance(result['negative_large'], str) - # Other types should be unchanged assert result['float'] == 3.14 assert result['string'] == 'hello' - - # Lists should be processed recursively assert result['list'] == [123, '9999999999999999999', 'text'] - - # Nested dicts should be processed recursively assert result['nested']['inner_large'] == '9999999999999999999' assert result['nested']['inner_small'] == 100 @@ -448,11 +153,11 @@ def test_parse_string_integers_in_dict(self): test_data = { 'regular_string': 'hello', - 'numeric_string_small': '123', # small, should stay as string - 'numeric_string_large': '9999999999999999999', # > 15 digits, should become int + 'numeric_string_small': '123', + 'numeric_string_large': '9999999999999999999', 'negative_large_string': '-9999999999999999999', - 'float_string': '3.14', # not all digits, should stay as string - 'mixed_string': '123abc', # not all digits, should stay as string + 'float_string': '3.14', + 'mixed_string': '123abc', 'int': 42, 'list': ['hello', '9999999999999999999', '123'], 'nested': { @@ -463,226 +168,112 @@ def test_parse_string_integers_in_dict(self): result = proto_utils.parse_string_integers_in_dict(test_data) - # Regular strings should remain unchanged assert result['regular_string'] == 'hello' - assert ( - result['numeric_string_small'] == '123' - ) # too small, stays string - assert result['float_string'] == '3.14' # not all digits - assert result['mixed_string'] == '123abc' # not all digits + assert result['numeric_string_small'] == '123' + assert result['float_string'] == '3.14' + assert result['mixed_string'] == '123abc' - # Large numeric strings should be converted to integers assert result['numeric_string_large'] == 9999999999999999999 assert isinstance(result['numeric_string_large'], int) assert result['negative_large_string'] == -9999999999999999999 assert isinstance(result['negative_large_string'], int) - # Other types should be unchanged assert result['int'] == 42 - - # Lists should be processed recursively assert result['list'] == ['hello', 9999999999999999999, '123'] - - # Nested dicts should be processed recursively assert result['nested']['inner_large_string'] == 9999999999999999999 - assert result['nested']['inner_regular'] == 'value' - def test_large_integer_roundtrip_with_utilities(self): - """Test large integer handling with preprocessing and post-processing utilities.""" - original_data = { - 'large_int': 9999999999999999999, - 'small_int': 42, - 'nested': {'another_large': 12345678901234567890, 'normal': 'text'}, - } +class TestRestParams: + """Unit tests for REST parameter conversion.""" - # Step 1: Preprocess to convert large integers to strings - preprocessed = proto_utils.normalize_large_integers_to_strings( - original_data + def test_rest_params_roundtrip(self): + """Test the comprehensive roundtrip conversion for REST parameters.""" + + original = ListTasksRequest( + tenant='tenant-1', + context_id='ctx-1', + status=TaskState.TASK_STATE_WORKING, + page_size=10, + include_artifacts=True, + status_timestamp_after=Parse('"2024-03-09T16:00:00Z"', Timestamp()), + history_length=5, ) - # Step 2: Convert to proto - proto_metadata = proto_utils.ToProto.metadata(preprocessed) - assert proto_metadata is not None + query_params = self._message_to_rest_params(original) - # Step 3: Convert back from proto - dict_from_proto = proto_utils.FromProto.metadata(proto_metadata) + assert dict(query_params) == { + 'tenant': 'tenant-1', + 'contextId': 'ctx-1', + 'status': 'TASK_STATE_WORKING', + 'pageSize': '10', + 'includeArtifacts': 'true', + 'statusTimestampAfter': '2024-03-09T16:00:00Z', + 'historyLength': '5', + } - # Step 4: Post-process to convert large integer strings back to integers - final_result = proto_utils.parse_string_integers_in_dict( - dict_from_proto - ) + converted = ListTasksRequest() + proto_utils.parse_params(QueryParams(query_params), converted) - # Verify roundtrip preserved the original data - assert final_result['large_int'] == 9999999999999999999 - assert isinstance(final_result['large_int'], int) - assert final_result['small_int'] == 42 - assert final_result['nested']['another_large'] == 12345678901234567890 - assert isinstance(final_result['nested']['another_large'], int) - assert final_result['nested']['normal'] == 'text' - - def test_task_conversion_roundtrip( - self, sample_task: types.Task, sample_message: types.Message - ): - """Test conversion of Task to proto and back.""" - proto_task = proto_utils.ToProto.task(sample_task) - assert isinstance(proto_task, a2a_pb2.Task) - - roundtrip_task = proto_utils.FromProto.task(proto_task) - assert roundtrip_task.id == 'task-1' - assert roundtrip_task.context_id == 'ctx-1' - assert roundtrip_task.status == types.TaskStatus( - state=types.TaskState.working, message=sample_message - ) - assert roundtrip_task.history == sample_task.history - assert roundtrip_task.artifacts == [ - types.Artifact( - artifact_id='art-1', - description='', - metadata={}, - name='', - parts=[ - types.Part(root=types.TextPart(text='Artifact content')) - ], - ) - ] - assert roundtrip_task.metadata == {'source': 'test'} - - def test_agent_card_conversion_roundtrip( - self, sample_agent_card: types.AgentCard - ): - """Test conversion of AgentCard to proto and back.""" - proto_card = proto_utils.ToProto.agent_card(sample_agent_card) - assert isinstance(proto_card, a2a_pb2.AgentCard) - - roundtrip_card = proto_utils.FromProto.agent_card(proto_card) - assert roundtrip_card.name == 'Test Agent' - assert roundtrip_card.description == 'A test agent' - assert roundtrip_card.url == 'http://localhost' - assert roundtrip_card.version == '1.0.0' - assert roundtrip_card.capabilities == types.AgentCapabilities( - extensions=[], streaming=True, push_notifications=True - ) - assert roundtrip_card.default_input_modes == ['text/plain'] - assert roundtrip_card.default_output_modes == ['text/plain'] - assert roundtrip_card.skills == [ - types.AgentSkill( - id='skill1', - name='Test Skill', - description='A test skill', - tags=['test'], - examples=[], - input_modes=[], - output_modes=[], - ) - ] - assert roundtrip_card.provider == types.AgentProvider( - organization='Test Org', url='http://test.org' - ) - assert roundtrip_card.security == [{'oauth_scheme': ['read', 'write']}] - - # Normalized version of security_schemes. None fields are filled with defaults. - expected_security_schemes = { - 'oauth_scheme': types.SecurityScheme( - root=types.OAuth2SecurityScheme( - description='', - flows=types.OAuthFlows( - client_credentials=types.ClientCredentialsOAuthFlow( - refresh_url='', - scopes={ - 'write': 'Write access', - 'read': 'Read access', - }, - token_url='http://token.url', - ), - ), - ) - ), - 'apiKey': types.SecurityScheme( - root=types.APIKeySecurityScheme( - description='', - in_=types.In.header, - name='X-API-KEY', - ) - ), - 'httpAuth': types.SecurityScheme( - root=types.HTTPAuthSecurityScheme( - bearer_format='', - description='', - scheme='bearer', - ) - ), - 'oidc': types.SecurityScheme( - root=types.OpenIdConnectSecurityScheme( - description='', - open_id_connect_url='http://oidc.url', - ) - ), - } - assert roundtrip_card.security_schemes == expected_security_schemes - assert roundtrip_card.signatures == [ - types.AgentCardSignature( - protected='protected_test', - signature='signature_test', - header={'alg': 'ES256'}, - ), - types.AgentCardSignature( - protected='protected_val', - signature='signature_val', - header={'alg': 'ES256', 'kid': 'unique-key-identifier-123'}, - ), - ] + assert converted == original @pytest.mark.parametrize( - 'signature_data, expected_data', + 'query_string', [ - ( - types.AgentCardSignature( - protected='protected_val', - signature='signature_val', - header={'alg': 'ES256'}, - ), - types.AgentCardSignature( - protected='protected_val', - signature='signature_val', - header={'alg': 'ES256'}, - ), - ), - ( - types.AgentCardSignature( - protected='protected_val', - signature='signature_val', - header=None, - ), - types.AgentCardSignature( - protected='protected_val', - signature='signature_val', - header={}, - ), - ), - ( - types.AgentCardSignature( - protected='', - signature='', - header={}, - ), - types.AgentCardSignature( - protected='', - signature='', - header={}, - ), - ), + 'id=skill-1&tags=tag1&tags=tag2&tags=tag3', + 'id=skill-1&tags=tag1,tag2,tag3', ], ) - def test_agent_card_signature_conversion_roundtrip( - self, signature_data, expected_data - ): - """Test conversion of AgentCardSignature to proto and back.""" - proto_signature = proto_utils.ToProto.agent_card_signature( - signature_data + def test_repeated_fields_parsing(self, query_string: str): + """Test parsing of repeated fields using different query string formats.""" + query_params = QueryParams(query_string) + + converted = AgentSkill() + proto_utils.parse_params(query_params, converted) + + assert converted == AgentSkill( + id='skill-1', tags=['tag1', 'tag2', 'tag3'] ) - assert isinstance(proto_signature, a2a_pb2.AgentCardSignature) - roundtrip_signature = proto_utils.FromProto.agent_card_signature( - proto_signature + + def _message_to_rest_params(self, message: ProtobufMessage) -> QueryParams: + """Converts a message to REST query parameters.""" + rest_dict = MessageToDict(message) + return httpx.Request( + 'GET', 'http://api.example.com', params=rest_dict + ).url.params + + +class TestValidateProtoRequiredFields: + """Tests for validate_proto_required_fields function.""" + + def test_valid_required_fields(self): + """Test with all required fields present.""" + msg = Message( + message_id='msg-1', + role=Role.ROLE_USER, + parts=[Part(text='hello')], ) - assert roundtrip_signature == expected_data + proto_utils.validate_proto_required_fields(msg) + + def test_missing_required_fields(self): + """Test with empty message raising InvalidParamsError containing all errors.""" + msg = Message() + with pytest.raises(InvalidParamsError) as exc_info: + proto_utils.validate_proto_required_fields(msg) + + err = exc_info.value + errors = err.data.get('errors', []) if err.data else [] + + assert {e['field'] for e in errors} == {'message_id', 'role', 'parts'} + + def test_nested_required_fields(self): + """Test nested required fields inside TaskStatus.""" + # Task Status requires 'state' + task = Task(id='task-1', status=TaskStatus()) + with pytest.raises(InvalidParamsError) as exc_info: + proto_utils.validate_proto_required_fields(task) + + err = exc_info.value + errors = err.data.get('errors', []) if err.data else [] + + fields = [e['field'] for e in errors] + assert 'status.state' in fields diff --git a/tests/utils/test_signing.py b/tests/utils/test_signing.py index 9a843d340..2a09943fe 100644 --- a/tests/utils/test_signing.py +++ b/tests/utils/test_signing.py @@ -1,23 +1,19 @@ -from a2a.types import ( - AgentCard, - AgentCapabilities, - AgentSkill, -) -from a2a.types import ( +import pytest +from cryptography.hazmat.primitives.asymmetric import ec +from jwt.utils import base64url_encode +from typing import Any + +from a2a.types.a2a_pb2 import ( AgentCard, AgentCapabilities, AgentSkill, AgentCardSignature, + AgentInterface, ) from a2a.utils import signing -from typing import Any -from jwt.utils import base64url_encode - -import pytest -from cryptography.hazmat.primitives import asymmetric -def create_key_provider(verification_key: str | bytes | dict[str, Any]): +def create_key_provider(verification_key: Any): """Creates a key provider function for testing.""" def key_provider(kid: str | None, jku: str | None): @@ -26,13 +22,17 @@ def key_provider(kid: str | None, jku: str | None): return key_provider -# Fixture for a complete sample AgentCard @pytest.fixture def sample_agent_card() -> AgentCard: return AgentCard( name='Test Agent', description='A test agent', - url='http://localhost', + supported_interfaces=[ + AgentInterface( + url='http://localhost', + protocol_binding='HTTP+JSON', + ) + ], version='1.0.0', capabilities=AgentCapabilities( streaming=None, @@ -55,7 +55,7 @@ def sample_agent_card() -> AgentCard: def test_signer_and_verifier_symmetric(sample_agent_card: AgentCard): """Test the agent card signing and verification process with symmetric key encryption.""" - key = 'key12345' # Using a simple symmetric key for HS256 + key = 'key12345' wrong_key = 'wrongkey' agent_card_signer = signing.create_agent_card_signer( @@ -75,7 +75,6 @@ def test_signer_and_verifier_symmetric(sample_agent_card: AgentCard): assert signature.protected is not None assert signature.signature is not None - # Verify the signature verifier = signing.create_signature_verifier( create_key_provider(key), ['HS256', 'HS384', 'ES256', 'RS256'] ) @@ -84,7 +83,6 @@ def test_signer_and_verifier_symmetric(sample_agent_card: AgentCard): except signing.InvalidSignaturesError: pytest.fail('Signature verification failed with correct key') - # Verify with wrong key verifier_wrong_key = signing.create_signature_verifier( create_key_provider(wrong_key), ['HS256', 'HS384', 'ES256', 'RS256'] ) @@ -96,14 +94,18 @@ def test_signer_and_verifier_symmetric_multiple_signatures( sample_agent_card: AgentCard, ): """Test the agent card signing and verification process with symmetric key encryption. - This test adds a signatures to the AgentCard before signing.""" + This test adds a signature to the AgentCard before signing.""" encoded_header = base64url_encode( b'{"alg": "HS256", "kid": "old_key"}' ).decode('utf-8') - sample_agent_card.signatures = [ - AgentCardSignature(protected=encoded_header, signature='old_signature') - ] - key = 'key12345' # Using a simple symmetric key for HS256 + sample_agent_card.signatures.extend( + [ + AgentCardSignature( + protected=encoded_header, signature='old_signature' + ) + ] + ) + key = 'key12345' wrong_key = 'wrongkey' agent_card_signer = signing.create_agent_card_signer( @@ -123,7 +125,6 @@ def test_signer_and_verifier_symmetric_multiple_signatures( assert signature.protected is not None assert signature.signature is not None - # Verify the signature verifier = signing.create_signature_verifier( create_key_provider(key), ['HS256', 'HS384', 'ES256', 'RS256'] ) @@ -132,7 +133,6 @@ def test_signer_and_verifier_symmetric_multiple_signatures( except signing.InvalidSignaturesError: pytest.fail('Signature verification failed with correct key') - # Verify with wrong key verifier_wrong_key = signing.create_signature_verifier( create_key_provider(wrong_key), ['HS256', 'HS384', 'ES256', 'RS256'] ) @@ -142,13 +142,9 @@ def test_signer_and_verifier_symmetric_multiple_signatures( def test_signer_and_verifier_asymmetric(sample_agent_card: AgentCard): """Test the agent card signing and verification process with an asymmetric key encryption.""" - # Generate a dummy EC private key for ES256 - private_key = asymmetric.ec.generate_private_key(asymmetric.ec.SECP256R1()) + private_key = ec.generate_private_key(ec.SECP256R1()) public_key = private_key.public_key() - # Generate another key pair for negative test - private_key_error = asymmetric.ec.generate_private_key( - asymmetric.ec.SECP256R1() - ) + private_key_error = ec.generate_private_key(ec.SECP256R1()) public_key_error = private_key_error.public_key() agent_card_signer = signing.create_agent_card_signer( @@ -176,10 +172,117 @@ def test_signer_and_verifier_asymmetric(sample_agent_card: AgentCard): except signing.InvalidSignaturesError: pytest.fail('Signature verification failed with correct key') - # Verify with wrong key verifier_wrong_key = signing.create_signature_verifier( create_key_provider(public_key_error), ['HS256', 'HS384', 'ES256', 'RS256'], ) with pytest.raises(signing.InvalidSignaturesError): verifier_wrong_key(signed_card) + + +def test_canonicalize_agent_card(sample_agent_card: AgentCard): + """Test canonicalize_agent_card with defaults, optionals, and exceptions. + + - extensions is omitted as it's not set and optional. + - protocolVersion is included because it's always added by canonicalize_agent_card. + - signatures should be omitted. + """ + expected_jcs = ( + '{"capabilities":{"pushNotifications":true},' + '"defaultInputModes":["text/plain"],"defaultOutputModes":["text/plain"],' + '"description":"A test agent","name":"Test Agent",' + '"skills":[{"description":"A test skill","id":"skill1","name":"Test Skill","tags":["test"]}],' + '"supportedInterfaces":[{"protocolBinding":"HTTP+JSON","url":"http://localhost"}],' + '"version":"1.0.0"}' + ) + result = signing._canonicalize_agent_card(sample_agent_card) + assert result == expected_jcs + + +def test_canonicalize_agent_card_preserves_false_capability( + sample_agent_card: AgentCard, +): + """Regression #692: streaming=False must not be stripped from canonical JSON.""" + sample_agent_card.capabilities.streaming = False + result = signing._canonicalize_agent_card(sample_agent_card) + assert '"streaming":false' in result + + +@pytest.mark.parametrize( + 'input_val', + [ + pytest.param({'a': ''}, id='empty-string'), + pytest.param({'a': []}, id='empty-list'), + pytest.param({'a': {}}, id='empty-dict'), + pytest.param({'a': {'b': []}}, id='nested-empty'), + pytest.param({'a': '', 'b': [], 'c': {}}, id='all-empties'), + pytest.param({'a': {'b': {'c': ''}}}, id='deeply-nested'), + ], +) +def test_clean_empty_removes_empties(input_val): + """_clean_empty removes empty strings, lists, and dicts recursively.""" + assert signing._clean_empty(input_val) is None + + +def test_clean_empty_top_level_list_becomes_none(): + """Top-level list that becomes empty after cleaning should return None.""" + assert signing._clean_empty(['', {}, []]) is None + + +@pytest.mark.parametrize( + 'input_val,expected', + [ + pytest.param({'retries': 0}, {'retries': 0}, id='int-zero'), + pytest.param({'enabled': False}, {'enabled': False}, id='bool-false'), + pytest.param({'score': 0.0}, {'score': 0.0}, id='float-zero'), + pytest.param([0, 1, 2], [0, 1, 2], id='zero-in-list'), + pytest.param([False, True], [False, True], id='false-in-list'), + pytest.param( + {'config': {'max_retries': 0, 'name': 'agent'}}, + {'config': {'max_retries': 0, 'name': 'agent'}}, + id='nested-zero', + ), + ], +) +def test_clean_empty_preserves_falsy_values(input_val, expected): + """_clean_empty preserves legitimate falsy values (0, False, 0.0).""" + assert signing._clean_empty(input_val) == expected + + +@pytest.mark.parametrize( + 'input_val,expected', + [ + pytest.param( + {'count': 0, 'label': '', 'items': []}, + {'count': 0}, + id='falsy-with-empties', + ), + pytest.param( + {'a': 0, 'b': 'hello', 'c': False, 'd': ''}, + {'a': 0, 'b': 'hello', 'c': False}, + id='mixed-types', + ), + pytest.param( + {'name': 'agent', 'retries': 0, 'tags': [], 'desc': ''}, + {'name': 'agent', 'retries': 0}, + id='realistic-mixed', + ), + ], +) +def test_clean_empty_mixed(input_val, expected): + """_clean_empty handles mixed empty and falsy values correctly.""" + assert signing._clean_empty(input_val) == expected + + +def test_clean_empty_does_not_mutate_input(): + """_clean_empty should not mutate the original input object.""" + original = {'a': '', 'b': 1, 'c': {'d': ''}} + original_copy = { + 'a': '', + 'b': 1, + 'c': {'d': ''}, + } + + signing._clean_empty(original) + + assert original == original_copy diff --git a/tests/utils/test_task.py b/tests/utils/test_task.py index cb3dc3868..55dc8ed4f 100644 --- a/tests/utils/test_task.py +++ b/tests/utils/test_task.py @@ -5,188 +5,91 @@ import pytest -from a2a.types import Artifact, Message, Part, Role, TextPart -from a2a.utils.task import completed_task, new_task +from a2a.types.a2a_pb2 import ( + Artifact, + Message, + Part, + Role, + TaskState, + GetTaskRequest, + SendMessageConfiguration, +) +from a2a.helpers.proto_helpers import new_task +from a2a.utils.task import ( + apply_history_length, + decode_page_token, + encode_page_token, +) +from a2a.utils.errors import InvalidParamsError class TestTask(unittest.TestCase): - def test_new_task_status(self): - message = Message( - role=Role.user, - parts=[Part(root=TextPart(text='test message'))], - message_id=str(uuid.uuid4()), - ) - task = new_task(message) - self.assertEqual(task.status.state.value, 'submitted') - - @patch('uuid.uuid4') - def test_new_task_generates_ids(self, mock_uuid4): - mock_uuid = uuid.UUID('12345678-1234-5678-1234-567812345678') - mock_uuid4.return_value = mock_uuid - message = Message( - role=Role.user, - parts=[Part(root=TextPart(text='test message'))], - message_id=str(uuid.uuid4()), - ) - task = new_task(message) - self.assertEqual(task.id, str(mock_uuid)) - self.assertEqual(task.context_id, str(mock_uuid)) - - def test_new_task_uses_provided_ids(self): - task_id = str(uuid.uuid4()) - context_id = str(uuid.uuid4()) - message = Message( - role=Role.user, - parts=[Part(root=TextPart(text='test message'))], - message_id=str(uuid.uuid4()), - task_id=task_id, - context_id=context_id, - ) - task = new_task(message) - self.assertEqual(task.id, task_id) - self.assertEqual(task.context_id, context_id) - - def test_new_task_initial_message_in_history(self): - message = Message( - role=Role.user, - parts=[Part(root=TextPart(text='test message'))], - message_id=str(uuid.uuid4()), - ) - task = new_task(message) - self.assertEqual(len(task.history), 1) - self.assertEqual(task.history[0], message) - - def test_completed_task_status(self): - task_id = str(uuid.uuid4()) - context_id = str(uuid.uuid4()) - artifacts = [ - Artifact( - artifact_id='artifact_1', - parts=[Part(root=TextPart(text='some content'))], - ) - ] - task = completed_task( - task_id=task_id, - context_id=context_id, - artifacts=artifacts, - history=[], - ) - self.assertEqual(task.status.state.value, 'completed') - - def test_completed_task_assigns_ids_and_artifacts(self): - task_id = str(uuid.uuid4()) - context_id = str(uuid.uuid4()) - artifacts = [ - Artifact( - artifact_id='artifact_1', - parts=[Part(root=TextPart(text='some content'))], - ) - ] - task = completed_task( - task_id=task_id, - context_id=context_id, - artifacts=artifacts, - history=[], - ) - self.assertEqual(task.id, task_id) - self.assertEqual(task.context_id, context_id) - self.assertEqual(task.artifacts, artifacts) - - def test_completed_task_empty_history_if_not_provided(self): - task_id = str(uuid.uuid4()) - context_id = str(uuid.uuid4()) - artifacts = [ - Artifact( - artifact_id='artifact_1', - parts=[Part(root=TextPart(text='some content'))], - ) - ] - task = completed_task( - task_id=task_id, context_id=context_id, artifacts=artifacts + page_token = 'd47a95ba-0f39-4459-965b-3923cdd2ff58' + encoded_page_token = 'ZDQ3YTk1YmEtMGYzOS00NDU5LTk2NWItMzkyM2NkZDJmZjU4' # base64 for 'd47a95ba-0f39-4459-965b-3923cdd2ff58' + + def test_encode_page_token(self): + assert encode_page_token(self.page_token) == self.encoded_page_token + + def test_decode_page_token_succeeds(self): + assert decode_page_token(self.encoded_page_token) == self.page_token + + def test_decode_page_token_fails(self): + with pytest.raises(InvalidParamsError) as excinfo: + decode_page_token('invalid') + + assert 'Token is not a valid base64-encoded cursor.' in str( + excinfo.value ) - self.assertEqual(task.history, []) - - def test_completed_task_uses_provided_history(self): - task_id = str(uuid.uuid4()) - context_id = str(uuid.uuid4()) - artifacts = [ - Artifact( - artifact_id='artifact_1', - parts=[Part(root=TextPart(text='some content'))], - ) - ] - history = [ - Message( - role=Role.user, - parts=[Part(root=TextPart(text='Hello'))], - message_id=str(uuid.uuid4()), - ), + + +class TestApplyHistoryLength(unittest.TestCase): + def setUp(self): + self.history = [ Message( - role=Role.agent, - parts=[Part(root=TextPart(text='Hi there'))], - message_id=str(uuid.uuid4()), - ), + message_id=str(i), + role=Role.ROLE_USER, + parts=[Part(text=f'msg {i}')], + ) + for i in range(5) ] - task = completed_task( - task_id=task_id, - context_id=context_id, + artifacts = [Artifact(artifact_id='a1', parts=[Part(text='a')])] + self.task = new_task( + task_id='t1', + context_id='c1', + state=TaskState.TASK_STATE_COMPLETED, artifacts=artifacts, - history=history, + history=self.history, ) - self.assertEqual(task.history, history) - - def test_new_task_invalid_message_empty_parts(self): - with self.assertRaises(ValueError): - new_task( - Message( - role=Role.user, - parts=[], - message_id=str(uuid.uuid4()), - ) - ) - def test_new_task_invalid_message_empty_content(self): - with self.assertRaises(ValueError): - new_task( - Message( - role=Role.user, - parts=[Part(root=TextPart(text=''))], - messageId=str(uuid.uuid4()), - ) - ) + def test_none_config_returns_full_history(self): + result = apply_history_length(self.task, None) + self.assertEqual(len(result.history), 5) + self.assertEqual(result.history, self.history) - def test_new_task_invalid_message_none_role(self): - with self.assertRaises(TypeError): - msg = Message.model_construct( - role=None, - parts=[Part(root=TextPart(text='test message'))], - message_id=str(uuid.uuid4()), - ) - new_task(msg) - - def test_completed_task_empty_artifacts(self): - with pytest.raises( - ValueError, - match='artifacts must be a non-empty list of Artifact objects', - ): - completed_task( - task_id='task-123', - context_id='ctx-456', - artifacts=[], - history=[], - ) + def test_unset_history_length_returns_full_history(self): + result = apply_history_length(self.task, GetTaskRequest()) + self.assertEqual(len(result.history), 5) + self.assertEqual(result.history, self.history) - def test_completed_task_invalid_artifact_type(self): - with pytest.raises( - ValueError, - match='artifacts must be a non-empty list of Artifact objects', - ): - completed_task( - task_id='task-123', - context_id='ctx-456', - artifacts=['not an artifact'], - history=[], - ) + def test_positive_history_length_truncates(self): + result = apply_history_length( + self.task, GetTaskRequest(history_length=2) + ) + self.assertEqual(len(result.history), 2) + self.assertEqual(result.history, self.history[-2:]) + + def test_large_history_length_returns_full_history(self): + result = apply_history_length( + self.task, GetTaskRequest(history_length=10) + ) + self.assertEqual(len(result.history), 5) + self.assertEqual(result.history, self.history) + + def test_zero_history_length_returns_empty_history(self): + result = apply_history_length( + self.task, SendMessageConfiguration(history_length=0) + ) + self.assertEqual(len(result.history), 0) if __name__ == '__main__': diff --git a/tests/utils/test_version_validation.py b/tests/utils/test_version_validation.py new file mode 100644 index 000000000..b2ae0594e --- /dev/null +++ b/tests/utils/test_version_validation.py @@ -0,0 +1,167 @@ +"""Tests for version validation decorators.""" + +import pytest +from unittest.mock import MagicMock + +from a2a.server.context import ServerCallContext +from a2a.utils import constants +from a2a.utils.errors import VersionNotSupportedError +from a2a.utils.version_validator import validate_version + + +class TestHandler: + @validate_version(constants.PROTOCOL_VERSION_1_0) + async def async_method(self, request, context: ServerCallContext): + return 'success' + + @validate_version(constants.PROTOCOL_VERSION_1_0) + async def async_gen_method(self, request, context: ServerCallContext): + yield 'success' + + @validate_version(constants.PROTOCOL_VERSION_0_3) + async def compat_method(self, request, context: ServerCallContext): + return 'success' + + +@pytest.mark.asyncio +async def test_validate_version_success(): + handler = TestHandler() + context = ServerCallContext( + state={'headers': {constants.VERSION_HEADER: '1.0'}} + ) + + result = await handler.async_method(None, context) + assert result == 'success' + + +@pytest.mark.asyncio +async def test_validate_version_case_insensitive(): + handler = TestHandler() + # Test lowercase header name + context = ServerCallContext( + state={'headers': {constants.VERSION_HEADER.lower(): '1.0'}} + ) + + result = await handler.async_method(None, context) + assert result == 'success' + + +@pytest.mark.asyncio +async def test_validate_version_mismatch(): + handler = TestHandler() + context = ServerCallContext( + state={'headers': {constants.VERSION_HEADER: '0.3'}} + ) + + with pytest.raises(VersionNotSupportedError) as excinfo: + await handler.async_method(None, context) + assert "A2A version '0.3' is not supported" in str(excinfo.value) + + +@pytest.mark.asyncio +async def test_validate_version_missing_defaults_to_0_3(): + handler = TestHandler() + context = ServerCallContext(state={'headers': {}}) + + # Missing header should be interpreted as 0.3. + # Since async_method expects 1.0, it should fail. + with pytest.raises(VersionNotSupportedError) as excinfo: + await handler.async_method(None, context) + assert "A2A version '0.3' is not supported" in str(excinfo.value) + + # But compat_method expects 0.3, so it should succeed. + result = await handler.compat_method(None, context) + assert result == 'success' + + +@pytest.mark.asyncio +async def test_validate_version_async_gen_success(): + handler = TestHandler() + context = ServerCallContext( + state={'headers': {constants.VERSION_HEADER: '1.0'}} + ) + + results = [] + async for item in handler.async_gen_method(None, context): + results.append(item) + + assert results == ['success'] + + +@pytest.mark.asyncio +async def test_validate_version_async_gen_failure(): + handler = TestHandler() + context = ServerCallContext( + state={'headers': {constants.VERSION_HEADER: '0.3'}} + ) + + with pytest.raises(VersionNotSupportedError): + async for _ in handler.async_gen_method(None, context): + pass + + +@pytest.mark.asyncio +async def test_validate_version_no_context(): + handler = TestHandler() + + # If no context is found, it should default to allowing the call (for safety/backward compatibility with non-context methods) + # although in our actual handlers context will be there. + result = await handler.async_method(None, None) + assert result == 'success' + + +@pytest.mark.asyncio +async def test_validate_version_ignore_minor_patch(): + handler = TestHandler() + + # 1.0.1 should match 1.0 + context_patch = ServerCallContext( + state={'headers': {constants.VERSION_HEADER: '1.0.1'}} + ) + result = await handler.async_method(None, context_patch) + assert result == 'success' + + # 1.0.0 should match 1.0 + context_zero_patch = ServerCallContext( + state={'headers': {constants.VERSION_HEADER: '1.0.0'}} + ) + result = await handler.async_method(None, context_zero_patch) + assert result == 'success' + + # 1.1.0 should match 1.0 + context_diff_minor = ServerCallContext( + state={'headers': {constants.VERSION_HEADER: '1.1.0'}} + ) + result = await handler.async_method(None, context_diff_minor) + assert result == 'success' + + # 2.0.0 should NOT match 1.0 + context_diff_major = ServerCallContext( + state={'headers': {constants.VERSION_HEADER: '2.0.0'}} + ) + with pytest.raises(VersionNotSupportedError): + await handler.async_method(None, context_diff_major) + + +@pytest.mark.asyncio +async def test_validate_version_handler_expects_patch(): + class PatchHandler: + @validate_version('1.0.2') + async def method(self, request, context: ServerCallContext): + return 'success' + + handler = PatchHandler() + + # 1.0 should match 1.0.2 + context_no_patch = ServerCallContext( + state={'headers': {constants.VERSION_HEADER: '1.0'}} + ) + result = await handler.method(None, context_no_patch) + assert result == 'success' + + # 1.0.5 should match 1.0.2 + context_diff_patch = ServerCallContext( + state={'headers': {constants.VERSION_HEADER: '1.0.5'}} + ) + result = await handler.method(None, context_diff_patch) + assert result == 'success' diff --git a/uv.lock b/uv.lock index 778082c04..dc87a7b6d 100644 --- a/uv.lock +++ b/uv.lock @@ -12,20 +12,25 @@ resolution-markers = [ name = "a2a-sdk" source = { editable = "." } dependencies = [ + { name = "culsans", marker = "python_full_version < '3.13'" }, { name = "google-api-core" }, + { name = "googleapis-common-protos" }, { name = "httpx" }, { name = "httpx-sse" }, + { name = "json-rpc" }, + { name = "packaging" }, { name = "protobuf" }, { name = "pydantic" }, ] [package.optional-dependencies] all = [ + { name = "alembic" }, { name = "cryptography" }, - { name = "fastapi" }, { name = "google-cloud-aiplatform" }, { name = "grpcio" }, { name = "grpcio-reflection" }, + { name = "grpcio-status" }, { name = "grpcio-tools" }, { name = "opentelemetry-api" }, { name = "opentelemetry-sdk" }, @@ -34,16 +39,19 @@ all = [ { name = "sse-starlette" }, { name = "starlette" }, ] +db-cli = [ + { name = "alembic" }, +] encryption = [ { name = "cryptography" }, ] grpc = [ { name = "grpcio" }, { name = "grpcio-reflection" }, + { name = "grpcio-status" }, { name = "grpcio-tools" }, ] http-server = [ - { name = "fastapi" }, { name = "sse-starlette" }, { name = "starlette" }, ] @@ -73,9 +81,11 @@ vertex = [ [package.dev-dependencies] dev = [ { name = "a2a-sdk", extra = ["all"] }, - { name = "datamodel-code-generator" }, + { name = "fastapi" }, { name = "mypy" }, { name = "pre-commit" }, + { name = "pyjwt" }, + { name = "pyright" }, { name = "pytest" }, { name = "pytest-asyncio" }, { name = "pytest-cov" }, @@ -93,25 +103,31 @@ dev = [ [package.metadata] requires-dist = [ + { name = "alembic", marker = "extra == 'all'", specifier = ">=1.14.0" }, + { name = "alembic", marker = "extra == 'db-cli'", specifier = ">=1.14.0" }, { name = "cryptography", marker = "extra == 'all'", specifier = ">=43.0.0" }, { name = "cryptography", marker = "extra == 'encryption'", specifier = ">=43.0.0" }, - { name = "fastapi", marker = "extra == 'all'", specifier = ">=0.115.2" }, - { name = "fastapi", marker = "extra == 'http-server'", specifier = ">=0.115.2" }, + { name = "culsans", marker = "python_full_version < '3.13'", specifier = ">=0.11.0" }, { name = "google-api-core", specifier = ">=1.26.0" }, { name = "google-cloud-aiplatform", marker = "extra == 'all'", specifier = ">=1.140.0" }, { name = "google-cloud-aiplatform", marker = "extra == 'vertex'", specifier = ">=1.140.0" }, + { name = "googleapis-common-protos", specifier = ">=1.70.0" }, { name = "grpcio", marker = "extra == 'all'", specifier = ">=1.60" }, { name = "grpcio", marker = "extra == 'grpc'", specifier = ">=1.60" }, { name = "grpcio-reflection", marker = "extra == 'all'", specifier = ">=1.7.0" }, { name = "grpcio-reflection", marker = "extra == 'grpc'", specifier = ">=1.7.0" }, + { name = "grpcio-status", marker = "extra == 'all'", specifier = ">=1.60" }, + { name = "grpcio-status", marker = "extra == 'grpc'", specifier = ">=1.60" }, { name = "grpcio-tools", marker = "extra == 'all'", specifier = ">=1.60" }, { name = "grpcio-tools", marker = "extra == 'grpc'", specifier = ">=1.60" }, { name = "httpx", specifier = ">=0.28.1" }, { name = "httpx-sse", specifier = ">=0.4.0" }, + { name = "json-rpc", specifier = ">=1.15.0" }, { name = "opentelemetry-api", marker = "extra == 'all'", specifier = ">=1.33.0" }, { name = "opentelemetry-api", marker = "extra == 'telemetry'", specifier = ">=1.33.0" }, { name = "opentelemetry-sdk", marker = "extra == 'all'", specifier = ">=1.33.0" }, { name = "opentelemetry-sdk", marker = "extra == 'telemetry'", specifier = ">=1.33.0" }, + { name = "packaging", specifier = ">=24.0" }, { name = "protobuf", specifier = ">=5.29.5" }, { name = "pydantic", specifier = ">=2.11.3" }, { name = "pyjwt", marker = "extra == 'all'", specifier = ">=2.0.0" }, @@ -130,14 +146,16 @@ requires-dist = [ { name = "starlette", marker = "extra == 'all'" }, { name = "starlette", marker = "extra == 'http-server'" }, ] -provides-extras = ["all", "encryption", "grpc", "http-server", "mysql", "postgresql", "signing", "sql", "sqlite", "telemetry", "vertex"] +provides-extras = ["all", "db-cli", "encryption", "grpc", "http-server", "mysql", "postgresql", "signing", "sql", "sqlite", "telemetry", "vertex"] [package.metadata.requires-dev] dev = [ { name = "a2a-sdk", extras = ["all"], editable = "." }, - { name = "datamodel-code-generator", specifier = ">=0.30.0" }, + { name = "fastapi", specifier = ">=0.115.2" }, { name = "mypy", specifier = ">=1.15.0" }, { name = "pre-commit" }, + { name = "pyjwt", specifier = ">=2.0.0" }, + { name = "pyright" }, { name = "pytest", specifier = ">=8.3.5" }, { name = "pytest-asyncio", specifier = ">=0.26.0" }, { name = "pytest-cov", specifier = ">=6.1.1" }, @@ -153,28 +171,54 @@ dev = [ { name = "uvicorn", specifier = ">=0.35.0" }, ] +[[package]] +name = "aiologic" +version = "0.16.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "sniffio", marker = "python_full_version < '3.13'" }, + { name = "typing-extensions", marker = "python_full_version < '3.13'" }, + { name = "wrapt", marker = "python_full_version < '3.13'" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/a8/13/50b91a3ea6b030d280d2654be97c48b6ed81753a50286ee43c646ba36d3c/aiologic-0.16.0.tar.gz", hash = "sha256:c267ccbd3ff417ec93e78d28d4d577ccca115d5797cdbd16785a551d9658858f", size = 225952, upload-time = "2025-11-27T23:48:41.195Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f6/27/206615942005471499f6fbc36621582e24d0686f33c74b2d018fcfd4fe67/aiologic-0.16.0-py3-none-any.whl", hash = "sha256:e00ce5f68c5607c864d26aec99c0a33a83bdf8237aa7312ffbb96805af67d8b6", size = 135193, upload-time = "2025-11-27T23:48:40.099Z" }, +] + [[package]] name = "aiomysql" -version = "0.2.0" +version = "0.3.2" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "pymysql" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/67/76/2c5b55e4406a1957ffdfd933a94c2517455291c97d2b81cec6813754791a/aiomysql-0.2.0.tar.gz", hash = "sha256:558b9c26d580d08b8c5fd1be23c5231ce3aeff2dadad989540fee740253deb67", size = 114706, upload-time = "2023-06-11T19:57:53.608Z" } +sdist = { url = "https://files.pythonhosted.org/packages/29/e0/302aeffe8d90853556f47f3106b89c16cc2ec2a4d269bdfd82e3f4ae12cc/aiomysql-0.3.2.tar.gz", hash = "sha256:72d15ef5cfc34c03468eb41e1b90adb9fd9347b0b589114bd23ead569a02ac1a", size = 108311, upload-time = "2025-10-22T00:15:21.278Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/42/87/c982ee8b333c85b8ae16306387d703a1fcdfc81a2f3f15a24820ab1a512d/aiomysql-0.2.0-py3-none-any.whl", hash = "sha256:b7c26da0daf23a5ec5e0b133c03d20657276e4eae9b73e040b72787f6f6ade0a", size = 44215, upload-time = "2023-06-11T19:57:51.09Z" }, + { url = "https://files.pythonhosted.org/packages/4c/af/aae0153c3e28712adaf462328f6c7a3c196a1c1c27b491de4377dd3e6b52/aiomysql-0.3.2-py3-none-any.whl", hash = "sha256:c82c5ba04137d7afd5c693a258bea8ead2aad77101668044143a991e04632eb2", size = 71834, upload-time = "2025-10-22T00:15:15.905Z" }, ] [[package]] name = "aiosqlite" -version = "0.21.0" +version = "0.22.1" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/4e/8a/64761f4005f17809769d23e518d915db74e6310474e733e3593cfc854ef1/aiosqlite-0.22.1.tar.gz", hash = "sha256:043e0bd78d32888c0a9ca90fc788b38796843360c855a7262a532813133a0650", size = 14821, upload-time = "2025-12-23T19:25:43.997Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/00/b7/e3bf5133d697a08128598c8d0abc5e16377b51465a33756de24fa7dee953/aiosqlite-0.22.1-py3-none-any.whl", hash = "sha256:21c002eb13823fad740196c5a2e9d8e62f6243bd9e7e4a1f87fb5e44ecb4fceb", size = 17405, upload-time = "2025-12-23T19:25:42.139Z" }, +] + +[[package]] +name = "alembic" +version = "1.18.4" source = { registry = "https://pypi.org/simple" } dependencies = [ + { name = "mako" }, + { name = "sqlalchemy" }, + { name = "tomli", marker = "python_full_version < '3.11'" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/13/7d/8bca2bf9a247c2c5dfeec1d7a5f40db6518f88d314b8bca9da29670d2671/aiosqlite-0.21.0.tar.gz", hash = "sha256:131bb8056daa3bc875608c631c678cda73922a2d4ba8aec373b19f18c17e7aa3", size = 13454, upload-time = "2025-02-03T07:30:16.235Z" } +sdist = { url = "https://files.pythonhosted.org/packages/94/13/8b084e0f2efb0275a1d534838844926f798bd766566b1375174e2448cd31/alembic-1.18.4.tar.gz", hash = "sha256:cb6e1fd84b6174ab8dbb2329f86d631ba9559dd78df550b57804d607672cedbc", size = 2056725, upload-time = "2026-02-10T16:00:47.195Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/f5/10/6c25ed6de94c49f88a91fa5018cb4c0f3625f31d5be9f771ebe5cc7cd506/aiosqlite-0.21.0-py3-none-any.whl", hash = "sha256:2549cf4057f95f53dcba16f2b64e8e2791d7e1adedb13197dd8ed77bb226d7d0", size = 15792, upload-time = "2025-02-03T07:30:13.6Z" }, + { url = "https://files.pythonhosted.org/packages/d2/29/6533c317b74f707ea28f8d633734dbda2119bbadfc61b2f3640ba835d0f7/alembic-1.18.4-py3-none-any.whl", hash = "sha256:a5ed4adcf6d8a4cb575f3d759f071b03cd6e5c7618eb796cb52497be25bfe19a", size = 263893, upload-time = "2026-02-10T16:00:49.997Z" }, ] [[package]] @@ -197,26 +241,16 @@ wheels = [ [[package]] name = "anyio" -version = "4.9.0" +version = "4.12.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "exceptiongroup", marker = "python_full_version < '3.11'" }, { name = "idna" }, - { name = "sniffio" }, { name = "typing-extensions", marker = "python_full_version < '3.13'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/95/7d/4c1bd541d4dffa1b52bd83fb8527089e097a106fc90b467a7313b105f840/anyio-4.9.0.tar.gz", hash = "sha256:673c0c244e15788651a4ff38710fea9675823028a6f08a5eda409e0c9840a028", size = 190949, upload-time = "2025-03-17T00:02:54.77Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/a1/ee/48ca1a7c89ffec8b6a0c5d02b89c305671d5ffd8d3c94acf8b8c408575bb/anyio-4.9.0-py3-none-any.whl", hash = "sha256:9f76d541cad6e36af7beb62e978876f3b41e3e04f2c1fbf0884604c0a9c4d93c", size = 100916, upload-time = "2025-03-17T00:02:52.713Z" }, -] - -[[package]] -name = "argcomplete" -version = "3.6.2" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/16/0f/861e168fc813c56a78b35f3c30d91c6757d1fd185af1110f1aec784b35d0/argcomplete-3.6.2.tar.gz", hash = "sha256:d0519b1bc867f5f4f4713c41ad0aba73a4a5f007449716b16f385f2166dc6adf", size = 73403, upload-time = "2025-04-03T04:57:03.52Z" } +sdist = { url = "https://files.pythonhosted.org/packages/96/f0/5eb65b2bb0d09ac6776f2eb54adee6abe8228ea05b20a5ad0e4945de8aac/anyio-4.12.1.tar.gz", hash = "sha256:41cfcc3a4c85d3f05c932da7c26d0201ac36f72abd4435ba90d0464a3ffed703", size = 228685, upload-time = "2026-01-06T11:45:21.246Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/31/da/e42d7a9d8dd33fa775f467e4028a47936da2f01e4b0e561f9ba0d74cb0ca/argcomplete-3.6.2-py3-none-any.whl", hash = "sha256:65b3133a29ad53fb42c48cf5114752c7ab66c1c38544fdf6460f450c09b42591", size = 43708, upload-time = "2025-04-03T04:57:01.591Z" }, + { url = "https://files.pythonhosted.org/packages/38/0e/27be9fdef66e72d64c0cdc3cc2823101b80585f8119b5c112c2e8f5f7dab/anyio-4.12.1-py3-none-any.whl", hash = "sha256:d405828884fc140aa80a3c667b8beed277f1dfedec42ba031bd6ac3db606ab6c", size = 113592, upload-time = "2026-01-06T11:45:19.497Z" }, ] [[package]] @@ -230,54 +264,70 @@ wheels = [ [[package]] name = "asyncpg" -version = "0.30.0" +version = "0.31.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "async-timeout", marker = "python_full_version < '3.11'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/2f/4c/7c991e080e106d854809030d8584e15b2e996e26f16aee6d757e387bc17d/asyncpg-0.30.0.tar.gz", hash = "sha256:c551e9928ab6707602f44811817f82ba3c446e018bfe1d3abecc8ba5f3eac851", size = 957746, upload-time = "2024-10-20T00:30:41.127Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/bb/07/1650a8c30e3a5c625478fa8aafd89a8dd7d85999bf7169b16f54973ebf2c/asyncpg-0.30.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:bfb4dd5ae0699bad2b233672c8fc5ccbd9ad24b89afded02341786887e37927e", size = 673143, upload-time = "2024-10-20T00:29:08.846Z" }, - { url = "https://files.pythonhosted.org/packages/a0/9a/568ff9b590d0954553c56806766914c149609b828c426c5118d4869111d3/asyncpg-0.30.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:dc1f62c792752a49f88b7e6f774c26077091b44caceb1983509edc18a2222ec0", size = 645035, upload-time = "2024-10-20T00:29:12.02Z" }, - { url = "https://files.pythonhosted.org/packages/de/11/6f2fa6c902f341ca10403743701ea952bca896fc5b07cc1f4705d2bb0593/asyncpg-0.30.0-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:3152fef2e265c9c24eec4ee3d22b4f4d2703d30614b0b6753e9ed4115c8a146f", size = 2912384, upload-time = "2024-10-20T00:29:13.644Z" }, - { url = "https://files.pythonhosted.org/packages/83/83/44bd393919c504ffe4a82d0aed8ea0e55eb1571a1dea6a4922b723f0a03b/asyncpg-0.30.0-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c7255812ac85099a0e1ffb81b10dc477b9973345793776b128a23e60148dd1af", size = 2947526, upload-time = "2024-10-20T00:29:15.871Z" }, - { url = "https://files.pythonhosted.org/packages/08/85/e23dd3a2b55536eb0ded80c457b0693352262dc70426ef4d4a6fc994fa51/asyncpg-0.30.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:578445f09f45d1ad7abddbff2a3c7f7c291738fdae0abffbeb737d3fc3ab8b75", size = 2895390, upload-time = "2024-10-20T00:29:19.346Z" }, - { url = "https://files.pythonhosted.org/packages/9b/26/fa96c8f4877d47dc6c1864fef5500b446522365da3d3d0ee89a5cce71a3f/asyncpg-0.30.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:c42f6bb65a277ce4d93f3fba46b91a265631c8df7250592dd4f11f8b0152150f", size = 3015630, upload-time = "2024-10-20T00:29:21.186Z" }, - { url = "https://files.pythonhosted.org/packages/34/00/814514eb9287614188a5179a8b6e588a3611ca47d41937af0f3a844b1b4b/asyncpg-0.30.0-cp310-cp310-win32.whl", hash = "sha256:aa403147d3e07a267ada2ae34dfc9324e67ccc4cdca35261c8c22792ba2b10cf", size = 568760, upload-time = "2024-10-20T00:29:22.769Z" }, - { url = "https://files.pythonhosted.org/packages/f0/28/869a7a279400f8b06dd237266fdd7220bc5f7c975348fea5d1e6909588e9/asyncpg-0.30.0-cp310-cp310-win_amd64.whl", hash = "sha256:fb622c94db4e13137c4c7f98834185049cc50ee01d8f657ef898b6407c7b9c50", size = 625764, upload-time = "2024-10-20T00:29:25.882Z" }, - { url = "https://files.pythonhosted.org/packages/4c/0e/f5d708add0d0b97446c402db7e8dd4c4183c13edaabe8a8500b411e7b495/asyncpg-0.30.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:5e0511ad3dec5f6b4f7a9e063591d407eee66b88c14e2ea636f187da1dcfff6a", size = 674506, upload-time = "2024-10-20T00:29:27.988Z" }, - { url = "https://files.pythonhosted.org/packages/6a/a0/67ec9a75cb24a1d99f97b8437c8d56da40e6f6bd23b04e2f4ea5d5ad82ac/asyncpg-0.30.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:915aeb9f79316b43c3207363af12d0e6fd10776641a7de8a01212afd95bdf0ed", size = 645922, upload-time = "2024-10-20T00:29:29.391Z" }, - { url = "https://files.pythonhosted.org/packages/5c/d9/a7584f24174bd86ff1053b14bb841f9e714380c672f61c906eb01d8ec433/asyncpg-0.30.0-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c198a00cce9506fcd0bf219a799f38ac7a237745e1d27f0e1f66d3707c84a5a", size = 3079565, upload-time = "2024-10-20T00:29:30.832Z" }, - { url = "https://files.pythonhosted.org/packages/a0/d7/a4c0f9660e333114bdb04d1a9ac70db690dd4ae003f34f691139a5cbdae3/asyncpg-0.30.0-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3326e6d7381799e9735ca2ec9fd7be4d5fef5dcbc3cb555d8a463d8460607956", size = 3109962, upload-time = "2024-10-20T00:29:33.114Z" }, - { url = "https://files.pythonhosted.org/packages/3c/21/199fd16b5a981b1575923cbb5d9cf916fdc936b377e0423099f209e7e73d/asyncpg-0.30.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:51da377487e249e35bd0859661f6ee2b81db11ad1f4fc036194bc9cb2ead5056", size = 3064791, upload-time = "2024-10-20T00:29:34.677Z" }, - { url = "https://files.pythonhosted.org/packages/77/52/0004809b3427534a0c9139c08c87b515f1c77a8376a50ae29f001e53962f/asyncpg-0.30.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:bc6d84136f9c4d24d358f3b02be4b6ba358abd09f80737d1ac7c444f36108454", size = 3188696, upload-time = "2024-10-20T00:29:36.389Z" }, - { url = "https://files.pythonhosted.org/packages/52/cb/fbad941cd466117be58b774a3f1cc9ecc659af625f028b163b1e646a55fe/asyncpg-0.30.0-cp311-cp311-win32.whl", hash = "sha256:574156480df14f64c2d76450a3f3aaaf26105869cad3865041156b38459e935d", size = 567358, upload-time = "2024-10-20T00:29:37.915Z" }, - { url = "https://files.pythonhosted.org/packages/3c/0a/0a32307cf166d50e1ad120d9b81a33a948a1a5463ebfa5a96cc5606c0863/asyncpg-0.30.0-cp311-cp311-win_amd64.whl", hash = "sha256:3356637f0bd830407b5597317b3cb3571387ae52ddc3bca6233682be88bbbc1f", size = 629375, upload-time = "2024-10-20T00:29:39.987Z" }, - { url = "https://files.pythonhosted.org/packages/4b/64/9d3e887bb7b01535fdbc45fbd5f0a8447539833b97ee69ecdbb7a79d0cb4/asyncpg-0.30.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:c902a60b52e506d38d7e80e0dd5399f657220f24635fee368117b8b5fce1142e", size = 673162, upload-time = "2024-10-20T00:29:41.88Z" }, - { url = "https://files.pythonhosted.org/packages/6e/eb/8b236663f06984f212a087b3e849731f917ab80f84450e943900e8ca4052/asyncpg-0.30.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:aca1548e43bbb9f0f627a04666fedaca23db0a31a84136ad1f868cb15deb6e3a", size = 637025, upload-time = "2024-10-20T00:29:43.352Z" }, - { url = "https://files.pythonhosted.org/packages/cc/57/2dc240bb263d58786cfaa60920779af6e8d32da63ab9ffc09f8312bd7a14/asyncpg-0.30.0-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6c2a2ef565400234a633da0eafdce27e843836256d40705d83ab7ec42074efb3", size = 3496243, upload-time = "2024-10-20T00:29:44.922Z" }, - { url = "https://files.pythonhosted.org/packages/f4/40/0ae9d061d278b10713ea9021ef6b703ec44698fe32178715a501ac696c6b/asyncpg-0.30.0-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1292b84ee06ac8a2ad8e51c7475aa309245874b61333d97411aab835c4a2f737", size = 3575059, upload-time = "2024-10-20T00:29:46.891Z" }, - { url = "https://files.pythonhosted.org/packages/c3/75/d6b895a35a2c6506952247640178e5f768eeb28b2e20299b6a6f1d743ba0/asyncpg-0.30.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:0f5712350388d0cd0615caec629ad53c81e506b1abaaf8d14c93f54b35e3595a", size = 3473596, upload-time = "2024-10-20T00:29:49.201Z" }, - { url = "https://files.pythonhosted.org/packages/c8/e7/3693392d3e168ab0aebb2d361431375bd22ffc7b4a586a0fc060d519fae7/asyncpg-0.30.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:db9891e2d76e6f425746c5d2da01921e9a16b5a71a1c905b13f30e12a257c4af", size = 3641632, upload-time = "2024-10-20T00:29:50.768Z" }, - { url = "https://files.pythonhosted.org/packages/32/ea/15670cea95745bba3f0352341db55f506a820b21c619ee66b7d12ea7867d/asyncpg-0.30.0-cp312-cp312-win32.whl", hash = "sha256:68d71a1be3d83d0570049cd1654a9bdfe506e794ecc98ad0873304a9f35e411e", size = 560186, upload-time = "2024-10-20T00:29:52.394Z" }, - { url = "https://files.pythonhosted.org/packages/7e/6b/fe1fad5cee79ca5f5c27aed7bd95baee529c1bf8a387435c8ba4fe53d5c1/asyncpg-0.30.0-cp312-cp312-win_amd64.whl", hash = "sha256:9a0292c6af5c500523949155ec17b7fe01a00ace33b68a476d6b5059f9630305", size = 621064, upload-time = "2024-10-20T00:29:53.757Z" }, - { url = "https://files.pythonhosted.org/packages/3a/22/e20602e1218dc07692acf70d5b902be820168d6282e69ef0d3cb920dc36f/asyncpg-0.30.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:05b185ebb8083c8568ea8a40e896d5f7af4b8554b64d7719c0eaa1eb5a5c3a70", size = 670373, upload-time = "2024-10-20T00:29:55.165Z" }, - { url = "https://files.pythonhosted.org/packages/3d/b3/0cf269a9d647852a95c06eb00b815d0b95a4eb4b55aa2d6ba680971733b9/asyncpg-0.30.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:c47806b1a8cbb0a0db896f4cd34d89942effe353a5035c62734ab13b9f938da3", size = 634745, upload-time = "2024-10-20T00:29:57.14Z" }, - { url = "https://files.pythonhosted.org/packages/8e/6d/a4f31bf358ce8491d2a31bfe0d7bcf25269e80481e49de4d8616c4295a34/asyncpg-0.30.0-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:9b6fde867a74e8c76c71e2f64f80c64c0f3163e687f1763cfaf21633ec24ec33", size = 3512103, upload-time = "2024-10-20T00:29:58.499Z" }, - { url = "https://files.pythonhosted.org/packages/96/19/139227a6e67f407b9c386cb594d9628c6c78c9024f26df87c912fabd4368/asyncpg-0.30.0-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:46973045b567972128a27d40001124fbc821c87a6cade040cfcd4fa8a30bcdc4", size = 3592471, upload-time = "2024-10-20T00:30:00.354Z" }, - { url = "https://files.pythonhosted.org/packages/67/e4/ab3ca38f628f53f0fd28d3ff20edff1c975dd1cb22482e0061916b4b9a74/asyncpg-0.30.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:9110df111cabc2ed81aad2f35394a00cadf4f2e0635603db6ebbd0fc896f46a4", size = 3496253, upload-time = "2024-10-20T00:30:02.794Z" }, - { url = "https://files.pythonhosted.org/packages/ef/5f/0bf65511d4eeac3a1f41c54034a492515a707c6edbc642174ae79034d3ba/asyncpg-0.30.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:04ff0785ae7eed6cc138e73fc67b8e51d54ee7a3ce9b63666ce55a0bf095f7ba", size = 3662720, upload-time = "2024-10-20T00:30:04.501Z" }, - { url = "https://files.pythonhosted.org/packages/e7/31/1513d5a6412b98052c3ed9158d783b1e09d0910f51fbe0e05f56cc370bc4/asyncpg-0.30.0-cp313-cp313-win32.whl", hash = "sha256:ae374585f51c2b444510cdf3595b97ece4f233fde739aa14b50e0d64e8a7a590", size = 560404, upload-time = "2024-10-20T00:30:06.537Z" }, - { url = "https://files.pythonhosted.org/packages/c8/a4/cec76b3389c4c5ff66301cd100fe88c318563ec8a520e0b2e792b5b84972/asyncpg-0.30.0-cp313-cp313-win_amd64.whl", hash = "sha256:f59b430b8e27557c3fb9869222559f7417ced18688375825f8f12302c34e915e", size = 621623, upload-time = "2024-10-20T00:30:09.024Z" }, +sdist = { url = "https://files.pythonhosted.org/packages/fe/cc/d18065ce2380d80b1bcce927c24a2642efd38918e33fd724bc4bca904877/asyncpg-0.31.0.tar.gz", hash = "sha256:c989386c83940bfbd787180f2b1519415e2d3d6277a70d9d0f0145ac73500735", size = 993667, upload-time = "2025-11-24T23:27:00.812Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c3/d9/507c80bdac2e95e5a525644af94b03fa7f9a44596a84bd48a6e80f854f92/asyncpg-0.31.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:831712dd3cf117eec68575a9b50da711893fd63ebe277fc155ecae1c6c9f0f61", size = 644865, upload-time = "2025-11-24T23:25:23.527Z" }, + { url = "https://files.pythonhosted.org/packages/ea/03/f93b5e543f65c5f504e91405e8d21bb9e600548be95032951a754781a41d/asyncpg-0.31.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:0b17c89312c2f4ccea222a3a6571f7df65d4ba2c0e803339bfc7bed46a96d3be", size = 639297, upload-time = "2025-11-24T23:25:25.192Z" }, + { url = "https://files.pythonhosted.org/packages/e5/1e/de2177e57e03a06e697f6c1ddf2a9a7fcfdc236ce69966f54ffc830fd481/asyncpg-0.31.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3faa62f997db0c9add34504a68ac2c342cfee4d57a0c3062fcf0d86c7f9cb1e8", size = 2816679, upload-time = "2025-11-24T23:25:26.718Z" }, + { url = "https://files.pythonhosted.org/packages/d0/98/1a853f6870ac7ad48383a948c8ff3c85dc278066a4d69fc9af7d3d4b1106/asyncpg-0.31.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8ea599d45c361dfbf398cb67da7fd052affa556a401482d3ff1ee99bd68808a1", size = 2867087, upload-time = "2025-11-24T23:25:28.399Z" }, + { url = "https://files.pythonhosted.org/packages/11/29/7e76f2a51f2360a7c90d2cf6d0d9b210c8bb0ae342edebd16173611a55c2/asyncpg-0.31.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:795416369c3d284e1837461909f58418ad22b305f955e625a4b3a2521d80a5f3", size = 2747631, upload-time = "2025-11-24T23:25:30.154Z" }, + { url = "https://files.pythonhosted.org/packages/5d/3f/716e10cb57c4f388248db46555e9226901688fbfabd0afb85b5e1d65d5a7/asyncpg-0.31.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:a8d758dac9d2e723e173d286ef5e574f0b350ec00e9186fce84d0fc5f6a8e6b8", size = 2855107, upload-time = "2025-11-24T23:25:31.888Z" }, + { url = "https://files.pythonhosted.org/packages/7e/ec/3ebae9dfb23a1bd3f68acfd4f795983b65b413291c0e2b0d982d6ae6c920/asyncpg-0.31.0-cp310-cp310-win32.whl", hash = "sha256:2d076d42eb583601179efa246c5d7ae44614b4144bc1c7a683ad1222814ed095", size = 521990, upload-time = "2025-11-24T23:25:33.402Z" }, + { url = "https://files.pythonhosted.org/packages/20/b4/9fbb4b0af4e36d96a61d026dd37acab3cf521a70290a09640b215da5ab7c/asyncpg-0.31.0-cp310-cp310-win_amd64.whl", hash = "sha256:9ea33213ac044171f4cac23740bed9a3805abae10e7025314cfbd725ec670540", size = 581629, upload-time = "2025-11-24T23:25:34.846Z" }, + { url = "https://files.pythonhosted.org/packages/08/17/cc02bc49bc350623d050fa139e34ea512cd6e020562f2a7312a7bcae4bc9/asyncpg-0.31.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:eee690960e8ab85063ba93af2ce128c0f52fd655fdff9fdb1a28df01329f031d", size = 643159, upload-time = "2025-11-24T23:25:36.443Z" }, + { url = "https://files.pythonhosted.org/packages/a4/62/4ded7d400a7b651adf06f49ea8f73100cca07c6df012119594d1e3447aa6/asyncpg-0.31.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:2657204552b75f8288de08ca60faf4a99a65deef3a71d1467454123205a88fab", size = 638157, upload-time = "2025-11-24T23:25:37.89Z" }, + { url = "https://files.pythonhosted.org/packages/d6/5b/4179538a9a72166a0bf60ad783b1ef16efb7960e4d7b9afe9f77a5551680/asyncpg-0.31.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a429e842a3a4b4ea240ea52d7fe3f82d5149853249306f7ff166cb9948faa46c", size = 2918051, upload-time = "2025-11-24T23:25:39.461Z" }, + { url = "https://files.pythonhosted.org/packages/e6/35/c27719ae0536c5b6e61e4701391ffe435ef59539e9360959240d6e47c8c8/asyncpg-0.31.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c0807be46c32c963ae40d329b3a686356e417f674c976c07fa49f1b30303f109", size = 2972640, upload-time = "2025-11-24T23:25:41.512Z" }, + { url = "https://files.pythonhosted.org/packages/43/f4/01ebb9207f29e645a64699b9ce0eefeff8e7a33494e1d29bb53736f7766b/asyncpg-0.31.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:e5d5098f63beeae93512ee513d4c0c53dc12e9aa2b7a1af5a81cddf93fe4e4da", size = 2851050, upload-time = "2025-11-24T23:25:43.153Z" }, + { url = "https://files.pythonhosted.org/packages/3e/f4/03ff1426acc87be0f4e8d40fa2bff5c3952bef0080062af9efc2212e3be8/asyncpg-0.31.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:37fc6c00a814e18eef51833545d1891cac9aa69140598bb076b4cd29b3e010b9", size = 2962574, upload-time = "2025-11-24T23:25:44.942Z" }, + { url = "https://files.pythonhosted.org/packages/c7/39/cc788dfca3d4060f9d93e67be396ceec458dfc429e26139059e58c2c244d/asyncpg-0.31.0-cp311-cp311-win32.whl", hash = "sha256:5a4af56edf82a701aece93190cc4e094d2df7d33f6e915c222fb09efbb5afc24", size = 521076, upload-time = "2025-11-24T23:25:46.486Z" }, + { url = "https://files.pythonhosted.org/packages/28/fc/735af5384c029eb7f1ca60ccb8fa95521dbdaeef788edf4cecfc604c3cab/asyncpg-0.31.0-cp311-cp311-win_amd64.whl", hash = "sha256:480c4befbdf079c14c9ca43c8c5e1fe8b6296c96f1f927158d4f1e750aacc047", size = 584980, upload-time = "2025-11-24T23:25:47.938Z" }, + { url = "https://files.pythonhosted.org/packages/2a/a6/59d0a146e61d20e18db7396583242e32e0f120693b67a8de43f1557033e2/asyncpg-0.31.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b44c31e1efc1c15188ef183f287c728e2046abb1d26af4d20858215d50d91fad", size = 662042, upload-time = "2025-11-24T23:25:49.578Z" }, + { url = "https://files.pythonhosted.org/packages/36/01/ffaa189dcb63a2471720615e60185c3f6327716fdc0fc04334436fbb7c65/asyncpg-0.31.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0c89ccf741c067614c9b5fc7f1fc6f3b61ab05ae4aaa966e6fd6b93097c7d20d", size = 638504, upload-time = "2025-11-24T23:25:51.501Z" }, + { url = "https://files.pythonhosted.org/packages/9f/62/3f699ba45d8bd24c5d65392190d19656d74ff0185f42e19d0bbd973bb371/asyncpg-0.31.0-cp312-cp312-manylinux_2_28_aarch64.whl", hash = "sha256:12b3b2e39dc5470abd5e98c8d3373e4b1d1234d9fbdedf538798b2c13c64460a", size = 3426241, upload-time = "2025-11-24T23:25:53.278Z" }, + { url = "https://files.pythonhosted.org/packages/8c/d1/a867c2150f9c6e7af6462637f613ba67f78a314b00db220cd26ff559d532/asyncpg-0.31.0-cp312-cp312-manylinux_2_28_x86_64.whl", hash = "sha256:aad7a33913fb8bcb5454313377cc330fbb19a0cd5faa7272407d8a0c4257b671", size = 3520321, upload-time = "2025-11-24T23:25:54.982Z" }, + { url = "https://files.pythonhosted.org/packages/7a/1a/cce4c3f246805ecd285a3591222a2611141f1669d002163abef999b60f98/asyncpg-0.31.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:3df118d94f46d85b2e434fd62c84cb66d5834d5a890725fe625f498e72e4d5ec", size = 3316685, upload-time = "2025-11-24T23:25:57.43Z" }, + { url = "https://files.pythonhosted.org/packages/40/ae/0fc961179e78cc579e138fad6eb580448ecae64908f95b8cb8ee2f241f67/asyncpg-0.31.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:bd5b6efff3c17c3202d4b37189969acf8927438a238c6257f66be3c426beba20", size = 3471858, upload-time = "2025-11-24T23:25:59.636Z" }, + { url = "https://files.pythonhosted.org/packages/52/b2/b20e09670be031afa4cbfabd645caece7f85ec62d69c312239de568e058e/asyncpg-0.31.0-cp312-cp312-win32.whl", hash = "sha256:027eaa61361ec735926566f995d959ade4796f6a49d3bde17e5134b9964f9ba8", size = 527852, upload-time = "2025-11-24T23:26:01.084Z" }, + { url = "https://files.pythonhosted.org/packages/b5/f0/f2ed1de154e15b107dc692262395b3c17fc34eafe2a78fc2115931561730/asyncpg-0.31.0-cp312-cp312-win_amd64.whl", hash = "sha256:72d6bdcbc93d608a1158f17932de2321f68b1a967a13e014998db87a72ed3186", size = 597175, upload-time = "2025-11-24T23:26:02.564Z" }, + { url = "https://files.pythonhosted.org/packages/95/11/97b5c2af72a5d0b9bc3fa30cd4b9ce22284a9a943a150fdc768763caf035/asyncpg-0.31.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:c204fab1b91e08b0f47e90a75d1b3c62174dab21f670ad6c5d0f243a228f015b", size = 661111, upload-time = "2025-11-24T23:26:04.467Z" }, + { url = "https://files.pythonhosted.org/packages/1b/71/157d611c791a5e2d0423f09f027bd499935f0906e0c2a416ce712ba51ef3/asyncpg-0.31.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:54a64f91839ba59008eccf7aad2e93d6e3de688d796f35803235ea1c4898ae1e", size = 636928, upload-time = "2025-11-24T23:26:05.944Z" }, + { url = "https://files.pythonhosted.org/packages/2e/fc/9e3486fb2bbe69d4a867c0b76d68542650a7ff1574ca40e84c3111bb0c6e/asyncpg-0.31.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c0e0822b1038dc7253b337b0f3f676cadc4ac31b126c5d42691c39691962e403", size = 3424067, upload-time = "2025-11-24T23:26:07.957Z" }, + { url = "https://files.pythonhosted.org/packages/12/c6/8c9d076f73f07f995013c791e018a1cd5f31823c2a3187fc8581706aa00f/asyncpg-0.31.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:bef056aa502ee34204c161c72ca1f3c274917596877f825968368b2c33f585f4", size = 3518156, upload-time = "2025-11-24T23:26:09.591Z" }, + { url = "https://files.pythonhosted.org/packages/ae/3b/60683a0baf50fbc546499cfb53132cb6835b92b529a05f6a81471ab60d0c/asyncpg-0.31.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:0bfbcc5b7ffcd9b75ab1558f00db2ae07db9c80637ad1b2469c43df79d7a5ae2", size = 3319636, upload-time = "2025-11-24T23:26:11.168Z" }, + { url = "https://files.pythonhosted.org/packages/50/dc/8487df0f69bd398a61e1792b3cba0e47477f214eff085ba0efa7eac9ce87/asyncpg-0.31.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:22bc525ebbdc24d1261ecbf6f504998244d4e3be1721784b5f64664d61fbe602", size = 3472079, upload-time = "2025-11-24T23:26:13.164Z" }, + { url = "https://files.pythonhosted.org/packages/13/a1/c5bbeeb8531c05c89135cb8b28575ac2fac618bcb60119ee9696c3faf71c/asyncpg-0.31.0-cp313-cp313-win32.whl", hash = "sha256:f890de5e1e4f7e14023619399a471ce4b71f5418cd67a51853b9910fdfa73696", size = 527606, upload-time = "2025-11-24T23:26:14.78Z" }, + { url = "https://files.pythonhosted.org/packages/91/66/b25ccb84a246b470eb943b0107c07edcae51804912b824054b3413995a10/asyncpg-0.31.0-cp313-cp313-win_amd64.whl", hash = "sha256:dc5f2fa9916f292e5c5c8b2ac2813763bcd7f58e130055b4ad8a0531314201ab", size = 596569, upload-time = "2025-11-24T23:26:16.189Z" }, + { url = "https://files.pythonhosted.org/packages/3c/36/e9450d62e84a13aea6580c83a47a437f26c7ca6fa0f0fd40b6670793ea30/asyncpg-0.31.0-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:f6b56b91bb0ffc328c4e3ed113136cddd9deefdf5f79ab448598b9772831df44", size = 660867, upload-time = "2025-11-24T23:26:17.631Z" }, + { url = "https://files.pythonhosted.org/packages/82/4b/1d0a2b33b3102d210439338e1beea616a6122267c0df459ff0265cd5807a/asyncpg-0.31.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:334dec28cf20d7f5bb9e45b39546ddf247f8042a690bff9b9573d00086e69cb5", size = 638349, upload-time = "2025-11-24T23:26:19.689Z" }, + { url = "https://files.pythonhosted.org/packages/41/aa/e7f7ac9a7974f08eff9183e392b2d62516f90412686532d27e196c0f0eeb/asyncpg-0.31.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:98cc158c53f46de7bb677fd20c417e264fc02b36d901cc2a43bd6cb0dc6dbfd2", size = 3410428, upload-time = "2025-11-24T23:26:21.275Z" }, + { url = "https://files.pythonhosted.org/packages/6f/de/bf1b60de3dede5c2731e6788617a512bc0ebd9693eac297ee74086f101d7/asyncpg-0.31.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9322b563e2661a52e3cdbc93eed3be7748b289f792e0011cb2720d278b366ce2", size = 3471678, upload-time = "2025-11-24T23:26:23.627Z" }, + { url = "https://files.pythonhosted.org/packages/46/78/fc3ade003e22d8bd53aaf8f75f4be48f0b460fa73738f0391b9c856a9147/asyncpg-0.31.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:19857a358fc811d82227449b7ca40afb46e75b33eb8897240c3839dd8b744218", size = 3313505, upload-time = "2025-11-24T23:26:25.235Z" }, + { url = "https://files.pythonhosted.org/packages/bf/e9/73eb8a6789e927816f4705291be21f2225687bfa97321e40cd23055e903a/asyncpg-0.31.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:ba5f8886e850882ff2c2ace5732300e99193823e8107e2c53ef01c1ebfa1e85d", size = 3434744, upload-time = "2025-11-24T23:26:26.944Z" }, + { url = "https://files.pythonhosted.org/packages/08/4b/f10b880534413c65c5b5862f79b8e81553a8f364e5238832ad4c0af71b7f/asyncpg-0.31.0-cp314-cp314-win32.whl", hash = "sha256:cea3a0b2a14f95834cee29432e4ddc399b95700eb1d51bbc5bfee8f31fa07b2b", size = 532251, upload-time = "2025-11-24T23:26:28.404Z" }, + { url = "https://files.pythonhosted.org/packages/d3/2d/7aa40750b7a19efa5d66e67fc06008ca0f27ba1bd082e457ad82f59aba49/asyncpg-0.31.0-cp314-cp314-win_amd64.whl", hash = "sha256:04d19392716af6b029411a0264d92093b6e5e8285ae97a39957b9a9c14ea72be", size = 604901, upload-time = "2025-11-24T23:26:30.34Z" }, + { url = "https://files.pythonhosted.org/packages/ce/fe/b9dfe349b83b9dee28cc42360d2c86b2cdce4cb551a2c2d27e156bcac84d/asyncpg-0.31.0-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:bdb957706da132e982cc6856bb2f7b740603472b54c3ebc77fe60ea3e57e1bd2", size = 702280, upload-time = "2025-11-24T23:26:32Z" }, + { url = "https://files.pythonhosted.org/packages/6a/81/e6be6e37e560bd91e6c23ea8a6138a04fd057b08cf63d3c5055c98e81c1d/asyncpg-0.31.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:6d11b198111a72f47154fa03b85799f9be63701e068b43f84ac25da0bda9cb31", size = 682931, upload-time = "2025-11-24T23:26:33.572Z" }, + { url = "https://files.pythonhosted.org/packages/a6/45/6009040da85a1648dd5bc75b3b0a062081c483e75a1a29041ae63a0bf0dc/asyncpg-0.31.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:18c83b03bc0d1b23e6230f5bf8d4f217dc9bc08644ce0502a9d91dc9e634a9c7", size = 3581608, upload-time = "2025-11-24T23:26:35.638Z" }, + { url = "https://files.pythonhosted.org/packages/7e/06/2e3d4d7608b0b2b3adbee0d0bd6a2d29ca0fc4d8a78f8277df04e2d1fd7b/asyncpg-0.31.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:e009abc333464ff18b8f6fd146addffd9aaf63e79aa3bb40ab7a4c332d0c5e9e", size = 3498738, upload-time = "2025-11-24T23:26:37.275Z" }, + { url = "https://files.pythonhosted.org/packages/7d/aa/7d75ede780033141c51d83577ea23236ba7d3a23593929b32b49db8ed36e/asyncpg-0.31.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:3b1fbcb0e396a5ca435a8826a87e5c2c2cc0c8c68eb6fadf82168056b0e53a8c", size = 3401026, upload-time = "2025-11-24T23:26:39.423Z" }, + { url = "https://files.pythonhosted.org/packages/ba/7a/15e37d45e7f7c94facc1e9148c0e455e8f33c08f0b8a0b1deb2c5171771b/asyncpg-0.31.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:8df714dba348efcc162d2adf02d213e5fab1bd9f557e1305633e851a61814a7a", size = 3429426, upload-time = "2025-11-24T23:26:41.032Z" }, + { url = "https://files.pythonhosted.org/packages/13/d5/71437c5f6ae5f307828710efbe62163974e71237d5d46ebd2869ea052d10/asyncpg-0.31.0-cp314-cp314t-win32.whl", hash = "sha256:1b41f1afb1033f2b44f3234993b15096ddc9cd71b21a42dbd87fc6a57b43d65d", size = 614495, upload-time = "2025-11-24T23:26:42.659Z" }, + { url = "https://files.pythonhosted.org/packages/3c/d7/8fb3044eaef08a310acfe23dae9a8e2e07d305edc29a53497e52bc76eca7/asyncpg-0.31.0-cp314-cp314t-win_amd64.whl", hash = "sha256:bd4107bb7cdd0e9e65fae66a62afd3a249663b844fa34d479f6d5b3bef9c04c3", size = 706062, upload-time = "2025-11-24T23:26:44.086Z" }, ] [[package]] name = "attrs" -version = "25.3.0" +version = "26.1.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/5a/b0/1367933a8532ee6ff8d63537de4f1177af4bff9f3e829baf7331f595bb24/attrs-25.3.0.tar.gz", hash = "sha256:75d7cefc7fb576747b2c81b4442d4d4a1ce0900973527c011d1030fd3bf4af1b", size = 812032, upload-time = "2025-03-13T11:10:22.779Z" } +sdist = { url = "https://files.pythonhosted.org/packages/9a/8e/82a0fe20a541c03148528be8cac2408564a6c9a0cc7e9171802bc1d26985/attrs-26.1.0.tar.gz", hash = "sha256:d03ceb89cb322a8fd706d4fb91940737b6642aa36998fe130a9bc96c985eff32", size = 952055, upload-time = "2026-03-19T14:22:25.026Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/77/06/bb80f5f86020c4551da315d78b3ab75e8228f89f0162f2c3a819e407941a/attrs-25.3.0-py3-none-any.whl", hash = "sha256:427318ce031701fea540783410126f03899a97ffc6f61596ad581ac2e40e3bc3", size = 63815, upload-time = "2025-03-13T11:10:21.14Z" }, + { url = "https://files.pythonhosted.org/packages/64/b4/17d4b0b2a2dc85a6df63d1157e028ed19f90d4cd97c36717afef2bc2f395/attrs-26.1.0-py3-none-any.whl", hash = "sha256:c647aa4a12dfbad9333ca4e71fe62ddc36f4e63b2d260a37a8b83d2f043ac309", size = 67548, upload-time = "2026-03-19T14:22:23.645Z" }, ] [[package]] @@ -289,57 +339,13 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/a0/59/76ab57e3fe74484f48a53f8e337171b4a2349e506eabe136d7e01d059086/backports_asyncio_runner-1.2.0-py3-none-any.whl", hash = "sha256:0da0a936a8aeb554eccb426dc55af3ba63bcdc69fa1a600b5bb305413a4477b5", size = 12313, upload-time = "2025-07-02T02:27:14.263Z" }, ] -[[package]] -name = "black" -version = "26.3.1" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "click" }, - { name = "mypy-extensions" }, - { name = "packaging" }, - { name = "pathspec" }, - { name = "platformdirs" }, - { name = "pytokens" }, - { name = "tomli", marker = "python_full_version < '3.11'" }, - { name = "typing-extensions", marker = "python_full_version < '3.11'" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/e1/c5/61175d618685d42b005847464b8fb4743a67b1b8fdb75e50e5a96c31a27a/black-26.3.1.tar.gz", hash = "sha256:2c50f5063a9641c7eed7795014ba37b0f5fa227f3d408b968936e24bc0566b07", size = 666155, upload-time = "2026-03-12T03:36:03.593Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/32/a8/11170031095655d36ebc6664fe0897866f6023892396900eec0e8fdc4299/black-26.3.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:86a8b5035fce64f5dcd1b794cf8ec4d31fe458cf6ce3986a30deb434df82a1d2", size = 1866562, upload-time = "2026-03-12T03:39:58.639Z" }, - { url = "https://files.pythonhosted.org/packages/69/ce/9e7548d719c3248c6c2abfd555d11169457cbd584d98d179111338423790/black-26.3.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:5602bdb96d52d2d0672f24f6ffe5218795736dd34807fd0fd55ccd6bf206168b", size = 1703623, upload-time = "2026-03-12T03:40:00.347Z" }, - { url = "https://files.pythonhosted.org/packages/7f/0a/8d17d1a9c06f88d3d030d0b1d4373c1551146e252afe4547ed601c0e697f/black-26.3.1-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6c54a4a82e291a1fee5137371ab488866b7c86a3305af4026bdd4dc78642e1ac", size = 1768388, upload-time = "2026-03-12T03:40:01.765Z" }, - { url = "https://files.pythonhosted.org/packages/52/79/c1ee726e221c863cde5164f925bacf183dfdf0397d4e3f94889439b947b4/black-26.3.1-cp310-cp310-win_amd64.whl", hash = "sha256:6e131579c243c98f35bce64a7e08e87fb2d610544754675d4a0e73a070a5aa3a", size = 1412969, upload-time = "2026-03-12T03:40:03.252Z" }, - { url = "https://files.pythonhosted.org/packages/73/a5/15c01d613f5756f68ed8f6d4ec0a1e24b82b18889fa71affd3d1f7fad058/black-26.3.1-cp310-cp310-win_arm64.whl", hash = "sha256:5ed0ca58586c8d9a487352a96b15272b7fa55d139fc8496b519e78023a8dab0a", size = 1220345, upload-time = "2026-03-12T03:40:04.892Z" }, - { url = "https://files.pythonhosted.org/packages/17/57/5f11c92861f9c92eb9dddf515530bc2d06db843e44bdcf1c83c1427824bc/black-26.3.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:28ef38aee69e4b12fda8dba75e21f9b4f979b490c8ac0baa7cb505369ac9e1ff", size = 1851987, upload-time = "2026-03-12T03:40:06.248Z" }, - { url = "https://files.pythonhosted.org/packages/54/aa/340a1463660bf6831f9e39646bf774086dbd8ca7fc3cded9d59bbdf4ad0a/black-26.3.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:bf9bf162ed91a26f1adba8efda0b573bc6924ec1408a52cc6f82cb73ec2b142c", size = 1689499, upload-time = "2026-03-12T03:40:07.642Z" }, - { url = "https://files.pythonhosted.org/packages/f3/01/b726c93d717d72733da031d2de10b92c9fa4c8d0c67e8a8a372076579279/black-26.3.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:474c27574d6d7037c1bc875a81d9be0a9a4f9ee95e62800dab3cfaadbf75acd5", size = 1754369, upload-time = "2026-03-12T03:40:09.279Z" }, - { url = "https://files.pythonhosted.org/packages/e3/09/61e91881ca291f150cfc9eb7ba19473c2e59df28859a11a88248b5cbbc4d/black-26.3.1-cp311-cp311-win_amd64.whl", hash = "sha256:5e9d0d86df21f2e1677cc4bd090cd0e446278bcbbe49bf3659c308c3e402843e", size = 1413613, upload-time = "2026-03-12T03:40:10.943Z" }, - { url = "https://files.pythonhosted.org/packages/16/73/544f23891b22e7efe4d8f812371ab85b57f6a01b2fc45e3ba2e52ba985b8/black-26.3.1-cp311-cp311-win_arm64.whl", hash = "sha256:9a5e9f45e5d5e1c5b5c29b3bd4265dcc90e8b92cf4534520896ed77f791f4da5", size = 1219719, upload-time = "2026-03-12T03:40:12.597Z" }, - { url = "https://files.pythonhosted.org/packages/dc/f8/da5eae4fc75e78e6dceb60624e1b9662ab00d6b452996046dfa9b8a6025b/black-26.3.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:b5e6f89631eb88a7302d416594a32faeee9fb8fb848290da9d0a5f2903519fc1", size = 1895920, upload-time = "2026-03-12T03:40:13.921Z" }, - { url = "https://files.pythonhosted.org/packages/2c/9f/04e6f26534da2e1629b2b48255c264cabf5eedc5141d04516d9d68a24111/black-26.3.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:41cd2012d35b47d589cb8a16faf8a32ef7a336f56356babd9fcf70939ad1897f", size = 1718499, upload-time = "2026-03-12T03:40:15.239Z" }, - { url = "https://files.pythonhosted.org/packages/04/91/a5935b2a63e31b331060c4a9fdb5a6c725840858c599032a6f3aac94055f/black-26.3.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0f76ff19ec5297dd8e66eb64deda23631e642c9393ab592826fd4bdc97a4bce7", size = 1794994, upload-time = "2026-03-12T03:40:17.124Z" }, - { url = "https://files.pythonhosted.org/packages/e7/0a/86e462cdd311a3c2a8ece708d22aba17d0b2a0d5348ca34b40cdcbea512e/black-26.3.1-cp312-cp312-win_amd64.whl", hash = "sha256:ddb113db38838eb9f043623ba274cfaf7d51d5b0c22ecb30afe58b1bb8322983", size = 1420867, upload-time = "2026-03-12T03:40:18.83Z" }, - { url = "https://files.pythonhosted.org/packages/5b/e5/22515a19cb7eaee3440325a6b0d95d2c0e88dd180cb011b12ae488e031d1/black-26.3.1-cp312-cp312-win_arm64.whl", hash = "sha256:dfdd51fc3e64ea4f35873d1b3fb25326773d55d2329ff8449139ebaad7357efb", size = 1230124, upload-time = "2026-03-12T03:40:20.425Z" }, - { url = "https://files.pythonhosted.org/packages/f5/77/5728052a3c0450c53d9bb3945c4c46b91baa62b2cafab6801411b6271e45/black-26.3.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:855822d90f884905362f602880ed8b5df1b7e3ee7d0db2502d4388a954cc8c54", size = 1895034, upload-time = "2026-03-12T03:40:21.813Z" }, - { url = "https://files.pythonhosted.org/packages/52/73/7cae55fdfdfbe9d19e9a8d25d145018965fe2079fa908101c3733b0c55a0/black-26.3.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:8a33d657f3276328ce00e4d37fe70361e1ec7614da5d7b6e78de5426cb56332f", size = 1718503, upload-time = "2026-03-12T03:40:23.666Z" }, - { url = "https://files.pythonhosted.org/packages/e1/87/af89ad449e8254fdbc74654e6467e3c9381b61472cc532ee350d28cfdafb/black-26.3.1-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f1cd08e99d2f9317292a311dfe578fd2a24b15dbce97792f9c4d752275c1fa56", size = 1793557, upload-time = "2026-03-12T03:40:25.497Z" }, - { url = "https://files.pythonhosted.org/packages/43/10/d6c06a791d8124b843bf325ab4ac7d2f5b98731dff84d6064eafd687ded1/black-26.3.1-cp313-cp313-win_amd64.whl", hash = "sha256:c7e72339f841b5a237ff14f7d3880ddd0fc7f98a1199e8c4327f9a4f478c1839", size = 1422766, upload-time = "2026-03-12T03:40:27.14Z" }, - { url = "https://files.pythonhosted.org/packages/59/4f/40a582c015f2d841ac24fed6390bd68f0fc896069ff3a886317959c9daf8/black-26.3.1-cp313-cp313-win_arm64.whl", hash = "sha256:afc622538b430aa4c8c853f7f63bc582b3b8030fd8c80b70fb5fa5b834e575c2", size = 1232140, upload-time = "2026-03-12T03:40:28.882Z" }, - { url = "https://files.pythonhosted.org/packages/d5/da/e36e27c9cebc1311b7579210df6f1c86e50f2d7143ae4fcf8a5017dc8809/black-26.3.1-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:2d6bfaf7fd0993b420bed691f20f9492d53ce9a2bcccea4b797d34e947318a78", size = 1889234, upload-time = "2026-03-12T03:40:30.964Z" }, - { url = "https://files.pythonhosted.org/packages/0e/7b/9871acf393f64a5fa33668c19350ca87177b181f44bb3d0c33b2d534f22c/black-26.3.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:f89f2ab047c76a9c03f78d0d66ca519e389519902fa27e7a91117ef7611c0568", size = 1720522, upload-time = "2026-03-12T03:40:32.346Z" }, - { url = "https://files.pythonhosted.org/packages/03/87/e766c7f2e90c07fb7586cc787c9ae6462b1eedab390191f2b7fc7f6170a9/black-26.3.1-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b07fc0dab849d24a80a29cfab8d8a19187d1c4685d8a5e6385a5ce323c1f015f", size = 1787824, upload-time = "2026-03-12T03:40:33.636Z" }, - { url = "https://files.pythonhosted.org/packages/ac/94/2424338fb2d1875e9e83eed4c8e9c67f6905ec25afd826a911aea2b02535/black-26.3.1-cp314-cp314-win_amd64.whl", hash = "sha256:0126ae5b7c09957da2bdbd91a9ba1207453feada9e9fe51992848658c6c8e01c", size = 1445855, upload-time = "2026-03-12T03:40:35.442Z" }, - { url = "https://files.pythonhosted.org/packages/86/43/0c3338bd928afb8ee7471f1a4eec3bdbe2245ccb4a646092a222e8669840/black-26.3.1-cp314-cp314-win_arm64.whl", hash = "sha256:92c0ec1f2cc149551a2b7b47efc32c866406b6891b0ee4625e95967c8f4acfb1", size = 1258109, upload-time = "2026-03-12T03:40:36.832Z" }, - { url = "https://files.pythonhosted.org/packages/8e/0d/52d98722666d6fc6c3dd4c76df339501d6efd40e0ff95e6186a7b7f0befd/black-26.3.1-py3-none-any.whl", hash = "sha256:2bd5aa94fc267d38bb21a70d7410a89f1a1d318841855f698746f8e7f51acd1b", size = 207542, upload-time = "2026-03-12T03:36:01.668Z" }, -] - [[package]] name = "certifi" -version = "2025.7.14" +version = "2026.2.25" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/b3/76/52c535bcebe74590f296d6c77c86dabf761c41980e1347a2422e4aa2ae41/certifi-2025.7.14.tar.gz", hash = "sha256:8ea99dbdfaaf2ba2f9bac77b9249ef62ec5218e7c2b2e903378ed5fccf765995", size = 163981, upload-time = "2025-07-14T03:29:28.449Z" } +sdist = { url = "https://files.pythonhosted.org/packages/af/2d/7bf41579a8986e348fa033a31cdd0e4121114f6bce2457e8876010b092dd/certifi-2026.2.25.tar.gz", hash = "sha256:e887ab5cee78ea814d3472169153c2d12cd43b14bd03329a39a9c6e2e80bfba7", size = 155029, upload-time = "2026-02-25T02:54:17.342Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/4f/52/34c6cf5bb9285074dc3531c437b3919e825d976fde097a7a73f79e726d03/certifi-2025.7.14-py3-none-any.whl", hash = "sha256:6b31f564a415d79ee77df69d757bb49a5bb53bd9f756cbbe24394ffd6fc1f4b2", size = 162722, upload-time = "2025-07-14T03:29:26.863Z" }, + { url = "https://files.pythonhosted.org/packages/9a/3c/c17fb3ca2d9c3acff52e30b309f538586f9f5b9c9cf454f3845fc9af4881/certifi-2026.2.25-py3-none-any.whl", hash = "sha256:027692e4402ad994f1c42e52a4997a9763c646b73e4096e4d5d6db8af1d6f0fa", size = 153684, upload-time = "2026-02-25T02:54:15.766Z" }, ] [[package]] @@ -426,84 +432,128 @@ wheels = [ [[package]] name = "cfgv" -version = "3.4.0" +version = "3.5.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/11/74/539e56497d9bd1d484fd863dd69cbbfa653cd2aa27abfe35653494d85e94/cfgv-3.4.0.tar.gz", hash = "sha256:e52591d4c5f5dead8e0f673fb16db7949d2cfb3f7da4582893288f0ded8fe560", size = 7114, upload-time = "2023-08-12T20:38:17.776Z" } +sdist = { url = "https://files.pythonhosted.org/packages/4e/b5/721b8799b04bf9afe054a3899c6cf4e880fcf8563cc71c15610242490a0c/cfgv-3.5.0.tar.gz", hash = "sha256:d5b1034354820651caa73ede66a6294d6e95c1b00acc5e9b098e917404669132", size = 7334, upload-time = "2025-11-19T20:55:51.612Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/c5/55/51844dd50c4fc7a33b653bfaba4c2456f06955289ca770a5dbd5fd267374/cfgv-3.4.0-py2.py3-none-any.whl", hash = "sha256:b7265b1f29fd3316bfcd2b330d63d024f2bfd8bcb8b0272f8e19a504856c48f9", size = 7249, upload-time = "2023-08-12T20:38:16.269Z" }, + { url = "https://files.pythonhosted.org/packages/db/3c/33bac158f8ab7f89b2e59426d5fe2e4f63f7ed25df84c036890172b412b5/cfgv-3.5.0-py2.py3-none-any.whl", hash = "sha256:a8dc6b26ad22ff227d2634a65cb388215ce6cc96bbcc5cfde7641ae87e8dacc0", size = 7445, upload-time = "2025-11-19T20:55:50.744Z" }, ] [[package]] name = "charset-normalizer" -version = "3.4.2" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/e4/33/89c2ced2b67d1c2a61c19c6751aa8902d46ce3dacb23600a283619f5a12d/charset_normalizer-3.4.2.tar.gz", hash = "sha256:5baececa9ecba31eff645232d59845c07aa030f0c81ee70184a90d35099a0e63", size = 126367, upload-time = "2025-05-02T08:34:42.01Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/95/28/9901804da60055b406e1a1c5ba7aac1276fb77f1dde635aabfc7fd84b8ab/charset_normalizer-3.4.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7c48ed483eb946e6c04ccbe02c6b4d1d48e51944b6db70f697e089c193404941", size = 201818, upload-time = "2025-05-02T08:31:46.725Z" }, - { url = "https://files.pythonhosted.org/packages/d9/9b/892a8c8af9110935e5adcbb06d9c6fe741b6bb02608c6513983048ba1a18/charset_normalizer-3.4.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:b2d318c11350e10662026ad0eb71bb51c7812fc8590825304ae0bdd4ac283acd", size = 144649, upload-time = "2025-05-02T08:31:48.889Z" }, - { url = "https://files.pythonhosted.org/packages/7b/a5/4179abd063ff6414223575e008593861d62abfc22455b5d1a44995b7c101/charset_normalizer-3.4.2-cp310-cp310-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:9cbfacf36cb0ec2897ce0ebc5d08ca44213af24265bd56eca54bee7923c48fd6", size = 155045, upload-time = "2025-05-02T08:31:50.757Z" }, - { url = "https://files.pythonhosted.org/packages/3b/95/bc08c7dfeddd26b4be8c8287b9bb055716f31077c8b0ea1cd09553794665/charset_normalizer-3.4.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:18dd2e350387c87dabe711b86f83c9c78af772c748904d372ade190b5c7c9d4d", size = 147356, upload-time = "2025-05-02T08:31:52.634Z" }, - { url = "https://files.pythonhosted.org/packages/a8/2d/7a5b635aa65284bf3eab7653e8b4151ab420ecbae918d3e359d1947b4d61/charset_normalizer-3.4.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:8075c35cd58273fee266c58c0c9b670947c19df5fb98e7b66710e04ad4e9ff86", size = 149471, upload-time = "2025-05-02T08:31:56.207Z" }, - { url = "https://files.pythonhosted.org/packages/ae/38/51fc6ac74251fd331a8cfdb7ec57beba8c23fd5493f1050f71c87ef77ed0/charset_normalizer-3.4.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:5bf4545e3b962767e5c06fe1738f951f77d27967cb2caa64c28be7c4563e162c", size = 151317, upload-time = "2025-05-02T08:31:57.613Z" }, - { url = "https://files.pythonhosted.org/packages/b7/17/edee1e32215ee6e9e46c3e482645b46575a44a2d72c7dfd49e49f60ce6bf/charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:7a6ab32f7210554a96cd9e33abe3ddd86732beeafc7a28e9955cdf22ffadbab0", size = 146368, upload-time = "2025-05-02T08:31:59.468Z" }, - { url = "https://files.pythonhosted.org/packages/26/2c/ea3e66f2b5f21fd00b2825c94cafb8c326ea6240cd80a91eb09e4a285830/charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:b33de11b92e9f75a2b545d6e9b6f37e398d86c3e9e9653c4864eb7e89c5773ef", size = 154491, upload-time = "2025-05-02T08:32:01.219Z" }, - { url = "https://files.pythonhosted.org/packages/52/47/7be7fa972422ad062e909fd62460d45c3ef4c141805b7078dbab15904ff7/charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:8755483f3c00d6c9a77f490c17e6ab0c8729e39e6390328e42521ef175380ae6", size = 157695, upload-time = "2025-05-02T08:32:03.045Z" }, - { url = "https://files.pythonhosted.org/packages/2f/42/9f02c194da282b2b340f28e5fb60762de1151387a36842a92b533685c61e/charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:68a328e5f55ec37c57f19ebb1fdc56a248db2e3e9ad769919a58672958e8f366", size = 154849, upload-time = "2025-05-02T08:32:04.651Z" }, - { url = "https://files.pythonhosted.org/packages/67/44/89cacd6628f31fb0b63201a618049be4be2a7435a31b55b5eb1c3674547a/charset_normalizer-3.4.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:21b2899062867b0e1fde9b724f8aecb1af14f2778d69aacd1a5a1853a597a5db", size = 150091, upload-time = "2025-05-02T08:32:06.719Z" }, - { url = "https://files.pythonhosted.org/packages/1f/79/4b8da9f712bc079c0f16b6d67b099b0b8d808c2292c937f267d816ec5ecc/charset_normalizer-3.4.2-cp310-cp310-win32.whl", hash = "sha256:e8082b26888e2f8b36a042a58307d5b917ef2b1cacab921ad3323ef91901c71a", size = 98445, upload-time = "2025-05-02T08:32:08.66Z" }, - { url = "https://files.pythonhosted.org/packages/7d/d7/96970afb4fb66497a40761cdf7bd4f6fca0fc7bafde3a84f836c1f57a926/charset_normalizer-3.4.2-cp310-cp310-win_amd64.whl", hash = "sha256:f69a27e45c43520f5487f27627059b64aaf160415589230992cec34c5e18a509", size = 105782, upload-time = "2025-05-02T08:32:10.46Z" }, - { url = "https://files.pythonhosted.org/packages/05/85/4c40d00dcc6284a1c1ad5de5e0996b06f39d8232f1031cd23c2f5c07ee86/charset_normalizer-3.4.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:be1e352acbe3c78727a16a455126d9ff83ea2dfdcbc83148d2982305a04714c2", size = 198794, upload-time = "2025-05-02T08:32:11.945Z" }, - { url = "https://files.pythonhosted.org/packages/41/d9/7a6c0b9db952598e97e93cbdfcb91bacd89b9b88c7c983250a77c008703c/charset_normalizer-3.4.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:aa88ca0b1932e93f2d961bf3addbb2db902198dca337d88c89e1559e066e7645", size = 142846, upload-time = "2025-05-02T08:32:13.946Z" }, - { url = "https://files.pythonhosted.org/packages/66/82/a37989cda2ace7e37f36c1a8ed16c58cf48965a79c2142713244bf945c89/charset_normalizer-3.4.2-cp311-cp311-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d524ba3f1581b35c03cb42beebab4a13e6cdad7b36246bd22541fa585a56cccd", size = 153350, upload-time = "2025-05-02T08:32:15.873Z" }, - { url = "https://files.pythonhosted.org/packages/df/68/a576b31b694d07b53807269d05ec3f6f1093e9545e8607121995ba7a8313/charset_normalizer-3.4.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:28a1005facc94196e1fb3e82a3d442a9d9110b8434fc1ded7a24a2983c9888d8", size = 145657, upload-time = "2025-05-02T08:32:17.283Z" }, - { url = "https://files.pythonhosted.org/packages/92/9b/ad67f03d74554bed3aefd56fe836e1623a50780f7c998d00ca128924a499/charset_normalizer-3.4.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:fdb20a30fe1175ecabed17cbf7812f7b804b8a315a25f24678bcdf120a90077f", size = 147260, upload-time = "2025-05-02T08:32:18.807Z" }, - { url = "https://files.pythonhosted.org/packages/a6/e6/8aebae25e328160b20e31a7e9929b1578bbdc7f42e66f46595a432f8539e/charset_normalizer-3.4.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0f5d9ed7f254402c9e7d35d2f5972c9bbea9040e99cd2861bd77dc68263277c7", size = 149164, upload-time = "2025-05-02T08:32:20.333Z" }, - { url = "https://files.pythonhosted.org/packages/8b/f2/b3c2f07dbcc248805f10e67a0262c93308cfa149a4cd3d1fe01f593e5fd2/charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:efd387a49825780ff861998cd959767800d54f8308936b21025326de4b5a42b9", size = 144571, upload-time = "2025-05-02T08:32:21.86Z" }, - { url = "https://files.pythonhosted.org/packages/60/5b/c3f3a94bc345bc211622ea59b4bed9ae63c00920e2e8f11824aa5708e8b7/charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:f0aa37f3c979cf2546b73e8222bbfa3dc07a641585340179d768068e3455e544", size = 151952, upload-time = "2025-05-02T08:32:23.434Z" }, - { url = "https://files.pythonhosted.org/packages/e2/4d/ff460c8b474122334c2fa394a3f99a04cf11c646da895f81402ae54f5c42/charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:e70e990b2137b29dc5564715de1e12701815dacc1d056308e2b17e9095372a82", size = 155959, upload-time = "2025-05-02T08:32:24.993Z" }, - { url = "https://files.pythonhosted.org/packages/a2/2b/b964c6a2fda88611a1fe3d4c400d39c66a42d6c169c924818c848f922415/charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:0c8c57f84ccfc871a48a47321cfa49ae1df56cd1d965a09abe84066f6853b9c0", size = 153030, upload-time = "2025-05-02T08:32:26.435Z" }, - { url = "https://files.pythonhosted.org/packages/59/2e/d3b9811db26a5ebf444bc0fa4f4be5aa6d76fc6e1c0fd537b16c14e849b6/charset_normalizer-3.4.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:6b66f92b17849b85cad91259efc341dce9c1af48e2173bf38a85c6329f1033e5", size = 148015, upload-time = "2025-05-02T08:32:28.376Z" }, - { url = "https://files.pythonhosted.org/packages/90/07/c5fd7c11eafd561bb51220d600a788f1c8d77c5eef37ee49454cc5c35575/charset_normalizer-3.4.2-cp311-cp311-win32.whl", hash = "sha256:daac4765328a919a805fa5e2720f3e94767abd632ae410a9062dff5412bae65a", size = 98106, upload-time = "2025-05-02T08:32:30.281Z" }, - { url = "https://files.pythonhosted.org/packages/a8/05/5e33dbef7e2f773d672b6d79f10ec633d4a71cd96db6673625838a4fd532/charset_normalizer-3.4.2-cp311-cp311-win_amd64.whl", hash = "sha256:e53efc7c7cee4c1e70661e2e112ca46a575f90ed9ae3fef200f2a25e954f4b28", size = 105402, upload-time = "2025-05-02T08:32:32.191Z" }, - { url = "https://files.pythonhosted.org/packages/d7/a4/37f4d6035c89cac7930395a35cc0f1b872e652eaafb76a6075943754f095/charset_normalizer-3.4.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:0c29de6a1a95f24b9a1aa7aefd27d2487263f00dfd55a77719b530788f75cff7", size = 199936, upload-time = "2025-05-02T08:32:33.712Z" }, - { url = "https://files.pythonhosted.org/packages/ee/8a/1a5e33b73e0d9287274f899d967907cd0bf9c343e651755d9307e0dbf2b3/charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:cddf7bd982eaa998934a91f69d182aec997c6c468898efe6679af88283b498d3", size = 143790, upload-time = "2025-05-02T08:32:35.768Z" }, - { url = "https://files.pythonhosted.org/packages/66/52/59521f1d8e6ab1482164fa21409c5ef44da3e9f653c13ba71becdd98dec3/charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:fcbe676a55d7445b22c10967bceaaf0ee69407fbe0ece4d032b6eb8d4565982a", size = 153924, upload-time = "2025-05-02T08:32:37.284Z" }, - { url = "https://files.pythonhosted.org/packages/86/2d/fb55fdf41964ec782febbf33cb64be480a6b8f16ded2dbe8db27a405c09f/charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:d41c4d287cfc69060fa91cae9683eacffad989f1a10811995fa309df656ec214", size = 146626, upload-time = "2025-05-02T08:32:38.803Z" }, - { url = "https://files.pythonhosted.org/packages/8c/73/6ede2ec59bce19b3edf4209d70004253ec5f4e319f9a2e3f2f15601ed5f7/charset_normalizer-3.4.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4e594135de17ab3866138f496755f302b72157d115086d100c3f19370839dd3a", size = 148567, upload-time = "2025-05-02T08:32:40.251Z" }, - { url = "https://files.pythonhosted.org/packages/09/14/957d03c6dc343c04904530b6bef4e5efae5ec7d7990a7cbb868e4595ee30/charset_normalizer-3.4.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:cf713fe9a71ef6fd5adf7a79670135081cd4431c2943864757f0fa3a65b1fafd", size = 150957, upload-time = "2025-05-02T08:32:41.705Z" }, - { url = "https://files.pythonhosted.org/packages/0d/c8/8174d0e5c10ccebdcb1b53cc959591c4c722a3ad92461a273e86b9f5a302/charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:a370b3e078e418187da8c3674eddb9d983ec09445c99a3a263c2011993522981", size = 145408, upload-time = "2025-05-02T08:32:43.709Z" }, - { url = "https://files.pythonhosted.org/packages/58/aa/8904b84bc8084ac19dc52feb4f5952c6df03ffb460a887b42615ee1382e8/charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:a955b438e62efdf7e0b7b52a64dc5c3396e2634baa62471768a64bc2adb73d5c", size = 153399, upload-time = "2025-05-02T08:32:46.197Z" }, - { url = "https://files.pythonhosted.org/packages/c2/26/89ee1f0e264d201cb65cf054aca6038c03b1a0c6b4ae998070392a3ce605/charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:7222ffd5e4de8e57e03ce2cef95a4c43c98fcb72ad86909abdfc2c17d227fc1b", size = 156815, upload-time = "2025-05-02T08:32:48.105Z" }, - { url = "https://files.pythonhosted.org/packages/fd/07/68e95b4b345bad3dbbd3a8681737b4338ff2c9df29856a6d6d23ac4c73cb/charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:bee093bf902e1d8fc0ac143c88902c3dfc8941f7ea1d6a8dd2bcb786d33db03d", size = 154537, upload-time = "2025-05-02T08:32:49.719Z" }, - { url = "https://files.pythonhosted.org/packages/77/1a/5eefc0ce04affb98af07bc05f3bac9094513c0e23b0562d64af46a06aae4/charset_normalizer-3.4.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:dedb8adb91d11846ee08bec4c8236c8549ac721c245678282dcb06b221aab59f", size = 149565, upload-time = "2025-05-02T08:32:51.404Z" }, - { url = "https://files.pythonhosted.org/packages/37/a0/2410e5e6032a174c95e0806b1a6585eb21e12f445ebe239fac441995226a/charset_normalizer-3.4.2-cp312-cp312-win32.whl", hash = "sha256:db4c7bf0e07fc3b7d89ac2a5880a6a8062056801b83ff56d8464b70f65482b6c", size = 98357, upload-time = "2025-05-02T08:32:53.079Z" }, - { url = "https://files.pythonhosted.org/packages/6c/4f/c02d5c493967af3eda9c771ad4d2bbc8df6f99ddbeb37ceea6e8716a32bc/charset_normalizer-3.4.2-cp312-cp312-win_amd64.whl", hash = "sha256:5a9979887252a82fefd3d3ed2a8e3b937a7a809f65dcb1e068b090e165bbe99e", size = 105776, upload-time = "2025-05-02T08:32:54.573Z" }, - { url = "https://files.pythonhosted.org/packages/ea/12/a93df3366ed32db1d907d7593a94f1fe6293903e3e92967bebd6950ed12c/charset_normalizer-3.4.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:926ca93accd5d36ccdabd803392ddc3e03e6d4cd1cf17deff3b989ab8e9dbcf0", size = 199622, upload-time = "2025-05-02T08:32:56.363Z" }, - { url = "https://files.pythonhosted.org/packages/04/93/bf204e6f344c39d9937d3c13c8cd5bbfc266472e51fc8c07cb7f64fcd2de/charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:eba9904b0f38a143592d9fc0e19e2df0fa2e41c3c3745554761c5f6447eedabf", size = 143435, upload-time = "2025-05-02T08:32:58.551Z" }, - { url = "https://files.pythonhosted.org/packages/22/2a/ea8a2095b0bafa6c5b5a55ffdc2f924455233ee7b91c69b7edfcc9e02284/charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:3fddb7e2c84ac87ac3a947cb4e66d143ca5863ef48e4a5ecb83bd48619e4634e", size = 153653, upload-time = "2025-05-02T08:33:00.342Z" }, - { url = "https://files.pythonhosted.org/packages/b6/57/1b090ff183d13cef485dfbe272e2fe57622a76694061353c59da52c9a659/charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:98f862da73774290f251b9df8d11161b6cf25b599a66baf087c1ffe340e9bfd1", size = 146231, upload-time = "2025-05-02T08:33:02.081Z" }, - { url = "https://files.pythonhosted.org/packages/e2/28/ffc026b26f441fc67bd21ab7f03b313ab3fe46714a14b516f931abe1a2d8/charset_normalizer-3.4.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6c9379d65defcab82d07b2a9dfbfc2e95bc8fe0ebb1b176a3190230a3ef0e07c", size = 148243, upload-time = "2025-05-02T08:33:04.063Z" }, - { url = "https://files.pythonhosted.org/packages/c0/0f/9abe9bd191629c33e69e47c6ef45ef99773320e9ad8e9cb08b8ab4a8d4cb/charset_normalizer-3.4.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e635b87f01ebc977342e2697d05b56632f5f879a4f15955dfe8cef2448b51691", size = 150442, upload-time = "2025-05-02T08:33:06.418Z" }, - { url = "https://files.pythonhosted.org/packages/67/7c/a123bbcedca91d5916c056407f89a7f5e8fdfce12ba825d7d6b9954a1a3c/charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:1c95a1e2902a8b722868587c0e1184ad5c55631de5afc0eb96bc4b0d738092c0", size = 145147, upload-time = "2025-05-02T08:33:08.183Z" }, - { url = "https://files.pythonhosted.org/packages/ec/fe/1ac556fa4899d967b83e9893788e86b6af4d83e4726511eaaad035e36595/charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:ef8de666d6179b009dce7bcb2ad4c4a779f113f12caf8dc77f0162c29d20490b", size = 153057, upload-time = "2025-05-02T08:33:09.986Z" }, - { url = "https://files.pythonhosted.org/packages/2b/ff/acfc0b0a70b19e3e54febdd5301a98b72fa07635e56f24f60502e954c461/charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:32fc0341d72e0f73f80acb0a2c94216bd704f4f0bce10aedea38f30502b271ff", size = 156454, upload-time = "2025-05-02T08:33:11.814Z" }, - { url = "https://files.pythonhosted.org/packages/92/08/95b458ce9c740d0645feb0e96cea1f5ec946ea9c580a94adfe0b617f3573/charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:289200a18fa698949d2b39c671c2cc7a24d44096784e76614899a7ccf2574b7b", size = 154174, upload-time = "2025-05-02T08:33:13.707Z" }, - { url = "https://files.pythonhosted.org/packages/78/be/8392efc43487ac051eee6c36d5fbd63032d78f7728cb37aebcc98191f1ff/charset_normalizer-3.4.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4a476b06fbcf359ad25d34a057b7219281286ae2477cc5ff5e3f70a246971148", size = 149166, upload-time = "2025-05-02T08:33:15.458Z" }, - { url = "https://files.pythonhosted.org/packages/44/96/392abd49b094d30b91d9fbda6a69519e95802250b777841cf3bda8fe136c/charset_normalizer-3.4.2-cp313-cp313-win32.whl", hash = "sha256:aaeeb6a479c7667fbe1099af9617c83aaca22182d6cf8c53966491a0f1b7ffb7", size = 98064, upload-time = "2025-05-02T08:33:17.06Z" }, - { url = "https://files.pythonhosted.org/packages/e9/b0/0200da600134e001d91851ddc797809e2fe0ea72de90e09bec5a2fbdaccb/charset_normalizer-3.4.2-cp313-cp313-win_amd64.whl", hash = "sha256:aa6af9e7d59f9c12b33ae4e9450619cf2488e2bbe9b44030905877f0b2324980", size = 105641, upload-time = "2025-05-02T08:33:18.753Z" }, - { url = "https://files.pythonhosted.org/packages/20/94/c5790835a017658cbfabd07f3bfb549140c3ac458cfc196323996b10095a/charset_normalizer-3.4.2-py3-none-any.whl", hash = "sha256:7f56930ab0abd1c45cd15be65cc741c28b1c9a34876ce8c17a2fa107810c0af0", size = 52626, upload-time = "2025-05-02T08:34:40.053Z" }, +version = "3.4.6" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/7b/60/e3bec1881450851b087e301bedc3daa9377a4d45f1c26aa90b0b235e38aa/charset_normalizer-3.4.6.tar.gz", hash = "sha256:1ae6b62897110aa7c79ea2f5dd38d1abca6db663687c0b1ad9aed6f6bae3d9d6", size = 143363, upload-time = "2026-03-15T18:53:25.478Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e6/8c/2c56124c6dc53a774d435f985b5973bc592f42d437be58c0c92d65ae7296/charset_normalizer-3.4.6-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:2e1d8ca8611099001949d1cdfaefc510cf0f212484fe7c565f735b68c78c3c95", size = 298751, upload-time = "2026-03-15T18:50:00.003Z" }, + { url = "https://files.pythonhosted.org/packages/86/2a/2a7db6b314b966a3bcad8c731c0719c60b931b931de7ae9f34b2839289ee/charset_normalizer-3.4.6-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e25369dc110d58ddf29b949377a93e0716d72a24f62bad72b2b39f155949c1fd", size = 200027, upload-time = "2026-03-15T18:50:01.702Z" }, + { url = "https://files.pythonhosted.org/packages/68/f2/0fe775c74ae25e2a3b07b01538fc162737b3e3f795bada3bc26f4d4d495c/charset_normalizer-3.4.6-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:259695e2ccc253feb2a016303543d691825e920917e31f894ca1a687982b1de4", size = 220741, upload-time = "2026-03-15T18:50:03.194Z" }, + { url = "https://files.pythonhosted.org/packages/10/98/8085596e41f00b27dd6aa1e68413d1ddda7e605f34dd546833c61fddd709/charset_normalizer-3.4.6-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:dda86aba335c902b6149a02a55b38e96287157e609200811837678214ba2b1db", size = 215802, upload-time = "2026-03-15T18:50:05.859Z" }, + { url = "https://files.pythonhosted.org/packages/fd/ce/865e4e09b041bad659d682bbd98b47fb490b8e124f9398c9448065f64fee/charset_normalizer-3.4.6-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:51fb3c322c81d20567019778cb5a4a6f2dc1c200b886bc0d636238e364848c89", size = 207908, upload-time = "2026-03-15T18:50:07.676Z" }, + { url = "https://files.pythonhosted.org/packages/a8/54/8c757f1f7349262898c2f169e0d562b39dcb977503f18fdf0814e923db78/charset_normalizer-3.4.6-cp310-cp310-manylinux_2_31_armv7l.whl", hash = "sha256:4482481cb0572180b6fd976a4d5c72a30263e98564da68b86ec91f0fe35e8565", size = 194357, upload-time = "2026-03-15T18:50:09.327Z" }, + { url = "https://files.pythonhosted.org/packages/6f/29/e88f2fac9218907fc7a70722b393d1bbe8334c61fe9c46640dba349b6e66/charset_normalizer-3.4.6-cp310-cp310-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:39f5068d35621da2881271e5c3205125cc456f54e9030d3f723288c873a71bf9", size = 205610, upload-time = "2026-03-15T18:50:10.732Z" }, + { url = "https://files.pythonhosted.org/packages/4c/c5/21d7bb0cb415287178450171d130bed9d664211fdd59731ed2c34267b07d/charset_normalizer-3.4.6-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:8bea55c4eef25b0b19a0337dc4e3f9a15b00d569c77211fa8cde38684f234fb7", size = 203512, upload-time = "2026-03-15T18:50:12.535Z" }, + { url = "https://files.pythonhosted.org/packages/a4/be/ce52f3c7fdb35cc987ad38a53ebcef52eec498f4fb6c66ecfe62cfe57ba2/charset_normalizer-3.4.6-cp310-cp310-musllinux_1_2_armv7l.whl", hash = "sha256:f0cdaecd4c953bfae0b6bb64910aaaca5a424ad9c72d85cb88417bb9814f7550", size = 195398, upload-time = "2026-03-15T18:50:14.236Z" }, + { url = "https://files.pythonhosted.org/packages/81/a0/3ab5dd39d4859a3555e5dadfc8a9fa7f8352f8c183d1a65c90264517da0e/charset_normalizer-3.4.6-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:150b8ce8e830eb7ccb029ec9ca36022f756986aaaa7956aad6d9ec90089338c0", size = 221772, upload-time = "2026-03-15T18:50:15.581Z" }, + { url = "https://files.pythonhosted.org/packages/04/6e/6a4e41a97ba6b2fa87f849c41e4d229449a586be85053c4d90135fe82d26/charset_normalizer-3.4.6-cp310-cp310-musllinux_1_2_riscv64.whl", hash = "sha256:e68c14b04827dd76dcbd1aeea9e604e3e4b78322d8faf2f8132c7138efa340a8", size = 205759, upload-time = "2026-03-15T18:50:17.047Z" }, + { url = "https://files.pythonhosted.org/packages/db/3b/34a712a5ee64a6957bf355b01dc17b12de457638d436fdb05d01e463cd1c/charset_normalizer-3.4.6-cp310-cp310-musllinux_1_2_s390x.whl", hash = "sha256:3778fd7d7cd04ae8f54651f4a7a0bd6e39a0cf20f801720a4c21d80e9b7ad6b0", size = 216938, upload-time = "2026-03-15T18:50:18.44Z" }, + { url = "https://files.pythonhosted.org/packages/cb/05/5bd1e12da9ab18790af05c61aafd01a60f489778179b621ac2a305243c62/charset_normalizer-3.4.6-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:dad6e0f2e481fffdcf776d10ebee25e0ef89f16d691f1e5dee4b586375fdc64b", size = 210138, upload-time = "2026-03-15T18:50:19.852Z" }, + { url = "https://files.pythonhosted.org/packages/bd/8e/3cb9e2d998ff6b21c0a1860343cb7b83eba9cdb66b91410e18fc4969d6ab/charset_normalizer-3.4.6-cp310-cp310-win32.whl", hash = "sha256:74a2e659c7ecbc73562e2a15e05039f1e22c75b7c7618b4b574a3ea9118d1557", size = 144137, upload-time = "2026-03-15T18:50:21.505Z" }, + { url = "https://files.pythonhosted.org/packages/d8/8f/78f5489ffadb0db3eb7aff53d31c24531d33eb545f0c6f6567c25f49a5ff/charset_normalizer-3.4.6-cp310-cp310-win_amd64.whl", hash = "sha256:aa9cccf4a44b9b62d8ba8b4dd06c649ba683e4bf04eea606d2e94cfc2d6ff4d6", size = 154244, upload-time = "2026-03-15T18:50:22.81Z" }, + { url = "https://files.pythonhosted.org/packages/e4/74/e472659dffb0cadb2f411282d2d76c60da1fc94076d7fffed4ae8a93ec01/charset_normalizer-3.4.6-cp310-cp310-win_arm64.whl", hash = "sha256:e985a16ff513596f217cee86c21371b8cd011c0f6f056d0920aa2d926c544058", size = 143312, upload-time = "2026-03-15T18:50:24.074Z" }, + { url = "https://files.pythonhosted.org/packages/62/28/ff6f234e628a2de61c458be2779cb182bc03f6eec12200d4a525bbfc9741/charset_normalizer-3.4.6-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:82060f995ab5003a2d6e0f4ad29065b7672b6593c8c63559beefe5b443242c3e", size = 293582, upload-time = "2026-03-15T18:50:25.454Z" }, + { url = "https://files.pythonhosted.org/packages/1c/b7/b1a117e5385cbdb3205f6055403c2a2a220c5ea80b8716c324eaf75c5c95/charset_normalizer-3.4.6-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:60c74963d8350241a79cb8feea80e54d518f72c26db618862a8f53e5023deaf9", size = 197240, upload-time = "2026-03-15T18:50:27.196Z" }, + { url = "https://files.pythonhosted.org/packages/a1/5f/2574f0f09f3c3bc1b2f992e20bce6546cb1f17e111c5be07308dc5427956/charset_normalizer-3.4.6-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:f6e4333fb15c83f7d1482a76d45a0818897b3d33f00efd215528ff7c51b8e35d", size = 217363, upload-time = "2026-03-15T18:50:28.601Z" }, + { url = "https://files.pythonhosted.org/packages/4a/d1/0ae20ad77bc949ddd39b51bf383b6ca932f2916074c95cad34ae465ab71f/charset_normalizer-3.4.6-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:bc72863f4d9aba2e8fd9085e63548a324ba706d2ea2c83b260da08a59b9482de", size = 212994, upload-time = "2026-03-15T18:50:30.102Z" }, + { url = "https://files.pythonhosted.org/packages/60/ac/3233d262a310c1b12633536a07cde5ddd16985e6e7e238e9f3f9423d8eb9/charset_normalizer-3.4.6-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9cc4fc6c196d6a8b76629a70ddfcd4635a6898756e2d9cac5565cf0654605d73", size = 204697, upload-time = "2026-03-15T18:50:31.654Z" }, + { url = "https://files.pythonhosted.org/packages/25/3c/8a18fc411f085b82303cfb7154eed5bd49c77035eb7608d049468b53f87c/charset_normalizer-3.4.6-cp311-cp311-manylinux_2_31_armv7l.whl", hash = "sha256:0c173ce3a681f309f31b87125fecec7a5d1347261ea11ebbb856fa6006b23c8c", size = 191673, upload-time = "2026-03-15T18:50:33.433Z" }, + { url = "https://files.pythonhosted.org/packages/ff/a7/11cfe61d6c5c5c7438d6ba40919d0306ed83c9ab957f3d4da2277ff67836/charset_normalizer-3.4.6-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:c907cdc8109f6c619e6254212e794d6548373cc40e1ec75e6e3823d9135d29cc", size = 201120, upload-time = "2026-03-15T18:50:35.105Z" }, + { url = "https://files.pythonhosted.org/packages/b5/10/cf491fa1abd47c02f69687046b896c950b92b6cd7337a27e6548adbec8e4/charset_normalizer-3.4.6-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:404a1e552cf5b675a87f0651f8b79f5f1e6fd100ee88dc612f89aa16abd4486f", size = 200911, upload-time = "2026-03-15T18:50:36.819Z" }, + { url = "https://files.pythonhosted.org/packages/28/70/039796160b48b18ed466fde0af84c1b090c4e288fae26cd674ad04a2d703/charset_normalizer-3.4.6-cp311-cp311-musllinux_1_2_armv7l.whl", hash = "sha256:e3c701e954abf6fc03a49f7c579cc80c2c6cc52525340ca3186c41d3f33482ef", size = 192516, upload-time = "2026-03-15T18:50:38.228Z" }, + { url = "https://files.pythonhosted.org/packages/ff/34/c56f3223393d6ff3124b9e78f7de738047c2d6bc40a4f16ac0c9d7a1cb3c/charset_normalizer-3.4.6-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:7a6967aaf043bceabab5412ed6bd6bd26603dae84d5cb75bf8d9a74a4959d398", size = 218795, upload-time = "2026-03-15T18:50:39.664Z" }, + { url = "https://files.pythonhosted.org/packages/e8/3b/ce2d4f86c5282191a041fdc5a4ce18f1c6bd40a5bd1f74cf8625f08d51c1/charset_normalizer-3.4.6-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:5feb91325bbceade6afab43eb3b508c63ee53579fe896c77137ded51c6b6958e", size = 201833, upload-time = "2026-03-15T18:50:41.552Z" }, + { url = "https://files.pythonhosted.org/packages/3b/9b/b6a9f76b0fd7c5b5ec58b228ff7e85095370282150f0bd50b3126f5506d6/charset_normalizer-3.4.6-cp311-cp311-musllinux_1_2_s390x.whl", hash = "sha256:f820f24b09e3e779fe84c3c456cb4108a7aa639b0d1f02c28046e11bfcd088ed", size = 213920, upload-time = "2026-03-15T18:50:43.33Z" }, + { url = "https://files.pythonhosted.org/packages/ae/98/7bc23513a33d8172365ed30ee3a3b3fe1ece14a395e5fc94129541fc6003/charset_normalizer-3.4.6-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:b35b200d6a71b9839a46b9b7fff66b6638bb52fc9658aa58796b0326595d3021", size = 206951, upload-time = "2026-03-15T18:50:44.789Z" }, + { url = "https://files.pythonhosted.org/packages/32/73/c0b86f3d1458468e11aec870e6b3feac931facbe105a894b552b0e518e79/charset_normalizer-3.4.6-cp311-cp311-win32.whl", hash = "sha256:9ca4c0b502ab399ef89248a2c84c54954f77a070f28e546a85e91da627d1301e", size = 143703, upload-time = "2026-03-15T18:50:46.103Z" }, + { url = "https://files.pythonhosted.org/packages/c6/e3/76f2facfe8eddee0bbd38d2594e709033338eae44ebf1738bcefe0a06185/charset_normalizer-3.4.6-cp311-cp311-win_amd64.whl", hash = "sha256:a9e68c9d88823b274cf1e72f28cb5dc89c990edf430b0bfd3e2fb0785bfeabf4", size = 153857, upload-time = "2026-03-15T18:50:47.563Z" }, + { url = "https://files.pythonhosted.org/packages/e2/dc/9abe19c9b27e6cd3636036b9d1b387b78c40dedbf0b47f9366737684b4b0/charset_normalizer-3.4.6-cp311-cp311-win_arm64.whl", hash = "sha256:97d0235baafca5f2b09cf332cc275f021e694e8362c6bb9c96fc9a0eb74fc316", size = 142751, upload-time = "2026-03-15T18:50:49.234Z" }, + { url = "https://files.pythonhosted.org/packages/e5/62/c0815c992c9545347aeea7859b50dc9044d147e2e7278329c6e02ac9a616/charset_normalizer-3.4.6-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:2ef7fedc7a6ecbe99969cd09632516738a97eeb8bd7258bf8a0f23114c057dab", size = 295154, upload-time = "2026-03-15T18:50:50.88Z" }, + { url = "https://files.pythonhosted.org/packages/a8/37/bdca6613c2e3c58c7421891d80cc3efa1d32e882f7c4a7ee6039c3fc951a/charset_normalizer-3.4.6-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a4ea868bc28109052790eb2b52a9ab33f3aa7adc02f96673526ff47419490e21", size = 199191, upload-time = "2026-03-15T18:50:52.658Z" }, + { url = "https://files.pythonhosted.org/packages/6c/92/9934d1bbd69f7f398b38c5dae1cbf9cc672e7c34a4adf7b17c0a9c17d15d/charset_normalizer-3.4.6-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:836ab36280f21fc1a03c99cd05c6b7af70d2697e374c7af0b61ed271401a72a2", size = 218674, upload-time = "2026-03-15T18:50:54.102Z" }, + { url = "https://files.pythonhosted.org/packages/af/90/25f6ab406659286be929fd89ab0e78e38aa183fc374e03aa3c12d730af8a/charset_normalizer-3.4.6-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:f1ce721c8a7dfec21fcbdfe04e8f68174183cf4e8188e0645e92aa23985c57ff", size = 215259, upload-time = "2026-03-15T18:50:55.616Z" }, + { url = "https://files.pythonhosted.org/packages/4e/ef/79a463eb0fff7f96afa04c1d4c51f8fc85426f918db467854bfb6a569ce3/charset_normalizer-3.4.6-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0e28d62a8fc7a1fa411c43bd65e346f3bce9716dc51b897fbe930c5987b402d5", size = 207276, upload-time = "2026-03-15T18:50:57.054Z" }, + { url = "https://files.pythonhosted.org/packages/f7/72/d0426afec4b71dc159fa6b4e68f868cd5a3ecd918fec5813a15d292a7d10/charset_normalizer-3.4.6-cp312-cp312-manylinux_2_31_armv7l.whl", hash = "sha256:530d548084c4a9f7a16ed4a294d459b4f229db50df689bfe92027452452943a0", size = 195161, upload-time = "2026-03-15T18:50:58.686Z" }, + { url = "https://files.pythonhosted.org/packages/bf/18/c82b06a68bfcb6ce55e508225d210c7e6a4ea122bfc0748892f3dc4e8e11/charset_normalizer-3.4.6-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:30f445ae60aad5e1f8bdbb3108e39f6fbc09f4ea16c815c66578878325f8f15a", size = 203452, upload-time = "2026-03-15T18:51:00.196Z" }, + { url = "https://files.pythonhosted.org/packages/44/d6/0c25979b92f8adafdbb946160348d8d44aa60ce99afdc27df524379875cb/charset_normalizer-3.4.6-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:ac2393c73378fea4e52aa56285a3d64be50f1a12395afef9cce47772f60334c2", size = 202272, upload-time = "2026-03-15T18:51:01.703Z" }, + { url = "https://files.pythonhosted.org/packages/2e/3d/7fea3e8fe84136bebbac715dd1221cc25c173c57a699c030ab9b8900cbb7/charset_normalizer-3.4.6-cp312-cp312-musllinux_1_2_armv7l.whl", hash = "sha256:90ca27cd8da8118b18a52d5f547859cc1f8354a00cd1e8e5120df3e30d6279e5", size = 195622, upload-time = "2026-03-15T18:51:03.526Z" }, + { url = "https://files.pythonhosted.org/packages/57/8a/d6f7fd5cb96c58ef2f681424fbca01264461336d2a7fc875e4446b1f1346/charset_normalizer-3.4.6-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:8e5a94886bedca0f9b78fecd6afb6629142fd2605aa70a125d49f4edc6037ee6", size = 220056, upload-time = "2026-03-15T18:51:05.269Z" }, + { url = "https://files.pythonhosted.org/packages/16/50/478cdda782c8c9c3fb5da3cc72dd7f331f031e7f1363a893cdd6ca0f8de0/charset_normalizer-3.4.6-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:695f5c2823691a25f17bc5d5ffe79fa90972cc34b002ac6c843bb8a1720e950d", size = 203751, upload-time = "2026-03-15T18:51:06.858Z" }, + { url = "https://files.pythonhosted.org/packages/75/fc/cc2fcac943939c8e4d8791abfa139f685e5150cae9f94b60f12520feaa9b/charset_normalizer-3.4.6-cp312-cp312-musllinux_1_2_s390x.whl", hash = "sha256:231d4da14bcd9301310faf492051bee27df11f2bc7549bc0bb41fef11b82daa2", size = 216563, upload-time = "2026-03-15T18:51:08.564Z" }, + { url = "https://files.pythonhosted.org/packages/a8/b7/a4add1d9a5f68f3d037261aecca83abdb0ab15960a3591d340e829b37298/charset_normalizer-3.4.6-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:a056d1ad2633548ca18ffa2f85c202cfb48b68615129143915b8dc72a806a923", size = 209265, upload-time = "2026-03-15T18:51:10.312Z" }, + { url = "https://files.pythonhosted.org/packages/6c/18/c094561b5d64a24277707698e54b7f67bd17a4f857bbfbb1072bba07c8bf/charset_normalizer-3.4.6-cp312-cp312-win32.whl", hash = "sha256:c2274ca724536f173122f36c98ce188fd24ce3dad886ec2b7af859518ce008a4", size = 144229, upload-time = "2026-03-15T18:51:11.694Z" }, + { url = "https://files.pythonhosted.org/packages/ab/20/0567efb3a8fd481b8f34f739ebddc098ed062a59fed41a8d193a61939e8f/charset_normalizer-3.4.6-cp312-cp312-win_amd64.whl", hash = "sha256:c8ae56368f8cc97c7e40a7ee18e1cedaf8e780cd8bc5ed5ac8b81f238614facb", size = 154277, upload-time = "2026-03-15T18:51:13.004Z" }, + { url = "https://files.pythonhosted.org/packages/15/57/28d79b44b51933119e21f65479d0864a8d5893e494cf5daab15df0247c17/charset_normalizer-3.4.6-cp312-cp312-win_arm64.whl", hash = "sha256:899d28f422116b08be5118ef350c292b36fc15ec2daeb9ea987c89281c7bb5c4", size = 142817, upload-time = "2026-03-15T18:51:14.408Z" }, + { url = "https://files.pythonhosted.org/packages/1e/1d/4fdabeef4e231153b6ed7567602f3b68265ec4e5b76d6024cf647d43d981/charset_normalizer-3.4.6-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:11afb56037cbc4b1555a34dd69151e8e069bee82e613a73bef6e714ce733585f", size = 294823, upload-time = "2026-03-15T18:51:15.755Z" }, + { url = "https://files.pythonhosted.org/packages/47/7b/20e809b89c69d37be748d98e84dce6820bf663cf19cf6b942c951a3e8f41/charset_normalizer-3.4.6-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:423fb7e748a08f854a08a222b983f4df1912b1daedce51a72bd24fe8f26a1843", size = 198527, upload-time = "2026-03-15T18:51:17.177Z" }, + { url = "https://files.pythonhosted.org/packages/37/a6/4f8d27527d59c039dce6f7622593cdcd3d70a8504d87d09eb11e9fdc6062/charset_normalizer-3.4.6-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:d73beaac5e90173ac3deb9928a74763a6d230f494e4bfb422c217a0ad8e629bf", size = 218388, upload-time = "2026-03-15T18:51:18.934Z" }, + { url = "https://files.pythonhosted.org/packages/f6/9b/4770ccb3e491a9bacf1c46cc8b812214fe367c86a96353ccc6daf87b01ec/charset_normalizer-3.4.6-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:d60377dce4511655582e300dc1e5a5f24ba0cb229005a1d5c8d0cb72bb758ab8", size = 214563, upload-time = "2026-03-15T18:51:20.374Z" }, + { url = "https://files.pythonhosted.org/packages/2b/58/a199d245894b12db0b957d627516c78e055adc3a0d978bc7f65ddaf7c399/charset_normalizer-3.4.6-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:530e8cebeea0d76bdcf93357aa5e41336f48c3dc709ac52da2bb167c5b8271d9", size = 206587, upload-time = "2026-03-15T18:51:21.807Z" }, + { url = "https://files.pythonhosted.org/packages/7e/70/3def227f1ec56f5c69dfc8392b8bd63b11a18ca8178d9211d7cc5e5e4f27/charset_normalizer-3.4.6-cp313-cp313-manylinux_2_31_armv7l.whl", hash = "sha256:a26611d9987b230566f24a0a125f17fe0de6a6aff9f25c9f564aaa2721a5fb88", size = 194724, upload-time = "2026-03-15T18:51:23.508Z" }, + { url = "https://files.pythonhosted.org/packages/58/ab/9318352e220c05efd31c2779a23b50969dc94b985a2efa643ed9077bfca5/charset_normalizer-3.4.6-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:34315ff4fc374b285ad7f4a0bf7dcbfe769e1b104230d40f49f700d4ab6bbd84", size = 202956, upload-time = "2026-03-15T18:51:25.239Z" }, + { url = "https://files.pythonhosted.org/packages/75/13/f3550a3ac25b70f87ac98c40d3199a8503676c2f1620efbf8d42095cfc40/charset_normalizer-3.4.6-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:5f8ddd609f9e1af8c7bd6e2aca279c931aefecd148a14402d4e368f3171769fd", size = 201923, upload-time = "2026-03-15T18:51:26.682Z" }, + { url = "https://files.pythonhosted.org/packages/1b/db/c5c643b912740b45e8eec21de1bbab8e7fc085944d37e1e709d3dcd9d72f/charset_normalizer-3.4.6-cp313-cp313-musllinux_1_2_armv7l.whl", hash = "sha256:80d0a5615143c0b3225e5e3ef22c8d5d51f3f72ce0ea6fb84c943546c7b25b6c", size = 195366, upload-time = "2026-03-15T18:51:28.129Z" }, + { url = "https://files.pythonhosted.org/packages/5a/67/3b1c62744f9b2448443e0eb160d8b001c849ec3fef591e012eda6484787c/charset_normalizer-3.4.6-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:92734d4d8d187a354a556626c221cd1a892a4e0802ccb2af432a1d85ec012194", size = 219752, upload-time = "2026-03-15T18:51:29.556Z" }, + { url = "https://files.pythonhosted.org/packages/f6/98/32ffbaf7f0366ffb0445930b87d103f6b406bc2c271563644bde8a2b1093/charset_normalizer-3.4.6-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:613f19aa6e082cf96e17e3ffd89383343d0d589abda756b7764cf78361fd41dc", size = 203296, upload-time = "2026-03-15T18:51:30.921Z" }, + { url = "https://files.pythonhosted.org/packages/41/12/5d308c1bbe60cabb0c5ef511574a647067e2a1f631bc8634fcafaccd8293/charset_normalizer-3.4.6-cp313-cp313-musllinux_1_2_s390x.whl", hash = "sha256:2b1a63e8224e401cafe7739f77efd3f9e7f5f2026bda4aead8e59afab537784f", size = 215956, upload-time = "2026-03-15T18:51:32.399Z" }, + { url = "https://files.pythonhosted.org/packages/53/e9/5f85f6c5e20669dbe56b165c67b0260547dea97dba7e187938833d791687/charset_normalizer-3.4.6-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6cceb5473417d28edd20c6c984ab6fee6c6267d38d906823ebfe20b03d607dc2", size = 208652, upload-time = "2026-03-15T18:51:34.214Z" }, + { url = "https://files.pythonhosted.org/packages/f1/11/897052ea6af56df3eef3ca94edafee410ca699ca0c7b87960ad19932c55e/charset_normalizer-3.4.6-cp313-cp313-win32.whl", hash = "sha256:d7de2637729c67d67cf87614b566626057e95c303bc0a55ffe391f5205e7003d", size = 143940, upload-time = "2026-03-15T18:51:36.15Z" }, + { url = "https://files.pythonhosted.org/packages/a1/5c/724b6b363603e419829f561c854b87ed7c7e31231a7908708ac086cdf3e2/charset_normalizer-3.4.6-cp313-cp313-win_amd64.whl", hash = "sha256:572d7c822caf521f0525ba1bce1a622a0b85cf47ffbdae6c9c19e3b5ac3c4389", size = 154101, upload-time = "2026-03-15T18:51:37.876Z" }, + { url = "https://files.pythonhosted.org/packages/01/a5/7abf15b4c0968e47020f9ca0935fb3274deb87cb288cd187cad92e8cdffd/charset_normalizer-3.4.6-cp313-cp313-win_arm64.whl", hash = "sha256:a4474d924a47185a06411e0064b803c68be044be2d60e50e8bddcc2649957c1f", size = 143109, upload-time = "2026-03-15T18:51:39.565Z" }, + { url = "https://files.pythonhosted.org/packages/25/6f/ffe1e1259f384594063ea1869bfb6be5cdb8bc81020fc36c3636bc8302a1/charset_normalizer-3.4.6-cp314-cp314-macosx_10_15_universal2.whl", hash = "sha256:9cc6e6d9e571d2f863fa77700701dae73ed5f78881efc8b3f9a4398772ff53e8", size = 294458, upload-time = "2026-03-15T18:51:41.134Z" }, + { url = "https://files.pythonhosted.org/packages/56/60/09bb6c13a8c1016c2ed5c6a6488e4ffef506461aa5161662bd7636936fb1/charset_normalizer-3.4.6-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ef5960d965e67165d75b7c7ffc60a83ec5abfc5c11b764ec13ea54fbef8b4421", size = 199277, upload-time = "2026-03-15T18:51:42.953Z" }, + { url = "https://files.pythonhosted.org/packages/00/50/dcfbb72a5138bbefdc3332e8d81a23494bf67998b4b100703fd15fa52d81/charset_normalizer-3.4.6-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:b3694e3f87f8ac7ce279d4355645b3c878d24d1424581b46282f24b92f5a4ae2", size = 218758, upload-time = "2026-03-15T18:51:44.339Z" }, + { url = "https://files.pythonhosted.org/packages/03/b3/d79a9a191bb75f5aa81f3aaaa387ef29ce7cb7a9e5074ba8ea095cc073c2/charset_normalizer-3.4.6-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5d11595abf8dd942a77883a39d81433739b287b6aa71620f15164f8096221b30", size = 215299, upload-time = "2026-03-15T18:51:45.871Z" }, + { url = "https://files.pythonhosted.org/packages/76/7e/bc8911719f7084f72fd545f647601ea3532363927f807d296a8c88a62c0d/charset_normalizer-3.4.6-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7bda6eebafd42133efdca535b04ccb338ab29467b3f7bf79569883676fc628db", size = 206811, upload-time = "2026-03-15T18:51:47.308Z" }, + { url = "https://files.pythonhosted.org/packages/e2/40/c430b969d41dda0c465aa36cc7c2c068afb67177bef50905ac371b28ccc7/charset_normalizer-3.4.6-cp314-cp314-manylinux_2_31_armv7l.whl", hash = "sha256:bbc8c8650c6e51041ad1be191742b8b421d05bbd3410f43fa2a00c8db87678e8", size = 193706, upload-time = "2026-03-15T18:51:48.849Z" }, + { url = "https://files.pythonhosted.org/packages/48/15/e35e0590af254f7df984de1323640ef375df5761f615b6225ba8deb9799a/charset_normalizer-3.4.6-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:22c6f0c2fbc31e76c3b8a86fba1a56eda6166e238c29cdd3d14befdb4a4e4815", size = 202706, upload-time = "2026-03-15T18:51:50.257Z" }, + { url = "https://files.pythonhosted.org/packages/5e/bd/f736f7b9cc5e93a18b794a50346bb16fbfd6b37f99e8f306f7951d27c17c/charset_normalizer-3.4.6-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:7edbed096e4a4798710ed6bc75dcaa2a21b68b6c356553ac4823c3658d53743a", size = 202497, upload-time = "2026-03-15T18:51:52.012Z" }, + { url = "https://files.pythonhosted.org/packages/9d/ba/2cc9e3e7dfdf7760a6ed8da7446d22536f3d0ce114ac63dee2a5a3599e62/charset_normalizer-3.4.6-cp314-cp314-musllinux_1_2_armv7l.whl", hash = "sha256:7f9019c9cb613f084481bd6a100b12e1547cf2efe362d873c2e31e4035a6fa43", size = 193511, upload-time = "2026-03-15T18:51:53.723Z" }, + { url = "https://files.pythonhosted.org/packages/9e/cb/5be49b5f776e5613be07298c80e1b02a2d900f7a7de807230595c85a8b2e/charset_normalizer-3.4.6-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:58c948d0d086229efc484fe2f30c2d382c86720f55cd9bc33591774348ad44e0", size = 220133, upload-time = "2026-03-15T18:51:55.333Z" }, + { url = "https://files.pythonhosted.org/packages/83/43/99f1b5dad345accb322c80c7821071554f791a95ee50c1c90041c157ae99/charset_normalizer-3.4.6-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:419a9d91bd238052642a51938af8ac05da5b3343becde08d5cdeab9046df9ee1", size = 203035, upload-time = "2026-03-15T18:51:56.736Z" }, + { url = "https://files.pythonhosted.org/packages/87/9a/62c2cb6a531483b55dddff1a68b3d891a8b498f3ca555fbcf2978e804d9d/charset_normalizer-3.4.6-cp314-cp314-musllinux_1_2_s390x.whl", hash = "sha256:5273b9f0b5835ff0350c0828faea623c68bfa65b792720c453e22b25cc72930f", size = 216321, upload-time = "2026-03-15T18:51:58.17Z" }, + { url = "https://files.pythonhosted.org/packages/6e/79/94a010ff81e3aec7c293eb82c28f930918e517bc144c9906a060844462eb/charset_normalizer-3.4.6-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:0e901eb1049fdb80f5bd11ed5ea1e498ec423102f7a9b9e4645d5b8204ff2815", size = 208973, upload-time = "2026-03-15T18:51:59.998Z" }, + { url = "https://files.pythonhosted.org/packages/2a/57/4ecff6d4ec8585342f0c71bc03efaa99cb7468f7c91a57b105bcd561cea8/charset_normalizer-3.4.6-cp314-cp314-win32.whl", hash = "sha256:b4ff1d35e8c5bd078be89349b6f3a845128e685e751b6ea1169cf2160b344c4d", size = 144610, upload-time = "2026-03-15T18:52:02.213Z" }, + { url = "https://files.pythonhosted.org/packages/80/94/8434a02d9d7f168c25767c64671fead8d599744a05d6a6c877144c754246/charset_normalizer-3.4.6-cp314-cp314-win_amd64.whl", hash = "sha256:74119174722c4349af9708993118581686f343adc1c8c9c007d59be90d077f3f", size = 154962, upload-time = "2026-03-15T18:52:03.658Z" }, + { url = "https://files.pythonhosted.org/packages/46/4c/48f2cdbfd923026503dfd67ccea45c94fd8fe988d9056b468579c66ed62b/charset_normalizer-3.4.6-cp314-cp314-win_arm64.whl", hash = "sha256:e5bcc1a1ae744e0bb59641171ae53743760130600da8db48cbb6e4918e186e4e", size = 143595, upload-time = "2026-03-15T18:52:05.123Z" }, + { url = "https://files.pythonhosted.org/packages/31/93/8878be7569f87b14f1d52032946131bcb6ebbd8af3e20446bc04053dc3f1/charset_normalizer-3.4.6-cp314-cp314t-macosx_10_15_universal2.whl", hash = "sha256:ad8faf8df23f0378c6d527d8b0b15ea4a2e23c89376877c598c4870d1b2c7866", size = 314828, upload-time = "2026-03-15T18:52:06.831Z" }, + { url = "https://files.pythonhosted.org/packages/06/b6/fae511ca98aac69ecc35cde828b0a3d146325dd03d99655ad38fc2cc3293/charset_normalizer-3.4.6-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f5ea69428fa1b49573eef0cc44a1d43bebd45ad0c611eb7d7eac760c7ae771bc", size = 208138, upload-time = "2026-03-15T18:52:08.239Z" }, + { url = "https://files.pythonhosted.org/packages/54/57/64caf6e1bf07274a1e0b7c160a55ee9e8c9ec32c46846ce59b9c333f7008/charset_normalizer-3.4.6-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:06a7e86163334edfc5d20fe104db92fcd666e5a5df0977cb5680a506fe26cc8e", size = 224679, upload-time = "2026-03-15T18:52:10.043Z" }, + { url = "https://files.pythonhosted.org/packages/aa/cb/9ff5a25b9273ef160861b41f6937f86fae18b0792fe0a8e75e06acb08f1d/charset_normalizer-3.4.6-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:e1f6e2f00a6b8edb562826e4632e26d063ac10307e80f7461f7de3ad8ef3f077", size = 223475, upload-time = "2026-03-15T18:52:11.854Z" }, + { url = "https://files.pythonhosted.org/packages/fc/97/440635fc093b8d7347502a377031f9605a1039c958f3cd18dcacffb37743/charset_normalizer-3.4.6-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:95b52c68d64c1878818687a473a10547b3292e82b6f6fe483808fb1468e2f52f", size = 215230, upload-time = "2026-03-15T18:52:13.325Z" }, + { url = "https://files.pythonhosted.org/packages/cd/24/afff630feb571a13f07c8539fbb502d2ab494019492aaffc78ef41f1d1d0/charset_normalizer-3.4.6-cp314-cp314t-manylinux_2_31_armv7l.whl", hash = "sha256:7504e9b7dc05f99a9bbb4525c67a2c155073b44d720470a148b34166a69c054e", size = 199045, upload-time = "2026-03-15T18:52:14.752Z" }, + { url = "https://files.pythonhosted.org/packages/e5/17/d1399ecdaf7e0498c327433e7eefdd862b41236a7e484355b8e0e5ebd64b/charset_normalizer-3.4.6-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:172985e4ff804a7ad08eebec0a1640ece87ba5041d565fff23c8f99c1f389484", size = 211658, upload-time = "2026-03-15T18:52:16.278Z" }, + { url = "https://files.pythonhosted.org/packages/b5/38/16baa0affb957b3d880e5ac2144caf3f9d7de7bc4a91842e447fbb5e8b67/charset_normalizer-3.4.6-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:4be9f4830ba8741527693848403e2c457c16e499100963ec711b1c6f2049b7c7", size = 210769, upload-time = "2026-03-15T18:52:17.782Z" }, + { url = "https://files.pythonhosted.org/packages/05/34/c531bc6ac4c21da9ddfddb3107be2287188b3ea4b53b70fc58f2a77ac8d8/charset_normalizer-3.4.6-cp314-cp314t-musllinux_1_2_armv7l.whl", hash = "sha256:79090741d842f564b1b2827c0b82d846405b744d31e84f18d7a7b41c20e473ff", size = 201328, upload-time = "2026-03-15T18:52:19.553Z" }, + { url = "https://files.pythonhosted.org/packages/fa/73/a5a1e9ca5f234519c1953608a03fe109c306b97fdfb25f09182babad51a7/charset_normalizer-3.4.6-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:87725cfb1a4f1f8c2fc9890ae2f42094120f4b44db9360be5d99a4c6b0e03a9e", size = 225302, upload-time = "2026-03-15T18:52:21.043Z" }, + { url = "https://files.pythonhosted.org/packages/ba/f6/cd782923d112d296294dea4bcc7af5a7ae0f86ab79f8fefbda5526b6cfc0/charset_normalizer-3.4.6-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:fcce033e4021347d80ed9c66dcf1e7b1546319834b74445f561d2e2221de5659", size = 211127, upload-time = "2026-03-15T18:52:22.491Z" }, + { url = "https://files.pythonhosted.org/packages/0e/c5/0b6898950627af7d6103a449b22320372c24c6feda91aa24e201a478d161/charset_normalizer-3.4.6-cp314-cp314t-musllinux_1_2_s390x.whl", hash = "sha256:ca0276464d148c72defa8bb4390cce01b4a0e425f3b50d1435aa6d7a18107602", size = 222840, upload-time = "2026-03-15T18:52:24.113Z" }, + { url = "https://files.pythonhosted.org/packages/7d/25/c4bba773bef442cbdc06111d40daa3de5050a676fa26e85090fc54dd12f0/charset_normalizer-3.4.6-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:197c1a244a274bb016dd8b79204850144ef77fe81c5b797dc389327adb552407", size = 216890, upload-time = "2026-03-15T18:52:25.541Z" }, + { url = "https://files.pythonhosted.org/packages/35/1a/05dacadb0978da72ee287b0143097db12f2e7e8d3ffc4647da07a383b0b7/charset_normalizer-3.4.6-cp314-cp314t-win32.whl", hash = "sha256:2a24157fa36980478dd1770b585c0f30d19e18f4fb0c47c13aa568f871718579", size = 155379, upload-time = "2026-03-15T18:52:27.05Z" }, + { url = "https://files.pythonhosted.org/packages/5d/7a/d269d834cb3a76291651256f3b9a5945e81d0a49ab9f4a498964e83c0416/charset_normalizer-3.4.6-cp314-cp314t-win_amd64.whl", hash = "sha256:cd5e2801c89992ed8c0a3f0293ae83c159a60d9a5d685005383ef4caca77f2c4", size = 169043, upload-time = "2026-03-15T18:52:28.502Z" }, + { url = "https://files.pythonhosted.org/packages/23/06/28b29fba521a37a8932c6a84192175c34d49f84a6d4773fa63d05f9aff22/charset_normalizer-3.4.6-cp314-cp314t-win_arm64.whl", hash = "sha256:47955475ac79cc504ef2704b192364e51d0d473ad452caedd0002605f780101c", size = 148523, upload-time = "2026-03-15T18:52:29.956Z" }, + { url = "https://files.pythonhosted.org/packages/2a/68/687187c7e26cb24ccbd88e5069f5ef00eba804d36dde11d99aad0838ab45/charset_normalizer-3.4.6-py3-none-any.whl", hash = "sha256:947cf925bc916d90adba35a64c82aace04fa39b46b52d4630ece166655905a69", size = 61455, upload-time = "2026-03-15T18:53:23.833Z" }, ] [[package]] name = "click" -version = "8.2.1" +version = "8.3.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "colorama", marker = "sys_platform == 'win32'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/60/6c/8ca2efa64cf75a977a0d7fac081354553ebe483345c734fb6b6515d96bbc/click-8.2.1.tar.gz", hash = "sha256:27c491cc05d968d271d5a1db13e3b5a184636d9d930f148c50b038f0d0646202", size = 286342, upload-time = "2025-05-20T23:19:49.832Z" } +sdist = { url = "https://files.pythonhosted.org/packages/3d/fa/656b739db8587d7b5dfa22e22ed02566950fbfbcdc20311993483657a5c0/click-8.3.1.tar.gz", hash = "sha256:12ff4785d337a1bb490bb7e9c2b1ee5da3112e94a8622f26a6c77f5d2fc6842a", size = 295065, upload-time = "2025-11-15T20:45:42.706Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/85/32/10bb5764d90a8eee674e9dc6f4db6a0ab47c8c4d0d83c27f7c39ac415a4d/click-8.2.1-py3-none-any.whl", hash = "sha256:61a3265b914e850b85317d0b3109c7f8cd35a670f963866005d6ef1d5175a12b", size = 102215, upload-time = "2025-05-20T23:19:47.796Z" }, + { url = "https://files.pythonhosted.org/packages/98/78/01c019cdb5d6498122777c1a43056ebb3ebfeef2076d9d026bfe15583b2b/click-8.3.1-py3-none-any.whl", hash = "sha256:981153a64e25f12d547d3426c367a4857371575ee7ad18df2a6183ab0545b2a6", size = 108274, upload-time = "2025-11-15T20:45:41.139Z" }, ] [[package]] @@ -517,101 +567,115 @@ wheels = [ [[package]] name = "coverage" -version = "7.13.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/23/f9/e92df5e07f3fc8d4c7f9a0f146ef75446bf870351cd37b788cf5897f8079/coverage-7.13.1.tar.gz", hash = "sha256:b7593fe7eb5feaa3fbb461ac79aac9f9fc0387a5ca8080b0c6fe2ca27b091afd", size = 825862, upload-time = "2025-12-28T15:42:56.969Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/2d/9a/3742e58fd04b233df95c012ee9f3dfe04708a5e1d32613bd2d47d4e1be0d/coverage-7.13.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e1fa280b3ad78eea5be86f94f461c04943d942697e0dac889fa18fff8f5f9147", size = 218633, upload-time = "2025-12-28T15:40:10.165Z" }, - { url = "https://files.pythonhosted.org/packages/7e/45/7e6bdc94d89cd7c8017ce735cf50478ddfe765d4fbf0c24d71d30ea33d7a/coverage-7.13.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:c3d8c679607220979434f494b139dfb00131ebf70bb406553d69c1ff01a5c33d", size = 219147, upload-time = "2025-12-28T15:40:12.069Z" }, - { url = "https://files.pythonhosted.org/packages/f7/38/0d6a258625fd7f10773fe94097dc16937a5f0e3e0cdf3adef67d3ac6baef/coverage-7.13.1-cp310-cp310-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:339dc63b3eba969067b00f41f15ad161bf2946613156fb131266d8debc8e44d0", size = 245894, upload-time = "2025-12-28T15:40:13.556Z" }, - { url = "https://files.pythonhosted.org/packages/27/58/409d15ea487986994cbd4d06376e9860e9b157cfbfd402b1236770ab8dd2/coverage-7.13.1-cp310-cp310-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:db622b999ffe49cb891f2fff3b340cdc2f9797d01a0a202a0973ba2562501d90", size = 247721, upload-time = "2025-12-28T15:40:15.37Z" }, - { url = "https://files.pythonhosted.org/packages/da/bf/6e8056a83fd7a96c93341f1ffe10df636dd89f26d5e7b9ca511ce3bcf0df/coverage-7.13.1-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d1443ba9acbb593fa7c1c29e011d7c9761545fe35e7652e85ce7f51a16f7e08d", size = 249585, upload-time = "2025-12-28T15:40:17.226Z" }, - { url = "https://files.pythonhosted.org/packages/f4/15/e1daff723f9f5959acb63cbe35b11203a9df77ee4b95b45fffd38b318390/coverage-7.13.1-cp310-cp310-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:c832ec92c4499ac463186af72f9ed4d8daec15499b16f0a879b0d1c8e5cf4a3b", size = 246597, upload-time = "2025-12-28T15:40:19.028Z" }, - { url = "https://files.pythonhosted.org/packages/74/a6/1efd31c5433743a6ddbc9d37ac30c196bb07c7eab3d74fbb99b924c93174/coverage-7.13.1-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:562ec27dfa3f311e0db1ba243ec6e5f6ab96b1edfcfc6cf86f28038bc4961ce6", size = 247626, upload-time = "2025-12-28T15:40:20.846Z" }, - { url = "https://files.pythonhosted.org/packages/6d/9f/1609267dd3e749f57fdd66ca6752567d1c13b58a20a809dc409b263d0b5f/coverage-7.13.1-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:4de84e71173d4dada2897e5a0e1b7877e5eefbfe0d6a44edee6ce31d9b8ec09e", size = 245629, upload-time = "2025-12-28T15:40:22.397Z" }, - { url = "https://files.pythonhosted.org/packages/e2/f6/6815a220d5ec2466383d7cc36131b9fa6ecbe95c50ec52a631ba733f306a/coverage-7.13.1-cp310-cp310-musllinux_1_2_riscv64.whl", hash = "sha256:a5a68357f686f8c4d527a2dc04f52e669c2fc1cbde38f6f7eb6a0e58cbd17cae", size = 245901, upload-time = "2025-12-28T15:40:23.836Z" }, - { url = "https://files.pythonhosted.org/packages/ac/58/40576554cd12e0872faf6d2c0eb3bc85f71d78427946ddd19ad65201e2c0/coverage-7.13.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:77cc258aeb29a3417062758975521eae60af6f79e930d6993555eeac6a8eac29", size = 246505, upload-time = "2025-12-28T15:40:25.421Z" }, - { url = "https://files.pythonhosted.org/packages/3b/77/9233a90253fba576b0eee81707b5781d0e21d97478e5377b226c5b096c0f/coverage-7.13.1-cp310-cp310-win32.whl", hash = "sha256:bb4f8c3c9a9f34423dba193f241f617b08ffc63e27f67159f60ae6baf2dcfe0f", size = 221257, upload-time = "2025-12-28T15:40:27.217Z" }, - { url = "https://files.pythonhosted.org/packages/e0/43/e842ff30c1a0a623ec80db89befb84a3a7aad7bfe44a6ea77d5a3e61fedd/coverage-7.13.1-cp310-cp310-win_amd64.whl", hash = "sha256:c8e2706ceb622bc63bac98ebb10ef5da80ed70fbd8a7999a5076de3afaef0fb1", size = 222191, upload-time = "2025-12-28T15:40:28.916Z" }, - { url = "https://files.pythonhosted.org/packages/b4/9b/77baf488516e9ced25fc215a6f75d803493fc3f6a1a1227ac35697910c2a/coverage-7.13.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1a55d509a1dc5a5b708b5dad3b5334e07a16ad4c2185e27b40e4dba796ab7f88", size = 218755, upload-time = "2025-12-28T15:40:30.812Z" }, - { url = "https://files.pythonhosted.org/packages/d7/cd/7ab01154e6eb79ee2fab76bf4d89e94c6648116557307ee4ebbb85e5c1bf/coverage-7.13.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4d010d080c4888371033baab27e47c9df7d6fb28d0b7b7adf85a4a49be9298b3", size = 219257, upload-time = "2025-12-28T15:40:32.333Z" }, - { url = "https://files.pythonhosted.org/packages/01/d5/b11ef7863ffbbdb509da0023fad1e9eda1c0eaea61a6d2ea5b17d4ac706e/coverage-7.13.1-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:d938b4a840fb1523b9dfbbb454f652967f18e197569c32266d4d13f37244c3d9", size = 249657, upload-time = "2025-12-28T15:40:34.1Z" }, - { url = "https://files.pythonhosted.org/packages/f7/7c/347280982982383621d29b8c544cf497ae07ac41e44b1ca4903024131f55/coverage-7.13.1-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:bf100a3288f9bb7f919b87eb84f87101e197535b9bd0e2c2b5b3179633324fee", size = 251581, upload-time = "2025-12-28T15:40:36.131Z" }, - { url = "https://files.pythonhosted.org/packages/82/f6/ebcfed11036ade4c0d75fa4453a6282bdd225bc073862766eec184a4c643/coverage-7.13.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ef6688db9bf91ba111ae734ba6ef1a063304a881749726e0d3575f5c10a9facf", size = 253691, upload-time = "2025-12-28T15:40:37.626Z" }, - { url = "https://files.pythonhosted.org/packages/02/92/af8f5582787f5d1a8b130b2dcba785fa5e9a7a8e121a0bb2220a6fdbdb8a/coverage-7.13.1-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:0b609fc9cdbd1f02e51f67f51e5aee60a841ef58a68d00d5ee2c0faf357481a3", size = 249799, upload-time = "2025-12-28T15:40:39.47Z" }, - { url = "https://files.pythonhosted.org/packages/24/aa/0e39a2a3b16eebf7f193863323edbff38b6daba711abaaf807d4290cf61a/coverage-7.13.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:c43257717611ff5e9a1d79dce8e47566235ebda63328718d9b65dd640bc832ef", size = 251389, upload-time = "2025-12-28T15:40:40.954Z" }, - { url = "https://files.pythonhosted.org/packages/73/46/7f0c13111154dc5b978900c0ccee2e2ca239b910890e674a77f1363d483e/coverage-7.13.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e09fbecc007f7b6afdfb3b07ce5bd9f8494b6856dd4f577d26c66c391b829851", size = 249450, upload-time = "2025-12-28T15:40:42.489Z" }, - { url = "https://files.pythonhosted.org/packages/ac/ca/e80da6769e8b669ec3695598c58eef7ad98b0e26e66333996aee6316db23/coverage-7.13.1-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:a03a4f3a19a189919c7055098790285cc5c5b0b3976f8d227aea39dbf9f8bfdb", size = 249170, upload-time = "2025-12-28T15:40:44.279Z" }, - { url = "https://files.pythonhosted.org/packages/af/18/9e29baabdec1a8644157f572541079b4658199cfd372a578f84228e860de/coverage-7.13.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:3820778ea1387c2b6a818caec01c63adc5b3750211af6447e8dcfb9b6f08dbba", size = 250081, upload-time = "2025-12-28T15:40:45.748Z" }, - { url = "https://files.pythonhosted.org/packages/00/f8/c3021625a71c3b2f516464d322e41636aea381018319050a8114105872ee/coverage-7.13.1-cp311-cp311-win32.whl", hash = "sha256:ff10896fa55167371960c5908150b434b71c876dfab97b69478f22c8b445ea19", size = 221281, upload-time = "2025-12-28T15:40:47.232Z" }, - { url = "https://files.pythonhosted.org/packages/27/56/c216625f453df6e0559ed666d246fcbaaa93f3aa99eaa5080cea1229aa3d/coverage-7.13.1-cp311-cp311-win_amd64.whl", hash = "sha256:a998cc0aeeea4c6d5622a3754da5a493055d2d95186bad877b0a34ea6e6dbe0a", size = 222215, upload-time = "2025-12-28T15:40:49.19Z" }, - { url = "https://files.pythonhosted.org/packages/5c/9a/be342e76f6e531cae6406dc46af0d350586f24d9b67fdfa6daee02df71af/coverage-7.13.1-cp311-cp311-win_arm64.whl", hash = "sha256:fea07c1a39a22614acb762e3fbbb4011f65eedafcb2948feeef641ac78b4ee5c", size = 220886, upload-time = "2025-12-28T15:40:51.067Z" }, - { url = "https://files.pythonhosted.org/packages/ce/8a/87af46cccdfa78f53db747b09f5f9a21d5fc38d796834adac09b30a8ce74/coverage-7.13.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:6f34591000f06e62085b1865c9bc5f7858df748834662a51edadfd2c3bfe0dd3", size = 218927, upload-time = "2025-12-28T15:40:52.814Z" }, - { url = "https://files.pythonhosted.org/packages/82/a8/6e22fdc67242a4a5a153f9438d05944553121c8f4ba70cb072af4c41362e/coverage-7.13.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:b67e47c5595b9224599016e333f5ec25392597a89d5744658f837d204e16c63e", size = 219288, upload-time = "2025-12-28T15:40:54.262Z" }, - { url = "https://files.pythonhosted.org/packages/d0/0a/853a76e03b0f7c4375e2ca025df45c918beb367f3e20a0a8e91967f6e96c/coverage-7.13.1-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:3e7b8bd70c48ffb28461ebe092c2345536fb18bbbf19d287c8913699735f505c", size = 250786, upload-time = "2025-12-28T15:40:56.059Z" }, - { url = "https://files.pythonhosted.org/packages/ea/b4/694159c15c52b9f7ec7adf49d50e5f8ee71d3e9ef38adb4445d13dd56c20/coverage-7.13.1-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:c223d078112e90dc0e5c4e35b98b9584164bea9fbbd221c0b21c5241f6d51b62", size = 253543, upload-time = "2025-12-28T15:40:57.585Z" }, - { url = "https://files.pythonhosted.org/packages/96/b2/7f1f0437a5c855f87e17cf5d0dc35920b6440ff2b58b1ba9788c059c26c8/coverage-7.13.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:794f7c05af0763b1bbd1b9e6eff0e52ad068be3b12cd96c87de037b01390c968", size = 254635, upload-time = "2025-12-28T15:40:59.443Z" }, - { url = "https://files.pythonhosted.org/packages/e9/d1/73c3fdb8d7d3bddd9473c9c6a2e0682f09fc3dfbcb9c3f36412a7368bcab/coverage-7.13.1-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:0642eae483cc8c2902e4af7298bf886d605e80f26382124cddc3967c2a3df09e", size = 251202, upload-time = "2025-12-28T15:41:01.328Z" }, - { url = "https://files.pythonhosted.org/packages/66/3c/f0edf75dcc152f145d5598329e864bbbe04ab78660fe3e8e395f9fff010f/coverage-7.13.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:9f5e772ed5fef25b3de9f2008fe67b92d46831bd2bc5bdc5dd6bfd06b83b316f", size = 252566, upload-time = "2025-12-28T15:41:03.319Z" }, - { url = "https://files.pythonhosted.org/packages/17/b3/e64206d3c5f7dcbceafd14941345a754d3dbc78a823a6ed526e23b9cdaab/coverage-7.13.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:45980ea19277dc0a579e432aef6a504fe098ef3a9032ead15e446eb0f1191aee", size = 250711, upload-time = "2025-12-28T15:41:06.411Z" }, - { url = "https://files.pythonhosted.org/packages/dc/ad/28a3eb970a8ef5b479ee7f0c484a19c34e277479a5b70269dc652b730733/coverage-7.13.1-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:e4f18eca6028ffa62adbd185a8f1e1dd242f2e68164dba5c2b74a5204850b4cf", size = 250278, upload-time = "2025-12-28T15:41:08.285Z" }, - { url = "https://files.pythonhosted.org/packages/54/e3/c8f0f1a93133e3e1291ca76cbb63565bd4b5c5df63b141f539d747fff348/coverage-7.13.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:f8dca5590fec7a89ed6826fce625595279e586ead52e9e958d3237821fbc750c", size = 252154, upload-time = "2025-12-28T15:41:09.969Z" }, - { url = "https://files.pythonhosted.org/packages/d0/bf/9939c5d6859c380e405b19e736321f1c7d402728792f4c752ad1adcce005/coverage-7.13.1-cp312-cp312-win32.whl", hash = "sha256:ff86d4e85188bba72cfb876df3e11fa243439882c55957184af44a35bd5880b7", size = 221487, upload-time = "2025-12-28T15:41:11.468Z" }, - { url = "https://files.pythonhosted.org/packages/fa/dc/7282856a407c621c2aad74021680a01b23010bb8ebf427cf5eacda2e876f/coverage-7.13.1-cp312-cp312-win_amd64.whl", hash = "sha256:16cc1da46c04fb0fb128b4dc430b78fa2aba8a6c0c9f8eb391fd5103409a6ac6", size = 222299, upload-time = "2025-12-28T15:41:13.386Z" }, - { url = "https://files.pythonhosted.org/packages/10/79/176a11203412c350b3e9578620013af35bcdb79b651eb976f4a4b32044fa/coverage-7.13.1-cp312-cp312-win_arm64.whl", hash = "sha256:8d9bc218650022a768f3775dd7fdac1886437325d8d295d923ebcfef4892ad5c", size = 220941, upload-time = "2025-12-28T15:41:14.975Z" }, - { url = "https://files.pythonhosted.org/packages/a3/a4/e98e689347a1ff1a7f67932ab535cef82eb5e78f32a9e4132e114bbb3a0a/coverage-7.13.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:cb237bfd0ef4d5eb6a19e29f9e528ac67ac3be932ea6b44fb6cc09b9f3ecff78", size = 218951, upload-time = "2025-12-28T15:41:16.653Z" }, - { url = "https://files.pythonhosted.org/packages/32/33/7cbfe2bdc6e2f03d6b240d23dc45fdaf3fd270aaf2d640be77b7f16989ab/coverage-7.13.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:1dcb645d7e34dcbcc96cd7c132b1fc55c39263ca62eb961c064eb3928997363b", size = 219325, upload-time = "2025-12-28T15:41:18.609Z" }, - { url = "https://files.pythonhosted.org/packages/59/f6/efdabdb4929487baeb7cb2a9f7dac457d9356f6ad1b255be283d58b16316/coverage-7.13.1-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:3d42df8201e00384736f0df9be2ced39324c3907607d17d50d50116c989d84cd", size = 250309, upload-time = "2025-12-28T15:41:20.629Z" }, - { url = "https://files.pythonhosted.org/packages/12/da/91a52516e9d5aea87d32d1523f9cdcf7a35a3b298e6be05d6509ba3cfab2/coverage-7.13.1-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:fa3edde1aa8807de1d05934982416cb3ec46d1d4d91e280bcce7cca01c507992", size = 252907, upload-time = "2025-12-28T15:41:22.257Z" }, - { url = "https://files.pythonhosted.org/packages/75/38/f1ea837e3dc1231e086db1638947e00d264e7e8c41aa8ecacf6e1e0c05f4/coverage-7.13.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9edd0e01a343766add6817bc448408858ba6b489039eaaa2018474e4001651a4", size = 254148, upload-time = "2025-12-28T15:41:23.87Z" }, - { url = "https://files.pythonhosted.org/packages/7f/43/f4f16b881aaa34954ba446318dea6b9ed5405dd725dd8daac2358eda869a/coverage-7.13.1-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:985b7836931d033570b94c94713c6dba5f9d3ff26045f72c3e5dbc5fe3361e5a", size = 250515, upload-time = "2025-12-28T15:41:25.437Z" }, - { url = "https://files.pythonhosted.org/packages/84/34/8cba7f00078bd468ea914134e0144263194ce849ec3baad187ffb6203d1c/coverage-7.13.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ffed1e4980889765c84a5d1a566159e363b71d6b6fbaf0bebc9d3c30bc016766", size = 252292, upload-time = "2025-12-28T15:41:28.459Z" }, - { url = "https://files.pythonhosted.org/packages/8c/a4/cffac66c7652d84ee4ac52d3ccb94c015687d3b513f9db04bfcac2ac800d/coverage-7.13.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:8842af7f175078456b8b17f1b73a0d16a65dcbdc653ecefeb00a56b3c8c298c4", size = 250242, upload-time = "2025-12-28T15:41:30.02Z" }, - { url = "https://files.pythonhosted.org/packages/f4/78/9a64d462263dde416f3c0067efade7b52b52796f489b1037a95b0dc389c9/coverage-7.13.1-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:ccd7a6fca48ca9c131d9b0a2972a581e28b13416fc313fb98b6d24a03ce9a398", size = 250068, upload-time = "2025-12-28T15:41:32.007Z" }, - { url = "https://files.pythonhosted.org/packages/69/c8/a8994f5fece06db7c4a97c8fc1973684e178599b42e66280dded0524ef00/coverage-7.13.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:0403f647055de2609be776965108447deb8e384fe4a553c119e3ff6bfbab4784", size = 251846, upload-time = "2025-12-28T15:41:33.946Z" }, - { url = "https://files.pythonhosted.org/packages/cc/f7/91fa73c4b80305c86598a2d4e54ba22df6bf7d0d97500944af7ef155d9f7/coverage-7.13.1-cp313-cp313-win32.whl", hash = "sha256:549d195116a1ba1e1ae2f5ca143f9777800f6636eab917d4f02b5310d6d73461", size = 221512, upload-time = "2025-12-28T15:41:35.519Z" }, - { url = "https://files.pythonhosted.org/packages/45/0b/0768b4231d5a044da8f75e097a8714ae1041246bb765d6b5563bab456735/coverage-7.13.1-cp313-cp313-win_amd64.whl", hash = "sha256:5899d28b5276f536fcf840b18b61a9fce23cc3aec1d114c44c07fe94ebeaa500", size = 222321, upload-time = "2025-12-28T15:41:37.371Z" }, - { url = "https://files.pythonhosted.org/packages/9b/b8/bdcb7253b7e85157282450262008f1366aa04663f3e3e4c30436f596c3e2/coverage-7.13.1-cp313-cp313-win_arm64.whl", hash = "sha256:868a2fae76dfb06e87291bcbd4dcbcc778a8500510b618d50496e520bd94d9b9", size = 220949, upload-time = "2025-12-28T15:41:39.553Z" }, - { url = "https://files.pythonhosted.org/packages/70/52/f2be52cc445ff75ea8397948c96c1b4ee14f7f9086ea62fc929c5ae7b717/coverage-7.13.1-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:67170979de0dacac3f3097d02b0ad188d8edcea44ccc44aaa0550af49150c7dc", size = 219643, upload-time = "2025-12-28T15:41:41.567Z" }, - { url = "https://files.pythonhosted.org/packages/47/79/c85e378eaa239e2edec0c5523f71542c7793fe3340954eafb0bc3904d32d/coverage-7.13.1-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:f80e2bb21bfab56ed7405c2d79d34b5dc0bc96c2c1d2a067b643a09fb756c43a", size = 219997, upload-time = "2025-12-28T15:41:43.418Z" }, - { url = "https://files.pythonhosted.org/packages/fe/9b/b1ade8bfb653c0bbce2d6d6e90cc6c254cbb99b7248531cc76253cb4da6d/coverage-7.13.1-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:f83351e0f7dcdb14d7326c3d8d8c4e915fa685cbfdc6281f9470d97a04e9dfe4", size = 261296, upload-time = "2025-12-28T15:41:45.207Z" }, - { url = "https://files.pythonhosted.org/packages/1f/af/ebf91e3e1a2473d523e87e87fd8581e0aa08741b96265730e2d79ce78d8d/coverage-7.13.1-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:bb3f6562e89bad0110afbe64e485aac2462efdce6232cdec7862a095dc3412f6", size = 263363, upload-time = "2025-12-28T15:41:47.163Z" }, - { url = "https://files.pythonhosted.org/packages/c4/8b/fb2423526d446596624ac7fde12ea4262e66f86f5120114c3cfd0bb2befa/coverage-7.13.1-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:77545b5dcda13b70f872c3b5974ac64c21d05e65b1590b441c8560115dc3a0d1", size = 265783, upload-time = "2025-12-28T15:41:49.03Z" }, - { url = "https://files.pythonhosted.org/packages/9b/26/ef2adb1e22674913b89f0fe7490ecadcef4a71fa96f5ced90c60ec358789/coverage-7.13.1-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:a4d240d260a1aed814790bbe1f10a5ff31ce6c21bc78f0da4a1e8268d6c80dbd", size = 260508, upload-time = "2025-12-28T15:41:51.035Z" }, - { url = "https://files.pythonhosted.org/packages/ce/7d/f0f59b3404caf662e7b5346247883887687c074ce67ba453ea08c612b1d5/coverage-7.13.1-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:d2287ac9360dec3837bfdad969963a5d073a09a85d898bd86bea82aa8876ef3c", size = 263357, upload-time = "2025-12-28T15:41:52.631Z" }, - { url = "https://files.pythonhosted.org/packages/1a/b1/29896492b0b1a047604d35d6fa804f12818fa30cdad660763a5f3159e158/coverage-7.13.1-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:0d2c11f3ea4db66b5cbded23b20185c35066892c67d80ec4be4bab257b9ad1e0", size = 260978, upload-time = "2025-12-28T15:41:54.589Z" }, - { url = "https://files.pythonhosted.org/packages/48/f2/971de1238a62e6f0a4128d37adadc8bb882ee96afbe03ff1570291754629/coverage-7.13.1-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:3fc6a169517ca0d7ca6846c3c5392ef2b9e38896f61d615cb75b9e7134d4ee1e", size = 259877, upload-time = "2025-12-28T15:41:56.263Z" }, - { url = "https://files.pythonhosted.org/packages/6a/fc/0474efcbb590ff8628830e9aaec5f1831594874360e3251f1fdec31d07a3/coverage-7.13.1-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:d10a2ed46386e850bb3de503a54f9fe8192e5917fcbb143bfef653a9355e9a53", size = 262069, upload-time = "2025-12-28T15:41:58.093Z" }, - { url = "https://files.pythonhosted.org/packages/88/4f/3c159b7953db37a7b44c0eab8a95c37d1aa4257c47b4602c04022d5cb975/coverage-7.13.1-cp313-cp313t-win32.whl", hash = "sha256:75a6f4aa904301dab8022397a22c0039edc1f51e90b83dbd4464b8a38dc87842", size = 222184, upload-time = "2025-12-28T15:41:59.763Z" }, - { url = "https://files.pythonhosted.org/packages/58/a5/6b57d28f81417f9335774f20679d9d13b9a8fb90cd6160957aa3b54a2379/coverage-7.13.1-cp313-cp313t-win_amd64.whl", hash = "sha256:309ef5706e95e62578cda256b97f5e097916a2c26247c287bbe74794e7150df2", size = 223250, upload-time = "2025-12-28T15:42:01.52Z" }, - { url = "https://files.pythonhosted.org/packages/81/7c/160796f3b035acfbb58be80e02e484548595aa67e16a6345e7910ace0a38/coverage-7.13.1-cp313-cp313t-win_arm64.whl", hash = "sha256:92f980729e79b5d16d221038dbf2e8f9a9136afa072f9d5d6ed4cb984b126a09", size = 221521, upload-time = "2025-12-28T15:42:03.275Z" }, - { url = "https://files.pythonhosted.org/packages/aa/8e/ba0e597560c6563fc0adb902fda6526df5d4aa73bb10adf0574d03bd2206/coverage-7.13.1-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:97ab3647280d458a1f9adb85244e81587505a43c0c7cff851f5116cd2814b894", size = 218996, upload-time = "2025-12-28T15:42:04.978Z" }, - { url = "https://files.pythonhosted.org/packages/6b/8e/764c6e116f4221dc7aa26c4061181ff92edb9c799adae6433d18eeba7a14/coverage-7.13.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:8f572d989142e0908e6acf57ad1b9b86989ff057c006d13b76c146ec6a20216a", size = 219326, upload-time = "2025-12-28T15:42:06.691Z" }, - { url = "https://files.pythonhosted.org/packages/4f/a6/6130dc6d8da28cdcbb0f2bf8865aeca9b157622f7c0031e48c6cf9a0e591/coverage-7.13.1-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:d72140ccf8a147e94274024ff6fd8fb7811354cf7ef88b1f0a988ebaa5bc774f", size = 250374, upload-time = "2025-12-28T15:42:08.786Z" }, - { url = "https://files.pythonhosted.org/packages/82/2b/783ded568f7cd6b677762f780ad338bf4b4750205860c17c25f7c708995e/coverage-7.13.1-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:d3c9f051b028810f5a87c88e5d6e9af3c0ff32ef62763bf15d29f740453ca909", size = 252882, upload-time = "2025-12-28T15:42:10.515Z" }, - { url = "https://files.pythonhosted.org/packages/cd/b2/9808766d082e6a4d59eb0cc881a57fc1600eb2c5882813eefff8254f71b5/coverage-7.13.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f398ba4df52d30b1763f62eed9de5620dcde96e6f491f4c62686736b155aa6e4", size = 254218, upload-time = "2025-12-28T15:42:12.208Z" }, - { url = "https://files.pythonhosted.org/packages/44/ea/52a985bb447c871cb4d2e376e401116520991b597c85afdde1ea9ef54f2c/coverage-7.13.1-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:132718176cc723026d201e347f800cd1a9e4b62ccd3f82476950834dad501c75", size = 250391, upload-time = "2025-12-28T15:42:14.21Z" }, - { url = "https://files.pythonhosted.org/packages/7f/1d/125b36cc12310718873cfc8209ecfbc1008f14f4f5fa0662aa608e579353/coverage-7.13.1-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:9e549d642426e3579b3f4b92d0431543b012dcb6e825c91619d4e93b7363c3f9", size = 252239, upload-time = "2025-12-28T15:42:16.292Z" }, - { url = "https://files.pythonhosted.org/packages/6a/16/10c1c164950cade470107f9f14bbac8485f8fb8515f515fca53d337e4a7f/coverage-7.13.1-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:90480b2134999301eea795b3a9dbf606c6fbab1b489150c501da84a959442465", size = 250196, upload-time = "2025-12-28T15:42:18.54Z" }, - { url = "https://files.pythonhosted.org/packages/2a/c6/cd860fac08780c6fd659732f6ced1b40b79c35977c1356344e44d72ba6c4/coverage-7.13.1-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:e825dbb7f84dfa24663dd75835e7257f8882629fc11f03ecf77d84a75134b864", size = 250008, upload-time = "2025-12-28T15:42:20.365Z" }, - { url = "https://files.pythonhosted.org/packages/f0/3a/a8c58d3d38f82a5711e1e0a67268362af48e1a03df27c03072ac30feefcf/coverage-7.13.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:623dcc6d7a7ba450bbdbeedbaa0c42b329bdae16491af2282f12a7e809be7eb9", size = 251671, upload-time = "2025-12-28T15:42:22.114Z" }, - { url = "https://files.pythonhosted.org/packages/f0/bc/fd4c1da651d037a1e3d53e8cb3f8182f4b53271ffa9a95a2e211bacc0349/coverage-7.13.1-cp314-cp314-win32.whl", hash = "sha256:6e73ebb44dca5f708dc871fe0b90cf4cff1a13f9956f747cc87b535a840386f5", size = 221777, upload-time = "2025-12-28T15:42:23.919Z" }, - { url = "https://files.pythonhosted.org/packages/4b/50/71acabdc8948464c17e90b5ffd92358579bd0910732c2a1c9537d7536aa6/coverage-7.13.1-cp314-cp314-win_amd64.whl", hash = "sha256:be753b225d159feb397bd0bf91ae86f689bad0da09d3b301478cd39b878ab31a", size = 222592, upload-time = "2025-12-28T15:42:25.619Z" }, - { url = "https://files.pythonhosted.org/packages/f7/c8/a6fb943081bb0cc926499c7907731a6dc9efc2cbdc76d738c0ab752f1a32/coverage-7.13.1-cp314-cp314-win_arm64.whl", hash = "sha256:228b90f613b25ba0019361e4ab81520b343b622fc657daf7e501c4ed6a2366c0", size = 221169, upload-time = "2025-12-28T15:42:27.629Z" }, - { url = "https://files.pythonhosted.org/packages/16/61/d5b7a0a0e0e40d62e59bc8c7aa1afbd86280d82728ba97f0673b746b78e2/coverage-7.13.1-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:60cfb538fe9ef86e5b2ab0ca8fc8d62524777f6c611dcaf76dc16fbe9b8e698a", size = 219730, upload-time = "2025-12-28T15:42:29.306Z" }, - { url = "https://files.pythonhosted.org/packages/a3/2c/8881326445fd071bb49514d1ce97d18a46a980712b51fee84f9ab42845b4/coverage-7.13.1-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:57dfc8048c72ba48a8c45e188d811e5efd7e49b387effc8fb17e97936dde5bf6", size = 220001, upload-time = "2025-12-28T15:42:31.319Z" }, - { url = "https://files.pythonhosted.org/packages/b5/d7/50de63af51dfa3a7f91cc37ad8fcc1e244b734232fbc8b9ab0f3c834a5cd/coverage-7.13.1-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:3f2f725aa3e909b3c5fdb8192490bdd8e1495e85906af74fe6e34a2a77ba0673", size = 261370, upload-time = "2025-12-28T15:42:32.992Z" }, - { url = "https://files.pythonhosted.org/packages/e1/2c/d31722f0ec918fd7453b2758312729f645978d212b410cd0f7c2aed88a94/coverage-7.13.1-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:9ee68b21909686eeb21dfcba2c3b81fee70dcf38b140dcd5aa70680995fa3aa5", size = 263485, upload-time = "2025-12-28T15:42:34.759Z" }, - { url = "https://files.pythonhosted.org/packages/fa/7a/2c114fa5c5fc08ba0777e4aec4c97e0b4a1afcb69c75f1f54cff78b073ab/coverage-7.13.1-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:724b1b270cb13ea2e6503476e34541a0b1f62280bc997eab443f87790202033d", size = 265890, upload-time = "2025-12-28T15:42:36.517Z" }, - { url = "https://files.pythonhosted.org/packages/65/d9/f0794aa1c74ceabc780fe17f6c338456bbc4e96bd950f2e969f48ac6fb20/coverage-7.13.1-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:916abf1ac5cf7eb16bc540a5bf75c71c43a676f5c52fcb9fe75a2bd75fb944e8", size = 260445, upload-time = "2025-12-28T15:42:38.646Z" }, - { url = "https://files.pythonhosted.org/packages/49/23/184b22a00d9bb97488863ced9454068c79e413cb23f472da6cbddc6cfc52/coverage-7.13.1-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:776483fd35b58d8afe3acbd9988d5de592ab6da2d2a865edfdbc9fdb43e7c486", size = 263357, upload-time = "2025-12-28T15:42:40.788Z" }, - { url = "https://files.pythonhosted.org/packages/7d/bd/58af54c0c9199ea4190284f389005779d7daf7bf3ce40dcd2d2b2f96da69/coverage-7.13.1-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:b6f3b96617e9852703f5b633ea01315ca45c77e879584f283c44127f0f1ec564", size = 260959, upload-time = "2025-12-28T15:42:42.808Z" }, - { url = "https://files.pythonhosted.org/packages/4b/2a/6839294e8f78a4891bf1df79d69c536880ba2f970d0ff09e7513d6e352e9/coverage-7.13.1-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:bd63e7b74661fed317212fab774e2a648bc4bb09b35f25474f8e3325d2945cd7", size = 259792, upload-time = "2025-12-28T15:42:44.818Z" }, - { url = "https://files.pythonhosted.org/packages/ba/c3/528674d4623283310ad676c5af7414b9850ab6d55c2300e8aa4b945ec554/coverage-7.13.1-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:933082f161bbb3e9f90d00990dc956120f608cdbcaeea15c4d897f56ef4fe416", size = 262123, upload-time = "2025-12-28T15:42:47.108Z" }, - { url = "https://files.pythonhosted.org/packages/06/c5/8c0515692fb4c73ac379d8dc09b18eaf0214ecb76ea6e62467ba7a1556ff/coverage-7.13.1-cp314-cp314t-win32.whl", hash = "sha256:18be793c4c87de2965e1c0f060f03d9e5aff66cfeae8e1dbe6e5b88056ec153f", size = 222562, upload-time = "2025-12-28T15:42:49.144Z" }, - { url = "https://files.pythonhosted.org/packages/05/0e/c0a0c4678cb30dac735811db529b321d7e1c9120b79bd728d4f4d6b010e9/coverage-7.13.1-cp314-cp314t-win_amd64.whl", hash = "sha256:0e42e0ec0cd3e0d851cb3c91f770c9301f48647cb2877cb78f74bdaa07639a79", size = 223670, upload-time = "2025-12-28T15:42:51.218Z" }, - { url = "https://files.pythonhosted.org/packages/f5/5f/b177aa0011f354abf03a8f30a85032686d290fdeed4222b27d36b4372a50/coverage-7.13.1-cp314-cp314t-win_arm64.whl", hash = "sha256:eaecf47ef10c72ece9a2a92118257da87e460e113b83cc0d2905cbbe931792b4", size = 221707, upload-time = "2025-12-28T15:42:53.034Z" }, - { url = "https://files.pythonhosted.org/packages/cc/48/d9f421cb8da5afaa1a64570d9989e00fb7955e6acddc5a12979f7666ef60/coverage-7.13.1-py3-none-any.whl", hash = "sha256:2016745cb3ba554469d02819d78958b571792bb68e31302610e898f80dd3a573", size = 210722, upload-time = "2025-12-28T15:42:54.901Z" }, +version = "7.13.5" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/9d/e0/70553e3000e345daff267cec284ce4cbf3fc141b6da229ac52775b5428f1/coverage-7.13.5.tar.gz", hash = "sha256:c81f6515c4c40141f83f502b07bbfa5c240ba25bbe73da7b33f1e5b6120ff179", size = 915967, upload-time = "2026-03-17T10:33:18.341Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/69/33/e8c48488c29a73fd089f9d71f9653c1be7478f2ad6b5bc870db11a55d23d/coverage-7.13.5-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:e0723d2c96324561b9aa76fb982406e11d93cdb388a7a7da2b16e04719cf7ca5", size = 219255, upload-time = "2026-03-17T10:29:51.081Z" }, + { url = "https://files.pythonhosted.org/packages/da/bd/b0ebe9f677d7f4b74a3e115eec7ddd4bcf892074963a00d91e8b164a6386/coverage-7.13.5-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:52f444e86475992506b32d4e5ca55c24fc88d73bcbda0e9745095b28ef4dc0cf", size = 219772, upload-time = "2026-03-17T10:29:52.867Z" }, + { url = "https://files.pythonhosted.org/packages/48/cc/5cb9502f4e01972f54eedd48218bb203fe81e294be606a2bc93970208013/coverage-7.13.5-cp310-cp310-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:704de6328e3d612a8f6c07000a878ff38181ec3263d5a11da1db294fa6a9bdf8", size = 246532, upload-time = "2026-03-17T10:29:54.688Z" }, + { url = "https://files.pythonhosted.org/packages/7d/d8/3217636d86c7e7b12e126e4f30ef1581047da73140614523af7495ed5f2d/coverage-7.13.5-cp310-cp310-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:a1a6d79a14e1ec1832cabc833898636ad5f3754a678ef8bb4908515208bf84f4", size = 248333, upload-time = "2026-03-17T10:29:56.221Z" }, + { url = "https://files.pythonhosted.org/packages/2b/30/2002ac6729ba2d4357438e2ed3c447ad8562866c8c63fc16f6dfc33afe56/coverage-7.13.5-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:79060214983769c7ba3f0cee10b54c97609dca4d478fa1aa32b914480fd5738d", size = 250211, upload-time = "2026-03-17T10:29:57.938Z" }, + { url = "https://files.pythonhosted.org/packages/6c/85/552496626d6b9359eb0e2f86f920037c9cbfba09b24d914c6e1528155f7d/coverage-7.13.5-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:356e76b46783a98c2a2fe81ec79df4883a1e62895ea952968fb253c114e7f930", size = 252125, upload-time = "2026-03-17T10:29:59.388Z" }, + { url = "https://files.pythonhosted.org/packages/44/21/40256eabdcbccdb6acf6b381b3016a154399a75fe39d406f790ae84d1f3c/coverage-7.13.5-cp310-cp310-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:0cef0cdec915d11254a7f549c1170afecce708d30610c6abdded1f74e581666d", size = 247219, upload-time = "2026-03-17T10:30:01.199Z" }, + { url = "https://files.pythonhosted.org/packages/b1/e8/96e2a6c3f21a0ea77d7830b254a1542d0328acc8d7bdf6a284ba7e529f77/coverage-7.13.5-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:dc022073d063b25a402454e5712ef9e007113e3a676b96c5f29b2bda29352f40", size = 248248, upload-time = "2026-03-17T10:30:03.317Z" }, + { url = "https://files.pythonhosted.org/packages/da/ba/8477f549e554827da390ec659f3c38e4b6d95470f4daafc2d8ff94eaa9c2/coverage-7.13.5-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:9b74db26dfea4f4e50d48a4602207cd1e78be33182bc9cbf22da94f332f99878", size = 246254, upload-time = "2026-03-17T10:30:04.832Z" }, + { url = "https://files.pythonhosted.org/packages/55/59/bc22aef0e6aa179d5b1b001e8b3654785e9adf27ef24c93dc4228ebd5d68/coverage-7.13.5-cp310-cp310-musllinux_1_2_ppc64le.whl", hash = "sha256:ad146744ca4fd09b50c482650e3c1b1f4dfa1d4792e0a04a369c7f23336f0400", size = 250067, upload-time = "2026-03-17T10:30:06.535Z" }, + { url = "https://files.pythonhosted.org/packages/de/1b/c6a023a160806a5137dca53468fd97530d6acad24a22003b1578a9c2e429/coverage-7.13.5-cp310-cp310-musllinux_1_2_riscv64.whl", hash = "sha256:c555b48be1853fe3997c11c4bd521cdd9a9612352de01fa4508f16ec341e6fe0", size = 246521, upload-time = "2026-03-17T10:30:08.486Z" }, + { url = "https://files.pythonhosted.org/packages/2d/3f/3532c85a55aa2f899fa17c186f831cfa1aa434d88ff792a709636f64130e/coverage-7.13.5-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:7034b5c56a58ae5e85f23949d52c14aca2cfc6848a31764995b7de88f13a1ea0", size = 247126, upload-time = "2026-03-17T10:30:09.966Z" }, + { url = "https://files.pythonhosted.org/packages/aa/2e/b9d56af4a24ef45dfbcda88e06870cb7d57b2b0bfa3a888d79b4c8debd76/coverage-7.13.5-cp310-cp310-win32.whl", hash = "sha256:eb7fdf1ef130660e7415e0253a01a7d5a88c9c4d158bcf75cbbd922fd65a5b58", size = 221860, upload-time = "2026-03-17T10:30:11.393Z" }, + { url = "https://files.pythonhosted.org/packages/9f/cc/d938417e7a4d7f0433ad4edee8bb2acdc60dc7ac5af19e2a07a048ecbee3/coverage-7.13.5-cp310-cp310-win_amd64.whl", hash = "sha256:3e1bb5f6c78feeb1be3475789b14a0f0a5b47d505bfc7267126ccbd50289999e", size = 222788, upload-time = "2026-03-17T10:30:12.886Z" }, + { url = "https://files.pythonhosted.org/packages/4b/37/d24c8f8220ff07b839b2c043ea4903a33b0f455abe673ae3c03bbdb7f212/coverage-7.13.5-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:66a80c616f80181f4d643b0f9e709d97bcea413ecd9631e1dedc7401c8e6695d", size = 219381, upload-time = "2026-03-17T10:30:14.68Z" }, + { url = "https://files.pythonhosted.org/packages/35/8b/cd129b0ca4afe886a6ce9d183c44d8301acbd4ef248622e7c49a23145605/coverage-7.13.5-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:145ede53ccbafb297c1c9287f788d1bc3efd6c900da23bf6931b09eafc931587", size = 219880, upload-time = "2026-03-17T10:30:16.231Z" }, + { url = "https://files.pythonhosted.org/packages/55/2f/e0e5b237bffdb5d6c530ce87cc1d413a5b7d7dfd60fb067ad6d254c35c76/coverage-7.13.5-cp311-cp311-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:0672854dc733c342fa3e957e0605256d2bf5934feeac328da9e0b5449634a642", size = 250303, upload-time = "2026-03-17T10:30:17.748Z" }, + { url = "https://files.pythonhosted.org/packages/92/be/b1afb692be85b947f3401375851484496134c5554e67e822c35f28bf2fbc/coverage-7.13.5-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:ec10e2a42b41c923c2209b846126c6582db5e43a33157e9870ba9fb70dc7854b", size = 252218, upload-time = "2026-03-17T10:30:19.804Z" }, + { url = "https://files.pythonhosted.org/packages/da/69/2f47bb6fa1b8d1e3e5d0c4be8ccb4313c63d742476a619418f85740d597b/coverage-7.13.5-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:be3d4bbad9d4b037791794ddeedd7d64a56f5933a2c1373e18e9e568b9141686", size = 254326, upload-time = "2026-03-17T10:30:21.321Z" }, + { url = "https://files.pythonhosted.org/packages/d5/d0/79db81da58965bd29dabc8f4ad2a2af70611a57cba9d1ec006f072f30a54/coverage-7.13.5-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:4d2afbc5cc54d286bfb54541aa50b64cdb07a718227168c87b9e2fb8f25e1743", size = 256267, upload-time = "2026-03-17T10:30:23.094Z" }, + { url = "https://files.pythonhosted.org/packages/e5/32/d0d7cc8168f91ddab44c0ce4806b969df5f5fdfdbb568eaca2dbc2a04936/coverage-7.13.5-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:3ad050321264c49c2fa67bb599100456fc51d004b82534f379d16445da40fb75", size = 250430, upload-time = "2026-03-17T10:30:25.311Z" }, + { url = "https://files.pythonhosted.org/packages/4d/06/a055311d891ddbe231cd69fdd20ea4be6e3603ffebddf8704b8ca8e10a3c/coverage-7.13.5-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:7300c8a6d13335b29bb76d7651c66af6bd8658517c43499f110ddc6717bfc209", size = 252017, upload-time = "2026-03-17T10:30:27.284Z" }, + { url = "https://files.pythonhosted.org/packages/d6/f6/d0fd2d21e29a657b5f77a2fe7082e1568158340dceb941954f776dce1b7b/coverage-7.13.5-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:eb07647a5738b89baab047f14edd18ded523de60f3b30e75c2acc826f79c839a", size = 250080, upload-time = "2026-03-17T10:30:29.481Z" }, + { url = "https://files.pythonhosted.org/packages/4e/ab/0d7fb2efc2e9a5eb7ddcc6e722f834a69b454b7e6e5888c3a8567ecffb31/coverage-7.13.5-cp311-cp311-musllinux_1_2_ppc64le.whl", hash = "sha256:9adb6688e3b53adffefd4a52d72cbd8b02602bfb8f74dcd862337182fd4d1a4e", size = 253843, upload-time = "2026-03-17T10:30:31.301Z" }, + { url = "https://files.pythonhosted.org/packages/ba/6f/7467b917bbf5408610178f62a49c0ed4377bb16c1657f689cc61470da8ce/coverage-7.13.5-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:7c8d4bc913dd70b93488d6c496c77f3aff5ea99a07e36a18f865bca55adef8bd", size = 249802, upload-time = "2026-03-17T10:30:33.358Z" }, + { url = "https://files.pythonhosted.org/packages/75/2c/1172fb689df92135f5bfbbd69fc83017a76d24ea2e2f3a1154007e2fb9f8/coverage-7.13.5-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:0e3c426ffc4cd952f54ee9ffbdd10345709ecc78a3ecfd796a57236bfad0b9b8", size = 250707, upload-time = "2026-03-17T10:30:35.2Z" }, + { url = "https://files.pythonhosted.org/packages/67/21/9ac389377380a07884e3b48ba7a620fcd9dbfaf1d40565facdc6b36ec9ef/coverage-7.13.5-cp311-cp311-win32.whl", hash = "sha256:259b69bb83ad9894c4b25be2528139eecba9a82646ebdda2d9db1ba28424a6bf", size = 221880, upload-time = "2026-03-17T10:30:36.775Z" }, + { url = "https://files.pythonhosted.org/packages/af/7f/4cd8a92531253f9d7c1bbecd9fa1b472907fb54446ca768c59b531248dc5/coverage-7.13.5-cp311-cp311-win_amd64.whl", hash = "sha256:258354455f4e86e3e9d0d17571d522e13b4e1e19bf0f8596bcf9476d61e7d8a9", size = 222816, upload-time = "2026-03-17T10:30:38.891Z" }, + { url = "https://files.pythonhosted.org/packages/12/a6/1d3f6155fb0010ca68eba7fe48ca6c9da7385058b77a95848710ecf189b1/coverage-7.13.5-cp311-cp311-win_arm64.whl", hash = "sha256:bff95879c33ec8da99fc9b6fe345ddb5be6414b41d6d1ad1c8f188d26f36e028", size = 221483, upload-time = "2026-03-17T10:30:40.463Z" }, + { url = "https://files.pythonhosted.org/packages/a0/c3/a396306ba7db865bf96fc1fb3b7fd29bcbf3d829df642e77b13555163cd6/coverage-7.13.5-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:460cf0114c5016fa841214ff5564aa4864f11948da9440bc97e21ad1f4ba1e01", size = 219554, upload-time = "2026-03-17T10:30:42.208Z" }, + { url = "https://files.pythonhosted.org/packages/a6/16/a68a19e5384e93f811dccc51034b1fd0b865841c390e3c931dcc4699e035/coverage-7.13.5-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:0e223ce4b4ed47f065bfb123687686512e37629be25cc63728557ae7db261422", size = 219908, upload-time = "2026-03-17T10:30:43.906Z" }, + { url = "https://files.pythonhosted.org/packages/29/72/20b917c6793af3a5ceb7fb9c50033f3ec7865f2911a1416b34a7cfa0813b/coverage-7.13.5-cp312-cp312-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:6e3370441f4513c6252bf042b9c36d22491142385049243253c7e48398a15a9f", size = 251419, upload-time = "2026-03-17T10:30:45.545Z" }, + { url = "https://files.pythonhosted.org/packages/8c/49/cd14b789536ac6a4778c453c6a2338bc0a2fb60c5a5a41b4008328b9acc1/coverage-7.13.5-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:03ccc709a17a1de074fb1d11f217342fb0d2b1582ed544f554fc9fc3f07e95f5", size = 254159, upload-time = "2026-03-17T10:30:47.204Z" }, + { url = "https://files.pythonhosted.org/packages/9d/00/7b0edcfe64e2ed4c0340dac14a52ad0f4c9bd0b8b5e531af7d55b703db7c/coverage-7.13.5-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3f4818d065964db3c1c66dc0fbdac5ac692ecbc875555e13374fdbe7eedb4376", size = 255270, upload-time = "2026-03-17T10:30:48.812Z" }, + { url = "https://files.pythonhosted.org/packages/93/89/7ffc4ba0f5d0a55c1e84ea7cee39c9fc06af7b170513d83fbf3bbefce280/coverage-7.13.5-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:012d5319e66e9d5a218834642d6c35d265515a62f01157a45bcc036ecf947256", size = 257538, upload-time = "2026-03-17T10:30:50.77Z" }, + { url = "https://files.pythonhosted.org/packages/81/bd/73ddf85f93f7e6fa83e77ccecb6162d9415c79007b4bc124008a4995e4a7/coverage-7.13.5-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:8dd02af98971bdb956363e4827d34425cb3df19ee550ef92855b0acb9c7ce51c", size = 251821, upload-time = "2026-03-17T10:30:52.5Z" }, + { url = "https://files.pythonhosted.org/packages/a0/81/278aff4e8dec4926a0bcb9486320752811f543a3ce5b602cc7a29978d073/coverage-7.13.5-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:f08fd75c50a760c7eb068ae823777268daaf16a80b918fa58eea888f8e3919f5", size = 253191, upload-time = "2026-03-17T10:30:54.543Z" }, + { url = "https://files.pythonhosted.org/packages/70/ee/fe1621488e2e0a58d7e94c4800f0d96f79671553488d401a612bebae324b/coverage-7.13.5-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:843ea8643cf967d1ac7e8ecd4bb00c99135adf4816c0c0593fdcc47b597fcf09", size = 251337, upload-time = "2026-03-17T10:30:56.663Z" }, + { url = "https://files.pythonhosted.org/packages/37/a6/f79fb37aa104b562207cc23cb5711ab6793608e246cae1e93f26b2236ed9/coverage-7.13.5-cp312-cp312-musllinux_1_2_ppc64le.whl", hash = "sha256:9d44d7aa963820b1b971dbecd90bfe5fe8f81cff79787eb6cca15750bd2f79b9", size = 255404, upload-time = "2026-03-17T10:30:58.427Z" }, + { url = "https://files.pythonhosted.org/packages/75/f0/ed15262a58ec81ce457ceb717b7f78752a1713556b19081b76e90896e8d4/coverage-7.13.5-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:7132bed4bd7b836200c591410ae7d97bf7ae8be6fc87d160b2bd881df929e7bf", size = 250903, upload-time = "2026-03-17T10:31:00.093Z" }, + { url = "https://files.pythonhosted.org/packages/0f/e9/9129958f20e7e9d4d56d51d42ccf708d15cac355ff4ac6e736e97a9393d2/coverage-7.13.5-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:a698e363641b98843c517817db75373c83254781426e94ada3197cabbc2c919c", size = 252780, upload-time = "2026-03-17T10:31:01.916Z" }, + { url = "https://files.pythonhosted.org/packages/a4/d7/0ad9b15812d81272db94379fe4c6df8fd17781cc7671fdfa30c76ba5ff7b/coverage-7.13.5-cp312-cp312-win32.whl", hash = "sha256:bdba0a6b8812e8c7df002d908a9a2ea3c36e92611b5708633c50869e6d922fdf", size = 222093, upload-time = "2026-03-17T10:31:03.642Z" }, + { url = "https://files.pythonhosted.org/packages/29/3d/821a9a5799fac2556bcf0bd37a70d1d11fa9e49784b6d22e92e8b2f85f18/coverage-7.13.5-cp312-cp312-win_amd64.whl", hash = "sha256:d2c87e0c473a10bffe991502eac389220533024c8082ec1ce849f4218dded810", size = 222900, upload-time = "2026-03-17T10:31:05.651Z" }, + { url = "https://files.pythonhosted.org/packages/d4/fa/2238c2ad08e35cf4f020ea721f717e09ec3152aea75d191a7faf3ef009a8/coverage-7.13.5-cp312-cp312-win_arm64.whl", hash = "sha256:bf69236a9a81bdca3bff53796237aab096cdbf8d78a66ad61e992d9dac7eb2de", size = 221515, upload-time = "2026-03-17T10:31:07.293Z" }, + { url = "https://files.pythonhosted.org/packages/74/8c/74fedc9663dcf168b0a059d4ea756ecae4da77a489048f94b5f512a8d0b3/coverage-7.13.5-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:5ec4af212df513e399cf11610cc27063f1586419e814755ab362e50a85ea69c1", size = 219576, upload-time = "2026-03-17T10:31:09.045Z" }, + { url = "https://files.pythonhosted.org/packages/0c/c9/44fb661c55062f0818a6ffd2685c67aa30816200d5f2817543717d4b92eb/coverage-7.13.5-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:941617e518602e2d64942c88ec8499f7fbd49d3f6c4327d3a71d43a1973032f3", size = 219942, upload-time = "2026-03-17T10:31:10.708Z" }, + { url = "https://files.pythonhosted.org/packages/5f/13/93419671cee82b780bab7ea96b67c8ef448f5f295f36bf5031154ec9a790/coverage-7.13.5-cp313-cp313-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:da305e9937617ee95c2e39d8ff9f040e0487cbf1ac174f777ed5eddd7a7c1f26", size = 250935, upload-time = "2026-03-17T10:31:12.392Z" }, + { url = "https://files.pythonhosted.org/packages/ac/68/1666e3a4462f8202d836920114fa7a5ee9275d1fa45366d336c551a162dd/coverage-7.13.5-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:78e696e1cc714e57e8b25760b33a8b1026b7048d270140d25dafe1b0a1ee05a3", size = 253541, upload-time = "2026-03-17T10:31:14.247Z" }, + { url = "https://files.pythonhosted.org/packages/4e/5e/3ee3b835647be646dcf3c65a7c6c18f87c27326a858f72ab22c12730773d/coverage-7.13.5-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:02ca0eed225b2ff301c474aeeeae27d26e2537942aa0f87491d3e147e784a82b", size = 254780, upload-time = "2026-03-17T10:31:16.193Z" }, + { url = "https://files.pythonhosted.org/packages/44/b3/cb5bd1a04cfcc49ede6cd8409d80bee17661167686741e041abc7ee1b9a9/coverage-7.13.5-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:04690832cbea4e4663d9149e05dba142546ca05cb1848816760e7f58285c970a", size = 256912, upload-time = "2026-03-17T10:31:17.89Z" }, + { url = "https://files.pythonhosted.org/packages/1b/66/c1dceb7b9714473800b075f5c8a84f4588f887a90eb8645282031676e242/coverage-7.13.5-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:0590e44dd2745c696a778f7bab6aa95256de2cbc8b8cff4f7db8ff09813d6969", size = 251165, upload-time = "2026-03-17T10:31:19.605Z" }, + { url = "https://files.pythonhosted.org/packages/b7/62/5502b73b97aa2e53ea22a39cf8649ff44827bef76d90bf638777daa27a9d/coverage-7.13.5-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:d7cfad2d6d81dd298ab6b89fe72c3b7b05ec7544bdda3b707ddaecff8d25c161", size = 252908, upload-time = "2026-03-17T10:31:21.312Z" }, + { url = "https://files.pythonhosted.org/packages/7d/37/7792c2d69854397ca77a55c4646e5897c467928b0e27f2d235d83b5d08c6/coverage-7.13.5-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:e092b9499de38ae0fbfbc603a74660eb6ff3e869e507b50d85a13b6db9863e15", size = 250873, upload-time = "2026-03-17T10:31:23.565Z" }, + { url = "https://files.pythonhosted.org/packages/a3/23/bc866fb6163be52a8a9e5d708ba0d3b1283c12158cefca0a8bbb6e247a43/coverage-7.13.5-cp313-cp313-musllinux_1_2_ppc64le.whl", hash = "sha256:48c39bc4a04d983a54a705a6389512883d4a3b9862991b3617d547940e9f52b1", size = 255030, upload-time = "2026-03-17T10:31:25.58Z" }, + { url = "https://files.pythonhosted.org/packages/7d/8b/ef67e1c222ef49860701d346b8bbb70881bef283bd5f6cbba68a39a086c7/coverage-7.13.5-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:2d3807015f138ffea1ed9afeeb8624fd781703f2858b62a8dd8da5a0994c57b6", size = 250694, upload-time = "2026-03-17T10:31:27.316Z" }, + { url = "https://files.pythonhosted.org/packages/46/0d/866d1f74f0acddbb906db212e096dee77a8e2158ca5e6bb44729f9d93298/coverage-7.13.5-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:ee2aa19e03161671ec964004fb74b2257805d9710bf14a5c704558b9d8dbaf17", size = 252469, upload-time = "2026-03-17T10:31:29.472Z" }, + { url = "https://files.pythonhosted.org/packages/7a/f5/be742fec31118f02ce42b21c6af187ad6a344fed546b56ca60caacc6a9a0/coverage-7.13.5-cp313-cp313-win32.whl", hash = "sha256:ce1998c0483007608c8382f4ff50164bfc5bd07a2246dd272aa4043b75e61e85", size = 222112, upload-time = "2026-03-17T10:31:31.526Z" }, + { url = "https://files.pythonhosted.org/packages/66/40/7732d648ab9d069a46e686043241f01206348e2bbf128daea85be4d6414b/coverage-7.13.5-cp313-cp313-win_amd64.whl", hash = "sha256:631efb83f01569670a5e866ceb80fe483e7c159fac6f167e6571522636104a0b", size = 222923, upload-time = "2026-03-17T10:31:33.633Z" }, + { url = "https://files.pythonhosted.org/packages/48/af/fea819c12a095781f6ccd504890aaddaf88b8fab263c4940e82c7b770124/coverage-7.13.5-cp313-cp313-win_arm64.whl", hash = "sha256:f4cd16206ad171cbc2470dbea9103cf9a7607d5fe8c242fdf1edf36174020664", size = 221540, upload-time = "2026-03-17T10:31:35.445Z" }, + { url = "https://files.pythonhosted.org/packages/23/d2/17879af479df7fbbd44bd528a31692a48f6b25055d16482fdf5cdb633805/coverage-7.13.5-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:0428cbef5783ad91fe240f673cc1f76b25e74bbfe1a13115e4aa30d3f538162d", size = 220262, upload-time = "2026-03-17T10:31:37.184Z" }, + { url = "https://files.pythonhosted.org/packages/5b/4c/d20e554f988c8f91d6a02c5118f9abbbf73a8768a3048cb4962230d5743f/coverage-7.13.5-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:e0b216a19534b2427cc201a26c25da4a48633f29a487c61258643e89d28200c0", size = 220617, upload-time = "2026-03-17T10:31:39.245Z" }, + { url = "https://files.pythonhosted.org/packages/29/9c/f9f5277b95184f764b24e7231e166dfdb5780a46d408a2ac665969416d61/coverage-7.13.5-cp313-cp313t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:972a9cd27894afe4bc2b1480107054e062df08e671df7c2f18c205e805ccd806", size = 261912, upload-time = "2026-03-17T10:31:41.324Z" }, + { url = "https://files.pythonhosted.org/packages/d5/f6/7f1ab39393eeb50cfe4747ae8ef0e4fc564b989225aa1152e13a180d74f8/coverage-7.13.5-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:4b59148601efcd2bac8c4dbf1f0ad6391693ccf7a74b8205781751637076aee3", size = 263987, upload-time = "2026-03-17T10:31:43.724Z" }, + { url = "https://files.pythonhosted.org/packages/a0/d7/62c084fb489ed9c6fbdf57e006752e7c516ea46fd690e5ed8b8617c7d52e/coverage-7.13.5-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:505d7083c8b0c87a8fa8c07370c285847c1f77739b22e299ad75a6af6c32c5c9", size = 266416, upload-time = "2026-03-17T10:31:45.769Z" }, + { url = "https://files.pythonhosted.org/packages/a9/f6/df63d8660e1a0bff6125947afda112a0502736f470d62ca68b288ea762d8/coverage-7.13.5-cp313-cp313t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:60365289c3741e4db327e7baff2a4aaacf22f788e80fa4683393891b70a89fbd", size = 267558, upload-time = "2026-03-17T10:31:48.293Z" }, + { url = "https://files.pythonhosted.org/packages/5b/02/353ca81d36779bd108f6d384425f7139ac3c58c750dcfaafe5d0bee6436b/coverage-7.13.5-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:1b88c69c8ef5d4b6fe7dea66d6636056a0f6a7527c440e890cf9259011f5e606", size = 261163, upload-time = "2026-03-17T10:31:50.125Z" }, + { url = "https://files.pythonhosted.org/packages/2c/16/2e79106d5749bcaf3aee6d309123548e3276517cd7851faa8da213bc61bf/coverage-7.13.5-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:5b13955d31d1633cf9376908089b7cebe7d15ddad7aeaabcbe969a595a97e95e", size = 263981, upload-time = "2026-03-17T10:31:51.961Z" }, + { url = "https://files.pythonhosted.org/packages/29/c7/c29e0c59ffa6942030ae6f50b88ae49988e7e8da06de7ecdbf49c6d4feae/coverage-7.13.5-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:f70c9ab2595c56f81a89620e22899eea8b212a4041bd728ac6f4a28bf5d3ddd0", size = 261604, upload-time = "2026-03-17T10:31:53.872Z" }, + { url = "https://files.pythonhosted.org/packages/40/48/097cdc3db342f34006a308ab41c3a7c11c3f0d84750d340f45d88a782e00/coverage-7.13.5-cp313-cp313t-musllinux_1_2_ppc64le.whl", hash = "sha256:084b84a8c63e8d6fc7e3931b316a9bcafca1458d753c539db82d31ed20091a87", size = 265321, upload-time = "2026-03-17T10:31:55.997Z" }, + { url = "https://files.pythonhosted.org/packages/bb/1f/4994af354689e14fd03a75f8ec85a9a68d94e0188bbdab3fc1516b55e512/coverage-7.13.5-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:ad14385487393e386e2ea988b09d62dd42c397662ac2dabc3832d71253eee479", size = 260502, upload-time = "2026-03-17T10:31:58.308Z" }, + { url = "https://files.pythonhosted.org/packages/22/c6/9bb9ef55903e628033560885f5c31aa227e46878118b63ab15dc7ba87797/coverage-7.13.5-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:7f2c47b36fe7709a6e83bfadf4eefb90bd25fbe4014d715224c4316f808e59a2", size = 262688, upload-time = "2026-03-17T10:32:00.141Z" }, + { url = "https://files.pythonhosted.org/packages/14/4f/f5df9007e50b15e53e01edea486814783a7f019893733d9e4d6caad75557/coverage-7.13.5-cp313-cp313t-win32.whl", hash = "sha256:67e9bc5449801fad0e5dff329499fb090ba4c5800b86805c80617b4e29809b2a", size = 222788, upload-time = "2026-03-17T10:32:02.246Z" }, + { url = "https://files.pythonhosted.org/packages/e1/98/aa7fccaa97d0f3192bec013c4e6fd6d294a6ed44b640e6bb61f479e00ed5/coverage-7.13.5-cp313-cp313t-win_amd64.whl", hash = "sha256:da86cdcf10d2519e10cabb8ac2de03da1bcb6e4853790b7fbd48523332e3a819", size = 223851, upload-time = "2026-03-17T10:32:04.416Z" }, + { url = "https://files.pythonhosted.org/packages/3d/8b/e5c469f7352651e5f013198e9e21f97510b23de957dd06a84071683b4b60/coverage-7.13.5-cp313-cp313t-win_arm64.whl", hash = "sha256:0ecf12ecb326fe2c339d93fc131816f3a7367d223db37817208905c89bded911", size = 222104, upload-time = "2026-03-17T10:32:06.65Z" }, + { url = "https://files.pythonhosted.org/packages/8e/77/39703f0d1d4b478bfd30191d3c14f53caf596fac00efb3f8f6ee23646439/coverage-7.13.5-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:fbabfaceaeb587e16f7008f7795cd80d20ec548dc7f94fbb0d4ec2e038ce563f", size = 219621, upload-time = "2026-03-17T10:32:08.589Z" }, + { url = "https://files.pythonhosted.org/packages/e2/3e/51dff36d99ae14639a133d9b164d63e628532e2974d8b1edb99dd1ebc733/coverage-7.13.5-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:9bb2a28101a443669a423b665939381084412b81c3f8c0fcfbac57f4e30b5b8e", size = 219953, upload-time = "2026-03-17T10:32:10.507Z" }, + { url = "https://files.pythonhosted.org/packages/6a/6c/1f1917b01eb647c2f2adc9962bd66c79eb978951cab61bdc1acab3290c07/coverage-7.13.5-cp314-cp314-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:bd3a2fbc1c6cccb3c5106140d87cc6a8715110373ef42b63cf5aea29df8c217a", size = 250992, upload-time = "2026-03-17T10:32:12.41Z" }, + { url = "https://files.pythonhosted.org/packages/22/e5/06b1f88f42a5a99df42ce61208bdec3bddb3d261412874280a19796fc09c/coverage-7.13.5-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:6c36ddb64ed9d7e496028d1d00dfec3e428e0aabf4006583bb1839958d280510", size = 253503, upload-time = "2026-03-17T10:32:14.449Z" }, + { url = "https://files.pythonhosted.org/packages/80/28/2a148a51e5907e504fa7b85490277734e6771d8844ebcc48764a15e28155/coverage-7.13.5-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:380e8e9084d8eb38db3a9176a1a4f3c0082c3806fa0dc882d1d87abc3c789247", size = 254852, upload-time = "2026-03-17T10:32:16.56Z" }, + { url = "https://files.pythonhosted.org/packages/61/77/50e8d3d85cc0b7ebe09f30f151d670e302c7ff4a1bf6243f71dd8b0981fa/coverage-7.13.5-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:e808af52a0513762df4d945ea164a24b37f2f518cbe97e03deaa0ee66139b4d6", size = 257161, upload-time = "2026-03-17T10:32:19.004Z" }, + { url = "https://files.pythonhosted.org/packages/3b/c4/b5fd1d4b7bf8d0e75d997afd3925c59ba629fc8616f1b3aae7605132e256/coverage-7.13.5-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:e301d30dd7e95ae068671d746ba8c34e945a82682e62918e41b2679acd2051a0", size = 251021, upload-time = "2026-03-17T10:32:21.344Z" }, + { url = "https://files.pythonhosted.org/packages/f8/66/6ea21f910e92d69ef0b1c3346ea5922a51bad4446c9126db2ae96ee24c4c/coverage-7.13.5-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:800bc829053c80d240a687ceeb927a94fd108bbdc68dfbe505d0d75ab578a882", size = 252858, upload-time = "2026-03-17T10:32:23.506Z" }, + { url = "https://files.pythonhosted.org/packages/9e/ea/879c83cb5d61aa2a35fb80e72715e92672daef8191b84911a643f533840c/coverage-7.13.5-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:0b67af5492adb31940ee418a5a655c28e48165da5afab8c7fa6fd72a142f8740", size = 250823, upload-time = "2026-03-17T10:32:25.516Z" }, + { url = "https://files.pythonhosted.org/packages/8a/fb/616d95d3adb88b9803b275580bdeee8bd1b69a886d057652521f83d7322f/coverage-7.13.5-cp314-cp314-musllinux_1_2_ppc64le.whl", hash = "sha256:c9136ff29c3a91e25b1d1552b5308e53a1e0653a23e53b6366d7c2dcbbaf8a16", size = 255099, upload-time = "2026-03-17T10:32:27.944Z" }, + { url = "https://files.pythonhosted.org/packages/1c/93/25e6917c90ec1c9a56b0b26f6cad6408e5f13bb6b35d484a0d75c9cf000d/coverage-7.13.5-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:cff784eef7f0b8f6cb28804fbddcfa99f89efe4cc35fb5627e3ac58f91ed3ac0", size = 250638, upload-time = "2026-03-17T10:32:29.914Z" }, + { url = "https://files.pythonhosted.org/packages/fc/7b/dc1776b0464145a929deed214aef9fb1493f159b59ff3c7eeeedf91eddd0/coverage-7.13.5-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:68a4953be99b17ac3c23b6efbc8a38330d99680c9458927491d18700ef23ded0", size = 252295, upload-time = "2026-03-17T10:32:31.981Z" }, + { url = "https://files.pythonhosted.org/packages/ea/fb/99cbbc56a26e07762a2740713f3c8f9f3f3106e3a3dd8cc4474954bccd34/coverage-7.13.5-cp314-cp314-win32.whl", hash = "sha256:35a31f2b1578185fbe6aa2e74cea1b1d0bbf4c552774247d9160d29b80ed56cc", size = 222360, upload-time = "2026-03-17T10:32:34.233Z" }, + { url = "https://files.pythonhosted.org/packages/8d/b7/4758d4f73fb536347cc5e4ad63662f9d60ba9118cb6785e9616b2ce5d7fa/coverage-7.13.5-cp314-cp314-win_amd64.whl", hash = "sha256:2aa055ae1857258f9e0045be26a6d62bdb47a72448b62d7b55f4820f361a2633", size = 223174, upload-time = "2026-03-17T10:32:36.369Z" }, + { url = "https://files.pythonhosted.org/packages/2c/f2/24d84e1dfe70f8ac9fdf30d338239860d0d1d5da0bda528959d0ebc9da28/coverage-7.13.5-cp314-cp314-win_arm64.whl", hash = "sha256:1b11eef33edeae9d142f9b4358edb76273b3bfd30bc3df9a4f95d0e49caf94e8", size = 221739, upload-time = "2026-03-17T10:32:38.736Z" }, + { url = "https://files.pythonhosted.org/packages/60/5b/4a168591057b3668c2428bff25dd3ebc21b629d666d90bcdfa0217940e84/coverage-7.13.5-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:10a0c37f0b646eaff7cce1874c31d1f1ccb297688d4c747291f4f4c70741cc8b", size = 220351, upload-time = "2026-03-17T10:32:41.196Z" }, + { url = "https://files.pythonhosted.org/packages/f5/21/1fd5c4dbfe4a58b6b99649125635df46decdfd4a784c3cd6d410d303e370/coverage-7.13.5-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:b5db73ba3c41c7008037fa731ad5459fc3944cb7452fc0aa9f822ad3533c583c", size = 220612, upload-time = "2026-03-17T10:32:43.204Z" }, + { url = "https://files.pythonhosted.org/packages/d6/fe/2a924b3055a5e7e4512655a9d4609781b0d62334fa0140c3e742926834e2/coverage-7.13.5-cp314-cp314t-manylinux1_i686.manylinux_2_28_i686.manylinux_2_5_i686.whl", hash = "sha256:750db93a81e3e5a9831b534be7b1229df848b2e125a604fe6651e48aa070e5f9", size = 261985, upload-time = "2026-03-17T10:32:45.514Z" }, + { url = "https://files.pythonhosted.org/packages/d7/0d/c8928f2bd518c45990fe1a2ab8db42e914ef9b726c975facc4282578c3eb/coverage-7.13.5-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:9ddb4f4a5479f2539644be484da179b653273bca1a323947d48ab107b3ed1f29", size = 264107, upload-time = "2026-03-17T10:32:47.971Z" }, + { url = "https://files.pythonhosted.org/packages/ef/ae/4ae35bbd9a0af9d820362751f0766582833c211224b38665c0f8de3d487f/coverage-7.13.5-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:d8a7a2049c14f413163e2bdabd37e41179b1d1ccb10ffc6ccc4b7a718429c607", size = 266513, upload-time = "2026-03-17T10:32:50.1Z" }, + { url = "https://files.pythonhosted.org/packages/9c/20/d326174c55af36f74eac6ae781612d9492f060ce8244b570bb9d50d9d609/coverage-7.13.5-cp314-cp314t-manylinux2014_ppc64le.manylinux_2_17_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:e1c85e0b6c05c592ea6d8768a66a254bfb3874b53774b12d4c89c481eb78cb90", size = 267650, upload-time = "2026-03-17T10:32:52.391Z" }, + { url = "https://files.pythonhosted.org/packages/7a/5e/31484d62cbd0eabd3412e30d74386ece4a0837d4f6c3040a653878bfc019/coverage-7.13.5-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:777c4d1eff1b67876139d24288aaf1817f6c03d6bae9c5cc8d27b83bcfe38fe3", size = 261089, upload-time = "2026-03-17T10:32:54.544Z" }, + { url = "https://files.pythonhosted.org/packages/e9/d8/49a72d6de146eebb0b7e48cc0f4bc2c0dd858e3d4790ab2b39a2872b62bd/coverage-7.13.5-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:6697e29b93707167687543480a40f0db8f356e86d9f67ddf2e37e2dfd91a9dab", size = 263982, upload-time = "2026-03-17T10:32:56.803Z" }, + { url = "https://files.pythonhosted.org/packages/06/3b/0351f1bd566e6e4dd39e978efe7958bde1d32f879e85589de147654f57bb/coverage-7.13.5-cp314-cp314t-musllinux_1_2_i686.whl", hash = "sha256:8fdf453a942c3e4d99bd80088141c4c6960bb232c409d9c3558e2dbaa3998562", size = 261579, upload-time = "2026-03-17T10:32:59.466Z" }, + { url = "https://files.pythonhosted.org/packages/5d/ce/796a2a2f4017f554d7810f5c573449b35b1e46788424a548d4d19201b222/coverage-7.13.5-cp314-cp314t-musllinux_1_2_ppc64le.whl", hash = "sha256:32ca0c0114c9834a43f045a87dcebd69d108d8ffb666957ea65aa132f50332e2", size = 265316, upload-time = "2026-03-17T10:33:01.847Z" }, + { url = "https://files.pythonhosted.org/packages/3d/16/d5ae91455541d1a78bc90abf495be600588aff8f6db5c8b0dae739fa39c9/coverage-7.13.5-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:8769751c10f339021e2638cd354e13adeac54004d1941119b2c96fe5276d45ea", size = 260427, upload-time = "2026-03-17T10:33:03.945Z" }, + { url = "https://files.pythonhosted.org/packages/48/11/07f413dba62db21fb3fad5d0de013a50e073cc4e2dc4306e770360f6dfc8/coverage-7.13.5-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:cec2d83125531bd153175354055cdb7a09987af08a9430bd173c937c6d0fba2a", size = 262745, upload-time = "2026-03-17T10:33:06.285Z" }, + { url = "https://files.pythonhosted.org/packages/91/15/d792371332eb4663115becf4bad47e047d16234b1aff687b1b18c58d60ae/coverage-7.13.5-cp314-cp314t-win32.whl", hash = "sha256:0cd9ed7a8b181775459296e402ca4fb27db1279740a24e93b3b41942ebe4b215", size = 223146, upload-time = "2026-03-17T10:33:08.756Z" }, + { url = "https://files.pythonhosted.org/packages/db/51/37221f59a111dca5e85be7dbf09696323b5b9f13ff65e0641d535ed06ea8/coverage-7.13.5-cp314-cp314t-win_amd64.whl", hash = "sha256:301e3b7dfefecaca37c9f1aa6f0049b7d4ab8dd933742b607765d757aca77d43", size = 224254, upload-time = "2026-03-17T10:33:11.174Z" }, + { url = "https://files.pythonhosted.org/packages/54/83/6acacc889de8987441aa7d5adfbdbf33d288dad28704a67e574f1df9bcbb/coverage-7.13.5-cp314-cp314t-win_arm64.whl", hash = "sha256:9dacc2ad679b292709e0f5fc1ac74a6d4d5562e424058962c7bb0c658ad25e45", size = 222276, upload-time = "2026-03-17T10:33:13.466Z" }, + { url = "https://files.pythonhosted.org/packages/9e/ee/a4cf96b8ce1e566ed238f0659ac2d3f007ed1d14b181bcb684e19561a69a/coverage-7.13.5-py3-none-any.whl", hash = "sha256:34b02417cf070e173989b3db962f7ed56d2f644307b2cf9d5a0f258e13084a61", size = 211346, upload-time = "2026-03-17T10:33:15.691Z" }, ] [package.optional-dependencies] @@ -621,82 +685,75 @@ toml = [ [[package]] name = "cryptography" -version = "46.0.7" +version = "46.0.5" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "cffi", marker = "platform_python_implementation != 'PyPy'" }, { name = "typing-extensions", marker = "python_full_version < '3.11'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/47/93/ac8f3d5ff04d54bc814e961a43ae5b0b146154c89c61b47bb07557679b18/cryptography-46.0.7.tar.gz", hash = "sha256:e4cfd68c5f3e0bfdad0d38e023239b96a2fe84146481852dffbcca442c245aa5", size = 750652, upload-time = "2026-04-08T01:57:54.692Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/0b/5d/4a8f770695d73be252331e60e526291e3df0c9b27556a90a6b47bccca4c2/cryptography-46.0.7-cp311-abi3-macosx_10_9_universal2.whl", hash = "sha256:ea42cbe97209df307fdc3b155f1b6fa2577c0defa8f1f7d3be7d31d189108ad4", size = 7179869, upload-time = "2026-04-08T01:56:17.157Z" }, - { url = "https://files.pythonhosted.org/packages/5f/45/6d80dc379b0bbc1f9d1e429f42e4cb9e1d319c7a8201beffd967c516ea01/cryptography-46.0.7-cp311-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:b36a4695e29fe69215d75960b22577197aca3f7a25b9cf9d165dcfe9d80bc325", size = 4275492, upload-time = "2026-04-08T01:56:19.36Z" }, - { url = "https://files.pythonhosted.org/packages/4a/9a/1765afe9f572e239c3469f2cb429f3ba7b31878c893b246b4b2994ffe2fe/cryptography-46.0.7-cp311-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:5ad9ef796328c5e3c4ceed237a183f5d41d21150f972455a9d926593a1dcb308", size = 4426670, upload-time = "2026-04-08T01:56:21.415Z" }, - { url = "https://files.pythonhosted.org/packages/8f/3e/af9246aaf23cd4ee060699adab1e47ced3f5f7e7a8ffdd339f817b446462/cryptography-46.0.7-cp311-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:73510b83623e080a2c35c62c15298096e2a5dc8d51c3b4e1740211839d0dea77", size = 4280275, upload-time = "2026-04-08T01:56:23.539Z" }, - { url = "https://files.pythonhosted.org/packages/0f/54/6bbbfc5efe86f9d71041827b793c24811a017c6ac0fd12883e4caa86b8ed/cryptography-46.0.7-cp311-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:cbd5fb06b62bd0721e1170273d3f4d5a277044c47ca27ee257025146c34cbdd1", size = 4928402, upload-time = "2026-04-08T01:56:25.624Z" }, - { url = "https://files.pythonhosted.org/packages/2d/cf/054b9d8220f81509939599c8bdbc0c408dbd2bdd41688616a20731371fe0/cryptography-46.0.7-cp311-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:420b1e4109cc95f0e5700eed79908cef9268265c773d3a66f7af1eef53d409ef", size = 4459985, upload-time = "2026-04-08T01:56:27.309Z" }, - { url = "https://files.pythonhosted.org/packages/f9/46/4e4e9c6040fb01c7467d47217d2f882daddeb8828f7df800cb806d8a2288/cryptography-46.0.7-cp311-abi3-manylinux_2_31_armv7l.whl", hash = "sha256:24402210aa54baae71d99441d15bb5a1919c195398a87b563df84468160a65de", size = 3990652, upload-time = "2026-04-08T01:56:29.095Z" }, - { url = "https://files.pythonhosted.org/packages/36/5f/313586c3be5a2fbe87e4c9a254207b860155a8e1f3cca99f9910008e7d08/cryptography-46.0.7-cp311-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:8a469028a86f12eb7d2fe97162d0634026d92a21f3ae0ac87ed1c4a447886c83", size = 4279805, upload-time = "2026-04-08T01:56:30.928Z" }, - { url = "https://files.pythonhosted.org/packages/69/33/60dfc4595f334a2082749673386a4d05e4f0cf4df8248e63b2c3437585f2/cryptography-46.0.7-cp311-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:9694078c5d44c157ef3162e3bf3946510b857df5a3955458381d1c7cfc143ddb", size = 4892883, upload-time = "2026-04-08T01:56:32.614Z" }, - { url = "https://files.pythonhosted.org/packages/c7/0b/333ddab4270c4f5b972f980adef4faa66951a4aaf646ca067af597f15563/cryptography-46.0.7-cp311-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:42a1e5f98abb6391717978baf9f90dc28a743b7d9be7f0751a6f56a75d14065b", size = 4459756, upload-time = "2026-04-08T01:56:34.306Z" }, - { url = "https://files.pythonhosted.org/packages/d2/14/633913398b43b75f1234834170947957c6b623d1701ffc7a9600da907e89/cryptography-46.0.7-cp311-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:91bbcb08347344f810cbe49065914fe048949648f6bd5c2519f34619142bbe85", size = 4410244, upload-time = "2026-04-08T01:56:35.977Z" }, - { url = "https://files.pythonhosted.org/packages/10/f2/19ceb3b3dc14009373432af0c13f46aa08e3ce334ec6eff13492e1812ccd/cryptography-46.0.7-cp311-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:5d1c02a14ceb9148cc7816249f64f623fbfee39e8c03b3650d842ad3f34d637e", size = 4674868, upload-time = "2026-04-08T01:56:38.034Z" }, - { url = "https://files.pythonhosted.org/packages/1a/bb/a5c213c19ee94b15dfccc48f363738633a493812687f5567addbcbba9f6f/cryptography-46.0.7-cp311-abi3-win32.whl", hash = "sha256:d23c8ca48e44ee015cd0a54aeccdf9f09004eba9fc96f38c911011d9ff1bd457", size = 3026504, upload-time = "2026-04-08T01:56:39.666Z" }, - { url = "https://files.pythonhosted.org/packages/2b/02/7788f9fefa1d060ca68717c3901ae7fffa21ee087a90b7f23c7a603c32ae/cryptography-46.0.7-cp311-abi3-win_amd64.whl", hash = "sha256:397655da831414d165029da9bc483bed2fe0e75dde6a1523ec2fe63f3c46046b", size = 3488363, upload-time = "2026-04-08T01:56:41.893Z" }, - { url = "https://files.pythonhosted.org/packages/7b/56/15619b210e689c5403bb0540e4cb7dbf11a6bf42e483b7644e471a2812b3/cryptography-46.0.7-cp314-cp314t-macosx_10_9_universal2.whl", hash = "sha256:d151173275e1728cf7839aaa80c34fe550c04ddb27b34f48c232193df8db5842", size = 7119671, upload-time = "2026-04-08T01:56:44Z" }, - { url = "https://files.pythonhosted.org/packages/74/66/e3ce040721b0b5599e175ba91ab08884c75928fbeb74597dd10ef13505d2/cryptography-46.0.7-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:db0f493b9181c7820c8134437eb8b0b4792085d37dbb24da050476ccb664e59c", size = 4268551, upload-time = "2026-04-08T01:56:46.071Z" }, - { url = "https://files.pythonhosted.org/packages/03/11/5e395f961d6868269835dee1bafec6a1ac176505a167f68b7d8818431068/cryptography-46.0.7-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:ebd6daf519b9f189f85c479427bbd6e9c9037862cf8fe89ee35503bd209ed902", size = 4408887, upload-time = "2026-04-08T01:56:47.718Z" }, - { url = "https://files.pythonhosted.org/packages/40/53/8ed1cf4c3b9c8e611e7122fb56f1c32d09e1fff0f1d77e78d9ff7c82653e/cryptography-46.0.7-cp314-cp314t-manylinux_2_28_aarch64.whl", hash = "sha256:b7b412817be92117ec5ed95f880defe9cf18a832e8cafacf0a22337dc1981b4d", size = 4271354, upload-time = "2026-04-08T01:56:49.312Z" }, - { url = "https://files.pythonhosted.org/packages/50/46/cf71e26025c2e767c5609162c866a78e8a2915bbcfa408b7ca495c6140c4/cryptography-46.0.7-cp314-cp314t-manylinux_2_28_ppc64le.whl", hash = "sha256:fbfd0e5f273877695cb93baf14b185f4878128b250cc9f8e617ea0c025dfb022", size = 4905845, upload-time = "2026-04-08T01:56:50.916Z" }, - { url = "https://files.pythonhosted.org/packages/c0/ea/01276740375bac6249d0a971ebdf6b4dc9ead0ee0a34ef3b5a88c1a9b0d4/cryptography-46.0.7-cp314-cp314t-manylinux_2_28_x86_64.whl", hash = "sha256:ffca7aa1d00cf7d6469b988c581598f2259e46215e0140af408966a24cf086ce", size = 4444641, upload-time = "2026-04-08T01:56:52.882Z" }, - { url = "https://files.pythonhosted.org/packages/3d/4c/7d258f169ae71230f25d9f3d06caabcff8c3baf0978e2b7d65e0acac3827/cryptography-46.0.7-cp314-cp314t-manylinux_2_31_armv7l.whl", hash = "sha256:60627cf07e0d9274338521205899337c5d18249db56865f943cbe753aa96f40f", size = 3967749, upload-time = "2026-04-08T01:56:54.597Z" }, - { url = "https://files.pythonhosted.org/packages/b5/2a/2ea0767cad19e71b3530e4cad9605d0b5e338b6a1e72c37c9c1ceb86c333/cryptography-46.0.7-cp314-cp314t-manylinux_2_34_aarch64.whl", hash = "sha256:80406c3065e2c55d7f49a9550fe0c49b3f12e5bfff5dedb727e319e1afb9bf99", size = 4270942, upload-time = "2026-04-08T01:56:56.416Z" }, - { url = "https://files.pythonhosted.org/packages/41/3d/fe14df95a83319af25717677e956567a105bb6ab25641acaa093db79975d/cryptography-46.0.7-cp314-cp314t-manylinux_2_34_ppc64le.whl", hash = "sha256:c5b1ccd1239f48b7151a65bc6dd54bcfcc15e028c8ac126d3fada09db0e07ef1", size = 4871079, upload-time = "2026-04-08T01:56:58.31Z" }, - { url = "https://files.pythonhosted.org/packages/9c/59/4a479e0f36f8f378d397f4eab4c850b4ffb79a2f0d58704b8fa0703ddc11/cryptography-46.0.7-cp314-cp314t-manylinux_2_34_x86_64.whl", hash = "sha256:d5f7520159cd9c2154eb61eb67548ca05c5774d39e9c2c4339fd793fe7d097b2", size = 4443999, upload-time = "2026-04-08T01:57:00.508Z" }, - { url = "https://files.pythonhosted.org/packages/28/17/b59a741645822ec6d04732b43c5d35e4ef58be7bfa84a81e5ae6f05a1d33/cryptography-46.0.7-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:fcd8eac50d9138c1d7fc53a653ba60a2bee81a505f9f8850b6b2888555a45d0e", size = 4399191, upload-time = "2026-04-08T01:57:02.654Z" }, - { url = "https://files.pythonhosted.org/packages/59/6a/bb2e166d6d0e0955f1e9ff70f10ec4b2824c9cfcdb4da772c7dd69cc7d80/cryptography-46.0.7-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:65814c60f8cc400c63131584e3e1fad01235edba2614b61fbfbfa954082db0ee", size = 4655782, upload-time = "2026-04-08T01:57:04.592Z" }, - { url = "https://files.pythonhosted.org/packages/95/b6/3da51d48415bcb63b00dc17c2eff3a651b7c4fed484308d0f19b30e8cb2c/cryptography-46.0.7-cp314-cp314t-win32.whl", hash = "sha256:fdd1736fed309b4300346f88f74cd120c27c56852c3838cab416e7a166f67298", size = 3002227, upload-time = "2026-04-08T01:57:06.91Z" }, - { url = "https://files.pythonhosted.org/packages/32/a8/9f0e4ed57ec9cebe506e58db11ae472972ecb0c659e4d52bbaee80ca340a/cryptography-46.0.7-cp314-cp314t-win_amd64.whl", hash = "sha256:e06acf3c99be55aa3b516397fe42f5855597f430add9c17fa46bf2e0fb34c9bb", size = 3475332, upload-time = "2026-04-08T01:57:08.807Z" }, - { url = "https://files.pythonhosted.org/packages/a7/7f/cd42fc3614386bc0c12f0cb3c4ae1fc2bbca5c9662dfed031514911d513d/cryptography-46.0.7-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:462ad5cb1c148a22b2e3bcc5ad52504dff325d17daf5df8d88c17dda1f75f2a4", size = 7165618, upload-time = "2026-04-08T01:57:10.645Z" }, - { url = "https://files.pythonhosted.org/packages/a5/d0/36a49f0262d2319139d2829f773f1b97ef8aef7f97e6e5bd21455e5a8fb5/cryptography-46.0.7-cp38-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:84d4cced91f0f159a7ddacad249cc077e63195c36aac40b4150e7a57e84fffe7", size = 4270628, upload-time = "2026-04-08T01:57:12.885Z" }, - { url = "https://files.pythonhosted.org/packages/8a/6c/1a42450f464dda6ffbe578a911f773e54dd48c10f9895a23a7e88b3e7db5/cryptography-46.0.7-cp38-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:128c5edfe5e5938b86b03941e94fac9ee793a94452ad1365c9fc3f4f62216832", size = 4415405, upload-time = "2026-04-08T01:57:14.923Z" }, - { url = "https://files.pythonhosted.org/packages/9a/92/4ed714dbe93a066dc1f4b4581a464d2d7dbec9046f7c8b7016f5286329e2/cryptography-46.0.7-cp38-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:5e51be372b26ef4ba3de3c167cd3d1022934bc838ae9eaad7e644986d2a3d163", size = 4272715, upload-time = "2026-04-08T01:57:16.638Z" }, - { url = "https://files.pythonhosted.org/packages/b7/e6/a26b84096eddd51494bba19111f8fffe976f6a09f132706f8f1bf03f51f7/cryptography-46.0.7-cp38-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:cdf1a610ef82abb396451862739e3fc93b071c844399e15b90726ef7470eeaf2", size = 4918400, upload-time = "2026-04-08T01:57:19.021Z" }, - { url = "https://files.pythonhosted.org/packages/c7/08/ffd537b605568a148543ac3c2b239708ae0bd635064bab41359252ef88ed/cryptography-46.0.7-cp38-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:1d25aee46d0c6f1a501adcddb2d2fee4b979381346a78558ed13e50aa8a59067", size = 4450634, upload-time = "2026-04-08T01:57:21.185Z" }, - { url = "https://files.pythonhosted.org/packages/16/01/0cd51dd86ab5b9befe0d031e276510491976c3a80e9f6e31810cce46c4ad/cryptography-46.0.7-cp38-abi3-manylinux_2_31_armv7l.whl", hash = "sha256:cdfbe22376065ffcf8be74dc9a909f032df19bc58a699456a21712d6e5eabfd0", size = 3985233, upload-time = "2026-04-08T01:57:22.862Z" }, - { url = "https://files.pythonhosted.org/packages/92/49/819d6ed3a7d9349c2939f81b500a738cb733ab62fbecdbc1e38e83d45e12/cryptography-46.0.7-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:abad9dac36cbf55de6eb49badd4016806b3165d396f64925bf2999bcb67837ba", size = 4271955, upload-time = "2026-04-08T01:57:24.814Z" }, - { url = "https://files.pythonhosted.org/packages/80/07/ad9b3c56ebb95ed2473d46df0847357e01583f4c52a85754d1a55e29e4d0/cryptography-46.0.7-cp38-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:935ce7e3cfdb53e3536119a542b839bb94ec1ad081013e9ab9b7cfd478b05006", size = 4879888, upload-time = "2026-04-08T01:57:26.88Z" }, - { url = "https://files.pythonhosted.org/packages/b8/c7/201d3d58f30c4c2bdbe9b03844c291feb77c20511cc3586daf7edc12a47b/cryptography-46.0.7-cp38-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:35719dc79d4730d30f1c2b6474bd6acda36ae2dfae1e3c16f2051f215df33ce0", size = 4449961, upload-time = "2026-04-08T01:57:29.068Z" }, - { url = "https://files.pythonhosted.org/packages/a5/ef/649750cbf96f3033c3c976e112265c33906f8e462291a33d77f90356548c/cryptography-46.0.7-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:7bbc6ccf49d05ac8f7d7b5e2e2c33830d4fe2061def88210a126d130d7f71a85", size = 4401696, upload-time = "2026-04-08T01:57:31.029Z" }, - { url = "https://files.pythonhosted.org/packages/41/52/a8908dcb1a389a459a29008c29966c1d552588d4ae6d43f3a1a4512e0ebe/cryptography-46.0.7-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:a1529d614f44b863a7b480c6d000fe93b59acee9c82ffa027cfadc77521a9f5e", size = 4664256, upload-time = "2026-04-08T01:57:33.144Z" }, - { url = "https://files.pythonhosted.org/packages/4b/fa/f0ab06238e899cc3fb332623f337a7364f36f4bb3f2534c2bb95a35b132c/cryptography-46.0.7-cp38-abi3-win32.whl", hash = "sha256:f247c8c1a1fb45e12586afbb436ef21ff1e80670b2861a90353d9b025583d246", size = 3013001, upload-time = "2026-04-08T01:57:34.933Z" }, - { url = "https://files.pythonhosted.org/packages/d2/f1/00ce3bde3ca542d1acd8f8cfa38e446840945aa6363f9b74746394b14127/cryptography-46.0.7-cp38-abi3-win_amd64.whl", hash = "sha256:506c4ff91eff4f82bdac7633318a526b1d1309fc07ca76a3ad182cb5b686d6d3", size = 3472985, upload-time = "2026-04-08T01:57:36.714Z" }, - { url = "https://files.pythonhosted.org/packages/63/0c/dca8abb64e7ca4f6b2978769f6fea5ad06686a190cec381f0a796fdcaaba/cryptography-46.0.7-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:fc9ab8856ae6cf7c9358430e49b368f3108f050031442eaeb6b9d87e4dcf4e4f", size = 3476879, upload-time = "2026-04-08T01:57:38.664Z" }, - { url = "https://files.pythonhosted.org/packages/3a/ea/075aac6a84b7c271578d81a2f9968acb6e273002408729f2ddff517fed4a/cryptography-46.0.7-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:d3b99c535a9de0adced13d159c5a9cf65c325601aa30f4be08afd680643e9c15", size = 4219700, upload-time = "2026-04-08T01:57:40.625Z" }, - { url = "https://files.pythonhosted.org/packages/6c/7b/1c55db7242b5e5612b29fc7a630e91ee7a6e3c8e7bf5406d22e206875fbd/cryptography-46.0.7-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:d02c738dacda7dc2a74d1b2b3177042009d5cab7c7079db74afc19e56ca1b455", size = 4385982, upload-time = "2026-04-08T01:57:42.725Z" }, - { url = "https://files.pythonhosted.org/packages/cb/da/9870eec4b69c63ef5925bf7d8342b7e13bc2ee3d47791461c4e49ca212f4/cryptography-46.0.7-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:04959522f938493042d595a736e7dbdff6eb6cc2339c11465b3ff89343b65f65", size = 4219115, upload-time = "2026-04-08T01:57:44.939Z" }, - { url = "https://files.pythonhosted.org/packages/f4/72/05aa5832b82dd341969e9a734d1812a6aadb088d9eb6f0430fc337cc5a8f/cryptography-46.0.7-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:3986ac1dee6def53797289999eabe84798ad7817f3e97779b5061a95b0ee4968", size = 4385479, upload-time = "2026-04-08T01:57:46.86Z" }, - { url = "https://files.pythonhosted.org/packages/20/2a/1b016902351a523aa2bd446b50a5bc1175d7a7d1cf90fe2ef904f9b84ebc/cryptography-46.0.7-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:258514877e15963bd43b558917bc9f54cf7cf866c38aa576ebf47a77ddbc43a4", size = 3412829, upload-time = "2026-04-08T01:57:48.874Z" }, -] - -[[package]] -name = "datamodel-code-generator" -version = "0.55.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "argcomplete" }, - { name = "black" }, - { name = "genson" }, - { name = "inflect" }, - { name = "isort" }, - { name = "jinja2" }, - { name = "pydantic" }, - { name = "pyyaml" }, - { name = "tomli", marker = "python_full_version < '3.12'" }, +sdist = { url = "https://files.pythonhosted.org/packages/60/04/ee2a9e8542e4fa2773b81771ff8349ff19cdd56b7258a0cc442639052edb/cryptography-46.0.5.tar.gz", hash = "sha256:abace499247268e3757271b2f1e244b36b06f8515cf27c4d49468fc9eb16e93d", size = 750064, upload-time = "2026-02-10T19:18:38.255Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f7/81/b0bb27f2ba931a65409c6b8a8b358a7f03c0e46eceacddff55f7c84b1f3b/cryptography-46.0.5-cp311-abi3-macosx_10_9_universal2.whl", hash = "sha256:351695ada9ea9618b3500b490ad54c739860883df6c1f555e088eaf25b1bbaad", size = 7176289, upload-time = "2026-02-10T19:17:08.274Z" }, + { url = "https://files.pythonhosted.org/packages/ff/9e/6b4397a3e3d15123de3b1806ef342522393d50736c13b20ec4c9ea6693a6/cryptography-46.0.5-cp311-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:c18ff11e86df2e28854939acde2d003f7984f721eba450b56a200ad90eeb0e6b", size = 4275637, upload-time = "2026-02-10T19:17:10.53Z" }, + { url = "https://files.pythonhosted.org/packages/63/e7/471ab61099a3920b0c77852ea3f0ea611c9702f651600397ac567848b897/cryptography-46.0.5-cp311-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:4d7e3d356b8cd4ea5aff04f129d5f66ebdc7b6f8eae802b93739ed520c47c79b", size = 4424742, upload-time = "2026-02-10T19:17:12.388Z" }, + { url = "https://files.pythonhosted.org/packages/37/53/a18500f270342d66bf7e4d9f091114e31e5ee9e7375a5aba2e85a91e0044/cryptography-46.0.5-cp311-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:50bfb6925eff619c9c023b967d5b77a54e04256c4281b0e21336a130cd7fc263", size = 4277528, upload-time = "2026-02-10T19:17:13.853Z" }, + { url = "https://files.pythonhosted.org/packages/22/29/c2e812ebc38c57b40e7c583895e73c8c5adb4d1e4a0cc4c5a4fdab2b1acc/cryptography-46.0.5-cp311-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:803812e111e75d1aa73690d2facc295eaefd4439be1023fefc4995eaea2af90d", size = 4947993, upload-time = "2026-02-10T19:17:15.618Z" }, + { url = "https://files.pythonhosted.org/packages/6b/e7/237155ae19a9023de7e30ec64e5d99a9431a567407ac21170a046d22a5a3/cryptography-46.0.5-cp311-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:3ee190460e2fbe447175cda91b88b84ae8322a104fc27766ad09428754a618ed", size = 4456855, upload-time = "2026-02-10T19:17:17.221Z" }, + { url = "https://files.pythonhosted.org/packages/2d/87/fc628a7ad85b81206738abbd213b07702bcbdada1dd43f72236ef3cffbb5/cryptography-46.0.5-cp311-abi3-manylinux_2_31_armv7l.whl", hash = "sha256:f145bba11b878005c496e93e257c1e88f154d278d2638e6450d17e0f31e558d2", size = 3984635, upload-time = "2026-02-10T19:17:18.792Z" }, + { url = "https://files.pythonhosted.org/packages/84/29/65b55622bde135aedf4565dc509d99b560ee4095e56989e815f8fd2aa910/cryptography-46.0.5-cp311-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:e9251e3be159d1020c4030bd2e5f84d6a43fe54b6c19c12f51cde9542a2817b2", size = 4277038, upload-time = "2026-02-10T19:17:20.256Z" }, + { url = "https://files.pythonhosted.org/packages/bc/36/45e76c68d7311432741faf1fbf7fac8a196a0a735ca21f504c75d37e2558/cryptography-46.0.5-cp311-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:47fb8a66058b80e509c47118ef8a75d14c455e81ac369050f20ba0d23e77fee0", size = 4912181, upload-time = "2026-02-10T19:17:21.825Z" }, + { url = "https://files.pythonhosted.org/packages/6d/1a/c1ba8fead184d6e3d5afcf03d569acac5ad063f3ac9fb7258af158f7e378/cryptography-46.0.5-cp311-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:4c3341037c136030cb46e4b1e17b7418ea4cbd9dd207e4a6f3b2b24e0d4ac731", size = 4456482, upload-time = "2026-02-10T19:17:25.133Z" }, + { url = "https://files.pythonhosted.org/packages/f9/e5/3fb22e37f66827ced3b902cf895e6a6bc1d095b5b26be26bd13c441fdf19/cryptography-46.0.5-cp311-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:890bcb4abd5a2d3f852196437129eb3667d62630333aacc13dfd470fad3aaa82", size = 4405497, upload-time = "2026-02-10T19:17:26.66Z" }, + { url = "https://files.pythonhosted.org/packages/1a/df/9d58bb32b1121a8a2f27383fabae4d63080c7ca60b9b5c88be742be04ee7/cryptography-46.0.5-cp311-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:80a8d7bfdf38f87ca30a5391c0c9ce4ed2926918e017c29ddf643d0ed2778ea1", size = 4667819, upload-time = "2026-02-10T19:17:28.569Z" }, + { url = "https://files.pythonhosted.org/packages/ea/ed/325d2a490c5e94038cdb0117da9397ece1f11201f425c4e9c57fe5b9f08b/cryptography-46.0.5-cp311-abi3-win32.whl", hash = "sha256:60ee7e19e95104d4c03871d7d7dfb3d22ef8a9b9c6778c94e1c8fcc8365afd48", size = 3028230, upload-time = "2026-02-10T19:17:30.518Z" }, + { url = "https://files.pythonhosted.org/packages/e9/5a/ac0f49e48063ab4255d9e3b79f5def51697fce1a95ea1370f03dc9db76f6/cryptography-46.0.5-cp311-abi3-win_amd64.whl", hash = "sha256:38946c54b16c885c72c4f59846be9743d699eee2b69b6988e0a00a01f46a61a4", size = 3480909, upload-time = "2026-02-10T19:17:32.083Z" }, + { url = "https://files.pythonhosted.org/packages/00/13/3d278bfa7a15a96b9dc22db5a12ad1e48a9eb3d40e1827ef66a5df75d0d0/cryptography-46.0.5-cp314-cp314t-macosx_10_9_universal2.whl", hash = "sha256:94a76daa32eb78d61339aff7952ea819b1734b46f73646a07decb40e5b3448e2", size = 7119287, upload-time = "2026-02-10T19:17:33.801Z" }, + { url = "https://files.pythonhosted.org/packages/67/c8/581a6702e14f0898a0848105cbefd20c058099e2c2d22ef4e476dfec75d7/cryptography-46.0.5-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:5be7bf2fb40769e05739dd0046e7b26f9d4670badc7b032d6ce4db64dddc0678", size = 4265728, upload-time = "2026-02-10T19:17:35.569Z" }, + { url = "https://files.pythonhosted.org/packages/dd/4a/ba1a65ce8fc65435e5a849558379896c957870dd64fecea97b1ad5f46a37/cryptography-46.0.5-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:fe346b143ff9685e40192a4960938545c699054ba11d4f9029f94751e3f71d87", size = 4408287, upload-time = "2026-02-10T19:17:36.938Z" }, + { url = "https://files.pythonhosted.org/packages/f8/67/8ffdbf7b65ed1ac224d1c2df3943553766914a8ca718747ee3871da6107e/cryptography-46.0.5-cp314-cp314t-manylinux_2_28_aarch64.whl", hash = "sha256:c69fd885df7d089548a42d5ec05be26050ebcd2283d89b3d30676eb32ff87dee", size = 4270291, upload-time = "2026-02-10T19:17:38.748Z" }, + { url = "https://files.pythonhosted.org/packages/f8/e5/f52377ee93bc2f2bba55a41a886fd208c15276ffbd2569f2ddc89d50e2c5/cryptography-46.0.5-cp314-cp314t-manylinux_2_28_ppc64le.whl", hash = "sha256:8293f3dea7fc929ef7240796ba231413afa7b68ce38fd21da2995549f5961981", size = 4927539, upload-time = "2026-02-10T19:17:40.241Z" }, + { url = "https://files.pythonhosted.org/packages/3b/02/cfe39181b02419bbbbcf3abdd16c1c5c8541f03ca8bda240debc467d5a12/cryptography-46.0.5-cp314-cp314t-manylinux_2_28_x86_64.whl", hash = "sha256:1abfdb89b41c3be0365328a410baa9df3ff8a9110fb75e7b52e66803ddabc9a9", size = 4442199, upload-time = "2026-02-10T19:17:41.789Z" }, + { url = "https://files.pythonhosted.org/packages/c0/96/2fcaeb4873e536cf71421a388a6c11b5bc846e986b2b069c79363dc1648e/cryptography-46.0.5-cp314-cp314t-manylinux_2_31_armv7l.whl", hash = "sha256:d66e421495fdb797610a08f43b05269e0a5ea7f5e652a89bfd5a7d3c1dee3648", size = 3960131, upload-time = "2026-02-10T19:17:43.379Z" }, + { url = "https://files.pythonhosted.org/packages/d8/d2/b27631f401ddd644e94c5cf33c9a4069f72011821cf3dc7309546b0642a0/cryptography-46.0.5-cp314-cp314t-manylinux_2_34_aarch64.whl", hash = "sha256:4e817a8920bfbcff8940ecfd60f23d01836408242b30f1a708d93198393a80b4", size = 4270072, upload-time = "2026-02-10T19:17:45.481Z" }, + { url = "https://files.pythonhosted.org/packages/f4/a7/60d32b0370dae0b4ebe55ffa10e8599a2a59935b5ece1b9f06edb73abdeb/cryptography-46.0.5-cp314-cp314t-manylinux_2_34_ppc64le.whl", hash = "sha256:68f68d13f2e1cb95163fa3b4db4bf9a159a418f5f6e7242564fc75fcae667fd0", size = 4892170, upload-time = "2026-02-10T19:17:46.997Z" }, + { url = "https://files.pythonhosted.org/packages/d2/b9/cf73ddf8ef1164330eb0b199a589103c363afa0cf794218c24d524a58eab/cryptography-46.0.5-cp314-cp314t-manylinux_2_34_x86_64.whl", hash = "sha256:a3d1fae9863299076f05cb8a778c467578262fae09f9dc0ee9b12eb4268ce663", size = 4441741, upload-time = "2026-02-10T19:17:48.661Z" }, + { url = "https://files.pythonhosted.org/packages/5f/eb/eee00b28c84c726fe8fa0158c65afe312d9c3b78d9d01daf700f1f6e37ff/cryptography-46.0.5-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:c4143987a42a2397f2fc3b4d7e3a7d313fbe684f67ff443999e803dd75a76826", size = 4396728, upload-time = "2026-02-10T19:17:50.058Z" }, + { url = "https://files.pythonhosted.org/packages/65/f4/6bc1a9ed5aef7145045114b75b77c2a8261b4d38717bd8dea111a63c3442/cryptography-46.0.5-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:7d731d4b107030987fd61a7f8ab512b25b53cef8f233a97379ede116f30eb67d", size = 4652001, upload-time = "2026-02-10T19:17:51.54Z" }, + { url = "https://files.pythonhosted.org/packages/86/ef/5d00ef966ddd71ac2e6951d278884a84a40ffbd88948ef0e294b214ae9e4/cryptography-46.0.5-cp314-cp314t-win32.whl", hash = "sha256:c3bcce8521d785d510b2aad26ae2c966092b7daa8f45dd8f44734a104dc0bc1a", size = 3003637, upload-time = "2026-02-10T19:17:52.997Z" }, + { url = "https://files.pythonhosted.org/packages/b7/57/f3f4160123da6d098db78350fdfd9705057aad21de7388eacb2401dceab9/cryptography-46.0.5-cp314-cp314t-win_amd64.whl", hash = "sha256:4d8ae8659ab18c65ced284993c2265910f6c9e650189d4e3f68445ef82a810e4", size = 3469487, upload-time = "2026-02-10T19:17:54.549Z" }, + { url = "https://files.pythonhosted.org/packages/e2/fa/a66aa722105ad6a458bebd64086ca2b72cdd361fed31763d20390f6f1389/cryptography-46.0.5-cp38-abi3-macosx_10_9_universal2.whl", hash = "sha256:4108d4c09fbbf2789d0c926eb4152ae1760d5a2d97612b92d508d96c861e4d31", size = 7170514, upload-time = "2026-02-10T19:17:56.267Z" }, + { url = "https://files.pythonhosted.org/packages/0f/04/c85bdeab78c8bc77b701bf0d9bdcf514c044e18a46dcff330df5448631b0/cryptography-46.0.5-cp38-abi3-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:7d1f30a86d2757199cb2d56e48cce14deddf1f9c95f1ef1b64ee91ea43fe2e18", size = 4275349, upload-time = "2026-02-10T19:17:58.419Z" }, + { url = "https://files.pythonhosted.org/packages/5c/32/9b87132a2f91ee7f5223b091dc963055503e9b442c98fc0b8a5ca765fab0/cryptography-46.0.5-cp38-abi3-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:039917b0dc418bb9f6edce8a906572d69e74bd330b0b3fea4f79dab7f8ddd235", size = 4420667, upload-time = "2026-02-10T19:18:00.619Z" }, + { url = "https://files.pythonhosted.org/packages/a1/a6/a7cb7010bec4b7c5692ca6f024150371b295ee1c108bdc1c400e4c44562b/cryptography-46.0.5-cp38-abi3-manylinux_2_28_aarch64.whl", hash = "sha256:ba2a27ff02f48193fc4daeadf8ad2590516fa3d0adeeb34336b96f7fa64c1e3a", size = 4276980, upload-time = "2026-02-10T19:18:02.379Z" }, + { url = "https://files.pythonhosted.org/packages/8e/7c/c4f45e0eeff9b91e3f12dbd0e165fcf2a38847288fcfd889deea99fb7b6d/cryptography-46.0.5-cp38-abi3-manylinux_2_28_ppc64le.whl", hash = "sha256:61aa400dce22cb001a98014f647dc21cda08f7915ceb95df0c9eaf84b4b6af76", size = 4939143, upload-time = "2026-02-10T19:18:03.964Z" }, + { url = "https://files.pythonhosted.org/packages/37/19/e1b8f964a834eddb44fa1b9a9976f4e414cbb7aa62809b6760c8803d22d1/cryptography-46.0.5-cp38-abi3-manylinux_2_28_x86_64.whl", hash = "sha256:3ce58ba46e1bc2aac4f7d9290223cead56743fa6ab94a5d53292ffaac6a91614", size = 4453674, upload-time = "2026-02-10T19:18:05.588Z" }, + { url = "https://files.pythonhosted.org/packages/db/ed/db15d3956f65264ca204625597c410d420e26530c4e2943e05a0d2f24d51/cryptography-46.0.5-cp38-abi3-manylinux_2_31_armv7l.whl", hash = "sha256:420d0e909050490d04359e7fdb5ed7e667ca5c3c402b809ae2563d7e66a92229", size = 3978801, upload-time = "2026-02-10T19:18:07.167Z" }, + { url = "https://files.pythonhosted.org/packages/41/e2/df40a31d82df0a70a0daf69791f91dbb70e47644c58581d654879b382d11/cryptography-46.0.5-cp38-abi3-manylinux_2_34_aarch64.whl", hash = "sha256:582f5fcd2afa31622f317f80426a027f30dc792e9c80ffee87b993200ea115f1", size = 4276755, upload-time = "2026-02-10T19:18:09.813Z" }, + { url = "https://files.pythonhosted.org/packages/33/45/726809d1176959f4a896b86907b98ff4391a8aa29c0aaaf9450a8a10630e/cryptography-46.0.5-cp38-abi3-manylinux_2_34_ppc64le.whl", hash = "sha256:bfd56bb4b37ed4f330b82402f6f435845a5f5648edf1ad497da51a8452d5d62d", size = 4901539, upload-time = "2026-02-10T19:18:11.263Z" }, + { url = "https://files.pythonhosted.org/packages/99/0f/a3076874e9c88ecb2ecc31382f6e7c21b428ede6f55aafa1aa272613e3cd/cryptography-46.0.5-cp38-abi3-manylinux_2_34_x86_64.whl", hash = "sha256:a3d507bb6a513ca96ba84443226af944b0f7f47dcc9a399d110cd6146481d24c", size = 4452794, upload-time = "2026-02-10T19:18:12.914Z" }, + { url = "https://files.pythonhosted.org/packages/02/ef/ffeb542d3683d24194a38f66ca17c0a4b8bf10631feef44a7ef64e631b1a/cryptography-46.0.5-cp38-abi3-musllinux_1_2_aarch64.whl", hash = "sha256:9f16fbdf4da055efb21c22d81b89f155f02ba420558db21288b3d0035bafd5f4", size = 4404160, upload-time = "2026-02-10T19:18:14.375Z" }, + { url = "https://files.pythonhosted.org/packages/96/93/682d2b43c1d5f1406ed048f377c0fc9fc8f7b0447a478d5c65ab3d3a66eb/cryptography-46.0.5-cp38-abi3-musllinux_1_2_x86_64.whl", hash = "sha256:ced80795227d70549a411a4ab66e8ce307899fad2220ce5ab2f296e687eacde9", size = 4667123, upload-time = "2026-02-10T19:18:15.886Z" }, + { url = "https://files.pythonhosted.org/packages/45/2d/9c5f2926cb5300a8eefc3f4f0b3f3df39db7f7ce40c8365444c49363cbda/cryptography-46.0.5-cp38-abi3-win32.whl", hash = "sha256:02f547fce831f5096c9a567fd41bc12ca8f11df260959ecc7c3202555cc47a72", size = 3010220, upload-time = "2026-02-10T19:18:17.361Z" }, + { url = "https://files.pythonhosted.org/packages/48/ef/0c2f4a8e31018a986949d34a01115dd057bf536905dca38897bacd21fac3/cryptography-46.0.5-cp38-abi3-win_amd64.whl", hash = "sha256:556e106ee01aa13484ce9b0239bca667be5004efb0aabbed28d353df86445595", size = 3467050, upload-time = "2026-02-10T19:18:18.899Z" }, + { url = "https://files.pythonhosted.org/packages/eb/dd/2d9fdb07cebdf3d51179730afb7d5e576153c6744c3ff8fded23030c204e/cryptography-46.0.5-pp311-pypy311_pp73-macosx_11_0_arm64.whl", hash = "sha256:3b4995dc971c9fb83c25aa44cf45f02ba86f71ee600d81091c2f0cbae116b06c", size = 3476964, upload-time = "2026-02-10T19:18:20.687Z" }, + { url = "https://files.pythonhosted.org/packages/e9/6f/6cc6cc9955caa6eaf83660b0da2b077c7fe8ff9950a3c5e45d605038d439/cryptography-46.0.5-pp311-pypy311_pp73-manylinux_2_28_aarch64.whl", hash = "sha256:bc84e875994c3b445871ea7181d424588171efec3e185dced958dad9e001950a", size = 4218321, upload-time = "2026-02-10T19:18:22.349Z" }, + { url = "https://files.pythonhosted.org/packages/3e/5d/c4da701939eeee699566a6c1367427ab91a8b7088cc2328c09dbee940415/cryptography-46.0.5-pp311-pypy311_pp73-manylinux_2_28_x86_64.whl", hash = "sha256:2ae6971afd6246710480e3f15824ed3029a60fc16991db250034efd0b9fb4356", size = 4381786, upload-time = "2026-02-10T19:18:24.529Z" }, + { url = "https://files.pythonhosted.org/packages/ac/97/a538654732974a94ff96c1db621fa464f455c02d4bb7d2652f4edc21d600/cryptography-46.0.5-pp311-pypy311_pp73-manylinux_2_34_aarch64.whl", hash = "sha256:d861ee9e76ace6cf36a6a89b959ec08e7bc2493ee39d07ffe5acb23ef46d27da", size = 4217990, upload-time = "2026-02-10T19:18:25.957Z" }, + { url = "https://files.pythonhosted.org/packages/ae/11/7e500d2dd3ba891197b9efd2da5454b74336d64a7cc419aa7327ab74e5f6/cryptography-46.0.5-pp311-pypy311_pp73-manylinux_2_34_x86_64.whl", hash = "sha256:2b7a67c9cd56372f3249b39699f2ad479f6991e62ea15800973b956f4b73e257", size = 4381252, upload-time = "2026-02-10T19:18:27.496Z" }, + { url = "https://files.pythonhosted.org/packages/bc/58/6b3d24e6b9bc474a2dcdee65dfd1f008867015408a271562e4b690561a4d/cryptography-46.0.5-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:8456928655f856c6e1533ff59d5be76578a7157224dbd9ce6872f25055ab9ab7", size = 3407605, upload-time = "2026-02-10T19:18:29.233Z" }, +] + +[[package]] +name = "culsans" +version = "0.11.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "aiologic", marker = "python_full_version < '3.13'" }, + { name = "typing-extensions", marker = "python_full_version < '3.13'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/33/36/ec505ce62c143c0f045e82e2bb0360e2ede765c0cfe3a70bf32c5661b8a2/datamodel_code_generator-0.55.0.tar.gz", hash = "sha256:20ae7a4fbbb12be380f0bd02544db4abae96c5b644d4b3f2b9c3fc0bc9ee1184", size = 833828, upload-time = "2026-03-10T20:41:15.796Z" } +sdist = { url = "https://files.pythonhosted.org/packages/d9/e3/49afa1bc180e0d28008ec6bcdf82a4072d1c7a41032b5b759b60814ca4b0/culsans-0.11.0.tar.gz", hash = "sha256:0b43d0d05dce6106293d114c86e3fb4bfc63088cfe8ff08ed3fe36891447fe33", size = 107546, upload-time = "2025-12-31T23:15:38.196Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/be/c6/2abc9d11adbbf689b6b4dfb7a136d57b9ccaa3b3f1ba83504462109e8dbb/datamodel_code_generator-0.55.0-py3-none-any.whl", hash = "sha256:efa5a925288ca2a135fdc3361c7d774ae5b24b4fd632868363e249d55ea2f137", size = 256860, upload-time = "2026-03-10T20:41:13.488Z" }, + { url = "https://files.pythonhosted.org/packages/e0/5d/9fb19fb38f6d6120422064279ea5532e22b84aa2be8831d49607194feda3/culsans-0.11.0-py3-none-any.whl", hash = "sha256:278d118f63fc75b9db11b664b436a1b83cc30d9577127848ba41420e66eb5a47", size = 21811, upload-time = "2025-12-31T23:15:37.189Z" }, ] [[package]] @@ -740,14 +797,14 @@ wheels = [ [[package]] name = "exceptiongroup" -version = "1.3.0" +version = "1.3.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "typing-extensions", marker = "python_full_version < '3.11'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/0b/9f/a65090624ecf468cdca03533906e7c69ed7588582240cfe7cc9e770b50eb/exceptiongroup-1.3.0.tar.gz", hash = "sha256:b241f5885f560bc56a59ee63ca4c6a8bfa46ae4ad651af316d4e81817bb9fd88", size = 29749, upload-time = "2025-05-10T17:42:51.123Z" } +sdist = { url = "https://files.pythonhosted.org/packages/50/79/66800aadf48771f6b62f7eb014e352e5d06856655206165d775e675a02c9/exceptiongroup-1.3.1.tar.gz", hash = "sha256:8b412432c6055b0b7d14c310000ae93352ed6754f70fa8f7c34141f91c4e3219", size = 30371, upload-time = "2025-11-21T23:01:54.787Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/36/f4/c6e662dade71f56cd2f3735141b265c3c79293c109549c1e6933b0651ffc/exceptiongroup-1.3.0-py3-none-any.whl", hash = "sha256:4d111e6e0c13d0644cad6ddaa7ed0261a0b36971f6d23e7ec9b4b9097da78a10", size = 16674, upload-time = "2025-05-10T17:42:49.33Z" }, + { url = "https://files.pythonhosted.org/packages/8a/0e/97c33bf5009bdbac74fd2beace167cab3f978feb69cc36f1ef79360d6c4e/exceptiongroup-1.3.1-py3-none-any.whl", hash = "sha256:a7a39a3bd276781e98394987d3a5701d0c4edffb633bb7a5144577f82c773598", size = 16740, upload-time = "2025-11-21T23:01:53.443Z" }, ] [[package]] @@ -761,7 +818,7 @@ wheels = [ [[package]] name = "fastapi" -version = "0.135.2" +version = "0.135.3" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "annotated-doc" }, @@ -770,32 +827,23 @@ dependencies = [ { name = "typing-extensions" }, { name = "typing-inspection" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/c4/73/5903c4b13beae98618d64eb9870c3fac4f605523dd0312ca5c80dadbd5b9/fastapi-0.135.2.tar.gz", hash = "sha256:88a832095359755527b7f63bb4c6bc9edb8329a026189eed83d6c1afcf419d56", size = 395833, upload-time = "2026-03-23T14:12:41.697Z" } +sdist = { url = "https://files.pythonhosted.org/packages/f7/e6/7adb4c5fa231e82c35b8f5741a9f2d055f520c29af5546fd70d3e8e1cd2e/fastapi-0.135.3.tar.gz", hash = "sha256:bd6d7caf1a2bdd8d676843cdcd2287729572a1ef524fc4d65c17ae002a1be654", size = 396524, upload-time = "2026-04-01T16:23:58.188Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/8f/ea/18f6d0457f9efb2fc6fa594857f92810cadb03024975726db6546b3d6fcf/fastapi-0.135.2-py3-none-any.whl", hash = "sha256:0af0447d541867e8db2a6a25c23a8c4bd80e2394ac5529bd87501bbb9e240ca5", size = 117407, upload-time = "2026-03-23T14:12:43.284Z" }, + { url = "https://files.pythonhosted.org/packages/84/a4/5caa2de7f917a04ada20018eccf60d6cc6145b0199d55ca3711b0fc08312/fastapi-0.135.3-py3-none-any.whl", hash = "sha256:9b0f590c813acd13d0ab43dd8494138eb58e484bfac405db1f3187cfc5810d98", size = 117734, upload-time = "2026-04-01T16:23:59.328Z" }, ] [[package]] name = "filelock" -version = "3.20.3" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/1d/65/ce7f1b70157833bf3cb851b556a37d4547ceafc158aa9b34b36782f23696/filelock-3.20.3.tar.gz", hash = "sha256:18c57ee915c7ec61cff0ecf7f0f869936c7c30191bb0cf406f1341778d0834e1", size = 19485, upload-time = "2026-01-09T17:55:05.421Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/b5/36/7fb70f04bf00bc646cd5bb45aa9eddb15e19437a28b8fb2b4a5249fac770/filelock-3.20.3-py3-none-any.whl", hash = "sha256:4b0dda527ee31078689fc205ec4f1c1bf7d56cf88b6dc9426c4f230e46c2dce1", size = 16701, upload-time = "2026-01-09T17:55:04.334Z" }, -] - -[[package]] -name = "genson" -version = "1.3.0" +version = "3.25.2" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/c5/cf/2303c8ad276dcf5ee2ad6cf69c4338fd86ef0f471a5207b069adf7a393cf/genson-1.3.0.tar.gz", hash = "sha256:e02db9ac2e3fd29e65b5286f7135762e2cd8a986537c075b06fc5f1517308e37", size = 34919, upload-time = "2024-05-15T22:08:49.123Z" } +sdist = { url = "https://files.pythonhosted.org/packages/94/b8/00651a0f559862f3bb7d6f7477b192afe3f583cc5e26403b44e59a55ab34/filelock-3.25.2.tar.gz", hash = "sha256:b64ece2b38f4ca29dd3e810287aa8c48182bbecd1ae6e9ae126c9b35f1382694", size = 40480, upload-time = "2026-03-11T20:45:38.487Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/f8/5c/e226de133afd8bb267ec27eead9ae3d784b95b39a287ed404caab39a5f50/genson-1.3.0-py3-none-any.whl", hash = "sha256:468feccd00274cc7e4c09e84b08704270ba8d95232aa280f65b986139cec67f7", size = 21470, upload-time = "2024-05-15T22:08:47.056Z" }, + { url = "https://files.pythonhosted.org/packages/a4/a5/842ae8f0c08b61d6484b52f99a03510a3a72d23141942d216ebe81fefbce/filelock-3.25.2-py3-none-any.whl", hash = "sha256:ca8afb0da15f229774c9ad1b455ed96e85a81373065fb10446672f64444ddf70", size = 26759, upload-time = "2026-03-11T20:45:37.437Z" }, ] [[package]] name = "google-api-core" -version = "2.30.1" +version = "2.30.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "google-auth" }, @@ -804,9 +852,9 @@ dependencies = [ { name = "protobuf" }, { name = "requests" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/f7/0b/b6e296aff70bef900766934cf4e83eaacc3f244adb61936b66d24b204080/google_api_core-2.30.1.tar.gz", hash = "sha256:7304ef3bd7e77fd26320a36eeb75868f9339532bfea21694964f4765b37574ee", size = 176742, upload-time = "2026-03-30T22:50:52.637Z" } +sdist = { url = "https://files.pythonhosted.org/packages/22/98/586ec94553b569080caef635f98a3723db36a38eac0e3d7eb3ea9d2e4b9a/google_api_core-2.30.0.tar.gz", hash = "sha256:02edfa9fab31e17fc0befb5f161b3bf93c9096d99aed584625f38065c511ad9b", size = 176959, upload-time = "2026-02-18T20:28:11.926Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/43/86/a00ea4596780ef3f0721c1f073c0c5ae992da4f35cf12f0d8c92d19267a6/google_api_core-2.30.1-py3-none-any.whl", hash = "sha256:3be893babbb54a89c6807b598383ddf212112130e3d24d06c681b5d18f082e08", size = 173238, upload-time = "2026-03-30T22:48:50.586Z" }, + { url = "https://files.pythonhosted.org/packages/45/27/09c33d67f7e0dcf06d7ac17d196594e66989299374bfb0d4331d1038e76b/google_api_core-2.30.0-py3-none-any.whl", hash = "sha256:80be49ee937ff9aba0fd79a6eddfde35fe658b9953ab9b79c57dd7061afa8df5", size = 173288, upload-time = "2026-02-18T20:28:10.367Z" }, ] [package.optional-dependencies] @@ -817,16 +865,15 @@ grpc = [ [[package]] name = "google-auth" -version = "2.48.0" +version = "2.49.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "cryptography" }, { name = "pyasn1-modules" }, - { name = "rsa" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/0c/41/242044323fbd746615884b1c16639749e73665b718209946ebad7ba8a813/google_auth-2.48.0.tar.gz", hash = "sha256:4f7e706b0cd3208a3d940a19a822c37a476ddba5450156c3e6624a71f7c841ce", size = 326522, upload-time = "2026-01-26T19:22:47.157Z" } +sdist = { url = "https://files.pythonhosted.org/packages/ea/80/6a696a07d3d3b0a92488933532f03dbefa4a24ab80fb231395b9a2a1be77/google_auth-2.49.1.tar.gz", hash = "sha256:16d40da1c3c5a0533f57d268fe72e0ebb0ae1cc3b567024122651c045d879b64", size = 333825, upload-time = "2026-03-12T19:30:58.135Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/83/1d/d6466de3a5249d35e832a52834115ca9d1d0de6abc22065f049707516d47/google_auth-2.48.0-py3-none-any.whl", hash = "sha256:2e2a537873d449434252a9632c28bfc268b0adb1e53f9fb62afc5333a975903f", size = 236499, upload-time = "2026-01-26T19:22:45.099Z" }, + { url = "https://files.pythonhosted.org/packages/e9/eb/c6c2478d8a8d633460be40e2a8a6f8f429171997a35a96f81d3b680dec83/google_auth-2.49.1-py3-none-any.whl", hash = "sha256:195ebe3dca18eddd1b3db5edc5189b76c13e96f29e73043b923ebcf3f1a860f7", size = 240737, upload-time = "2026-03-12T19:30:53.159Z" }, ] [package.optional-dependencies] @@ -836,7 +883,7 @@ requests = [ [[package]] name = "google-cloud-aiplatform" -version = "1.144.0" +version = "1.141.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "docstring-parser" }, @@ -852,9 +899,9 @@ dependencies = [ { name = "pydantic" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/e8/79/8a756b77b33fdc7418c49d3445ebbb9392fb7915e2a3047a64d45d00a20d/google_cloud_aiplatform-1.144.0.tar.gz", hash = "sha256:d1a6f930a9385653b2104ab523751c9a249029b205ccc6b86ee00419c660c943", size = 10217550, upload-time = "2026-04-01T00:46:29.198Z" } +sdist = { url = "https://files.pythonhosted.org/packages/ac/dc/1209c7aab43bd7233cf631165a3b1b4284d22fc7fe7387c66228d07868ab/google_cloud_aiplatform-1.141.0.tar.gz", hash = "sha256:e3b1cdb28865dd862aac9c685dfc5ac076488705aba0a5354016efadcddd59c6", size = 10152688, upload-time = "2026-03-10T22:20:08.692Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/49/9f/70ecfa90f9429dd5f5db464516528b0f5cc7a116af1d021b2f0a5af67ac1/google_cloud_aiplatform-1.144.0-py2.py3-none-any.whl", hash = "sha256:f9801e5ed3d34bd97d9cf61f9ac05a725d20018f6bc567a4632df6817399e96a", size = 8394909, upload-time = "2026-04-01T00:46:25.58Z" }, + { url = "https://files.pythonhosted.org/packages/6a/fc/428af69a69ff2e477e7f5e12d227b31fe5790f1a8234aacd54297f49c836/google_cloud_aiplatform-1.141.0-py2.py3-none-any.whl", hash = "sha256:6bd25b4d514c40b8181ca703e1b313ad6d0454ab8006fc9907fb3e9f672f31d1", size = 8358409, upload-time = "2026-03-10T22:20:04.871Z" }, ] [[package]] @@ -907,7 +954,7 @@ wheels = [ [[package]] name = "google-cloud-storage" -version = "3.9.0" +version = "3.10.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "google-api-core" }, @@ -917,9 +964,9 @@ dependencies = [ { name = "google-resumable-media" }, { name = "requests" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/f7/b1/4f0798e88285b50dfc60ed3a7de071def538b358db2da468c2e0deecbb40/google_cloud_storage-3.9.0.tar.gz", hash = "sha256:f2d8ca7db2f652be757e92573b2196e10fbc09649b5c016f8b422ad593c641cc", size = 17298544, upload-time = "2026-02-02T13:36:34.119Z" } +sdist = { url = "https://files.pythonhosted.org/packages/7a/e3/747759eebc72e420c25903d6bc231d0ceb110b66ac7e6ee3f350417152cd/google_cloud_storage-3.10.0.tar.gz", hash = "sha256:1aeebf097c27d718d84077059a28d7e87f136f3700212215f1ceeae1d1c5d504", size = 17309829, upload-time = "2026-03-18T15:54:11.875Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/46/0b/816a6ae3c9fd096937d2e5f9670558908811d57d59ddf69dd4b83b326fd1/google_cloud_storage-3.9.0-py3-none-any.whl", hash = "sha256:2dce75a9e8b3387078cbbdad44757d410ecdb916101f8ba308abf202b6968066", size = 321324, upload-time = "2026-02-02T13:36:32.271Z" }, + { url = "https://files.pythonhosted.org/packages/29/e2/d58442f4daee5babd9255cf492a1f3d114357164072f8339a22a3ad460a2/google_cloud_storage-3.10.0-py3-none-any.whl", hash = "sha256:0072e7783b201e45af78fd9779894cdb6bec2bf922ee932f3fcc16f8bce9b9a3", size = 324382, upload-time = "2026-03-18T15:54:10.091Z" }, ] [[package]] @@ -992,14 +1039,14 @@ wheels = [ [[package]] name = "googleapis-common-protos" -version = "1.70.0" +version = "1.73.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "protobuf" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/39/24/33db22342cf4a2ea27c9955e6713140fedd51e8b141b5ce5260897020f1a/googleapis_common_protos-1.70.0.tar.gz", hash = "sha256:0e1b44e0ea153e6594f9f394fef15193a68aaaea2d843f83e2742717ca753257", size = 145903, upload-time = "2025-04-14T10:17:02.924Z" } +sdist = { url = "https://files.pythonhosted.org/packages/99/96/a0205167fa0154f4a542fd6925bdc63d039d88dab3588b875078107e6f06/googleapis_common_protos-1.73.0.tar.gz", hash = "sha256:778d07cd4fbeff84c6f7c72102f0daf98fa2bfd3fa8bea426edc545588da0b5a", size = 147323, upload-time = "2026-03-06T21:53:09.727Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/86/f1/62a193f0227cf15a920390abe675f386dec35f7ae3ffe6da582d3ade42c7/googleapis_common_protos-1.70.0-py3-none-any.whl", hash = "sha256:b8bfcca8c25a2bb253e0e0b0adaf8c00773e5e6af6fd92397576680b807e0fd8", size = 294530, upload-time = "2025-04-14T10:17:01.271Z" }, + { url = "https://files.pythonhosted.org/packages/69/28/23eea8acd65972bbfe295ce3666b28ac510dfcb115fac089d3edb0feb00a/googleapis_common_protos-1.73.0-py3-none-any.whl", hash = "sha256:dfdaaa2e860f242046be561e6d6cb5c5f1541ae02cfbcb034371aadb2942b4e8", size = 297578, upload-time = "2026-03-06T21:52:33.933Z" }, ] [package.optional-dependencies] @@ -1009,53 +1056,62 @@ grpc = [ [[package]] name = "greenlet" -version = "3.2.3" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/c9/92/bb85bd6e80148a4d2e0c59f7c0c2891029f8fd510183afc7d8d2feeed9b6/greenlet-3.2.3.tar.gz", hash = "sha256:8b0dd8ae4c0d6f5e54ee55ba935eeb3d735a9b58a8a1e5b5cbab64e01a39f365", size = 185752, upload-time = "2025-06-05T16:16:09.955Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/92/db/b4c12cff13ebac2786f4f217f06588bccd8b53d260453404ef22b121fc3a/greenlet-3.2.3-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:1afd685acd5597349ee6d7a88a8bec83ce13c106ac78c196ee9dde7c04fe87be", size = 268977, upload-time = "2025-06-05T16:10:24.001Z" }, - { url = "https://files.pythonhosted.org/packages/52/61/75b4abd8147f13f70986df2801bf93735c1bd87ea780d70e3b3ecda8c165/greenlet-3.2.3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:761917cac215c61e9dc7324b2606107b3b292a8349bdebb31503ab4de3f559ac", size = 627351, upload-time = "2025-06-05T16:38:50.685Z" }, - { url = "https://files.pythonhosted.org/packages/35/aa/6894ae299d059d26254779a5088632874b80ee8cf89a88bca00b0709d22f/greenlet-3.2.3-cp310-cp310-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:a433dbc54e4a37e4fff90ef34f25a8c00aed99b06856f0119dcf09fbafa16392", size = 638599, upload-time = "2025-06-05T16:41:34.057Z" }, - { url = "https://files.pythonhosted.org/packages/30/64/e01a8261d13c47f3c082519a5e9dbf9e143cc0498ed20c911d04e54d526c/greenlet-3.2.3-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:72e77ed69312bab0434d7292316d5afd6896192ac4327d44f3d613ecb85b037c", size = 634482, upload-time = "2025-06-05T16:48:16.26Z" }, - { url = "https://files.pythonhosted.org/packages/47/48/ff9ca8ba9772d083a4f5221f7b4f0ebe8978131a9ae0909cf202f94cd879/greenlet-3.2.3-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:68671180e3849b963649254a882cd544a3c75bfcd2c527346ad8bb53494444db", size = 633284, upload-time = "2025-06-05T16:13:01.599Z" }, - { url = "https://files.pythonhosted.org/packages/e9/45/626e974948713bc15775b696adb3eb0bd708bec267d6d2d5c47bb47a6119/greenlet-3.2.3-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:49c8cfb18fb419b3d08e011228ef8a25882397f3a859b9fe1436946140b6756b", size = 582206, upload-time = "2025-06-05T16:12:48.51Z" }, - { url = "https://files.pythonhosted.org/packages/b1/8e/8b6f42c67d5df7db35b8c55c9a850ea045219741bb14416255616808c690/greenlet-3.2.3-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:efc6dc8a792243c31f2f5674b670b3a95d46fa1c6a912b8e310d6f542e7b0712", size = 1111412, upload-time = "2025-06-05T16:36:45.479Z" }, - { url = "https://files.pythonhosted.org/packages/05/46/ab58828217349500a7ebb81159d52ca357da747ff1797c29c6023d79d798/greenlet-3.2.3-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:731e154aba8e757aedd0781d4b240f1225b075b4409f1bb83b05ff410582cf00", size = 1135054, upload-time = "2025-06-05T16:12:36.478Z" }, - { url = "https://files.pythonhosted.org/packages/68/7f/d1b537be5080721c0f0089a8447d4ef72839039cdb743bdd8ffd23046e9a/greenlet-3.2.3-cp310-cp310-win_amd64.whl", hash = "sha256:96c20252c2f792defe9a115d3287e14811036d51e78b3aaddbee23b69b216302", size = 296573, upload-time = "2025-06-05T16:34:26.521Z" }, - { url = "https://files.pythonhosted.org/packages/fc/2e/d4fcb2978f826358b673f779f78fa8a32ee37df11920dc2bb5589cbeecef/greenlet-3.2.3-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:784ae58bba89fa1fa5733d170d42486580cab9decda3484779f4759345b29822", size = 270219, upload-time = "2025-06-05T16:10:10.414Z" }, - { url = "https://files.pythonhosted.org/packages/16/24/929f853e0202130e4fe163bc1d05a671ce8dcd604f790e14896adac43a52/greenlet-3.2.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:0921ac4ea42a5315d3446120ad48f90c3a6b9bb93dd9b3cf4e4d84a66e42de83", size = 630383, upload-time = "2025-06-05T16:38:51.785Z" }, - { url = "https://files.pythonhosted.org/packages/d1/b2/0320715eb61ae70c25ceca2f1d5ae620477d246692d9cc284c13242ec31c/greenlet-3.2.3-cp311-cp311-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:d2971d93bb99e05f8c2c0c2f4aa9484a18d98c4c3bd3c62b65b7e6ae33dfcfaf", size = 642422, upload-time = "2025-06-05T16:41:35.259Z" }, - { url = "https://files.pythonhosted.org/packages/bd/49/445fd1a210f4747fedf77615d941444349c6a3a4a1135bba9701337cd966/greenlet-3.2.3-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:c667c0bf9d406b77a15c924ef3285e1e05250948001220368e039b6aa5b5034b", size = 638375, upload-time = "2025-06-05T16:48:18.235Z" }, - { url = "https://files.pythonhosted.org/packages/7e/c8/ca19760cf6eae75fa8dc32b487e963d863b3ee04a7637da77b616703bc37/greenlet-3.2.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:592c12fb1165be74592f5de0d70f82bc5ba552ac44800d632214b76089945147", size = 637627, upload-time = "2025-06-05T16:13:02.858Z" }, - { url = "https://files.pythonhosted.org/packages/65/89/77acf9e3da38e9bcfca881e43b02ed467c1dedc387021fc4d9bd9928afb8/greenlet-3.2.3-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:29e184536ba333003540790ba29829ac14bb645514fbd7e32af331e8202a62a5", size = 585502, upload-time = "2025-06-05T16:12:49.642Z" }, - { url = "https://files.pythonhosted.org/packages/97/c6/ae244d7c95b23b7130136e07a9cc5aadd60d59b5951180dc7dc7e8edaba7/greenlet-3.2.3-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:93c0bb79844a367782ec4f429d07589417052e621aa39a5ac1fb99c5aa308edc", size = 1114498, upload-time = "2025-06-05T16:36:46.598Z" }, - { url = "https://files.pythonhosted.org/packages/89/5f/b16dec0cbfd3070658e0d744487919740c6d45eb90946f6787689a7efbce/greenlet-3.2.3-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:751261fc5ad7b6705f5f76726567375bb2104a059454e0226e1eef6c756748ba", size = 1139977, upload-time = "2025-06-05T16:12:38.262Z" }, - { url = "https://files.pythonhosted.org/packages/66/77/d48fb441b5a71125bcac042fc5b1494c806ccb9a1432ecaa421e72157f77/greenlet-3.2.3-cp311-cp311-win_amd64.whl", hash = "sha256:83a8761c75312361aa2b5b903b79da97f13f556164a7dd2d5448655425bd4c34", size = 297017, upload-time = "2025-06-05T16:25:05.225Z" }, - { url = "https://files.pythonhosted.org/packages/f3/94/ad0d435f7c48debe960c53b8f60fb41c2026b1d0fa4a99a1cb17c3461e09/greenlet-3.2.3-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:25ad29caed5783d4bd7a85c9251c651696164622494c00802a139c00d639242d", size = 271992, upload-time = "2025-06-05T16:11:23.467Z" }, - { url = "https://files.pythonhosted.org/packages/93/5d/7c27cf4d003d6e77749d299c7c8f5fd50b4f251647b5c2e97e1f20da0ab5/greenlet-3.2.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:88cd97bf37fe24a6710ec6a3a7799f3f81d9cd33317dcf565ff9950c83f55e0b", size = 638820, upload-time = "2025-06-05T16:38:52.882Z" }, - { url = "https://files.pythonhosted.org/packages/c6/7e/807e1e9be07a125bb4c169144937910bf59b9d2f6d931578e57f0bce0ae2/greenlet-3.2.3-cp312-cp312-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:baeedccca94880d2f5666b4fa16fc20ef50ba1ee353ee2d7092b383a243b0b0d", size = 653046, upload-time = "2025-06-05T16:41:36.343Z" }, - { url = "https://files.pythonhosted.org/packages/9d/ab/158c1a4ea1068bdbc78dba5a3de57e4c7aeb4e7fa034320ea94c688bfb61/greenlet-3.2.3-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:be52af4b6292baecfa0f397f3edb3c6092ce071b499dd6fe292c9ac9f2c8f264", size = 647701, upload-time = "2025-06-05T16:48:19.604Z" }, - { url = "https://files.pythonhosted.org/packages/cc/0d/93729068259b550d6a0288da4ff72b86ed05626eaf1eb7c0d3466a2571de/greenlet-3.2.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:0cc73378150b8b78b0c9fe2ce56e166695e67478550769536a6742dca3651688", size = 649747, upload-time = "2025-06-05T16:13:04.628Z" }, - { url = "https://files.pythonhosted.org/packages/f6/f6/c82ac1851c60851302d8581680573245c8fc300253fc1ff741ae74a6c24d/greenlet-3.2.3-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:706d016a03e78df129f68c4c9b4c4f963f7d73534e48a24f5f5a7101ed13dbbb", size = 605461, upload-time = "2025-06-05T16:12:50.792Z" }, - { url = "https://files.pythonhosted.org/packages/98/82/d022cf25ca39cf1200650fc58c52af32c90f80479c25d1cbf57980ec3065/greenlet-3.2.3-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:419e60f80709510c343c57b4bb5a339d8767bf9aef9b8ce43f4f143240f88b7c", size = 1121190, upload-time = "2025-06-05T16:36:48.59Z" }, - { url = "https://files.pythonhosted.org/packages/f5/e1/25297f70717abe8104c20ecf7af0a5b82d2f5a980eb1ac79f65654799f9f/greenlet-3.2.3-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:93d48533fade144203816783373f27a97e4193177ebaaf0fc396db19e5d61163", size = 1149055, upload-time = "2025-06-05T16:12:40.457Z" }, - { url = "https://files.pythonhosted.org/packages/1f/8f/8f9e56c5e82eb2c26e8cde787962e66494312dc8cb261c460e1f3a9c88bc/greenlet-3.2.3-cp312-cp312-win_amd64.whl", hash = "sha256:7454d37c740bb27bdeddfc3f358f26956a07d5220818ceb467a483197d84f849", size = 297817, upload-time = "2025-06-05T16:29:49.244Z" }, - { url = "https://files.pythonhosted.org/packages/b1/cf/f5c0b23309070ae93de75c90d29300751a5aacefc0a3ed1b1d8edb28f08b/greenlet-3.2.3-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:500b8689aa9dd1ab26872a34084503aeddefcb438e2e7317b89b11eaea1901ad", size = 270732, upload-time = "2025-06-05T16:10:08.26Z" }, - { url = "https://files.pythonhosted.org/packages/48/ae/91a957ba60482d3fecf9be49bc3948f341d706b52ddb9d83a70d42abd498/greenlet-3.2.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:a07d3472c2a93117af3b0136f246b2833fdc0b542d4a9799ae5f41c28323faef", size = 639033, upload-time = "2025-06-05T16:38:53.983Z" }, - { url = "https://files.pythonhosted.org/packages/6f/df/20ffa66dd5a7a7beffa6451bdb7400d66251374ab40b99981478c69a67a8/greenlet-3.2.3-cp313-cp313-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:8704b3768d2f51150626962f4b9a9e4a17d2e37c8a8d9867bbd9fa4eb938d3b3", size = 652999, upload-time = "2025-06-05T16:41:37.89Z" }, - { url = "https://files.pythonhosted.org/packages/51/b4/ebb2c8cb41e521f1d72bf0465f2f9a2fd803f674a88db228887e6847077e/greenlet-3.2.3-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:5035d77a27b7c62db6cf41cf786cfe2242644a7a337a0e155c80960598baab95", size = 647368, upload-time = "2025-06-05T16:48:21.467Z" }, - { url = "https://files.pythonhosted.org/packages/8e/6a/1e1b5aa10dced4ae876a322155705257748108b7fd2e4fae3f2a091fe81a/greenlet-3.2.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:2d8aa5423cd4a396792f6d4580f88bdc6efcb9205891c9d40d20f6e670992efb", size = 650037, upload-time = "2025-06-05T16:13:06.402Z" }, - { url = "https://files.pythonhosted.org/packages/26/f2/ad51331a157c7015c675702e2d5230c243695c788f8f75feba1af32b3617/greenlet-3.2.3-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:2c724620a101f8170065d7dded3f962a2aea7a7dae133a009cada42847e04a7b", size = 608402, upload-time = "2025-06-05T16:12:51.91Z" }, - { url = "https://files.pythonhosted.org/packages/26/bc/862bd2083e6b3aff23300900a956f4ea9a4059de337f5c8734346b9b34fc/greenlet-3.2.3-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:873abe55f134c48e1f2a6f53f7d1419192a3d1a4e873bace00499a4e45ea6af0", size = 1119577, upload-time = "2025-06-05T16:36:49.787Z" }, - { url = "https://files.pythonhosted.org/packages/86/94/1fc0cc068cfde885170e01de40a619b00eaa8f2916bf3541744730ffb4c3/greenlet-3.2.3-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:024571bbce5f2c1cfff08bf3fbaa43bbc7444f580ae13b0099e95d0e6e67ed36", size = 1147121, upload-time = "2025-06-05T16:12:42.527Z" }, - { url = "https://files.pythonhosted.org/packages/27/1a/199f9587e8cb08a0658f9c30f3799244307614148ffe8b1e3aa22f324dea/greenlet-3.2.3-cp313-cp313-win_amd64.whl", hash = "sha256:5195fb1e75e592dd04ce79881c8a22becdfa3e6f500e7feb059b1e6fdd54d3e3", size = 297603, upload-time = "2025-06-05T16:20:12.651Z" }, - { url = "https://files.pythonhosted.org/packages/d8/ca/accd7aa5280eb92b70ed9e8f7fd79dc50a2c21d8c73b9a0856f5b564e222/greenlet-3.2.3-cp314-cp314-macosx_11_0_universal2.whl", hash = "sha256:3d04332dddb10b4a211b68111dabaee2e1a073663d117dc10247b5b1642bac86", size = 271479, upload-time = "2025-06-05T16:10:47.525Z" }, - { url = "https://files.pythonhosted.org/packages/55/71/01ed9895d9eb49223280ecc98a557585edfa56b3d0e965b9fa9f7f06b6d9/greenlet-3.2.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:8186162dffde068a465deab08fc72c767196895c39db26ab1c17c0b77a6d8b97", size = 683952, upload-time = "2025-06-05T16:38:55.125Z" }, - { url = "https://files.pythonhosted.org/packages/ea/61/638c4bdf460c3c678a0a1ef4c200f347dff80719597e53b5edb2fb27ab54/greenlet-3.2.3-cp314-cp314-manylinux2014_ppc64le.manylinux_2_17_ppc64le.whl", hash = "sha256:f4bfbaa6096b1b7a200024784217defedf46a07c2eee1a498e94a1b5f8ec5728", size = 696917, upload-time = "2025-06-05T16:41:38.959Z" }, - { url = "https://files.pythonhosted.org/packages/22/cc/0bd1a7eb759d1f3e3cc2d1bc0f0b487ad3cc9f34d74da4b80f226fde4ec3/greenlet-3.2.3-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.whl", hash = "sha256:ed6cfa9200484d234d8394c70f5492f144b20d4533f69262d530a1a082f6ee9a", size = 692443, upload-time = "2025-06-05T16:48:23.113Z" }, - { url = "https://files.pythonhosted.org/packages/67/10/b2a4b63d3f08362662e89c103f7fe28894a51ae0bc890fabf37d1d780e52/greenlet-3.2.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:02b0df6f63cd15012bed5401b47829cfd2e97052dc89da3cfaf2c779124eb892", size = 692995, upload-time = "2025-06-05T16:13:07.972Z" }, - { url = "https://files.pythonhosted.org/packages/5a/c6/ad82f148a4e3ce9564056453a71529732baf5448ad53fc323e37efe34f66/greenlet-3.2.3-cp314-cp314-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:86c2d68e87107c1792e2e8d5399acec2487a4e993ab76c792408e59394d52141", size = 655320, upload-time = "2025-06-05T16:12:53.453Z" }, - { url = "https://files.pythonhosted.org/packages/5c/4f/aab73ecaa6b3086a4c89863d94cf26fa84cbff63f52ce9bc4342b3087a06/greenlet-3.2.3-cp314-cp314-win_amd64.whl", hash = "sha256:8c47aae8fbbfcf82cc13327ae802ba13c9c36753b67e760023fd116bc124a62a", size = 301236, upload-time = "2025-06-05T16:15:20.111Z" }, +version = "3.3.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a3/51/1664f6b78fc6ebbd98019a1fd730e83fa78f2db7058f72b1463d3612b8db/greenlet-3.3.2.tar.gz", hash = "sha256:2eaf067fc6d886931c7962e8c6bede15d2f01965560f3359b27c80bde2d151f2", size = 188267, upload-time = "2026-02-20T20:54:15.531Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/38/3f/9859f655d11901e7b2996c6e3d33e0caa9a1d4572c3bc61ed0faa64b2f4c/greenlet-3.3.2-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:9bc885b89709d901859cf95179ec9f6bb67a3d2bb1f0e88456461bd4b7f8fd0d", size = 277747, upload-time = "2026-02-20T20:16:21.325Z" }, + { url = "https://files.pythonhosted.org/packages/fb/07/cb284a8b5c6498dbd7cba35d31380bb123d7dceaa7907f606c8ff5993cbf/greenlet-3.3.2-cp310-cp310-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b568183cf65b94919be4438dc28416b234b678c608cafac8874dfeeb2a9bbe13", size = 579202, upload-time = "2026-02-20T20:47:28.955Z" }, + { url = "https://files.pythonhosted.org/packages/ed/45/67922992b3a152f726163b19f890a85129a992f39607a2a53155de3448b8/greenlet-3.3.2-cp310-cp310-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:527fec58dc9f90efd594b9b700662ed3fb2493c2122067ac9c740d98080a620e", size = 590620, upload-time = "2026-02-20T20:55:55.581Z" }, + { url = "https://files.pythonhosted.org/packages/03/5f/6e2a7d80c353587751ef3d44bb947f0565ec008a2e0927821c007e96d3a7/greenlet-3.3.2-cp310-cp310-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:508c7f01f1791fbc8e011bd508f6794cb95397fdb198a46cb6635eb5b78d85a7", size = 602132, upload-time = "2026-02-20T21:02:43.261Z" }, + { url = "https://files.pythonhosted.org/packages/ad/55/9f1ebb5a825215fadcc0f7d5073f6e79e3007e3282b14b22d6aba7ca6cb8/greenlet-3.3.2-cp310-cp310-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ad0c8917dd42a819fe77e6bdfcb84e3379c0de956469301d9fd36427a1ca501f", size = 591729, upload-time = "2026-02-20T20:20:58.395Z" }, + { url = "https://files.pythonhosted.org/packages/24/b4/21f5455773d37f94b866eb3cf5caed88d6cea6dd2c6e1f9c34f463cba3ec/greenlet-3.3.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:97245cc10e5515dbc8c3104b2928f7f02b6813002770cfaffaf9a6e0fc2b94ef", size = 1551946, upload-time = "2026-02-20T20:49:31.102Z" }, + { url = "https://files.pythonhosted.org/packages/00/68/91f061a926abead128fe1a87f0b453ccf07368666bd59ffa46016627a930/greenlet-3.3.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:8c1fdd7d1b309ff0da81d60a9688a8bd044ac4e18b250320a96fc68d31c209ca", size = 1618494, upload-time = "2026-02-20T20:21:06.541Z" }, + { url = "https://files.pythonhosted.org/packages/ac/78/f93e840cbaef8becaf6adafbaf1319682a6c2d8c1c20224267a5c6c8c891/greenlet-3.3.2-cp310-cp310-win_amd64.whl", hash = "sha256:5d0e35379f93a6d0222de929a25ab47b5eb35b5ef4721c2b9cbcc4036129ff1f", size = 230092, upload-time = "2026-02-20T20:17:09.379Z" }, + { url = "https://files.pythonhosted.org/packages/f3/47/16400cb42d18d7a6bb46f0626852c1718612e35dcb0dffa16bbaffdf5dd2/greenlet-3.3.2-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:c56692189a7d1c7606cb794be0a8381470d95c57ce5be03fb3d0ef57c7853b86", size = 278890, upload-time = "2026-02-20T20:19:39.263Z" }, + { url = "https://files.pythonhosted.org/packages/a3/90/42762b77a5b6aa96cd8c0e80612663d39211e8ae8a6cd47c7f1249a66262/greenlet-3.3.2-cp311-cp311-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1ebd458fa8285960f382841da585e02201b53a5ec2bac6b156fc623b5ce4499f", size = 581120, upload-time = "2026-02-20T20:47:30.161Z" }, + { url = "https://files.pythonhosted.org/packages/bf/6f/f3d64f4fa0a9c7b5c5b3c810ff1df614540d5aa7d519261b53fba55d4df9/greenlet-3.3.2-cp311-cp311-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a443358b33c4ec7b05b79a7c8b466f5d275025e750298be7340f8fc63dff2a55", size = 594363, upload-time = "2026-02-20T20:55:56.965Z" }, + { url = "https://files.pythonhosted.org/packages/9c/8b/1430a04657735a3f23116c2e0d5eb10220928846e4537a938a41b350bed6/greenlet-3.3.2-cp311-cp311-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:4375a58e49522698d3e70cc0b801c19433021b5c37686f7ce9c65b0d5c8677d2", size = 605046, upload-time = "2026-02-20T21:02:45.234Z" }, + { url = "https://files.pythonhosted.org/packages/72/83/3e06a52aca8128bdd4dcd67e932b809e76a96ab8c232a8b025b2850264c5/greenlet-3.3.2-cp311-cp311-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8e2cd90d413acbf5e77ae41e5d3c9b3ac1d011a756d7284d7f3f2b806bbd6358", size = 594156, upload-time = "2026-02-20T20:20:59.955Z" }, + { url = "https://files.pythonhosted.org/packages/70/79/0de5e62b873e08fe3cef7dbe84e5c4bc0e8ed0c7ff131bccb8405cd107c8/greenlet-3.3.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:442b6057453c8cb29b4fb36a2ac689382fc71112273726e2423f7f17dc73bf99", size = 1554649, upload-time = "2026-02-20T20:49:32.293Z" }, + { url = "https://files.pythonhosted.org/packages/5a/00/32d30dee8389dc36d42170a9c66217757289e2afb0de59a3565260f38373/greenlet-3.3.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:45abe8eb6339518180d5a7fa47fa01945414d7cca5ecb745346fc6a87d2750be", size = 1619472, upload-time = "2026-02-20T20:21:07.966Z" }, + { url = "https://files.pythonhosted.org/packages/f1/3a/efb2cf697fbccdf75b24e2c18025e7dfa54c4f31fab75c51d0fe79942cef/greenlet-3.3.2-cp311-cp311-win_amd64.whl", hash = "sha256:1e692b2dae4cc7077cbb11b47d258533b48c8fde69a33d0d8a82e2fe8d8531d5", size = 230389, upload-time = "2026-02-20T20:17:18.772Z" }, + { url = "https://files.pythonhosted.org/packages/e1/a1/65bbc059a43a7e2143ec4fc1f9e3f673e04f9c7b371a494a101422ac4fd5/greenlet-3.3.2-cp311-cp311-win_arm64.whl", hash = "sha256:02b0a8682aecd4d3c6c18edf52bc8e51eacdd75c8eac52a790a210b06aa295fd", size = 229645, upload-time = "2026-02-20T20:18:18.695Z" }, + { url = "https://files.pythonhosted.org/packages/ea/ab/1608e5a7578e62113506740b88066bf09888322a311cff602105e619bd87/greenlet-3.3.2-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:ac8d61d4343b799d1e526db579833d72f23759c71e07181c2d2944e429eb09cd", size = 280358, upload-time = "2026-02-20T20:17:43.971Z" }, + { url = "https://files.pythonhosted.org/packages/a5/23/0eae412a4ade4e6623ff7626e38998cb9b11e9ff1ebacaa021e4e108ec15/greenlet-3.3.2-cp312-cp312-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3ceec72030dae6ac0c8ed7591b96b70410a8be370b6a477b1dbc072856ad02bd", size = 601217, upload-time = "2026-02-20T20:47:31.462Z" }, + { url = "https://files.pythonhosted.org/packages/f8/16/5b1678a9c07098ecb9ab2dd159fafaf12e963293e61ee8d10ecb55273e5e/greenlet-3.3.2-cp312-cp312-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:a2a5be83a45ce6188c045bcc44b0ee037d6a518978de9a5d97438548b953a1ac", size = 611792, upload-time = "2026-02-20T20:55:58.423Z" }, + { url = "https://files.pythonhosted.org/packages/5c/c5/cc09412a29e43406eba18d61c70baa936e299bc27e074e2be3806ed29098/greenlet-3.3.2-cp312-cp312-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:ae9e21c84035c490506c17002f5c8ab25f980205c3e61ddb3a2a2a2e6c411fcb", size = 626250, upload-time = "2026-02-20T21:02:46.596Z" }, + { url = "https://files.pythonhosted.org/packages/50/1f/5155f55bd71cabd03765a4aac9ac446be129895271f73872c36ebd4b04b6/greenlet-3.3.2-cp312-cp312-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:43e99d1749147ac21dde49b99c9abffcbc1e2d55c67501465ef0930d6e78e070", size = 613875, upload-time = "2026-02-20T20:21:01.102Z" }, + { url = "https://files.pythonhosted.org/packages/fc/dd/845f249c3fcd69e32df80cdab059b4be8b766ef5830a3d0aa9d6cad55beb/greenlet-3.3.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:4c956a19350e2c37f2c48b336a3afb4bff120b36076d9d7fb68cb44e05d95b79", size = 1571467, upload-time = "2026-02-20T20:49:33.495Z" }, + { url = "https://files.pythonhosted.org/packages/2a/50/2649fe21fcc2b56659a452868e695634722a6655ba245d9f77f5656010bf/greenlet-3.3.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:6c6f8ba97d17a1e7d664151284cb3315fc5f8353e75221ed4324f84eb162b395", size = 1640001, upload-time = "2026-02-20T20:21:09.154Z" }, + { url = "https://files.pythonhosted.org/packages/9b/40/cc802e067d02af8b60b6771cea7d57e21ef5e6659912814babb42b864713/greenlet-3.3.2-cp312-cp312-win_amd64.whl", hash = "sha256:34308836d8370bddadb41f5a7ce96879b72e2fdfb4e87729330c6ab52376409f", size = 231081, upload-time = "2026-02-20T20:17:28.121Z" }, + { url = "https://files.pythonhosted.org/packages/58/2e/fe7f36ff1982d6b10a60d5e0740c759259a7d6d2e1dc41da6d96de32fff6/greenlet-3.3.2-cp312-cp312-win_arm64.whl", hash = "sha256:d3a62fa76a32b462a97198e4c9e99afb9ab375115e74e9a83ce180e7a496f643", size = 230331, upload-time = "2026-02-20T20:17:23.34Z" }, + { url = "https://files.pythonhosted.org/packages/ac/48/f8b875fa7dea7dd9b33245e37f065af59df6a25af2f9561efa8d822fde51/greenlet-3.3.2-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:aa6ac98bdfd716a749b84d4034486863fd81c3abde9aa3cf8eff9127981a4ae4", size = 279120, upload-time = "2026-02-20T20:19:01.9Z" }, + { url = "https://files.pythonhosted.org/packages/49/8d/9771d03e7a8b1ee456511961e1b97a6d77ae1dea4a34a5b98eee706689d3/greenlet-3.3.2-cp313-cp313-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ab0c7e7901a00bc0a7284907273dc165b32e0d109a6713babd04471327ff7986", size = 603238, upload-time = "2026-02-20T20:47:32.873Z" }, + { url = "https://files.pythonhosted.org/packages/59/0e/4223c2bbb63cd5c97f28ffb2a8aee71bdfb30b323c35d409450f51b91e3e/greenlet-3.3.2-cp313-cp313-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:d248d8c23c67d2291ffd47af766e2a3aa9fa1c6703155c099feb11f526c63a92", size = 614219, upload-time = "2026-02-20T20:55:59.817Z" }, + { url = "https://files.pythonhosted.org/packages/94/2b/4d012a69759ac9d77210b8bfb128bc621125f5b20fc398bce3940d036b1c/greenlet-3.3.2-cp313-cp313-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:ccd21bb86944ca9be6d967cf7691e658e43417782bce90b5d2faeda0ff78a7dd", size = 628268, upload-time = "2026-02-20T21:02:48.024Z" }, + { url = "https://files.pythonhosted.org/packages/7a/34/259b28ea7a2a0c904b11cd36c79b8cef8019b26ee5dbe24e73b469dea347/greenlet-3.3.2-cp313-cp313-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b6997d360a4e6a4e936c0f9625b1c20416b8a0ea18a8e19cabbefc712e7397ab", size = 616774, upload-time = "2026-02-20T20:21:02.454Z" }, + { url = "https://files.pythonhosted.org/packages/0a/03/996c2d1689d486a6e199cb0f1cf9e4aa940c500e01bdf201299d7d61fa69/greenlet-3.3.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:64970c33a50551c7c50491671265d8954046cb6e8e2999aacdd60e439b70418a", size = 1571277, upload-time = "2026-02-20T20:49:34.795Z" }, + { url = "https://files.pythonhosted.org/packages/d9/c4/2570fc07f34a39f2caf0bf9f24b0a1a0a47bc2e8e465b2c2424821389dfc/greenlet-3.3.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:1a9172f5bf6bd88e6ba5a84e0a68afeac9dc7b6b412b245dd64f52d83c81e55b", size = 1640455, upload-time = "2026-02-20T20:21:10.261Z" }, + { url = "https://files.pythonhosted.org/packages/91/39/5ef5aa23bc545aa0d31e1b9b55822b32c8da93ba657295840b6b34124009/greenlet-3.3.2-cp313-cp313-win_amd64.whl", hash = "sha256:a7945dd0eab63ded0a48e4dcade82939783c172290a7903ebde9e184333ca124", size = 230961, upload-time = "2026-02-20T20:16:58.461Z" }, + { url = "https://files.pythonhosted.org/packages/62/6b/a89f8456dcb06becff288f563618e9f20deed8dd29beea14f9a168aef64b/greenlet-3.3.2-cp313-cp313-win_arm64.whl", hash = "sha256:394ead29063ee3515b4e775216cb756b2e3b4a7e55ae8fd884f17fa579e6b327", size = 230221, upload-time = "2026-02-20T20:17:37.152Z" }, + { url = "https://files.pythonhosted.org/packages/3f/ae/8bffcbd373b57a5992cd077cbe8858fff39110480a9d50697091faea6f39/greenlet-3.3.2-cp314-cp314-macosx_11_0_universal2.whl", hash = "sha256:8d1658d7291f9859beed69a776c10822a0a799bc4bfe1bd4272bb60e62507dab", size = 279650, upload-time = "2026-02-20T20:18:00.783Z" }, + { url = "https://files.pythonhosted.org/packages/d1/c0/45f93f348fa49abf32ac8439938726c480bd96b2a3c6f4d949ec0124b69f/greenlet-3.3.2-cp314-cp314-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:18cb1b7337bca281915b3c5d5ae19f4e76d35e1df80f4ad3c1a7be91fadf1082", size = 650295, upload-time = "2026-02-20T20:47:34.036Z" }, + { url = "https://files.pythonhosted.org/packages/b3/de/dd7589b3f2b8372069ab3e4763ea5329940fc7ad9dcd3e272a37516d7c9b/greenlet-3.3.2-cp314-cp314-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:c2e47408e8ce1c6f1ceea0dffcdf6ebb85cc09e55c7af407c99f1112016e45e9", size = 662163, upload-time = "2026-02-20T20:56:01.295Z" }, + { url = "https://files.pythonhosted.org/packages/cd/ac/85804f74f1ccea31ba518dcc8ee6f14c79f73fe36fa1beba38930806df09/greenlet-3.3.2-cp314-cp314-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:e3cb43ce200f59483eb82949bf1835a99cf43d7571e900d7c8d5c62cdf25d2f9", size = 675371, upload-time = "2026-02-20T21:02:49.664Z" }, + { url = "https://files.pythonhosted.org/packages/d2/d8/09bfa816572a4d83bccd6750df1926f79158b1c36c5f73786e26dbe4ee38/greenlet-3.3.2-cp314-cp314-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:63d10328839d1973e5ba35e98cccbca71b232b14051fd957b6f8b6e8e80d0506", size = 664160, upload-time = "2026-02-20T20:21:04.015Z" }, + { url = "https://files.pythonhosted.org/packages/48/cf/56832f0c8255d27f6c35d41b5ec91168d74ec721d85f01a12131eec6b93c/greenlet-3.3.2-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:8e4ab3cfb02993c8cc248ea73d7dae6cec0253e9afa311c9b37e603ca9fad2ce", size = 1619181, upload-time = "2026-02-20T20:49:36.052Z" }, + { url = "https://files.pythonhosted.org/packages/0a/23/b90b60a4aabb4cec0796e55f25ffbfb579a907c3898cd2905c8918acaa16/greenlet-3.3.2-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:94ad81f0fd3c0c0681a018a976e5c2bd2ca2d9d94895f23e7bb1af4e8af4e2d5", size = 1687713, upload-time = "2026-02-20T20:21:11.684Z" }, + { url = "https://files.pythonhosted.org/packages/f3/ca/2101ca3d9223a1dc125140dbc063644dca76df6ff356531eb27bc267b446/greenlet-3.3.2-cp314-cp314-win_amd64.whl", hash = "sha256:8c4dd0f3997cf2512f7601563cc90dfb8957c0cff1e3a1b23991d4ea1776c492", size = 232034, upload-time = "2026-02-20T20:20:08.186Z" }, + { url = "https://files.pythonhosted.org/packages/f6/4a/ecf894e962a59dea60f04877eea0fd5724618da89f1867b28ee8b91e811f/greenlet-3.3.2-cp314-cp314-win_arm64.whl", hash = "sha256:cd6f9e2bbd46321ba3bbb4c8a15794d32960e3b0ae2cc4d49a1a53d314805d71", size = 231437, upload-time = "2026-02-20T20:18:59.722Z" }, + { url = "https://files.pythonhosted.org/packages/98/6d/8f2ef704e614bcf58ed43cfb8d87afa1c285e98194ab2cfad351bf04f81e/greenlet-3.3.2-cp314-cp314t-macosx_11_0_universal2.whl", hash = "sha256:e26e72bec7ab387ac80caa7496e0f908ff954f31065b0ffc1f8ecb1338b11b54", size = 286617, upload-time = "2026-02-20T20:19:29.856Z" }, + { url = "https://files.pythonhosted.org/packages/5e/0d/93894161d307c6ea237a43988f27eba0947b360b99ac5239ad3fe09f0b47/greenlet-3.3.2-cp314-cp314t-manylinux_2_24_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:8b466dff7a4ffda6ca975979bab80bdadde979e29fc947ac3be4451428d8b0e4", size = 655189, upload-time = "2026-02-20T20:47:35.742Z" }, + { url = "https://files.pythonhosted.org/packages/f5/2c/d2d506ebd8abcb57386ec4f7ba20f4030cbe56eae541bc6fd6ef399c0b41/greenlet-3.3.2-cp314-cp314t-manylinux_2_24_ppc64le.manylinux_2_28_ppc64le.whl", hash = "sha256:b8bddc5b73c9720bea487b3bffdb1840fe4e3656fba3bd40aa1489e9f37877ff", size = 658225, upload-time = "2026-02-20T20:56:02.527Z" }, + { url = "https://files.pythonhosted.org/packages/d1/67/8197b7e7e602150938049d8e7f30de1660cfb87e4c8ee349b42b67bdb2e1/greenlet-3.3.2-cp314-cp314t-manylinux_2_24_s390x.manylinux_2_28_s390x.whl", hash = "sha256:59b3e2c40f6706b05a9cd299c836c6aa2378cabe25d021acd80f13abf81181cf", size = 666581, upload-time = "2026-02-20T21:02:51.526Z" }, + { url = "https://files.pythonhosted.org/packages/8e/30/3a09155fbf728673a1dea713572d2d31159f824a37c22da82127056c44e4/greenlet-3.3.2-cp314-cp314t-manylinux_2_24_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b26b0f4428b871a751968285a1ac9648944cea09807177ac639b030bddebcea4", size = 657907, upload-time = "2026-02-20T20:21:05.259Z" }, + { url = "https://files.pythonhosted.org/packages/f3/fd/d05a4b7acd0154ed758797f0a43b4c0962a843bedfe980115e842c5b2d08/greenlet-3.3.2-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:1fb39a11ee2e4d94be9a76671482be9398560955c9e568550de0224e41104727", size = 1618857, upload-time = "2026-02-20T20:49:37.309Z" }, + { url = "https://files.pythonhosted.org/packages/6f/e1/50ee92a5db521de8f35075b5eff060dd43d39ebd46c2181a2042f7070385/greenlet-3.3.2-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:20154044d9085151bc309e7689d6f7ba10027f8f5a8c0676ad398b951913d89e", size = 1680010, upload-time = "2026-02-20T20:21:13.427Z" }, + { url = "https://files.pythonhosted.org/packages/29/4b/45d90626aef8e65336bed690106d1382f7a43665e2249017e9527df8823b/greenlet-3.3.2-cp314-cp314t-win_amd64.whl", hash = "sha256:c04c5e06ec3e022cbfe2cd4a846e1d4e50087444f875ff6d2c2ad8445495cf1a", size = 237086, upload-time = "2026-02-20T20:20:45.786Z" }, ] [[package]] @@ -1074,76 +1130,76 @@ wheels = [ [[package]] name = "grpcio" -version = "1.80.0" +version = "1.78.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/b7/48/af6173dbca4454f4637a4678b67f52ca7e0c1ed7d5894d89d434fecede05/grpcio-1.80.0.tar.gz", hash = "sha256:29aca15edd0688c22ba01d7cc01cb000d72b2033f4a3c72a81a19b56fd143257", size = 12978905, upload-time = "2026-03-30T08:49:10.502Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/9d/cd/bb7b7e54084a344c03d68144450da7ddd5564e51a298ae1662de65f48e2d/grpcio-1.80.0-cp310-cp310-linux_armv7l.whl", hash = "sha256:886457a7768e408cdce226ad1ca67d2958917d306523a0e21e1a2fdaa75c9c9c", size = 6050363, upload-time = "2026-03-30T08:46:20.894Z" }, - { url = "https://files.pythonhosted.org/packages/16/02/1417f5c3460dea65f7a2e3c14e8b31e77f7ffb730e9bfadd89eda7a9f477/grpcio-1.80.0-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:7b641fc3f1dc647bfd80bd713addc68f6d145956f64677e56d9ebafc0bd72388", size = 12026037, upload-time = "2026-03-30T08:46:25.144Z" }, - { url = "https://files.pythonhosted.org/packages/43/98/c910254eedf2cae368d78336a2de0678e66a7317d27c02522392f949b5c6/grpcio-1.80.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:33eb763f18f006dc7fee1e69831d38d23f5eccd15b2e0f92a13ee1d9242e5e02", size = 6602306, upload-time = "2026-03-30T08:46:27.593Z" }, - { url = "https://files.pythonhosted.org/packages/7c/f8/88ca4e78c077b2b2113d95da1e1ab43efd43d723c9a0397d26529c2c1a56/grpcio-1.80.0-cp310-cp310-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:52d143637e3872633fc7dd7c3c6a1c84e396b359f3a72e215f8bf69fd82084fc", size = 7301535, upload-time = "2026-03-30T08:46:29.556Z" }, - { url = "https://files.pythonhosted.org/packages/f9/96/f28660fe2fe0f153288bf4a04e4910b7309d442395135c88ed4f5b3b8b40/grpcio-1.80.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:c51bf8ac4575af2e0678bccfb07e47321fc7acb5049b4482832c5c195e04e13a", size = 6808669, upload-time = "2026-03-30T08:46:31.984Z" }, - { url = "https://files.pythonhosted.org/packages/47/eb/3f68a5e955779c00aeef23850e019c1c1d0e032d90633ba49c01ad5a96e0/grpcio-1.80.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:50a9871536d71c4fba24ee856abc03a87764570f0c457dd8db0b4018f379fed9", size = 7409489, upload-time = "2026-03-30T08:46:34.684Z" }, - { url = "https://files.pythonhosted.org/packages/5b/a7/d2f681a4bfb881be40659a309771f3bdfbfdb1190619442816c3f0ffc079/grpcio-1.80.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:a72d84ad0514db063e21887fbacd1fd7acb4d494a564cae22227cd45c7fbf199", size = 8423167, upload-time = "2026-03-30T08:46:36.833Z" }, - { url = "https://files.pythonhosted.org/packages/97/8a/29b4589c204959aa35ce5708400a05bba72181807c45c47b3ec000c39333/grpcio-1.80.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f7691a6788ad9196872f95716df5bc643ebba13c97140b7a5ee5c8e75d1dea81", size = 7846761, upload-time = "2026-03-30T08:46:40.091Z" }, - { url = "https://files.pythonhosted.org/packages/6b/d2/ed143e097230ee121ac5848f6ff14372dba91289b10b536d54fb1b7cbae7/grpcio-1.80.0-cp310-cp310-win32.whl", hash = "sha256:46c2390b59d67f84e882694d489f5b45707c657832d7934859ceb8c33f467069", size = 4156534, upload-time = "2026-03-30T08:46:42.026Z" }, - { url = "https://files.pythonhosted.org/packages/d5/c9/df8279bb49b29409995e95efa85b72973d62f8aeff89abee58c91f393710/grpcio-1.80.0-cp310-cp310-win_amd64.whl", hash = "sha256:dc053420fc75749c961e2a4c906398d7c15725d36ccc04ae6d16093167223b58", size = 4889869, upload-time = "2026-03-30T08:46:44.219Z" }, - { url = "https://files.pythonhosted.org/packages/5d/db/1d56e5f5823257b291962d6c0ce106146c6447f405b60b234c4f222a7cde/grpcio-1.80.0-cp311-cp311-linux_armv7l.whl", hash = "sha256:dfab85db094068ff42e2a3563f60ab3dddcc9d6488a35abf0132daec13209c8a", size = 6055009, upload-time = "2026-03-30T08:46:46.265Z" }, - { url = "https://files.pythonhosted.org/packages/6e/18/c83f3cad64c5ca63bca7e91e5e46b0d026afc5af9d0a9972472ceba294b3/grpcio-1.80.0-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:5c07e82e822e1161354e32da2662f741a4944ea955f9f580ec8fb409dd6f6060", size = 12035295, upload-time = "2026-03-30T08:46:49.099Z" }, - { url = "https://files.pythonhosted.org/packages/0f/8e/e14966b435be2dda99fbe89db9525ea436edc79780431a1c2875a3582644/grpcio-1.80.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:ba0915d51fd4ced2db5ff719f84e270afe0e2d4c45a7bdb1e8d036e4502928c2", size = 6610297, upload-time = "2026-03-30T08:46:52.123Z" }, - { url = "https://files.pythonhosted.org/packages/cc/26/d5eb38f42ce0e3fdc8174ea4d52036ef8d58cc4426cb800f2610f625dd75/grpcio-1.80.0-cp311-cp311-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:3cb8130ba457d2aa09fa6b7c3ed6b6e4e6a2685fce63cb803d479576c4d80e21", size = 7300208, upload-time = "2026-03-30T08:46:54.859Z" }, - { url = "https://files.pythonhosted.org/packages/25/51/bd267c989f85a17a5b3eea65a6feb4ff672af41ca614e5a0279cc0ea381c/grpcio-1.80.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:09e5e478b3d14afd23f12e49e8b44c8684ac3c5f08561c43a5b9691c54d136ab", size = 6813442, upload-time = "2026-03-30T08:46:57.056Z" }, - { url = "https://files.pythonhosted.org/packages/9e/d9/d80eef735b19e9169e30164bbf889b46f9df9127598a83d174eb13a48b26/grpcio-1.80.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:00168469238b022500e486c1c33916acf2f2a9b2c022202cf8a1885d2e3073c1", size = 7414743, upload-time = "2026-03-30T08:46:59.682Z" }, - { url = "https://files.pythonhosted.org/packages/de/f2/567f5bd5054398ed6b0509b9a30900376dcf2786bd936812098808b49d8d/grpcio-1.80.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:8502122a3cc1714038e39a0b071acb1207ca7844208d5ea0d091317555ee7106", size = 8426046, upload-time = "2026-03-30T08:47:02.474Z" }, - { url = "https://files.pythonhosted.org/packages/62/29/73ef0141b4732ff5eacd68430ff2512a65c004696997f70476a83e548e7e/grpcio-1.80.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:ce1794f4ea6cc3ca29463f42d665c32ba1b964b48958a66497917fe9069f26e6", size = 7851641, upload-time = "2026-03-30T08:47:05.462Z" }, - { url = "https://files.pythonhosted.org/packages/46/69/abbfa360eb229a8623bab5f5a4f8105e445bd38ce81a89514ba55d281ad0/grpcio-1.80.0-cp311-cp311-win32.whl", hash = "sha256:51b4a7189b0bef2aa30adce3c78f09c83526cf3dddb24c6a96555e3b97340440", size = 4154368, upload-time = "2026-03-30T08:47:08.027Z" }, - { url = "https://files.pythonhosted.org/packages/6f/d4/ae92206d01183b08613e846076115f5ac5991bae358d2a749fa864da5699/grpcio-1.80.0-cp311-cp311-win_amd64.whl", hash = "sha256:02e64bb0bb2da14d947a49e6f120a75e947250aebe65f9629b62bb1f5c14e6e9", size = 4894235, upload-time = "2026-03-30T08:47:10.839Z" }, - { url = "https://files.pythonhosted.org/packages/5c/e8/a2b749265eb3415abc94f2e619bbd9e9707bebdda787e61c593004ec927a/grpcio-1.80.0-cp312-cp312-linux_armv7l.whl", hash = "sha256:c624cc9f1008361014378c9d776de7182b11fe8b2e5a81bc69f23a295f2a1ad0", size = 6015616, upload-time = "2026-03-30T08:47:13.428Z" }, - { url = "https://files.pythonhosted.org/packages/3e/97/b1282161a15d699d1e90c360df18d19165a045ce1c343c7f313f5e8a0b77/grpcio-1.80.0-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:f49eddcac43c3bf350c0385366a58f36bed8cc2c0ec35ef7b74b49e56552c0c2", size = 12014204, upload-time = "2026-03-30T08:47:15.873Z" }, - { url = "https://files.pythonhosted.org/packages/6e/5e/d319c6e997b50c155ac5a8cb12f5173d5b42677510e886d250d50264949d/grpcio-1.80.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:d334591df610ab94714048e0d5b4f3dd5ad1bee74dfec11eee344220077a79de", size = 6563866, upload-time = "2026-03-30T08:47:18.588Z" }, - { url = "https://files.pythonhosted.org/packages/ae/f6/fdd975a2cb4d78eb67769a7b3b3830970bfa2e919f1decf724ae4445f42c/grpcio-1.80.0-cp312-cp312-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:0cb517eb1d0d0aaf1d87af7cc5b801d686557c1d88b2619f5e31fab3c2315921", size = 7273060, upload-time = "2026-03-30T08:47:21.113Z" }, - { url = "https://files.pythonhosted.org/packages/db/f0/a3deb5feba60d9538a962913e37bd2e69a195f1c3376a3dd44fe0427e996/grpcio-1.80.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:4e78c4ac0d97dc2e569b2f4bcbbb447491167cb358d1a389fc4af71ab6f70411", size = 6782121, upload-time = "2026-03-30T08:47:23.827Z" }, - { url = "https://files.pythonhosted.org/packages/ca/84/36c6dcfddc093e108141f757c407902a05085e0c328007cb090d56646cdf/grpcio-1.80.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2ed770b4c06984f3b47eb0517b1c69ad0b84ef3f40128f51448433be904634cd", size = 7383811, upload-time = "2026-03-30T08:47:26.517Z" }, - { url = "https://files.pythonhosted.org/packages/7c/ef/f3a77e3dc5b471a0ec86c564c98d6adfa3510d38f8ee99010410858d591e/grpcio-1.80.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:256507e2f524092f1473071a05e65a5b10d84b82e3ff24c5b571513cfaa61e2f", size = 8393860, upload-time = "2026-03-30T08:47:29.439Z" }, - { url = "https://files.pythonhosted.org/packages/9b/8d/9d4d27ed7f33d109c50d6b5ce578a9914aa68edab75d65869a17e630a8d1/grpcio-1.80.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:9a6284a5d907c37db53350645567c522be314bac859a64a7a5ca63b77bb7958f", size = 7830132, upload-time = "2026-03-30T08:47:33.254Z" }, - { url = "https://files.pythonhosted.org/packages/14/e4/9990b41c6d7a44e1e9dee8ac11d7a9802ba1378b40d77468a7761d1ad288/grpcio-1.80.0-cp312-cp312-win32.whl", hash = "sha256:c71309cfce2f22be26aa4a847357c502db6c621f1a49825ae98aa0907595b193", size = 4140904, upload-time = "2026-03-30T08:47:35.319Z" }, - { url = "https://files.pythonhosted.org/packages/2f/2c/296f6138caca1f4b92a31ace4ae1b87dab692fc16a7a3417af3bb3c805bf/grpcio-1.80.0-cp312-cp312-win_amd64.whl", hash = "sha256:9fe648599c0e37594c4809d81a9e77bd138cc82eb8baa71b6a86af65426723ff", size = 4880944, upload-time = "2026-03-30T08:47:37.831Z" }, - { url = "https://files.pythonhosted.org/packages/2f/3a/7c3c25789e3f069e581dc342e03613c5b1cb012c4e8c7d9d5cf960a75856/grpcio-1.80.0-cp313-cp313-linux_armv7l.whl", hash = "sha256:e9e408fc016dffd20661f0126c53d8a31c2821b5c13c5d67a0f5ed5de93319ad", size = 6017243, upload-time = "2026-03-30T08:47:40.075Z" }, - { url = "https://files.pythonhosted.org/packages/04/19/21a9806eb8240e174fd1ab0cd5b9aa948bb0e05c2f2f55f9d5d7405e6d08/grpcio-1.80.0-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:92d787312e613754d4d8b9ca6d3297e69994a7912a32fa38c4c4e01c272974b0", size = 12010840, upload-time = "2026-03-30T08:47:43.11Z" }, - { url = "https://files.pythonhosted.org/packages/18/3a/23347d35f76f639e807fb7a36fad3068aed100996849a33809591f26eca6/grpcio-1.80.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:8ac393b58aa16991a2f1144ec578084d544038c12242da3a215966b512904d0f", size = 6567644, upload-time = "2026-03-30T08:47:46.806Z" }, - { url = "https://files.pythonhosted.org/packages/ff/40/96e07ecb604a6a67ae6ab151e3e35b132875d98bc68ec65f3e5ab3e781d7/grpcio-1.80.0-cp313-cp313-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:68e5851ac4b9afe07e7f84483803ad167852570d65326b34d54ca560bfa53fb6", size = 7277830, upload-time = "2026-03-30T08:47:49.643Z" }, - { url = "https://files.pythonhosted.org/packages/9b/e2/da1506ecea1f34a5e365964644b35edef53803052b763ca214ba3870c856/grpcio-1.80.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:873ff5d17d68992ef6605330127425d2fc4e77e612fa3c3e0ed4e668685e3140", size = 6783216, upload-time = "2026-03-30T08:47:52.817Z" }, - { url = "https://files.pythonhosted.org/packages/44/83/3b20ff58d0c3b7f6caaa3af9a4174d4023701df40a3f39f7f1c8e7c48f9d/grpcio-1.80.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:2bea16af2750fd0a899bf1abd9022244418b55d1f37da2202249ba4ba673838d", size = 7385866, upload-time = "2026-03-30T08:47:55.687Z" }, - { url = "https://files.pythonhosted.org/packages/47/45/55c507599c5520416de5eefecc927d6a0d7af55e91cfffb2e410607e5744/grpcio-1.80.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:ba0db34f7e1d803a878284cd70e4c63cb6ae2510ba51937bf8f45ba997cefcf7", size = 8391602, upload-time = "2026-03-30T08:47:58.303Z" }, - { url = "https://files.pythonhosted.org/packages/10/bb/dd06f4c24c01db9cf11341b547d0a016b2c90ed7dbbb086a5710df7dd1d7/grpcio-1.80.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:8eb613f02d34721f1acf3626dfdb3545bd3c8505b0e52bf8b5710a28d02e8aa7", size = 7826752, upload-time = "2026-03-30T08:48:01.311Z" }, - { url = "https://files.pythonhosted.org/packages/f9/1e/9d67992ba23371fd63d4527096eb8c6b76d74d52b500df992a3343fd7251/grpcio-1.80.0-cp313-cp313-win32.whl", hash = "sha256:93b6f823810720912fd131f561f91f5fed0fda372b6b7028a2681b8194d5d294", size = 4142310, upload-time = "2026-03-30T08:48:04.594Z" }, - { url = "https://files.pythonhosted.org/packages/cf/e6/283326a27da9e2c3038bc93eeea36fb118ce0b2d03922a9cda6688f53c5b/grpcio-1.80.0-cp313-cp313-win_amd64.whl", hash = "sha256:e172cf795a3ba5246d3529e4d34c53db70e888fa582a8ffebd2e6e48bc0cba50", size = 4882833, upload-time = "2026-03-30T08:48:07.363Z" }, - { url = "https://files.pythonhosted.org/packages/c5/6d/e65307ce20f5a09244ba9e9d8476e99fb039de7154f37fb85f26978b59c3/grpcio-1.80.0-cp314-cp314-linux_armv7l.whl", hash = "sha256:3d4147a97c8344d065d01bbf8b6acec2cf86fb0400d40696c8bdad34a64ffc0e", size = 6017376, upload-time = "2026-03-30T08:48:10.005Z" }, - { url = "https://files.pythonhosted.org/packages/69/10/9cef5d9650c72625a699c549940f0abb3c4bfdb5ed45a5ce431f92f31806/grpcio-1.80.0-cp314-cp314-macosx_11_0_universal2.whl", hash = "sha256:d8e11f167935b3eb089ac9038e1a063e6d7dbe995c0bb4a661e614583352e76f", size = 12018133, upload-time = "2026-03-30T08:48:12.927Z" }, - { url = "https://files.pythonhosted.org/packages/04/82/983aabaad82ba26113caceeb9091706a0696b25da004fe3defb5b346e15b/grpcio-1.80.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:f14b618fc30de822681ee986cfdcc2d9327229dc4c98aed16896761cacd468b9", size = 6574748, upload-time = "2026-03-30T08:48:16.386Z" }, - { url = "https://files.pythonhosted.org/packages/07/d7/031666ef155aa0bf399ed7e19439656c38bbd143779ae0861b038ce82abd/grpcio-1.80.0-cp314-cp314-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:4ed39fbdcf9b87370f6e8df4e39ca7b38b3e5e9d1b0013c7b6be9639d6578d14", size = 7277711, upload-time = "2026-03-30T08:48:19.627Z" }, - { url = "https://files.pythonhosted.org/packages/e8/43/f437a78f7f4f1d311804189e8f11fb311a01049b2e08557c1068d470cb2e/grpcio-1.80.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:2dcc70e9f0ba987526e8e8603a610fb4f460e42899e74e7a518bf3c68fe1bf05", size = 6785372, upload-time = "2026-03-30T08:48:22.373Z" }, - { url = "https://files.pythonhosted.org/packages/93/3d/f6558e9c6296cb4227faa5c43c54a34c68d32654b829f53288313d16a86e/grpcio-1.80.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:448c884b668b868562b1bda833c5fce6272d26e1926ec46747cda05741d302c1", size = 7395268, upload-time = "2026-03-30T08:48:25.638Z" }, - { url = "https://files.pythonhosted.org/packages/06/21/0fdd77e84720b08843c371a2efa6f2e19dbebf56adc72df73d891f5506f0/grpcio-1.80.0-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:a1dc80fe55685b4a543555e6eef975303b36c8db1023b1599b094b92aa77965f", size = 8392000, upload-time = "2026-03-30T08:48:28.974Z" }, - { url = "https://files.pythonhosted.org/packages/f5/68/67f4947ed55d2e69f2cc199ab9fd85e0a0034d813bbeef84df6d2ba4d4b7/grpcio-1.80.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:31b9ac4ad1aa28ffee5503821fafd09e4da0a261ce1c1281c6c8da0423c83b6e", size = 7828477, upload-time = "2026-03-30T08:48:32.054Z" }, - { url = "https://files.pythonhosted.org/packages/44/b6/8d4096691b2e385e8271911a0de4f35f0a6c7d05aff7098e296c3de86939/grpcio-1.80.0-cp314-cp314-win32.whl", hash = "sha256:367ce30ba67d05e0592470428f0ec1c31714cab9ef19b8f2e37be1f4c7d32fae", size = 4218563, upload-time = "2026-03-30T08:48:34.538Z" }, - { url = "https://files.pythonhosted.org/packages/e5/8c/bbe6baf2557262834f2070cf668515fa308b2d38a4bbf771f8f7872a7036/grpcio-1.80.0-cp314-cp314-win_amd64.whl", hash = "sha256:3b01e1f5464c583d2f567b2e46ff0d516ef979978f72091fd81f5ab7fa6e2e7f", size = 5019457, upload-time = "2026-03-30T08:48:37.308Z" }, +sdist = { url = "https://files.pythonhosted.org/packages/06/8a/3d098f35c143a89520e568e6539cc098fcd294495910e359889ce8741c84/grpcio-1.78.0.tar.gz", hash = "sha256:7382b95189546f375c174f53a5fa873cef91c4b8005faa05cc5b3beea9c4f1c5", size = 12852416, upload-time = "2026-02-06T09:57:18.093Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5a/a8/690a085b4d1fe066130de97a87de32c45062cf2ecd218df9675add895550/grpcio-1.78.0-cp310-cp310-linux_armv7l.whl", hash = "sha256:7cc47943d524ee0096f973e1081cb8f4f17a4615f2116882a5f1416e4cfe92b5", size = 5946986, upload-time = "2026-02-06T09:54:34.043Z" }, + { url = "https://files.pythonhosted.org/packages/c7/1b/e5213c5c0ced9d2d92778d30529ad5bb2dcfb6c48c4e2d01b1f302d33d64/grpcio-1.78.0-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:c3f293fdc675ccba4db5a561048cca627b5e7bd1c8a6973ffedabe7d116e22e2", size = 11816533, upload-time = "2026-02-06T09:54:37.04Z" }, + { url = "https://files.pythonhosted.org/packages/18/37/1ba32dccf0a324cc5ace744c44331e300b000a924bf14840f948c559ede7/grpcio-1.78.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:10a9a644b5dd5aec3b82b5b0b90d41c0fa94c85ef42cb42cf78a23291ddb5e7d", size = 6519964, upload-time = "2026-02-06T09:54:40.268Z" }, + { url = "https://files.pythonhosted.org/packages/ed/f5/c0e178721b818072f2e8b6fde13faaba942406c634009caf065121ce246b/grpcio-1.78.0-cp310-cp310-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:4c5533d03a6cbd7f56acfc9cfb44ea64f63d29091e40e44010d34178d392d7eb", size = 7198058, upload-time = "2026-02-06T09:54:42.389Z" }, + { url = "https://files.pythonhosted.org/packages/5b/b2/40d43c91ae9cd667edc960135f9f08e58faa1576dc95af29f66ec912985f/grpcio-1.78.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:ff870aebe9a93a85283837801d35cd5f8814fe2ad01e606861a7fb47c762a2b7", size = 6727212, upload-time = "2026-02-06T09:54:44.91Z" }, + { url = "https://files.pythonhosted.org/packages/ed/88/9da42eed498f0efcfcd9156e48ae63c0cde3bea398a16c99fb5198c885b6/grpcio-1.78.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:391e93548644e6b2726f1bb84ed60048d4bcc424ce5e4af0843d28ca0b754fec", size = 7300845, upload-time = "2026-02-06T09:54:47.562Z" }, + { url = "https://files.pythonhosted.org/packages/23/3f/1c66b7b1b19a8828890e37868411a6e6925df5a9030bfa87ab318f34095d/grpcio-1.78.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:df2c8f3141f7cbd112a6ebbd760290b5849cda01884554f7c67acc14e7b1758a", size = 8284605, upload-time = "2026-02-06T09:54:50.475Z" }, + { url = "https://files.pythonhosted.org/packages/94/c4/ca1bd87394f7b033e88525384b4d1e269e8424ab441ea2fba1a0c5b50986/grpcio-1.78.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:bd8cb8026e5f5b50498a3c4f196f57f9db344dad829ffae16b82e4fdbaea2813", size = 7726672, upload-time = "2026-02-06T09:54:53.11Z" }, + { url = "https://files.pythonhosted.org/packages/41/09/f16e487d4cc65ccaf670f6ebdd1a17566b965c74fc3d93999d3b2821e052/grpcio-1.78.0-cp310-cp310-win32.whl", hash = "sha256:f8dff3d9777e5d2703a962ee5c286c239bf0ba173877cc68dc02c17d042e29de", size = 4076715, upload-time = "2026-02-06T09:54:55.549Z" }, + { url = "https://files.pythonhosted.org/packages/2a/32/4ce60d94e242725fd3bcc5673c04502c82a8e87b21ea411a63992dc39f8f/grpcio-1.78.0-cp310-cp310-win_amd64.whl", hash = "sha256:94f95cf5d532d0e717eed4fc1810e8e6eded04621342ec54c89a7c2f14b581bf", size = 4799157, upload-time = "2026-02-06T09:54:59.838Z" }, + { url = "https://files.pythonhosted.org/packages/86/c7/d0b780a29b0837bf4ca9580904dfb275c1fc321ded7897d620af7047ec57/grpcio-1.78.0-cp311-cp311-linux_armv7l.whl", hash = "sha256:2777b783f6c13b92bd7b716667452c329eefd646bfb3f2e9dabea2e05dbd34f6", size = 5951525, upload-time = "2026-02-06T09:55:01.989Z" }, + { url = "https://files.pythonhosted.org/packages/c5/b1/96920bf2ee61df85a9503cb6f733fe711c0ff321a5a697d791b075673281/grpcio-1.78.0-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:9dca934f24c732750389ce49d638069c3892ad065df86cb465b3fa3012b70c9e", size = 11830418, upload-time = "2026-02-06T09:55:04.462Z" }, + { url = "https://files.pythonhosted.org/packages/83/0c/7c1528f098aeb75a97de2bae18c530f56959fb7ad6c882db45d9884d6edc/grpcio-1.78.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:459ab414b35f4496138d0ecd735fed26f1318af5e52cb1efbc82a09f0d5aa911", size = 6524477, upload-time = "2026-02-06T09:55:07.111Z" }, + { url = "https://files.pythonhosted.org/packages/8d/52/e7c1f3688f949058e19a011c4e0dec973da3d0ae5e033909677f967ae1f4/grpcio-1.78.0-cp311-cp311-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:082653eecbdf290e6e3e2c276ab2c54b9e7c299e07f4221872380312d8cf395e", size = 7198266, upload-time = "2026-02-06T09:55:10.016Z" }, + { url = "https://files.pythonhosted.org/packages/e5/61/8ac32517c1e856677282c34f2e7812d6c328fa02b8f4067ab80e77fdc9c9/grpcio-1.78.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:85f93781028ec63f383f6bc90db785a016319c561cc11151fbb7b34e0d012303", size = 6730552, upload-time = "2026-02-06T09:55:12.207Z" }, + { url = "https://files.pythonhosted.org/packages/bd/98/b8ee0158199250220734f620b12e4a345955ac7329cfd908d0bf0fda77f0/grpcio-1.78.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:f12857d24d98441af6a1d5c87442d624411db486f7ba12550b07788f74b67b04", size = 7304296, upload-time = "2026-02-06T09:55:15.044Z" }, + { url = "https://files.pythonhosted.org/packages/bd/0f/7b72762e0d8840b58032a56fdbd02b78fc645b9fa993d71abf04edbc54f4/grpcio-1.78.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:5397fff416b79e4b284959642a4e95ac4b0f1ece82c9993658e0e477d40551ec", size = 8288298, upload-time = "2026-02-06T09:55:17.276Z" }, + { url = "https://files.pythonhosted.org/packages/24/ae/ae4ce56bc5bb5caa3a486d60f5f6083ac3469228faa734362487176c15c5/grpcio-1.78.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:fbe6e89c7ffb48518384068321621b2a69cab509f58e40e4399fdd378fa6d074", size = 7730953, upload-time = "2026-02-06T09:55:19.545Z" }, + { url = "https://files.pythonhosted.org/packages/b5/6e/8052e3a28eb6a820c372b2eb4b5e32d195c661e137d3eca94d534a4cfd8a/grpcio-1.78.0-cp311-cp311-win32.whl", hash = "sha256:6092beabe1966a3229f599d7088b38dfc8ffa1608b5b5cdda31e591e6500f856", size = 4076503, upload-time = "2026-02-06T09:55:21.521Z" }, + { url = "https://files.pythonhosted.org/packages/08/62/f22c98c5265dfad327251fa2f840b591b1df5f5e15d88b19c18c86965b27/grpcio-1.78.0-cp311-cp311-win_amd64.whl", hash = "sha256:1afa62af6e23f88629f2b29ec9e52ec7c65a7176c1e0a83292b93c76ca882558", size = 4799767, upload-time = "2026-02-06T09:55:24.107Z" }, + { url = "https://files.pythonhosted.org/packages/4e/f4/7384ed0178203d6074446b3c4f46c90a22ddf7ae0b3aee521627f54cfc2a/grpcio-1.78.0-cp312-cp312-linux_armv7l.whl", hash = "sha256:f9ab915a267fc47c7e88c387a3a28325b58c898e23d4995f765728f4e3dedb97", size = 5913985, upload-time = "2026-02-06T09:55:26.832Z" }, + { url = "https://files.pythonhosted.org/packages/81/ed/be1caa25f06594463f685b3790b320f18aea49b33166f4141bfdc2bfb236/grpcio-1.78.0-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:3f8904a8165ab21e07e58bf3e30a73f4dffc7a1e0dbc32d51c61b5360d26f43e", size = 11811853, upload-time = "2026-02-06T09:55:29.224Z" }, + { url = "https://files.pythonhosted.org/packages/24/a7/f06d151afc4e64b7e3cc3e872d331d011c279aaab02831e40a81c691fb65/grpcio-1.78.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:859b13906ce098c0b493af92142ad051bf64c7870fa58a123911c88606714996", size = 6475766, upload-time = "2026-02-06T09:55:31.825Z" }, + { url = "https://files.pythonhosted.org/packages/8a/a8/4482922da832ec0082d0f2cc3a10976d84a7424707f25780b82814aafc0a/grpcio-1.78.0-cp312-cp312-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:b2342d87af32790f934a79c3112641e7b27d63c261b8b4395350dad43eff1dc7", size = 7170027, upload-time = "2026-02-06T09:55:34.7Z" }, + { url = "https://files.pythonhosted.org/packages/54/bf/f4a3b9693e35d25b24b0b39fa46d7d8a3c439e0a3036c3451764678fec20/grpcio-1.78.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:12a771591ae40bc65ba67048fa52ef4f0e6db8279e595fd349f9dfddeef571f9", size = 6690766, upload-time = "2026-02-06T09:55:36.902Z" }, + { url = "https://files.pythonhosted.org/packages/c7/b9/521875265cc99fe5ad4c5a17010018085cae2810a928bf15ebe7d8bcd9cc/grpcio-1.78.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:185dea0d5260cbb2d224c507bf2a5444d5abbb1fa3594c1ed7e4c709d5eb8383", size = 7266161, upload-time = "2026-02-06T09:55:39.824Z" }, + { url = "https://files.pythonhosted.org/packages/05/86/296a82844fd40a4ad4a95f100b55044b4f817dece732bf686aea1a284147/grpcio-1.78.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:51b13f9aed9d59ee389ad666b8c2214cc87b5de258fa712f9ab05f922e3896c6", size = 8253303, upload-time = "2026-02-06T09:55:42.353Z" }, + { url = "https://files.pythonhosted.org/packages/f3/e4/ea3c0caf5468537f27ad5aab92b681ed7cc0ef5f8c9196d3fd42c8c2286b/grpcio-1.78.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:fd5f135b1bd58ab088930b3c613455796dfa0393626a6972663ccdda5b4ac6ce", size = 7698222, upload-time = "2026-02-06T09:55:44.629Z" }, + { url = "https://files.pythonhosted.org/packages/d7/47/7f05f81e4bb6b831e93271fb12fd52ba7b319b5402cbc101d588f435df00/grpcio-1.78.0-cp312-cp312-win32.whl", hash = "sha256:94309f498bcc07e5a7d16089ab984d42ad96af1d94b5a4eb966a266d9fcabf68", size = 4066123, upload-time = "2026-02-06T09:55:47.644Z" }, + { url = "https://files.pythonhosted.org/packages/ad/e7/d6914822c88aa2974dbbd10903d801a28a19ce9cd8bad7e694cbbcf61528/grpcio-1.78.0-cp312-cp312-win_amd64.whl", hash = "sha256:9566fe4ababbb2610c39190791e5b829869351d14369603702e890ef3ad2d06e", size = 4797657, upload-time = "2026-02-06T09:55:49.86Z" }, + { url = "https://files.pythonhosted.org/packages/05/a9/8f75894993895f361ed8636cd9237f4ab39ef87fd30db17467235ed1c045/grpcio-1.78.0-cp313-cp313-linux_armv7l.whl", hash = "sha256:ce3a90455492bf8bfa38e56fbbe1dbd4f872a3d8eeaf7337dc3b1c8aa28c271b", size = 5920143, upload-time = "2026-02-06T09:55:52.035Z" }, + { url = "https://files.pythonhosted.org/packages/55/06/0b78408e938ac424100100fd081189451b472236e8a3a1f6500390dc4954/grpcio-1.78.0-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:2bf5e2e163b356978b23652c4818ce4759d40f4712ee9ec5a83c4be6f8c23a3a", size = 11803926, upload-time = "2026-02-06T09:55:55.494Z" }, + { url = "https://files.pythonhosted.org/packages/88/93/b59fe7832ff6ae3c78b813ea43dac60e295fa03606d14d89d2e0ec29f4f3/grpcio-1.78.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:8f2ac84905d12918e4e55a16da17939eb63e433dc11b677267c35568aa63fc84", size = 6478628, upload-time = "2026-02-06T09:55:58.533Z" }, + { url = "https://files.pythonhosted.org/packages/ed/df/e67e3734527f9926b7d9c0dde6cd998d1d26850c3ed8eeec81297967ac67/grpcio-1.78.0-cp313-cp313-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:b58f37edab4a3881bc6c9bca52670610e0c9ca14e2ea3cf9debf185b870457fb", size = 7173574, upload-time = "2026-02-06T09:56:01.786Z" }, + { url = "https://files.pythonhosted.org/packages/a6/62/cc03fffb07bfba982a9ec097b164e8835546980aec25ecfa5f9c1a47e022/grpcio-1.78.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:735e38e176a88ce41840c21bb49098ab66177c64c82426e24e0082500cc68af5", size = 6692639, upload-time = "2026-02-06T09:56:04.529Z" }, + { url = "https://files.pythonhosted.org/packages/bf/9a/289c32e301b85bdb67d7ec68b752155e674ee3ba2173a1858f118e399ef3/grpcio-1.78.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:2045397e63a7a0ee7957c25f7dbb36ddc110e0cfb418403d110c0a7a68a844e9", size = 7268838, upload-time = "2026-02-06T09:56:08.397Z" }, + { url = "https://files.pythonhosted.org/packages/0e/79/1be93f32add280461fa4773880196572563e9c8510861ac2da0ea0f892b6/grpcio-1.78.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:a9f136fbafe7ccf4ac7e8e0c28b31066e810be52d6e344ef954a3a70234e1702", size = 8251878, upload-time = "2026-02-06T09:56:10.914Z" }, + { url = "https://files.pythonhosted.org/packages/65/65/793f8e95296ab92e4164593674ae6291b204bb5f67f9d4a711489cd30ffa/grpcio-1.78.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:748b6138585379c737adc08aeffd21222abbda1a86a0dca2a39682feb9196c20", size = 7695412, upload-time = "2026-02-06T09:56:13.593Z" }, + { url = "https://files.pythonhosted.org/packages/1c/9f/1e233fe697ecc82845942c2822ed06bb522e70d6771c28d5528e4c50f6a4/grpcio-1.78.0-cp313-cp313-win32.whl", hash = "sha256:271c73e6e5676afe4fc52907686670c7cea22ab2310b76a59b678403ed40d670", size = 4064899, upload-time = "2026-02-06T09:56:15.601Z" }, + { url = "https://files.pythonhosted.org/packages/4d/27/d86b89e36de8a951501fb06a0f38df19853210f341d0b28f83f4aa0ffa08/grpcio-1.78.0-cp313-cp313-win_amd64.whl", hash = "sha256:f2d4e43ee362adfc05994ed479334d5a451ab7bc3f3fee1b796b8ca66895acb4", size = 4797393, upload-time = "2026-02-06T09:56:17.882Z" }, + { url = "https://files.pythonhosted.org/packages/29/f2/b56e43e3c968bfe822fa6ce5bca10d5c723aa40875b48791ce1029bb78c7/grpcio-1.78.0-cp314-cp314-linux_armv7l.whl", hash = "sha256:e87cbc002b6f440482b3519e36e1313eb5443e9e9e73d6a52d43bd2004fcfd8e", size = 5920591, upload-time = "2026-02-06T09:56:20.758Z" }, + { url = "https://files.pythonhosted.org/packages/5d/81/1f3b65bd30c334167bfa8b0d23300a44e2725ce39bba5b76a2460d85f745/grpcio-1.78.0-cp314-cp314-macosx_11_0_universal2.whl", hash = "sha256:c41bc64626db62e72afec66b0c8a0da76491510015417c127bfc53b2fe6d7f7f", size = 11813685, upload-time = "2026-02-06T09:56:24.315Z" }, + { url = "https://files.pythonhosted.org/packages/0e/1c/bbe2f8216a5bd3036119c544d63c2e592bdf4a8ec6e4a1867592f4586b26/grpcio-1.78.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:8dfffba826efcf366b1e3ccc37e67afe676f290e13a3b48d31a46739f80a8724", size = 6487803, upload-time = "2026-02-06T09:56:27.367Z" }, + { url = "https://files.pythonhosted.org/packages/16/5c/a6b2419723ea7ddce6308259a55e8e7593d88464ce8db9f4aa857aba96fa/grpcio-1.78.0-cp314-cp314-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:74be1268d1439eaaf552c698cdb11cd594f0c49295ae6bb72c34ee31abbe611b", size = 7173206, upload-time = "2026-02-06T09:56:29.876Z" }, + { url = "https://files.pythonhosted.org/packages/df/1e/b8801345629a415ea7e26c83d75eb5dbe91b07ffe5210cc517348a8d4218/grpcio-1.78.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:be63c88b32e6c0f1429f1398ca5c09bc64b0d80950c8bb7807d7d7fb36fb84c7", size = 6693826, upload-time = "2026-02-06T09:56:32.305Z" }, + { url = "https://files.pythonhosted.org/packages/34/84/0de28eac0377742679a510784f049738a80424b17287739fc47d63c2439e/grpcio-1.78.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:3c586ac70e855c721bda8f548d38c3ca66ac791dc49b66a8281a1f99db85e452", size = 7277897, upload-time = "2026-02-06T09:56:34.915Z" }, + { url = "https://files.pythonhosted.org/packages/ca/9c/ad8685cfe20559a9edb66f735afdcb2b7d3de69b13666fdfc542e1916ebd/grpcio-1.78.0-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:35eb275bf1751d2ffbd8f57cdbc46058e857cf3971041521b78b7db94bdaf127", size = 8252404, upload-time = "2026-02-06T09:56:37.553Z" }, + { url = "https://files.pythonhosted.org/packages/3c/05/33a7a4985586f27e1de4803887c417ec7ced145ebd069bc38a9607059e2b/grpcio-1.78.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:207db540302c884b8848036b80db352a832b99dfdf41db1eb554c2c2c7800f65", size = 7696837, upload-time = "2026-02-06T09:56:40.173Z" }, + { url = "https://files.pythonhosted.org/packages/73/77/7382241caf88729b106e49e7d18e3116216c778e6a7e833826eb96de22f7/grpcio-1.78.0-cp314-cp314-win32.whl", hash = "sha256:57bab6deef2f4f1ca76cc04565df38dc5713ae6c17de690721bdf30cb1e0545c", size = 4142439, upload-time = "2026-02-06T09:56:43.258Z" }, + { url = "https://files.pythonhosted.org/packages/48/b2/b096ccce418882fbfda4f7496f9357aaa9a5af1896a9a7f60d9f2b275a06/grpcio-1.78.0-cp314-cp314-win_amd64.whl", hash = "sha256:dce09d6116df20a96acfdbf85e4866258c3758180e8c49845d6ba8248b6d0bbb", size = 4929852, upload-time = "2026-02-06T09:56:45.885Z" }, ] [[package]] name = "grpcio-reflection" -version = "1.80.0" +version = "1.78.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "grpcio" }, { name = "protobuf" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/c3/eb/b84590a0794ae2509cdc9896f66ae2949ac8d85a2078fe4412bb6ca1211f/grpcio_reflection-1.80.0.tar.gz", hash = "sha256:e9c76aabc4324279945b70bc76a3d41bc4f9396bffcf1cfc1011a571c2c56221", size = 19211, upload-time = "2026-03-30T08:54:36.73Z" } +sdist = { url = "https://files.pythonhosted.org/packages/31/06/337546aae558675f79cae2a8c1ce0c9b1952cbc5c28b01878f68d040f5bb/grpcio_reflection-1.78.0.tar.gz", hash = "sha256:e6e60c0b85dbcdf963b4d4d150c0f1d238ba891d805b575c52c0365d07fc0c40", size = 19098, upload-time = "2026-02-06T10:01:52.225Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/af/29/49fbd2593a29dab9cd5837f67668157ef7a24c16eac232852379e8e43266/grpcio_reflection-1.80.0-py3-none-any.whl", hash = "sha256:a7d0b77961b1c722400b1509968f1ad3a64e9d78280d4cf5b88b6cfe5b41eb61", size = 22917, upload-time = "2026-03-30T08:54:00.008Z" }, + { url = "https://files.pythonhosted.org/packages/df/6d/4d095d27ccd049865ecdafc467754e9e47ad0f677a30dda969c3590f6582/grpcio_reflection-1.78.0-py3-none-any.whl", hash = "sha256:06fcfde9e6888cdd12e9dd1cf6dc7c440c2e9acf420f696ccbe008672ed05b60", size = 22800, upload-time = "2026-02-06T10:01:33.822Z" }, ] [[package]] @@ -1162,65 +1218,65 @@ wheels = [ [[package]] name = "grpcio-tools" -version = "1.80.0" +version = "1.78.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "grpcio" }, { name = "protobuf" }, { name = "setuptools" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/94/c8/1223f29c84a143ae9a56c084fc96894de0ba84b6e8d60a26241abd81d278/grpcio_tools-1.80.0.tar.gz", hash = "sha256:26052b19c6ce0dcf52d1024496aea3e2bdfa864159f06dc7b97b22d041a94b26", size = 6133212, upload-time = "2026-03-30T08:52:39.077Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/7a/54/1de67f5080da305a258758a8deb33f85666fa759f56785042a80b114a53f/grpcio_tools-1.80.0-cp310-cp310-linux_armv7l.whl", hash = "sha256:727477b9afa4b53f5ec70cafb41c3965d893835e0d4ea9b542fe3d0d005602bf", size = 2549601, upload-time = "2026-03-30T08:50:09.498Z" }, - { url = "https://files.pythonhosted.org/packages/9a/b4/6d57ea199c5b880d182a2234aafa9a686f9c54c708ea7be75bd19d5aa825/grpcio_tools-1.80.0-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:85fe8d15f146c62cb76f38d963e256392d287442b9232717d30ae9e3bbda9bc3", size = 5712717, upload-time = "2026-03-30T08:50:15.028Z" }, - { url = "https://files.pythonhosted.org/packages/6a/1a/5505ee2277d368b409c796c78f22ea34a2a517b7d16755247efd663dc7af/grpcio_tools-1.80.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:95f0fffb5ca00519f3b602f938169b4dfa04b165e03258323965a9dfe8cc4d80", size = 2595941, upload-time = "2026-03-30T08:50:17.299Z" }, - { url = "https://files.pythonhosted.org/packages/4e/39/7fc1d16d8b767805079d76365d73e82c88dfaf179034473dbc9fbccedb77/grpcio_tools-1.80.0-cp310-cp310-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:7a0106af212748823a6ebd8ffbd9043414216f47cae3835f3187de0a62c415d3", size = 2909304, upload-time = "2026-03-30T08:50:19.485Z" }, - { url = "https://files.pythonhosted.org/packages/97/d8/276ee759755d8f34f2ca5e9d2debd1a59f29f66059fb790bc369f2236c26/grpcio_tools-1.80.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:31fd01a4038b5dfc4ec79504a17061344f670f851833411717fef66920f13cd7", size = 2660269, upload-time = "2026-03-30T08:50:21.266Z" }, - { url = "https://files.pythonhosted.org/packages/51/04/a6bb47942ad52901d777a649324d3203cf19d487f1d446263637f7a5bf12/grpcio_tools-1.80.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:57da9e19607fac4a01c48ead333c0dd15d91ed38794dce1194eda308f73e2038", size = 3109798, upload-time = "2026-03-30T08:50:23.267Z" }, - { url = "https://files.pythonhosted.org/packages/be/50/7ee69b2919916739787d725f205b878e8d1619dd30422b8278e324664669/grpcio_tools-1.80.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:90968f751851abb8b145593609800fa70c837e1c93ba0792c480b1c8d8bc29ef", size = 3658930, upload-time = "2026-03-30T08:50:25.458Z" }, - { url = "https://files.pythonhosted.org/packages/92/61/6d50783092b0e8bbcb04152d5388bf50ecf3ea2f783d95288ff6c3bb00fa/grpcio_tools-1.80.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:b69dc5d6376ab43406304d1e2fc61ccf960b287d4325d77c3d45448c37a9d2da", size = 3326562, upload-time = "2026-03-30T08:50:27.809Z" }, - { url = "https://files.pythonhosted.org/packages/ea/58/d272ba549f6b1f0d8504f5fc4cd0a296f2c495a64d6e987fe871c4151557/grpcio_tools-1.80.0-cp310-cp310-win32.whl", hash = "sha256:3e8dcfebe34cb54df095de3d5871a4562a85a29f26d0f8bb41ee2c3dcfb11c3c", size = 997620, upload-time = "2026-03-30T08:50:29.959Z" }, - { url = "https://files.pythonhosted.org/packages/70/5f/9f45a9946a0298711c72ca48b2c1f46a7d0c207a44cd3e4bb59d04556ba3/grpcio_tools-1.80.0-cp310-cp310-win_amd64.whl", hash = "sha256:fc622ed4ca400695f41c9eae3266276c6ba007e4c28164ce53b44e7ccc5e492b", size = 1162466, upload-time = "2026-03-30T08:50:32.242Z" }, - { url = "https://files.pythonhosted.org/packages/8a/d7/225dc91e6cb4f8d4830f16a478a468e9c6f342dcdf8cacc3772cc1d1f607/grpcio_tools-1.80.0-cp311-cp311-linux_armv7l.whl", hash = "sha256:1c43e5c768578fe0c6de3dbfaabe64af642951e1aa05c487cacedda63fa6c6c4", size = 2549937, upload-time = "2026-03-30T08:50:34.651Z" }, - { url = "https://files.pythonhosted.org/packages/97/3d/a3684cb7677f3bea8db434eae02a9ce30135d7a268cd473b1bc8041c4722/grpcio_tools-1.80.0-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:a225348456575f3ac7851d8e23163195e76d2a905ee340cf73f33da62fba08aa", size = 5713099, upload-time = "2026-03-30T08:50:37.158Z" }, - { url = "https://files.pythonhosted.org/packages/b1/81/5665c697173ec346076358bfbfed0f7386825852494593ca14386478dfee/grpcio_tools-1.80.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:a9396f02820d3f51c368c2c9dee15c55c77636c91be48a4d5c702e98d6fe0fdc", size = 2595776, upload-time = "2026-03-30T08:50:39.087Z" }, - { url = "https://files.pythonhosted.org/packages/03/4f/fb81384f08a8226fa079972ba88272ac6277581fc72e8ab234d74c7e065b/grpcio_tools-1.80.0-cp311-cp311-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:797c08460cae16b402326eac329aec720dccf45c9f9279b95a352792eb53cf0f", size = 2909144, upload-time = "2026-03-30T08:50:40.922Z" }, - { url = "https://files.pythonhosted.org/packages/4d/9c/c957618f1c2a3195ecf5e83b03edcb364c2c1391f74183cb76e5763fa536/grpcio_tools-1.80.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:1872a867eb6217de19edb70a4ce4a374ced9d94293533dfd42fa649713f55bf4", size = 2660477, upload-time = "2026-03-30T08:50:42.766Z" }, - { url = "https://files.pythonhosted.org/packages/42/c7/23913da184febfd4eaf04de256a26bc5ff0411a5feb753e2adcff10fa86a/grpcio_tools-1.80.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:db122ba5ee357e3bb14e8944d69bbebcbdae91d5eace29ed4df3edc53cbc6528", size = 3110164, upload-time = "2026-03-30T08:50:44.761Z" }, - { url = "https://files.pythonhosted.org/packages/af/fa/b25ed85ebdb0396910eaa250b1346d75527d22fca586265416bd4330dcd5/grpcio_tools-1.80.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:ddefd48c227e6f4d640fe576fac5fb2c4a8898196f513604c8ec7671b3b3d421", size = 3658988, upload-time = "2026-03-30T08:50:47.546Z" }, - { url = "https://files.pythonhosted.org/packages/60/85/2a55147cc9645e2ed777d1afcd2dc68cb34ba6f6c726bd4378ddb001a5ea/grpcio_tools-1.80.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:970ec058fa469dd6dae6ebc687501c5da670d95dead75f62f5b0933dce2c9794", size = 3326662, upload-time = "2026-03-30T08:50:49.59Z" }, - { url = "https://files.pythonhosted.org/packages/68/ed/b05bee2a992e6f9bda81909692ea920d0896cfa05c5c9dd77ba03f2d22fb/grpcio_tools-1.80.0-cp311-cp311-win32.whl", hash = "sha256:526b4402d47a0e9b31cd6087e42b7674784617916cc73c764e0bc35ed41b4ee5", size = 997969, upload-time = "2026-03-30T08:50:51.539Z" }, - { url = "https://files.pythonhosted.org/packages/b6/9a/cb50c8270e2f6285ff2761130ae257ac4e51789ded4b9d9710ce0381814d/grpcio_tools-1.80.0-cp311-cp311-win_amd64.whl", hash = "sha256:ee101ecda7231770f6a5da1024a9a6ed587a7785f8fe23ab8283f4a1acb3ffe6", size = 1162742, upload-time = "2026-03-30T08:50:54.232Z" }, - { url = "https://files.pythonhosted.org/packages/0c/b9/65929df8c9614792db900a8e45d4997fadbd1734c827da3f0eb1f2fe4866/grpcio_tools-1.80.0-cp312-cp312-linux_armv7l.whl", hash = "sha256:d19d5a8244311947b96f749c417b32d144641c6953f1164824579e1f0a51d040", size = 2550856, upload-time = "2026-03-30T08:50:57.3Z" }, - { url = "https://files.pythonhosted.org/packages/28/17/af1557544d68d1aeca9d9ea53ed16524022d521fec6ba334ab3530e9c1a6/grpcio_tools-1.80.0-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:fb599a3dc89ed1bb24489a2724b2f6dd4cddbbf0f7bdd69c073477bab0dc7554", size = 5710883, upload-time = "2026-03-30T08:51:00.077Z" }, - { url = "https://files.pythonhosted.org/packages/cc/48/aa9b4f7519ca972bc40d315d5c28f05ca28fa08de13d4e8b69f551b798ab/grpcio_tools-1.80.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:623ee31fc2ff7df9a987b4f3d139c30af17ce46a861ae0e25fb8c112daa32dd8", size = 2598004, upload-time = "2026-03-30T08:51:02.102Z" }, - { url = "https://files.pythonhosted.org/packages/b4/b8/b01371c119924b3beca1fe3f047b1bc2cdc66b3d37f0f3acc9d10c567a43/grpcio_tools-1.80.0-cp312-cp312-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:b46570a68378539ee2b75a5a43202561f8d753c832798b1047099e3c551cf5d6", size = 2909568, upload-time = "2026-03-30T08:51:04.159Z" }, - { url = "https://files.pythonhosted.org/packages/4f/7c/1108f7bdb58475a7e701ec89b55eb494538b6e76acd211ba0d4cc5fd28e8/grpcio_tools-1.80.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:51caf99c28999e7e0f97e9cea190c1405b7681a57bb2e0631205accd92b43fa4", size = 2660938, upload-time = "2026-03-30T08:51:06.126Z" }, - { url = "https://files.pythonhosted.org/packages/67/59/d1c0063d4cd3b85363c7044ff3e5159d6d5df96e2692a9a5312d9c8cb290/grpcio_tools-1.80.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:cdaa1c9aa8d3a87891a96700cadd29beec214711d6522818d207277f6452567c", size = 3113814, upload-time = "2026-03-30T08:51:08.834Z" }, - { url = "https://files.pythonhosted.org/packages/76/21/18d34a4efe524c903cf66b0cfa5260d81f277b6ae668b647edf795df9ce5/grpcio_tools-1.80.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:3399b5fd7b59bcffd59c6b9975a969d9f37a3c87f3e3d63c3a09c147907acb0d", size = 3662793, upload-time = "2026-03-30T08:51:11.094Z" }, - { url = "https://files.pythonhosted.org/packages/f3/40/cf2d9295a6bd593244ea703858f8fc2efd315046ca3ef7c6f9ebc5b810fa/grpcio_tools-1.80.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:9c6abc08d3485b2aac99bb58afcd31dc6cd4316ce36cf263ff09cb6df15f287f", size = 3329149, upload-time = "2026-03-30T08:51:13.066Z" }, - { url = "https://files.pythonhosted.org/packages/0d/1d/fc34b32167966df20d69429b71dfca83c48434b047a5ac4fd6cd91ca4eed/grpcio_tools-1.80.0-cp312-cp312-win32.whl", hash = "sha256:18c51e07652ac7386fcdbd11866f8d55a795de073337c12447b5805575339f74", size = 997519, upload-time = "2026-03-30T08:51:14.87Z" }, - { url = "https://files.pythonhosted.org/packages/91/98/6d6563cdf51085b75f8ec24605c6f2ce84197571878ca8ab4af949c6be2d/grpcio_tools-1.80.0-cp312-cp312-win_amd64.whl", hash = "sha256:ac6fdd42d5bb18f0d903a067e2825be172deff70cf197164b6f65676cb506c9b", size = 1162407, upload-time = "2026-03-30T08:51:16.793Z" }, - { url = "https://files.pythonhosted.org/packages/44/d9/f7887a4805939e9a85d03744b66fc02575dc1df3c3e8b4d9ec000ee7a33d/grpcio_tools-1.80.0-cp313-cp313-linux_armv7l.whl", hash = "sha256:e7046837859bbfd10b01786056145480155c16b222c9e209215b68d3be13060e", size = 2550319, upload-time = "2026-03-30T08:51:19.117Z" }, - { url = "https://files.pythonhosted.org/packages/57/5a/c8a05b32bd7203f1b9f4c0151090a2d6179d6c97692d32f2066dc29c67a6/grpcio_tools-1.80.0-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:a447f28958a8fe84ff0d9d3d9473868feb27ee4a9c9c805e66f5b670121cec59", size = 5709681, upload-time = "2026-03-30T08:51:21.991Z" }, - { url = "https://files.pythonhosted.org/packages/82/6b/794350ed645c12c310008f97068f6a6fd927150b0d0d08aad1d909e880b1/grpcio_tools-1.80.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:75f00450e08fe648ad8a1eeb25bc52219679d54cdd02f04dfdddc747309d83f6", size = 2596820, upload-time = "2026-03-30T08:51:24.323Z" }, - { url = "https://files.pythonhosted.org/packages/f9/b2/b39e7b79f7c878135e0784a53cd7260ee77260c8c7f2c9e46bca8e05d017/grpcio_tools-1.80.0-cp313-cp313-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:3db830eaff1f2c2797328f2fa86c9dcdbd7d81af573a68db81e27afa2182a611", size = 2909193, upload-time = "2026-03-30T08:51:27.025Z" }, - { url = "https://files.pythonhosted.org/packages/10/f3/abe089b058f87f9910c9a458409505cbeb0b3e1c2d993a79721d02ee6a32/grpcio_tools-1.80.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:7982b5fe42f012686b667dda12916884de95c4b1c65ff64371fb7232a1474b23", size = 2660197, upload-time = "2026-03-30T08:51:29.392Z" }, - { url = "https://files.pythonhosted.org/packages/09/c3/3f7806ad8b731d8a89fe3c6ed496473abd1ef4c9c42c9e9a8836ce96e377/grpcio_tools-1.80.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:6451b3f4eb52d12c7f32d04bf8e0185f80521f3f088ad04b8d222b3a4819c71e", size = 3113144, upload-time = "2026-03-30T08:51:31.671Z" }, - { url = "https://files.pythonhosted.org/packages/fe/f5/415ef205e0b7e75d2a2005df6120145c4f02fda28d7b3715b55d924fe1a4/grpcio_tools-1.80.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:258bc30654a9a2236be4ca8e2ad443e2ac6db7c8cc20454d34cce60265922726", size = 3661897, upload-time = "2026-03-30T08:51:34.849Z" }, - { url = "https://files.pythonhosted.org/packages/e3/d3/2ad54764c2a9547080dd8518f4a4dc7899c7e6e747a1b1de542ce6a12066/grpcio_tools-1.80.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:865a2b8e6334c838976ab02a322cbd55c863d2eaf3c1e1a0255883c63996772a", size = 3328786, upload-time = "2026-03-30T08:51:37.265Z" }, - { url = "https://files.pythonhosted.org/packages/eb/63/23ab7db01f9630ab4f3742a2fc9fbff38b0cfc30c976114f913950664a75/grpcio_tools-1.80.0-cp313-cp313-win32.whl", hash = "sha256:f760ac1722f33e774814c37b6aa0444143f612e85088ead7447a0e9cd306a1f1", size = 997087, upload-time = "2026-03-30T08:51:39.137Z" }, - { url = "https://files.pythonhosted.org/packages/9b/af/b1c1c4423fb49cb7c8e9d2c02196b038c44160b7028b425466743c6c81fa/grpcio_tools-1.80.0-cp313-cp313-win_amd64.whl", hash = "sha256:7843b9ac6ff8ca508424d0dd968bd9a1a4559967e4a290f26be5bd6f04af2234", size = 1162167, upload-time = "2026-03-30T08:51:41.498Z" }, - { url = "https://files.pythonhosted.org/packages/0e/44/7beeee2348f9f412804f5bf80b7d13b81d522bf926a338ae3da46b2213b7/grpcio_tools-1.80.0-cp314-cp314-linux_armv7l.whl", hash = "sha256:12f950470449dbeec78317dbc090add7a00eb6ca812af7b0538ab7441e0a42c3", size = 2550303, upload-time = "2026-03-30T08:51:44.373Z" }, - { url = "https://files.pythonhosted.org/packages/2d/aa/f77dd85409a1855f8c6319ffc69d81e8c3ffe122ee3a7136653e1991d8b6/grpcio_tools-1.80.0-cp314-cp314-macosx_11_0_universal2.whl", hash = "sha256:d3f9a376a29c9adf62bb56f7ff5bc81eb4abeaf53d1e7dde5015564832901a51", size = 5709778, upload-time = "2026-03-30T08:51:47.112Z" }, - { url = "https://files.pythonhosted.org/packages/9c/7c/ab7af4883ebdfdc228b853de89fed409703955e8d47285b321a5794856bd/grpcio_tools-1.80.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:1ba1ffbf2cff71533615e2c5a138ed5569611eec9ae7f9c67b8898e127b54ac0", size = 2597928, upload-time = "2026-03-30T08:51:49.494Z" }, - { url = "https://files.pythonhosted.org/packages/22/e8/4381a963d472e3ab6690ba067ed2b1f1abf8518b10f402678bd2dcb79a54/grpcio_tools-1.80.0-cp314-cp314-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:13f60f8d9397c514c6745a967d22b5c8c698347e88deebca1ff2e1b94555e450", size = 2909333, upload-time = "2026-03-30T08:51:52.124Z" }, - { url = "https://files.pythonhosted.org/packages/94/cb/356b5fdf79dd99455b425fb16302fe60995554ceb721afbf3cf770a19208/grpcio_tools-1.80.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:88d77bad5dd3cd5e6f952c4ecdd0ee33e0c02ecfc2e4b0cbee3391ac19e0a431", size = 2660217, upload-time = "2026-03-30T08:51:55.066Z" }, - { url = "https://files.pythonhosted.org/packages/2b/d7/1752018cc2c36b2c5612051379e2e5f59f2dbe612de23e817d2f066a9487/grpcio_tools-1.80.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:017945c3e98a4ed1c4e21399781b4137fc08dfc1f802c8ace2e64ef52d32b142", size = 3113896, upload-time = "2026-03-30T08:51:57.3Z" }, - { url = "https://files.pythonhosted.org/packages/cc/17/695bbe454f70df35c03e22b48c5314683b913d3e6ed35ec90d065418c1ab/grpcio_tools-1.80.0-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:a33e265d4db803495007a6c623eafb0f6b9bb123ff4a0af89e44567dad809b88", size = 3661950, upload-time = "2026-03-30T08:51:59.867Z" }, - { url = "https://files.pythonhosted.org/packages/9c/d0/533d87629ec823c02c9169ee20228f734c264b209dcdf55268b5a14cde0a/grpcio_tools-1.80.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:6c129da370c5f85f569be2e545317dda786a60dd51d7deea29b03b0c05f6aac3", size = 3328755, upload-time = "2026-03-30T08:52:02.942Z" }, - { url = "https://files.pythonhosted.org/packages/08/a1/504d7838770c73a9761e8a8ff4869dba1146b44f297ff0ac6641481942d3/grpcio_tools-1.80.0-cp314-cp314-win32.whl", hash = "sha256:25742de5958ae4325249a37e724e7c0e5120f8e302a24a977ebd1737b48a5e97", size = 1019620, upload-time = "2026-03-30T08:52:05.342Z" }, - { url = "https://files.pythonhosted.org/packages/f3/75/8b7cd281c5cdfb4ca2c308f7e9b2799bab2be6e7a9e9212ea5a82e2aecd4/grpcio_tools-1.80.0-cp314-cp314-win_amd64.whl", hash = "sha256:bbf8eeef78fda1966f732f79c1c802fadd5cfd203d845d2af4d314d18569069c", size = 1194210, upload-time = "2026-03-30T08:52:08.105Z" }, +sdist = { url = "https://files.pythonhosted.org/packages/8b/d1/cbefe328653f746fd319c4377836a25ba64226e41c6a1d7d5cdbc87a459f/grpcio_tools-1.78.0.tar.gz", hash = "sha256:4b0dd86560274316e155d925158276f8564508193088bc43e20d3f5dff956b2b", size = 5393026, upload-time = "2026-02-06T09:59:59.53Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/5e/70/2118a814a62ab205c905d221064bc09021db83fceeb84764d35c00f0f633/grpcio_tools-1.78.0-cp310-cp310-linux_armv7l.whl", hash = "sha256:ea64e38d1caa2b8468b08cb193f5a091d169b6dbfe1c7dac37d746651ab9d84e", size = 2545568, upload-time = "2026-02-06T09:57:30.308Z" }, + { url = "https://files.pythonhosted.org/packages/2b/a9/68134839dd1a00f964185ead103646d6dd6a396b92ed264eaf521431b793/grpcio_tools-1.78.0-cp310-cp310-macosx_11_0_universal2.whl", hash = "sha256:4003fcd5cbb5d578b06176fd45883a72a8f9203152149b7c680ce28653ad9e3a", size = 5708704, upload-time = "2026-02-06T09:57:33.512Z" }, + { url = "https://files.pythonhosted.org/packages/36/1b/b6135aa9534e22051c53e5b9c0853d18024a41c50aaff464b7b47c1ed379/grpcio_tools-1.78.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:fe6b0081775394c61ec633c9ff5dbc18337100eabb2e946b5c83967fe43b2748", size = 2591905, upload-time = "2026-02-06T09:57:35.338Z" }, + { url = "https://files.pythonhosted.org/packages/41/2b/6380df1390d62b1d18ae18d4d790115abf4997fa29498aa50ba644ecb9d8/grpcio_tools-1.78.0-cp310-cp310-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:7e989ad2cd93db52d7f1a643ecaa156ac55bf0484f1007b485979ce8aef62022", size = 2905271, upload-time = "2026-02-06T09:57:37.932Z" }, + { url = "https://files.pythonhosted.org/packages/3a/07/9b369f37c8f4956b68778c044d57390a8f0f3b1cca590018809e75a4fce2/grpcio_tools-1.78.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:b874991797e96c41a37e563236c3317ed41b915eff25b292b202d6277d30da85", size = 2656234, upload-time = "2026-02-06T09:57:41.157Z" }, + { url = "https://files.pythonhosted.org/packages/51/61/40eee40e7a54f775a0d4117536532713606b6b177fff5e327f33ad18746e/grpcio_tools-1.78.0-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:daa8c288b728228377aaf758925692fc6068939d9fa32f92ca13dedcbeb41f33", size = 3105770, upload-time = "2026-02-06T09:57:43.373Z" }, + { url = "https://files.pythonhosted.org/packages/b6/ac/81ee4b728e70e8ba66a589f86469925ead02ed6f8973434e4a52e3576148/grpcio_tools-1.78.0-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:87e648759b06133199f4bc0c0053e3819f4ec3b900dc399e1097b6065db998b5", size = 3654896, upload-time = "2026-02-06T09:57:45.402Z" }, + { url = "https://files.pythonhosted.org/packages/be/b9/facb3430ee427c800bb1e39588c85685677ea649491d6e0874bd9f3a1c0e/grpcio_tools-1.78.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:f3d3ced52bfe39eba3d24f5a8fab4e12d071959384861b41f0c52ca5399d6920", size = 3322529, upload-time = "2026-02-06T09:57:47.292Z" }, + { url = "https://files.pythonhosted.org/packages/c7/de/d7a011df9abfed8c30f0d2077b0562a6e3edc57cb3e5514718e2a81f370a/grpcio_tools-1.78.0-cp310-cp310-win32.whl", hash = "sha256:4bb6ed690d417b821808796221bde079377dff98fdc850ac157ad2f26cda7a36", size = 993518, upload-time = "2026-02-06T09:57:48.836Z" }, + { url = "https://files.pythonhosted.org/packages/c8/5e/f7f60c3ae2281c6b438c3a8455f4a5d5d2e677cf20207864cbee3763da22/grpcio_tools-1.78.0-cp310-cp310-win_amd64.whl", hash = "sha256:0c676d8342fd53bd85a5d5f0d070cd785f93bc040510014708ede6fcb32fada1", size = 1158505, upload-time = "2026-02-06T09:57:50.633Z" }, + { url = "https://files.pythonhosted.org/packages/75/78/280184d19242ed6762bf453c47a70b869b3c5c72a24dc5bf2bf43909faa3/grpcio_tools-1.78.0-cp311-cp311-linux_armv7l.whl", hash = "sha256:6a8b8b7b49f319d29dbcf507f62984fa382d1d10437d75c3f26db5f09c4ac0af", size = 2545904, upload-time = "2026-02-06T09:57:52.769Z" }, + { url = "https://files.pythonhosted.org/packages/5b/51/3c46dea5113f68fe879961cae62d34bb7a3c308a774301b45d614952ee98/grpcio_tools-1.78.0-cp311-cp311-macosx_11_0_universal2.whl", hash = "sha256:d62cf3b68372b0c6d722a6165db41b976869811abeabc19c8522182978d8db10", size = 5709078, upload-time = "2026-02-06T09:57:56.389Z" }, + { url = "https://files.pythonhosted.org/packages/e0/2c/dc1ae9ec53182c96d56dfcbf3bcd3e55a8952ad508b188c75bf5fc8993d4/grpcio_tools-1.78.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:fa9056742efeaf89d5fe14198af71e5cbc4fbf155d547b89507e19d6025906c6", size = 2591744, upload-time = "2026-02-06T09:57:58.341Z" }, + { url = "https://files.pythonhosted.org/packages/04/63/9b53fc9a9151dd24386785171a4191ee7cb5afb4d983b6a6a87408f41b28/grpcio_tools-1.78.0-cp311-cp311-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:e3191af125dcb705aa6bc3856ba81ba99b94121c1b6ebee152e66ea084672831", size = 2905113, upload-time = "2026-02-06T09:58:00.38Z" }, + { url = "https://files.pythonhosted.org/packages/96/b2/0ad8d789f3a2a00893131c140865605fa91671a6e6fcf9da659e1fabba10/grpcio_tools-1.78.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:283239ddbb67ae83fac111c61b25d8527a1dbd355b377cbc8383b79f1329944d", size = 2656436, upload-time = "2026-02-06T09:58:03.038Z" }, + { url = "https://files.pythonhosted.org/packages/09/4d/580f47ce2fc61b093ade747b378595f51b4f59972dd39949f7444b464a03/grpcio_tools-1.78.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:ac977508c0db15301ef36d6c79769ec1a6cc4e3bc75735afca7fe7e360cead3a", size = 3106128, upload-time = "2026-02-06T09:58:05.064Z" }, + { url = "https://files.pythonhosted.org/packages/c9/29/d83b2d89f8d10e438bad36b1eb29356510fb97e81e6a608b22ae1890e8e6/grpcio_tools-1.78.0-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:4ff605e25652a0bd13aa8a73a09bc48669c68170902f5d2bf1468a57d5e78771", size = 3654953, upload-time = "2026-02-06T09:58:07.15Z" }, + { url = "https://files.pythonhosted.org/packages/08/71/917ce85633311e54fefd7e6eb1224fb780ef317a4d092766f5630c3fc419/grpcio_tools-1.78.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:0197d7b561c79be78ab93d0fe2836c8def470683df594bae3ac89dd8e5c821b2", size = 3322630, upload-time = "2026-02-06T09:58:10.305Z" }, + { url = "https://files.pythonhosted.org/packages/b2/55/3fbf6b26ab46fc79e1e6f7f4e0993cf540263dad639290299fad374a0829/grpcio_tools-1.78.0-cp311-cp311-win32.whl", hash = "sha256:28f71f591f7f39555863ced84fcc209cbf4454e85ef957232f43271ee99af577", size = 993804, upload-time = "2026-02-06T09:58:13.698Z" }, + { url = "https://files.pythonhosted.org/packages/73/86/4affe006d9e1e9e1c6653d6aafe2f8b9188acb2b563cd8ed3a2c7c0e8aec/grpcio_tools-1.78.0-cp311-cp311-win_amd64.whl", hash = "sha256:5a6de495dabf86a3b40b9a7492994e1232b077af9d63080811838b781abbe4e8", size = 1158566, upload-time = "2026-02-06T09:58:15.721Z" }, + { url = "https://files.pythonhosted.org/packages/0c/ae/5b1fa5dd8d560a6925aa52de0de8731d319f121c276e35b9b2af7cc220a2/grpcio_tools-1.78.0-cp312-cp312-linux_armv7l.whl", hash = "sha256:9eb122da57d4cad7d339fc75483116f0113af99e8d2c67f3ef9cae7501d806e4", size = 2546823, upload-time = "2026-02-06T09:58:17.944Z" }, + { url = "https://files.pythonhosted.org/packages/a7/ed/d33ccf7fa701512efea7e7e23333b748848a123e9d3bbafde4e126784546/grpcio_tools-1.78.0-cp312-cp312-macosx_11_0_universal2.whl", hash = "sha256:d0c501b8249940b886420e6935045c44cb818fa6f265f4c2b97d5cff9cb5e796", size = 5706776, upload-time = "2026-02-06T09:58:20.944Z" }, + { url = "https://files.pythonhosted.org/packages/c6/69/4285583f40b37af28277fc6b867d636e3b10e1b6a7ebd29391a856e1279b/grpcio_tools-1.78.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:77e5aa2d2a7268d55b1b113f958264681ef1994c970f69d48db7d4683d040f57", size = 2593972, upload-time = "2026-02-06T09:58:23.29Z" }, + { url = "https://files.pythonhosted.org/packages/d7/eb/ecc1885bd6b3147f0a1b7dff5565cab72f01c8f8aa458f682a1c77a9fb08/grpcio_tools-1.78.0-cp312-cp312-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:8e3c0b0e6ba5275322ba29a97bf890565a55f129f99a21b121145e9e93a22525", size = 2905531, upload-time = "2026-02-06T09:58:25.406Z" }, + { url = "https://files.pythonhosted.org/packages/ae/a9/511d0040ced66960ca10ba0f082d6b2d2ee6dd61837b1709636fdd8e23b4/grpcio_tools-1.78.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:975d4cb48694e20ebd78e1643e5f1cd94cdb6a3d38e677a8e84ae43665aa4790", size = 2656909, upload-time = "2026-02-06T09:58:28.022Z" }, + { url = "https://files.pythonhosted.org/packages/06/a3/3d2c707e7dee8df842c96fbb24feb2747e506e39f4a81b661def7fed107c/grpcio_tools-1.78.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:553ff18c5d52807dedecf25045ae70bad7a3dbba0b27a9a3cdd9bcf0a1b7baec", size = 3109778, upload-time = "2026-02-06T09:58:30.091Z" }, + { url = "https://files.pythonhosted.org/packages/1f/4b/646811ba241bf05da1f0dc6f25764f1c837f78f75b4485a4210c84b79eae/grpcio_tools-1.78.0-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:8c7f5e4af5a84d2e96c862b1a65e958a538237e268d5f8203a3a784340975b51", size = 3658763, upload-time = "2026-02-06T09:58:32.875Z" }, + { url = "https://files.pythonhosted.org/packages/45/de/0a5ef3b3e79d1011375f5580dfee3a9c1ccb96c5f5d1c74c8cee777a2483/grpcio_tools-1.78.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:96183e2b44afc3f9a761e9d0f985c3b44e03e8bb98e626241a6cbfb3b6f7e88f", size = 3325116, upload-time = "2026-02-06T09:58:34.894Z" }, + { url = "https://files.pythonhosted.org/packages/95/d2/6391b241ad571bc3e71d63f957c0b1860f0c47932d03c7f300028880f9b8/grpcio_tools-1.78.0-cp312-cp312-win32.whl", hash = "sha256:2250e8424c565a88573f7dc10659a0b92802e68c2a1d57e41872c9b88ccea7a6", size = 993493, upload-time = "2026-02-06T09:58:37.242Z" }, + { url = "https://files.pythonhosted.org/packages/7c/8f/7d0d3a39ecad76ccc136be28274daa660569b244fa7d7d0bbb24d68e5ece/grpcio_tools-1.78.0-cp312-cp312-win_amd64.whl", hash = "sha256:217d1fa29de14d9c567d616ead7cb0fef33cde36010edff5a9390b00d52e5094", size = 1158423, upload-time = "2026-02-06T09:58:40.072Z" }, + { url = "https://files.pythonhosted.org/packages/53/ce/17311fb77530420e2f441e916b347515133e83d21cd6cc77be04ce093d5b/grpcio_tools-1.78.0-cp313-cp313-linux_armv7l.whl", hash = "sha256:2d6de1cc23bdc1baafc23e201b1e48c617b8c1418b4d8e34cebf72141676e5fb", size = 2546284, upload-time = "2026-02-06T09:58:43.073Z" }, + { url = "https://files.pythonhosted.org/packages/1d/d3/79e101483115f0e78223397daef71751b75eba7e92a32060c10aae11ca64/grpcio_tools-1.78.0-cp313-cp313-macosx_11_0_universal2.whl", hash = "sha256:2afeaad88040894c76656202ff832cb151bceb05c0e6907e539d129188b1e456", size = 5705653, upload-time = "2026-02-06T09:58:45.533Z" }, + { url = "https://files.pythonhosted.org/packages/8b/a7/52fa3ccb39ceeee6adc010056eadfbca8198651c113e418dafebbdf2b306/grpcio_tools-1.78.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:33cc593735c93c03d63efe7a8ba25f3c66f16c52f0651910712490244facad72", size = 2592788, upload-time = "2026-02-06T09:58:48.918Z" }, + { url = "https://files.pythonhosted.org/packages/68/08/682ff6bb548225513d73dc9403742d8975439d7469c673bc534b9bbc83a7/grpcio_tools-1.78.0-cp313-cp313-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:2921d7989c4d83b71f03130ab415fa4d66e6693b8b8a1fcbb7a1c67cff19b812", size = 2905157, upload-time = "2026-02-06T09:58:51.478Z" }, + { url = "https://files.pythonhosted.org/packages/b2/66/264f3836a96423b7018e5ada79d62576a6401f6da4e1f4975b18b2be1265/grpcio_tools-1.78.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:e6a0df438e82c804c7b95e3f311c97c2f876dcc36376488d5b736b7bcf5a9b45", size = 2656166, upload-time = "2026-02-06T09:58:54.117Z" }, + { url = "https://files.pythonhosted.org/packages/f3/6b/f108276611522e03e98386b668cc7e575eff6952f2db9caa15b2a3b3e883/grpcio_tools-1.78.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:e9c6070a9500798225191ef25d0055a15d2c01c9c8f2ee7b681fffa99c98c822", size = 3109110, upload-time = "2026-02-06T09:58:56.891Z" }, + { url = "https://files.pythonhosted.org/packages/6f/c7/cf048dbcd64b3396b3c860a2ffbcc67a8f8c87e736aaa74c2e505a7eee4c/grpcio_tools-1.78.0-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:394e8b57d85370a62e5b0a4d64c96fcf7568345c345d8590c821814d227ecf1d", size = 3657863, upload-time = "2026-02-06T09:58:59.176Z" }, + { url = "https://files.pythonhosted.org/packages/b6/37/e2736912c8fda57e2e57a66ea5e0bc8eb9a5fb7ded00e866ad22d50afb08/grpcio_tools-1.78.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a3ef700293ab375e111a2909d87434ed0a0b086adf0ce67a8d9cf12ea7765e63", size = 3324748, upload-time = "2026-02-06T09:59:01.242Z" }, + { url = "https://files.pythonhosted.org/packages/1c/5d/726abc75bb5bfc2841e88ea05896e42f51ca7c30cb56da5c5b63058b3867/grpcio_tools-1.78.0-cp313-cp313-win32.whl", hash = "sha256:6993b960fec43a8d840ee5dc20247ef206c1a19587ea49fe5e6cc3d2a09c1585", size = 993074, upload-time = "2026-02-06T09:59:03.085Z" }, + { url = "https://files.pythonhosted.org/packages/c5/68/91b400bb360faf9b177ffb5540ec1c4d06ca923691ddf0f79e2c9683f4da/grpcio_tools-1.78.0-cp313-cp313-win_amd64.whl", hash = "sha256:275ce3c2978842a8cf9dd88dce954e836e590cf7029649ad5d1145b779039ed5", size = 1158185, upload-time = "2026-02-06T09:59:05.036Z" }, + { url = "https://files.pythonhosted.org/packages/cf/5e/278f3831c8d56bae02e3acc570465648eccf0a6bbedcb1733789ac966803/grpcio_tools-1.78.0-cp314-cp314-linux_armv7l.whl", hash = "sha256:8b080d0d072e6032708a3a91731b808074d7ab02ca8fb9847b6a011fdce64cd9", size = 2546270, upload-time = "2026-02-06T09:59:07.426Z" }, + { url = "https://files.pythonhosted.org/packages/a3/d9/68582f2952b914b60dddc18a2e3f9c6f09af9372b6f6120d6cf3ec7f8b4e/grpcio_tools-1.78.0-cp314-cp314-macosx_11_0_universal2.whl", hash = "sha256:8c0ad8f8f133145cd7008b49cb611a5c6a9d89ab276c28afa17050516e801f79", size = 5705731, upload-time = "2026-02-06T09:59:09.856Z" }, + { url = "https://files.pythonhosted.org/packages/70/68/feb0f9a48818ee1df1e8b644069379a1e6ef5447b9b347c24e96fd258e5d/grpcio_tools-1.78.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.whl", hash = "sha256:2f8ea092a7de74c6359335d36f0674d939a3c7e1a550f4c2c9e80e0226de8fe4", size = 2593896, upload-time = "2026-02-06T09:59:12.23Z" }, + { url = "https://files.pythonhosted.org/packages/1f/08/a430d8d06e1b8d33f3e48d3f0cc28236723af2f35e37bd5c8db05df6c3aa/grpcio_tools-1.78.0-cp314-cp314-manylinux2014_i686.manylinux_2_17_i686.whl", hash = "sha256:da422985e0cac822b41822f43429c19ecb27c81ffe3126d0b74e77edec452608", size = 2905298, upload-time = "2026-02-06T09:59:14.458Z" }, + { url = "https://files.pythonhosted.org/packages/71/0a/348c36a3eae101ca0c090c9c3bc96f2179adf59ee0c9262d11cdc7bfe7db/grpcio_tools-1.78.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.whl", hash = "sha256:4fab1faa3fbcb246263e68da7a8177d73772283f9db063fb8008517480888d26", size = 2656186, upload-time = "2026-02-06T09:59:16.949Z" }, + { url = "https://files.pythonhosted.org/packages/1d/3f/18219f331536fad4af6207ade04142292faa77b5cb4f4463787988963df8/grpcio_tools-1.78.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:dd9c094f73f734becae3f20f27d4944d3cd8fb68db7338ee6c58e62fc5c3d99f", size = 3109859, upload-time = "2026-02-06T09:59:19.202Z" }, + { url = "https://files.pythonhosted.org/packages/5b/d9/341ea20a44c8e5a3a18acc820b65014c2e3ea5b4f32a53d14864bcd236bc/grpcio_tools-1.78.0-cp314-cp314-musllinux_1_2_i686.whl", hash = "sha256:2ed51ce6b833068f6c580b73193fc2ec16468e6bc18354bc2f83a58721195a58", size = 3657915, upload-time = "2026-02-06T09:59:21.839Z" }, + { url = "https://files.pythonhosted.org/packages/fb/f4/5978b0f91611a64371424c109dd0027b247e5b39260abad2eaee66b6aa37/grpcio_tools-1.78.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:05803a5cdafe77c8bdf36aa660ad7a6a1d9e49bc59ce45c1bade2a4698826599", size = 3324724, upload-time = "2026-02-06T09:59:24.402Z" }, + { url = "https://files.pythonhosted.org/packages/b2/80/96a324dba99cfbd20e291baf0b0ae719dbb62b76178c5ce6c788e7331cb1/grpcio_tools-1.78.0-cp314-cp314-win32.whl", hash = "sha256:f7c722e9ce6f11149ac5bddd5056e70aaccfd8168e74e9d34d8b8b588c3f5c7c", size = 1015505, upload-time = "2026-02-06T09:59:26.3Z" }, + { url = "https://files.pythonhosted.org/packages/3b/d1/909e6a05bfd44d46327dc4b8a78beb2bae4fb245ffab2772e350081aaf7e/grpcio_tools-1.78.0-cp314-cp314-win_amd64.whl", hash = "sha256:7d58ade518b546120ec8f0a8e006fc8076ae5df151250ebd7e82e9b5e152c229", size = 1190196, upload-time = "2026-02-06T09:59:28.359Z" }, ] [[package]] @@ -1234,7 +1290,7 @@ wheels = [ [[package]] name = "hatchling" -version = "1.27.0" +version = "1.29.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "packaging" }, @@ -1243,9 +1299,9 @@ dependencies = [ { name = "tomli", marker = "python_full_version < '3.11'" }, { name = "trove-classifiers" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/8f/8a/cc1debe3514da292094f1c3a700e4ca25442489731ef7c0814358816bb03/hatchling-1.27.0.tar.gz", hash = "sha256:971c296d9819abb3811112fc52c7a9751c8d381898f36533bb16f9791e941fd6", size = 54983, upload-time = "2024-12-15T17:08:11.894Z" } +sdist = { url = "https://files.pythonhosted.org/packages/cf/9c/b4cfe330cd4f49cff17fd771154730555fa4123beb7f292cf0098b4e6c20/hatchling-1.29.0.tar.gz", hash = "sha256:793c31816d952cee405b83488ce001c719f325d9cda69f1fc4cd750527640ea6", size = 55656, upload-time = "2026-02-23T19:42:06.539Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/08/e7/ae38d7a6dfba0533684e0b2136817d667588ae3ec984c1a4e5df5eb88482/hatchling-1.27.0-py3-none-any.whl", hash = "sha256:d3a2f3567c4f926ea39849cdf924c7e99e6686c9c8e288ae1037c8fa2a5d937b", size = 75794, upload-time = "2024-12-15T17:08:10.364Z" }, + { url = "https://files.pythonhosted.org/packages/d3/8a/44032265776062a89171285ede55a0bdaadc8ac00f27f0512a71a9e3e1c8/hatchling-1.29.0-py3-none-any.whl", hash = "sha256:50af9343281f34785fab12da82e445ed987a6efb34fd8c2fc0f6e6630dbcc1b0", size = 76356, upload-time = "2026-02-23T19:42:05.197Z" }, ] [[package]] @@ -1287,63 +1343,41 @@ wheels = [ [[package]] name = "identify" -version = "2.6.12" +version = "2.6.18" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/a2/88/d193a27416618628a5eea64e3223acd800b40749a96ffb322a9b55a49ed1/identify-2.6.12.tar.gz", hash = "sha256:d8de45749f1efb108badef65ee8386f0f7bb19a7f26185f74de6367bffbaf0e6", size = 99254, upload-time = "2025-05-23T20:37:53.3Z" } +sdist = { url = "https://files.pythonhosted.org/packages/46/c4/7fb4db12296cdb11893d61c92048fe617ee853f8523b9b296ac03b43757e/identify-2.6.18.tar.gz", hash = "sha256:873ac56a5e3fd63e7438a7ecbc4d91aca692eb3fefa4534db2b7913f3fc352fd", size = 99580, upload-time = "2026-03-15T18:39:50.319Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/7a/cd/18f8da995b658420625f7ef13f037be53ae04ec5ad33f9b718240dcfd48c/identify-2.6.12-py2.py3-none-any.whl", hash = "sha256:ad9672d5a72e0d2ff7c5c8809b62dfa60458626352fb0eb7b55e69bdc45334a2", size = 99145, upload-time = "2025-05-23T20:37:51.495Z" }, + { url = "https://files.pythonhosted.org/packages/46/33/92ef41c6fad0233e41d3d84ba8e8ad18d1780f1e5d99b3c683e6d7f98b63/identify-2.6.18-py2.py3-none-any.whl", hash = "sha256:8db9d3c8ea9079db92cafb0ebf97abdc09d52e97f4dcf773a2e694048b7cd737", size = 99394, upload-time = "2026-03-15T18:39:48.915Z" }, ] [[package]] name = "idna" -version = "3.10" +version = "3.11" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/f1/70/7703c29685631f5a7590aa73f1f1d3fa9a380e654b86af429e0934a32f7d/idna-3.10.tar.gz", hash = "sha256:12f65c9b470abda6dc35cf8e63cc574b1c52b11df2c86030af0ac09b01b13ea9", size = 190490, upload-time = "2024-09-15T18:07:39.745Z" } +sdist = { url = "https://files.pythonhosted.org/packages/6f/6d/0703ccc57f3a7233505399edb88de3cbd678da106337b9fcde432b65ed60/idna-3.11.tar.gz", hash = "sha256:795dafcc9c04ed0c1fb032c2aa73654d8e8c5023a7df64a53f39190ada629902", size = 194582, upload-time = "2025-10-12T14:55:20.501Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/76/c6/c88e154df9c4e1a2a66ccf0005a88dfb2650c1dffb6f5ce603dfbd452ce3/idna-3.10-py3-none-any.whl", hash = "sha256:946d195a0d259cbba61165e88e65941f16e9b36ea6ddb97f00452bae8b1287d3", size = 70442, upload-time = "2024-09-15T18:07:37.964Z" }, + { url = "https://files.pythonhosted.org/packages/0e/61/66938bbb5fc52dbdf84594873d5b51fb1f7c7794e9c0f5bd885f30bc507b/idna-3.11-py3-none-any.whl", hash = "sha256:771a87f49d9defaf64091e6e6fe9c18d4833f140bd19464795bc32d966ca37ea", size = 71008, upload-time = "2025-10-12T14:55:18.883Z" }, ] [[package]] name = "importlib-metadata" -version = "8.7.0" +version = "8.7.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "zipp" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/76/66/650a33bd90f786193e4de4b3ad86ea60b53c89b669a5c7be931fac31cdb0/importlib_metadata-8.7.0.tar.gz", hash = "sha256:d13b81ad223b890aa16c5471f2ac3056cf76c5f10f82d6f9292f0b415f389000", size = 56641, upload-time = "2025-04-27T15:29:01.736Z" } +sdist = { url = "https://files.pythonhosted.org/packages/f3/49/3b30cad09e7771a4982d9975a8cbf64f00d4a1ececb53297f1d9a7be1b10/importlib_metadata-8.7.1.tar.gz", hash = "sha256:49fef1ae6440c182052f407c8d34a68f72efc36db9ca90dc0113398f2fdde8bb", size = 57107, upload-time = "2025-12-21T10:00:19.278Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/20/b0/36bd937216ec521246249be3bf9855081de4c5e06a0c9b4219dbeda50373/importlib_metadata-8.7.0-py3-none-any.whl", hash = "sha256:e5dd1551894c77868a30651cef00984d50e1002d06942a7101d34870c5f02afd", size = 27656, upload-time = "2025-04-27T15:29:00.214Z" }, -] - -[[package]] -name = "inflect" -version = "7.5.0" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "more-itertools" }, - { name = "typeguard" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/78/c6/943357d44a21fd995723d07ccaddd78023eace03c1846049a2645d4324a3/inflect-7.5.0.tar.gz", hash = "sha256:faf19801c3742ed5a05a8ce388e0d8fe1a07f8d095c82201eb904f5d27ad571f", size = 73751, upload-time = "2024-12-28T17:11:18.897Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/8a/eb/427ed2b20a38a4ee29f24dbe4ae2dafab198674fe9a85e3d6adf9e5f5f41/inflect-7.5.0-py3-none-any.whl", hash = "sha256:2aea70e5e70c35d8350b8097396ec155ffd68def678c7ff97f51aa69c1d92344", size = 35197, upload-time = "2024-12-28T17:11:15.931Z" }, + { url = "https://files.pythonhosted.org/packages/fa/5e/f8e9a1d23b9c20a551a8a02ea3637b4642e22c2626e3a13a9a29cdea99eb/importlib_metadata-8.7.1-py3-none-any.whl", hash = "sha256:5a1f80bf1daa489495071efbb095d75a634cf28a8bc299581244063b53176151", size = 27865, upload-time = "2025-12-21T10:00:18.329Z" }, ] [[package]] name = "iniconfig" -version = "2.1.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/f2/97/ebf4da567aa6827c909642694d71c9fcf53e5b504f2d96afea02718862f3/iniconfig-2.1.0.tar.gz", hash = "sha256:3abbd2e30b36733fee78f9c7f7308f2d0050e88f0087fd25c2645f63c773e1c7", size = 4793, upload-time = "2025-03-19T20:09:59.721Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/2c/e1/e6716421ea10d38022b952c159d5161ca1193197fb744506875fbb87ea7b/iniconfig-2.1.0-py3-none-any.whl", hash = "sha256:9deba5723312380e77435581c6bf4935c94cbfab9b1ed33ef8d238ea168eb760", size = 6050, upload-time = "2025-03-19T20:10:01.071Z" }, -] - -[[package]] -name = "isort" -version = "6.0.1" +version = "2.3.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/b8/21/1e2a441f74a653a144224d7d21afe8f4169e6c7c20bb13aec3a2dc3815e0/isort-6.0.1.tar.gz", hash = "sha256:1cb5df28dfbc742e490c5e41bad6da41b805b0a8be7bc93cd0fb2a8a890ac450", size = 821955, upload-time = "2025-02-26T21:13:16.955Z" } +sdist = { url = "https://files.pythonhosted.org/packages/72/34/14ca021ce8e5dfedc35312d08ba8bf51fdd999c576889fc2c24cb97f4f10/iniconfig-2.3.0.tar.gz", hash = "sha256:c76315c77db068650d49c5b56314774a7804df16fee4402c1f19d6d15d8c4730", size = 20503, upload-time = "2025-10-18T21:55:43.219Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/c1/11/114d0a5f4dabbdcedc1125dee0888514c3c3b16d3e9facad87ed96fad97c/isort-6.0.1-py3-none-any.whl", hash = "sha256:2dc5d7f65c9678d94c88dfc29161a320eec67328bc97aad576874cb4be1e9615", size = 94186, upload-time = "2025-02-26T21:13:14.911Z" }, + { url = "https://files.pythonhosted.org/packages/cb/b1/3846dd7f199d53cb17f49cba7e651e9ce294d8497c8c150530ed11865bb8/iniconfig-2.3.0-py3-none-any.whl", hash = "sha256:f631c04d2c48c52b84d0d0549c99ff3859c98df65b3101406327ecc7d53fbf12", size = 7484, upload-time = "2025-10-18T21:55:41.639Z" }, ] [[package]] @@ -1358,6 +1392,15 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/62/a1/3d680cbfd5f4b8f15abc1d571870c5fc3e594bb582bc3b64ea099db13e56/jinja2-3.1.6-py3-none-any.whl", hash = "sha256:85ece4451f492d0c13c5dd7c13a64681a86afae63a5f347908daf103ce6d2f67", size = 134899, upload-time = "2025-03-05T20:05:00.369Z" }, ] +[[package]] +name = "json-rpc" +version = "1.15.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/6d/9e/59f4a5b7855ced7346ebf40a2e9a8942863f644378d956f68bcef2c88b90/json-rpc-1.15.0.tar.gz", hash = "sha256:e6441d56c1dcd54241c937d0a2dcd193bdf0bdc539b5316524713f554b7f85b9", size = 28854, upload-time = "2023-06-11T09:45:49.078Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/94/9e/820c4b086ad01ba7d77369fb8b11470a01fac9b4977f02e18659cf378b6b/json_rpc-1.15.0-py2.py3-none-any.whl", hash = "sha256:4a4668bbbe7116feb4abbd0f54e64a4adcf4b8f648f19ffa0848ad0f6606a9bf", size = 39450, upload-time = "2023-06-11T09:45:47.136Z" }, +] + [[package]] name = "librt" version = "0.8.1" @@ -1443,76 +1486,106 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/b2/c8/d148e041732d631fc76036f8b30fae4e77b027a1e95b7a84bb522481a940/librt-0.8.1-cp314-cp314t-win_arm64.whl", hash = "sha256:bf512a71a23504ed08103a13c941f763db13fb11177beb3d9244c98c29fb4a61", size = 48755, upload-time = "2026-02-17T16:12:47.943Z" }, ] +[[package]] +name = "mako" +version = "1.3.10" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "markupsafe" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/9e/38/bd5b78a920a64d708fe6bc8e0a2c075e1389d53bef8413725c63ba041535/mako-1.3.10.tar.gz", hash = "sha256:99579a6f39583fa7e5630a28c3c1f440e4e97a414b80372649c0ce338da2ea28", size = 392474, upload-time = "2025-04-10T12:44:31.16Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/87/fb/99f81ac72ae23375f22b7afdb7642aba97c00a713c217124420147681a2f/mako-1.3.10-py3-none-any.whl", hash = "sha256:baef24a52fc4fc514a0887ac600f9f1cff3d82c61d4d700a1fa84d597b88db59", size = 78509, upload-time = "2025-04-10T12:50:53.297Z" }, +] + [[package]] name = "markupsafe" -version = "3.0.2" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/b2/97/5d42485e71dfc078108a86d6de8fa46db44a1a9295e89c5d6d4a06e23a62/markupsafe-3.0.2.tar.gz", hash = "sha256:ee55d3edf80167e48ea11a923c7386f4669df67d7994554387f84e7d8b0a2bf0", size = 20537, upload-time = "2024-10-18T15:21:54.129Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/04/90/d08277ce111dd22f77149fd1a5d4653eeb3b3eaacbdfcbae5afb2600eebd/MarkupSafe-3.0.2-cp310-cp310-macosx_10_9_universal2.whl", hash = "sha256:7e94c425039cde14257288fd61dcfb01963e658efbc0ff54f5306b06054700f8", size = 14357, upload-time = "2024-10-18T15:20:51.44Z" }, - { url = "https://files.pythonhosted.org/packages/04/e1/6e2194baeae0bca1fae6629dc0cbbb968d4d941469cbab11a3872edff374/MarkupSafe-3.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:9e2d922824181480953426608b81967de705c3cef4d1af983af849d7bd619158", size = 12393, upload-time = "2024-10-18T15:20:52.426Z" }, - { url = "https://files.pythonhosted.org/packages/1d/69/35fa85a8ece0a437493dc61ce0bb6d459dcba482c34197e3efc829aa357f/MarkupSafe-3.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:38a9ef736c01fccdd6600705b09dc574584b89bea478200c5fbf112a6b0d5579", size = 21732, upload-time = "2024-10-18T15:20:53.578Z" }, - { url = "https://files.pythonhosted.org/packages/22/35/137da042dfb4720b638d2937c38a9c2df83fe32d20e8c8f3185dbfef05f7/MarkupSafe-3.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:bbcb445fa71794da8f178f0f6d66789a28d7319071af7a496d4d507ed566270d", size = 20866, upload-time = "2024-10-18T15:20:55.06Z" }, - { url = "https://files.pythonhosted.org/packages/29/28/6d029a903727a1b62edb51863232152fd335d602def598dade38996887f0/MarkupSafe-3.0.2-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:57cb5a3cf367aeb1d316576250f65edec5bb3be939e9247ae594b4bcbc317dfb", size = 20964, upload-time = "2024-10-18T15:20:55.906Z" }, - { url = "https://files.pythonhosted.org/packages/cc/cd/07438f95f83e8bc028279909d9c9bd39e24149b0d60053a97b2bc4f8aa51/MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:3809ede931876f5b2ec92eef964286840ed3540dadf803dd570c3b7e13141a3b", size = 21977, upload-time = "2024-10-18T15:20:57.189Z" }, - { url = "https://files.pythonhosted.org/packages/29/01/84b57395b4cc062f9c4c55ce0df7d3108ca32397299d9df00fedd9117d3d/MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_i686.whl", hash = "sha256:e07c3764494e3776c602c1e78e298937c3315ccc9043ead7e685b7f2b8d47b3c", size = 21366, upload-time = "2024-10-18T15:20:58.235Z" }, - { url = "https://files.pythonhosted.org/packages/bd/6e/61ebf08d8940553afff20d1fb1ba7294b6f8d279df9fd0c0db911b4bbcfd/MarkupSafe-3.0.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:b424c77b206d63d500bcb69fa55ed8d0e6a3774056bdc4839fc9298a7edca171", size = 21091, upload-time = "2024-10-18T15:20:59.235Z" }, - { url = "https://files.pythonhosted.org/packages/11/23/ffbf53694e8c94ebd1e7e491de185124277964344733c45481f32ede2499/MarkupSafe-3.0.2-cp310-cp310-win32.whl", hash = "sha256:fcabf5ff6eea076f859677f5f0b6b5c1a51e70a376b0579e0eadef8db48c6b50", size = 15065, upload-time = "2024-10-18T15:21:00.307Z" }, - { url = "https://files.pythonhosted.org/packages/44/06/e7175d06dd6e9172d4a69a72592cb3f7a996a9c396eee29082826449bbc3/MarkupSafe-3.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:6af100e168aa82a50e186c82875a5893c5597a0c1ccdb0d8b40240b1f28b969a", size = 15514, upload-time = "2024-10-18T15:21:01.122Z" }, - { url = "https://files.pythonhosted.org/packages/6b/28/bbf83e3f76936960b850435576dd5e67034e200469571be53f69174a2dfd/MarkupSafe-3.0.2-cp311-cp311-macosx_10_9_universal2.whl", hash = "sha256:9025b4018f3a1314059769c7bf15441064b2207cb3f065e6ea1e7359cb46db9d", size = 14353, upload-time = "2024-10-18T15:21:02.187Z" }, - { url = "https://files.pythonhosted.org/packages/6c/30/316d194b093cde57d448a4c3209f22e3046c5bb2fb0820b118292b334be7/MarkupSafe-3.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:93335ca3812df2f366e80509ae119189886b0f3c2b81325d39efdb84a1e2ae93", size = 12392, upload-time = "2024-10-18T15:21:02.941Z" }, - { url = "https://files.pythonhosted.org/packages/f2/96/9cdafba8445d3a53cae530aaf83c38ec64c4d5427d975c974084af5bc5d2/MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:2cb8438c3cbb25e220c2ab33bb226559e7afb3baec11c4f218ffa7308603c832", size = 23984, upload-time = "2024-10-18T15:21:03.953Z" }, - { url = "https://files.pythonhosted.org/packages/f1/a4/aefb044a2cd8d7334c8a47d3fb2c9f328ac48cb349468cc31c20b539305f/MarkupSafe-3.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:a123e330ef0853c6e822384873bef7507557d8e4a082961e1defa947aa59ba84", size = 23120, upload-time = "2024-10-18T15:21:06.495Z" }, - { url = "https://files.pythonhosted.org/packages/8d/21/5e4851379f88f3fad1de30361db501300d4f07bcad047d3cb0449fc51f8c/MarkupSafe-3.0.2-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:1e084f686b92e5b83186b07e8a17fc09e38fff551f3602b249881fec658d3eca", size = 23032, upload-time = "2024-10-18T15:21:07.295Z" }, - { url = "https://files.pythonhosted.org/packages/00/7b/e92c64e079b2d0d7ddf69899c98842f3f9a60a1ae72657c89ce2655c999d/MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:d8213e09c917a951de9d09ecee036d5c7d36cb6cb7dbaece4c71a60d79fb9798", size = 24057, upload-time = "2024-10-18T15:21:08.073Z" }, - { url = "https://files.pythonhosted.org/packages/f9/ac/46f960ca323037caa0a10662ef97d0a4728e890334fc156b9f9e52bcc4ca/MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:5b02fb34468b6aaa40dfc198d813a641e3a63b98c2b05a16b9f80b7ec314185e", size = 23359, upload-time = "2024-10-18T15:21:09.318Z" }, - { url = "https://files.pythonhosted.org/packages/69/84/83439e16197337b8b14b6a5b9c2105fff81d42c2a7c5b58ac7b62ee2c3b1/MarkupSafe-3.0.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:0bff5e0ae4ef2e1ae4fdf2dfd5b76c75e5c2fa4132d05fc1b0dabcd20c7e28c4", size = 23306, upload-time = "2024-10-18T15:21:10.185Z" }, - { url = "https://files.pythonhosted.org/packages/9a/34/a15aa69f01e2181ed8d2b685c0d2f6655d5cca2c4db0ddea775e631918cd/MarkupSafe-3.0.2-cp311-cp311-win32.whl", hash = "sha256:6c89876f41da747c8d3677a2b540fb32ef5715f97b66eeb0c6b66f5e3ef6f59d", size = 15094, upload-time = "2024-10-18T15:21:11.005Z" }, - { url = "https://files.pythonhosted.org/packages/da/b8/3a3bd761922d416f3dc5d00bfbed11f66b1ab89a0c2b6e887240a30b0f6b/MarkupSafe-3.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:70a87b411535ccad5ef2f1df5136506a10775d267e197e4cf531ced10537bd6b", size = 15521, upload-time = "2024-10-18T15:21:12.911Z" }, - { url = "https://files.pythonhosted.org/packages/22/09/d1f21434c97fc42f09d290cbb6350d44eb12f09cc62c9476effdb33a18aa/MarkupSafe-3.0.2-cp312-cp312-macosx_10_13_universal2.whl", hash = "sha256:9778bd8ab0a994ebf6f84c2b949e65736d5575320a17ae8984a77fab08db94cf", size = 14274, upload-time = "2024-10-18T15:21:13.777Z" }, - { url = "https://files.pythonhosted.org/packages/6b/b0/18f76bba336fa5aecf79d45dcd6c806c280ec44538b3c13671d49099fdd0/MarkupSafe-3.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:846ade7b71e3536c4e56b386c2a47adf5741d2d8b94ec9dc3e92e5e1ee1e2225", size = 12348, upload-time = "2024-10-18T15:21:14.822Z" }, - { url = "https://files.pythonhosted.org/packages/e0/25/dd5c0f6ac1311e9b40f4af06c78efde0f3b5cbf02502f8ef9501294c425b/MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1c99d261bd2d5f6b59325c92c73df481e05e57f19837bdca8413b9eac4bd8028", size = 24149, upload-time = "2024-10-18T15:21:15.642Z" }, - { url = "https://files.pythonhosted.org/packages/f3/f0/89e7aadfb3749d0f52234a0c8c7867877876e0a20b60e2188e9850794c17/MarkupSafe-3.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:e17c96c14e19278594aa4841ec148115f9c7615a47382ecb6b82bd8fea3ab0c8", size = 23118, upload-time = "2024-10-18T15:21:17.133Z" }, - { url = "https://files.pythonhosted.org/packages/d5/da/f2eeb64c723f5e3777bc081da884b414671982008c47dcc1873d81f625b6/MarkupSafe-3.0.2-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:88416bd1e65dcea10bc7569faacb2c20ce071dd1f87539ca2ab364bf6231393c", size = 22993, upload-time = "2024-10-18T15:21:18.064Z" }, - { url = "https://files.pythonhosted.org/packages/da/0e/1f32af846df486dce7c227fe0f2398dc7e2e51d4a370508281f3c1c5cddc/MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:2181e67807fc2fa785d0592dc2d6206c019b9502410671cc905d132a92866557", size = 24178, upload-time = "2024-10-18T15:21:18.859Z" }, - { url = "https://files.pythonhosted.org/packages/c4/f6/bb3ca0532de8086cbff5f06d137064c8410d10779c4c127e0e47d17c0b71/MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:52305740fe773d09cffb16f8ed0427942901f00adedac82ec8b67752f58a1b22", size = 23319, upload-time = "2024-10-18T15:21:19.671Z" }, - { url = "https://files.pythonhosted.org/packages/a2/82/8be4c96ffee03c5b4a034e60a31294daf481e12c7c43ab8e34a1453ee48b/MarkupSafe-3.0.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:ad10d3ded218f1039f11a75f8091880239651b52e9bb592ca27de44eed242a48", size = 23352, upload-time = "2024-10-18T15:21:20.971Z" }, - { url = "https://files.pythonhosted.org/packages/51/ae/97827349d3fcffee7e184bdf7f41cd6b88d9919c80f0263ba7acd1bbcb18/MarkupSafe-3.0.2-cp312-cp312-win32.whl", hash = "sha256:0f4ca02bea9a23221c0182836703cbf8930c5e9454bacce27e767509fa286a30", size = 15097, upload-time = "2024-10-18T15:21:22.646Z" }, - { url = "https://files.pythonhosted.org/packages/c1/80/a61f99dc3a936413c3ee4e1eecac96c0da5ed07ad56fd975f1a9da5bc630/MarkupSafe-3.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:8e06879fc22a25ca47312fbe7c8264eb0b662f6db27cb2d3bbbc74b1df4b9b87", size = 15601, upload-time = "2024-10-18T15:21:23.499Z" }, - { url = "https://files.pythonhosted.org/packages/83/0e/67eb10a7ecc77a0c2bbe2b0235765b98d164d81600746914bebada795e97/MarkupSafe-3.0.2-cp313-cp313-macosx_10_13_universal2.whl", hash = "sha256:ba9527cdd4c926ed0760bc301f6728ef34d841f405abf9d4f959c478421e4efd", size = 14274, upload-time = "2024-10-18T15:21:24.577Z" }, - { url = "https://files.pythonhosted.org/packages/2b/6d/9409f3684d3335375d04e5f05744dfe7e9f120062c9857df4ab490a1031a/MarkupSafe-3.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:f8b3d067f2e40fe93e1ccdd6b2e1d16c43140e76f02fb1319a05cf2b79d99430", size = 12352, upload-time = "2024-10-18T15:21:25.382Z" }, - { url = "https://files.pythonhosted.org/packages/d2/f5/6eadfcd3885ea85fe2a7c128315cc1bb7241e1987443d78c8fe712d03091/MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:569511d3b58c8791ab4c2e1285575265991e6d8f8700c7be0e88f86cb0672094", size = 24122, upload-time = "2024-10-18T15:21:26.199Z" }, - { url = "https://files.pythonhosted.org/packages/0c/91/96cf928db8236f1bfab6ce15ad070dfdd02ed88261c2afafd4b43575e9e9/MarkupSafe-3.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:15ab75ef81add55874e7ab7055e9c397312385bd9ced94920f2802310c930396", size = 23085, upload-time = "2024-10-18T15:21:27.029Z" }, - { url = "https://files.pythonhosted.org/packages/c2/cf/c9d56af24d56ea04daae7ac0940232d31d5a8354f2b457c6d856b2057d69/MarkupSafe-3.0.2-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:f3818cb119498c0678015754eba762e0d61e5b52d34c8b13d770f0719f7b1d79", size = 22978, upload-time = "2024-10-18T15:21:27.846Z" }, - { url = "https://files.pythonhosted.org/packages/2a/9f/8619835cd6a711d6272d62abb78c033bda638fdc54c4e7f4272cf1c0962b/MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:cdb82a876c47801bb54a690c5ae105a46b392ac6099881cdfb9f6e95e4014c6a", size = 24208, upload-time = "2024-10-18T15:21:28.744Z" }, - { url = "https://files.pythonhosted.org/packages/f9/bf/176950a1792b2cd2102b8ffeb5133e1ed984547b75db47c25a67d3359f77/MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:cabc348d87e913db6ab4aa100f01b08f481097838bdddf7c7a84b7575b7309ca", size = 23357, upload-time = "2024-10-18T15:21:29.545Z" }, - { url = "https://files.pythonhosted.org/packages/ce/4f/9a02c1d335caabe5c4efb90e1b6e8ee944aa245c1aaaab8e8a618987d816/MarkupSafe-3.0.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:444dcda765c8a838eaae23112db52f1efaf750daddb2d9ca300bcae1039adc5c", size = 23344, upload-time = "2024-10-18T15:21:30.366Z" }, - { url = "https://files.pythonhosted.org/packages/ee/55/c271b57db36f748f0e04a759ace9f8f759ccf22b4960c270c78a394f58be/MarkupSafe-3.0.2-cp313-cp313-win32.whl", hash = "sha256:bcf3e58998965654fdaff38e58584d8937aa3096ab5354d493c77d1fdd66d7a1", size = 15101, upload-time = "2024-10-18T15:21:31.207Z" }, - { url = "https://files.pythonhosted.org/packages/29/88/07df22d2dd4df40aba9f3e402e6dc1b8ee86297dddbad4872bd5e7b0094f/MarkupSafe-3.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:e6a2a455bd412959b57a172ce6328d2dd1f01cb2135efda2e4576e8a23fa3b0f", size = 15603, upload-time = "2024-10-18T15:21:32.032Z" }, - { url = "https://files.pythonhosted.org/packages/62/6a/8b89d24db2d32d433dffcd6a8779159da109842434f1dd2f6e71f32f738c/MarkupSafe-3.0.2-cp313-cp313t-macosx_10_13_universal2.whl", hash = "sha256:b5a6b3ada725cea8a5e634536b1b01c30bcdcd7f9c6fff4151548d5bf6b3a36c", size = 14510, upload-time = "2024-10-18T15:21:33.625Z" }, - { url = "https://files.pythonhosted.org/packages/7a/06/a10f955f70a2e5a9bf78d11a161029d278eeacbd35ef806c3fd17b13060d/MarkupSafe-3.0.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:a904af0a6162c73e3edcb969eeeb53a63ceeb5d8cf642fade7d39e7963a22ddb", size = 12486, upload-time = "2024-10-18T15:21:34.611Z" }, - { url = "https://files.pythonhosted.org/packages/34/cf/65d4a571869a1a9078198ca28f39fba5fbb910f952f9dbc5220afff9f5e6/MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4aa4e5faecf353ed117801a068ebab7b7e09ffb6e1d5e412dc852e0da018126c", size = 25480, upload-time = "2024-10-18T15:21:35.398Z" }, - { url = "https://files.pythonhosted.org/packages/0c/e3/90e9651924c430b885468b56b3d597cabf6d72be4b24a0acd1fa0e12af67/MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:c0ef13eaeee5b615fb07c9a7dadb38eac06a0608b41570d8ade51c56539e509d", size = 23914, upload-time = "2024-10-18T15:21:36.231Z" }, - { url = "https://files.pythonhosted.org/packages/66/8c/6c7cf61f95d63bb866db39085150df1f2a5bd3335298f14a66b48e92659c/MarkupSafe-3.0.2-cp313-cp313t-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d16a81a06776313e817c951135cf7340a3e91e8c1ff2fac444cfd75fffa04afe", size = 23796, upload-time = "2024-10-18T15:21:37.073Z" }, - { url = "https://files.pythonhosted.org/packages/bb/35/cbe9238ec3f47ac9a7c8b3df7a808e7cb50fe149dc7039f5f454b3fba218/MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:6381026f158fdb7c72a168278597a5e3a5222e83ea18f543112b2662a9b699c5", size = 25473, upload-time = "2024-10-18T15:21:37.932Z" }, - { url = "https://files.pythonhosted.org/packages/e6/32/7621a4382488aa283cc05e8984a9c219abad3bca087be9ec77e89939ded9/MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_i686.whl", hash = "sha256:3d79d162e7be8f996986c064d1c7c817f6df3a77fe3d6859f6f9e7be4b8c213a", size = 24114, upload-time = "2024-10-18T15:21:39.799Z" }, - { url = "https://files.pythonhosted.org/packages/0d/80/0985960e4b89922cb5a0bac0ed39c5b96cbc1a536a99f30e8c220a996ed9/MarkupSafe-3.0.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:131a3c7689c85f5ad20f9f6fb1b866f402c445b220c19fe4308c0b147ccd2ad9", size = 24098, upload-time = "2024-10-18T15:21:40.813Z" }, - { url = "https://files.pythonhosted.org/packages/82/78/fedb03c7d5380df2427038ec8d973587e90561b2d90cd472ce9254cf348b/MarkupSafe-3.0.2-cp313-cp313t-win32.whl", hash = "sha256:ba8062ed2cf21c07a9e295d5b8a2a5ce678b913b45fdf68c32d95d6c1291e0b6", size = 15208, upload-time = "2024-10-18T15:21:41.814Z" }, - { url = "https://files.pythonhosted.org/packages/4f/65/6079a46068dfceaeabb5dcad6d674f5f5c61a6fa5673746f42a9f4c233b3/MarkupSafe-3.0.2-cp313-cp313t-win_amd64.whl", hash = "sha256:e444a31f8db13eb18ada366ab3cf45fd4b31e4db1236a4448f68778c1d1a5a2f", size = 15739, upload-time = "2024-10-18T15:21:42.784Z" }, -] - -[[package]] -name = "more-itertools" -version = "10.7.0" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/ce/a0/834b0cebabbfc7e311f30b46c8188790a37f89fc8d756660346fe5abfd09/more_itertools-10.7.0.tar.gz", hash = "sha256:9fddd5403be01a94b204faadcff459ec3568cf110265d3c54323e1e866ad29d3", size = 127671, upload-time = "2025-04-22T14:17:41.838Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/2b/9f/7ba6f94fc1e9ac3d2b853fdff3035fb2fa5afbed898c4a72b8a020610594/more_itertools-10.7.0-py3-none-any.whl", hash = "sha256:d43980384673cb07d2f7d2d918c616b30c659c089ee23953f601d6609c67510e", size = 65278, upload-time = "2025-04-22T14:17:40.49Z" }, +version = "3.0.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/7e/99/7690b6d4034fffd95959cbe0c02de8deb3098cc577c67bb6a24fe5d7caa7/markupsafe-3.0.3.tar.gz", hash = "sha256:722695808f4b6457b320fdc131280796bdceb04ab50fe1795cd540799ebe1698", size = 80313, upload-time = "2025-09-27T18:37:40.426Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/e8/4b/3541d44f3937ba468b75da9eebcae497dcf67adb65caa16760b0a6807ebb/markupsafe-3.0.3-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:2f981d352f04553a7171b8e44369f2af4055f888dfb147d55e42d29e29e74559", size = 11631, upload-time = "2025-09-27T18:36:05.558Z" }, + { url = "https://files.pythonhosted.org/packages/98/1b/fbd8eed11021cabd9226c37342fa6ca4e8a98d8188a8d9b66740494960e4/markupsafe-3.0.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:e1c1493fb6e50ab01d20a22826e57520f1284df32f2d8601fdd90b6304601419", size = 12057, upload-time = "2025-09-27T18:36:07.165Z" }, + { url = "https://files.pythonhosted.org/packages/40/01/e560d658dc0bb8ab762670ece35281dec7b6c1b33f5fbc09ebb57a185519/markupsafe-3.0.3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1ba88449deb3de88bd40044603fafffb7bc2b055d626a330323a9ed736661695", size = 22050, upload-time = "2025-09-27T18:36:08.005Z" }, + { url = "https://files.pythonhosted.org/packages/af/cd/ce6e848bbf2c32314c9b237839119c5a564a59725b53157c856e90937b7a/markupsafe-3.0.3-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:f42d0984e947b8adf7dd6dde396e720934d12c506ce84eea8476409563607591", size = 20681, upload-time = "2025-09-27T18:36:08.881Z" }, + { url = "https://files.pythonhosted.org/packages/c9/2a/b5c12c809f1c3045c4d580b035a743d12fcde53cf685dbc44660826308da/markupsafe-3.0.3-cp310-cp310-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:c0c0b3ade1c0b13b936d7970b1d37a57acde9199dc2aecc4c336773e1d86049c", size = 20705, upload-time = "2025-09-27T18:36:10.131Z" }, + { url = "https://files.pythonhosted.org/packages/cf/e3/9427a68c82728d0a88c50f890d0fc072a1484de2f3ac1ad0bfc1a7214fd5/markupsafe-3.0.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:0303439a41979d9e74d18ff5e2dd8c43ed6c6001fd40e5bf2e43f7bd9bbc523f", size = 21524, upload-time = "2025-09-27T18:36:11.324Z" }, + { url = "https://files.pythonhosted.org/packages/bc/36/23578f29e9e582a4d0278e009b38081dbe363c5e7165113fad546918a232/markupsafe-3.0.3-cp310-cp310-musllinux_1_2_riscv64.whl", hash = "sha256:d2ee202e79d8ed691ceebae8e0486bd9a2cd4794cec4824e1c99b6f5009502f6", size = 20282, upload-time = "2025-09-27T18:36:12.573Z" }, + { url = "https://files.pythonhosted.org/packages/56/21/dca11354e756ebd03e036bd8ad58d6d7168c80ce1fe5e75218e4945cbab7/markupsafe-3.0.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:177b5253b2834fe3678cb4a5f0059808258584c559193998be2601324fdeafb1", size = 20745, upload-time = "2025-09-27T18:36:13.504Z" }, + { url = "https://files.pythonhosted.org/packages/87/99/faba9369a7ad6e4d10b6a5fbf71fa2a188fe4a593b15f0963b73859a1bbd/markupsafe-3.0.3-cp310-cp310-win32.whl", hash = "sha256:2a15a08b17dd94c53a1da0438822d70ebcd13f8c3a95abe3a9ef9f11a94830aa", size = 14571, upload-time = "2025-09-27T18:36:14.779Z" }, + { url = "https://files.pythonhosted.org/packages/d6/25/55dc3ab959917602c96985cb1253efaa4ff42f71194bddeb61eb7278b8be/markupsafe-3.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:c4ffb7ebf07cfe8931028e3e4c85f0357459a3f9f9490886198848f4fa002ec8", size = 15056, upload-time = "2025-09-27T18:36:16.125Z" }, + { url = "https://files.pythonhosted.org/packages/d0/9e/0a02226640c255d1da0b8d12e24ac2aa6734da68bff14c05dd53b94a0fc3/markupsafe-3.0.3-cp310-cp310-win_arm64.whl", hash = "sha256:e2103a929dfa2fcaf9bb4e7c091983a49c9ac3b19c9061b6d5427dd7d14d81a1", size = 13932, upload-time = "2025-09-27T18:36:17.311Z" }, + { url = "https://files.pythonhosted.org/packages/08/db/fefacb2136439fc8dd20e797950e749aa1f4997ed584c62cfb8ef7c2be0e/markupsafe-3.0.3-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:1cc7ea17a6824959616c525620e387f6dd30fec8cb44f649e31712db02123dad", size = 11631, upload-time = "2025-09-27T18:36:18.185Z" }, + { url = "https://files.pythonhosted.org/packages/e1/2e/5898933336b61975ce9dc04decbc0a7f2fee78c30353c5efba7f2d6ff27a/markupsafe-3.0.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:4bd4cd07944443f5a265608cc6aab442e4f74dff8088b0dfc8238647b8f6ae9a", size = 12058, upload-time = "2025-09-27T18:36:19.444Z" }, + { url = "https://files.pythonhosted.org/packages/1d/09/adf2df3699d87d1d8184038df46a9c80d78c0148492323f4693df54e17bb/markupsafe-3.0.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6b5420a1d9450023228968e7e6a9ce57f65d148ab56d2313fcd589eee96a7a50", size = 24287, upload-time = "2025-09-27T18:36:20.768Z" }, + { url = "https://files.pythonhosted.org/packages/30/ac/0273f6fcb5f42e314c6d8cd99effae6a5354604d461b8d392b5ec9530a54/markupsafe-3.0.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0bf2a864d67e76e5c9a34dc26ec616a66b9888e25e7b9460e1c76d3293bd9dbf", size = 22940, upload-time = "2025-09-27T18:36:22.249Z" }, + { url = "https://files.pythonhosted.org/packages/19/ae/31c1be199ef767124c042c6c3e904da327a2f7f0cd63a0337e1eca2967a8/markupsafe-3.0.3-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:bc51efed119bc9cfdf792cdeaa4d67e8f6fcccab66ed4bfdd6bde3e59bfcbb2f", size = 21887, upload-time = "2025-09-27T18:36:23.535Z" }, + { url = "https://files.pythonhosted.org/packages/b2/76/7edcab99d5349a4532a459e1fe64f0b0467a3365056ae550d3bcf3f79e1e/markupsafe-3.0.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:068f375c472b3e7acbe2d5318dea141359e6900156b5b2ba06a30b169086b91a", size = 23692, upload-time = "2025-09-27T18:36:24.823Z" }, + { url = "https://files.pythonhosted.org/packages/a4/28/6e74cdd26d7514849143d69f0bf2399f929c37dc2b31e6829fd2045b2765/markupsafe-3.0.3-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:7be7b61bb172e1ed687f1754f8e7484f1c8019780f6f6b0786e76bb01c2ae115", size = 21471, upload-time = "2025-09-27T18:36:25.95Z" }, + { url = "https://files.pythonhosted.org/packages/62/7e/a145f36a5c2945673e590850a6f8014318d5577ed7e5920a4b3448e0865d/markupsafe-3.0.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:f9e130248f4462aaa8e2552d547f36ddadbeaa573879158d721bbd33dfe4743a", size = 22923, upload-time = "2025-09-27T18:36:27.109Z" }, + { url = "https://files.pythonhosted.org/packages/0f/62/d9c46a7f5c9adbeeeda52f5b8d802e1094e9717705a645efc71b0913a0a8/markupsafe-3.0.3-cp311-cp311-win32.whl", hash = "sha256:0db14f5dafddbb6d9208827849fad01f1a2609380add406671a26386cdf15a19", size = 14572, upload-time = "2025-09-27T18:36:28.045Z" }, + { url = "https://files.pythonhosted.org/packages/83/8a/4414c03d3f891739326e1783338e48fb49781cc915b2e0ee052aa490d586/markupsafe-3.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:de8a88e63464af587c950061a5e6a67d3632e36df62b986892331d4620a35c01", size = 15077, upload-time = "2025-09-27T18:36:29.025Z" }, + { url = "https://files.pythonhosted.org/packages/35/73/893072b42e6862f319b5207adc9ae06070f095b358655f077f69a35601f0/markupsafe-3.0.3-cp311-cp311-win_arm64.whl", hash = "sha256:3b562dd9e9ea93f13d53989d23a7e775fdfd1066c33494ff43f5418bc8c58a5c", size = 13876, upload-time = "2025-09-27T18:36:29.954Z" }, + { url = "https://files.pythonhosted.org/packages/5a/72/147da192e38635ada20e0a2e1a51cf8823d2119ce8883f7053879c2199b5/markupsafe-3.0.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:d53197da72cc091b024dd97249dfc7794d6a56530370992a5e1a08983ad9230e", size = 11615, upload-time = "2025-09-27T18:36:30.854Z" }, + { url = "https://files.pythonhosted.org/packages/9a/81/7e4e08678a1f98521201c3079f77db69fb552acd56067661f8c2f534a718/markupsafe-3.0.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:1872df69a4de6aead3491198eaf13810b565bdbeec3ae2dc8780f14458ec73ce", size = 12020, upload-time = "2025-09-27T18:36:31.971Z" }, + { url = "https://files.pythonhosted.org/packages/1e/2c/799f4742efc39633a1b54a92eec4082e4f815314869865d876824c257c1e/markupsafe-3.0.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3a7e8ae81ae39e62a41ec302f972ba6ae23a5c5396c8e60113e9066ef893da0d", size = 24332, upload-time = "2025-09-27T18:36:32.813Z" }, + { url = "https://files.pythonhosted.org/packages/3c/2e/8d0c2ab90a8c1d9a24f0399058ab8519a3279d1bd4289511d74e909f060e/markupsafe-3.0.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d6dd0be5b5b189d31db7cda48b91d7e0a9795f31430b7f271219ab30f1d3ac9d", size = 22947, upload-time = "2025-09-27T18:36:33.86Z" }, + { url = "https://files.pythonhosted.org/packages/2c/54/887f3092a85238093a0b2154bd629c89444f395618842e8b0c41783898ea/markupsafe-3.0.3-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:94c6f0bb423f739146aec64595853541634bde58b2135f27f61c1ffd1cd4d16a", size = 21962, upload-time = "2025-09-27T18:36:35.099Z" }, + { url = "https://files.pythonhosted.org/packages/c9/2f/336b8c7b6f4a4d95e91119dc8521402461b74a485558d8f238a68312f11c/markupsafe-3.0.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:be8813b57049a7dc738189df53d69395eba14fb99345e0a5994914a3864c8a4b", size = 23760, upload-time = "2025-09-27T18:36:36.001Z" }, + { url = "https://files.pythonhosted.org/packages/32/43/67935f2b7e4982ffb50a4d169b724d74b62a3964bc1a9a527f5ac4f1ee2b/markupsafe-3.0.3-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:83891d0e9fb81a825d9a6d61e3f07550ca70a076484292a70fde82c4b807286f", size = 21529, upload-time = "2025-09-27T18:36:36.906Z" }, + { url = "https://files.pythonhosted.org/packages/89/e0/4486f11e51bbba8b0c041098859e869e304d1c261e59244baa3d295d47b7/markupsafe-3.0.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:77f0643abe7495da77fb436f50f8dab76dbc6e5fd25d39589a0f1fe6548bfa2b", size = 23015, upload-time = "2025-09-27T18:36:37.868Z" }, + { url = "https://files.pythonhosted.org/packages/2f/e1/78ee7a023dac597a5825441ebd17170785a9dab23de95d2c7508ade94e0e/markupsafe-3.0.3-cp312-cp312-win32.whl", hash = "sha256:d88b440e37a16e651bda4c7c2b930eb586fd15ca7406cb39e211fcff3bf3017d", size = 14540, upload-time = "2025-09-27T18:36:38.761Z" }, + { url = "https://files.pythonhosted.org/packages/aa/5b/bec5aa9bbbb2c946ca2733ef9c4ca91c91b6a24580193e891b5f7dbe8e1e/markupsafe-3.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:26a5784ded40c9e318cfc2bdb30fe164bdb8665ded9cd64d500a34fb42067b1c", size = 15105, upload-time = "2025-09-27T18:36:39.701Z" }, + { url = "https://files.pythonhosted.org/packages/e5/f1/216fc1bbfd74011693a4fd837e7026152e89c4bcf3e77b6692fba9923123/markupsafe-3.0.3-cp312-cp312-win_arm64.whl", hash = "sha256:35add3b638a5d900e807944a078b51922212fb3dedb01633a8defc4b01a3c85f", size = 13906, upload-time = "2025-09-27T18:36:40.689Z" }, + { url = "https://files.pythonhosted.org/packages/38/2f/907b9c7bbba283e68f20259574b13d005c121a0fa4c175f9bed27c4597ff/markupsafe-3.0.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e1cf1972137e83c5d4c136c43ced9ac51d0e124706ee1c8aa8532c1287fa8795", size = 11622, upload-time = "2025-09-27T18:36:41.777Z" }, + { url = "https://files.pythonhosted.org/packages/9c/d9/5f7756922cdd676869eca1c4e3c0cd0df60ed30199ffd775e319089cb3ed/markupsafe-3.0.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:116bb52f642a37c115f517494ea5feb03889e04df47eeff5b130b1808ce7c219", size = 12029, upload-time = "2025-09-27T18:36:43.257Z" }, + { url = "https://files.pythonhosted.org/packages/00/07/575a68c754943058c78f30db02ee03a64b3c638586fba6a6dd56830b30a3/markupsafe-3.0.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:133a43e73a802c5562be9bbcd03d090aa5a1fe899db609c29e8c8d815c5f6de6", size = 24374, upload-time = "2025-09-27T18:36:44.508Z" }, + { url = "https://files.pythonhosted.org/packages/a9/21/9b05698b46f218fc0e118e1f8168395c65c8a2c750ae2bab54fc4bd4e0e8/markupsafe-3.0.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ccfcd093f13f0f0b7fdd0f198b90053bf7b2f02a3927a30e63f3ccc9df56b676", size = 22980, upload-time = "2025-09-27T18:36:45.385Z" }, + { url = "https://files.pythonhosted.org/packages/7f/71/544260864f893f18b6827315b988c146b559391e6e7e8f7252839b1b846a/markupsafe-3.0.3-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:509fa21c6deb7a7a273d629cf5ec029bc209d1a51178615ddf718f5918992ab9", size = 21990, upload-time = "2025-09-27T18:36:46.916Z" }, + { url = "https://files.pythonhosted.org/packages/c2/28/b50fc2f74d1ad761af2f5dcce7492648b983d00a65b8c0e0cb457c82ebbe/markupsafe-3.0.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:a4afe79fb3de0b7097d81da19090f4df4f8d3a2b3adaa8764138aac2e44f3af1", size = 23784, upload-time = "2025-09-27T18:36:47.884Z" }, + { url = "https://files.pythonhosted.org/packages/ed/76/104b2aa106a208da8b17a2fb72e033a5a9d7073c68f7e508b94916ed47a9/markupsafe-3.0.3-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:795e7751525cae078558e679d646ae45574b47ed6e7771863fcc079a6171a0fc", size = 21588, upload-time = "2025-09-27T18:36:48.82Z" }, + { url = "https://files.pythonhosted.org/packages/b5/99/16a5eb2d140087ebd97180d95249b00a03aa87e29cc224056274f2e45fd6/markupsafe-3.0.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:8485f406a96febb5140bfeca44a73e3ce5116b2501ac54fe953e488fb1d03b12", size = 23041, upload-time = "2025-09-27T18:36:49.797Z" }, + { url = "https://files.pythonhosted.org/packages/19/bc/e7140ed90c5d61d77cea142eed9f9c303f4c4806f60a1044c13e3f1471d0/markupsafe-3.0.3-cp313-cp313-win32.whl", hash = "sha256:bdd37121970bfd8be76c5fb069c7751683bdf373db1ed6c010162b2a130248ed", size = 14543, upload-time = "2025-09-27T18:36:51.584Z" }, + { url = "https://files.pythonhosted.org/packages/05/73/c4abe620b841b6b791f2edc248f556900667a5a1cf023a6646967ae98335/markupsafe-3.0.3-cp313-cp313-win_amd64.whl", hash = "sha256:9a1abfdc021a164803f4d485104931fb8f8c1efd55bc6b748d2f5774e78b62c5", size = 15113, upload-time = "2025-09-27T18:36:52.537Z" }, + { url = "https://files.pythonhosted.org/packages/f0/3a/fa34a0f7cfef23cf9500d68cb7c32dd64ffd58a12b09225fb03dd37d5b80/markupsafe-3.0.3-cp313-cp313-win_arm64.whl", hash = "sha256:7e68f88e5b8799aa49c85cd116c932a1ac15caaa3f5db09087854d218359e485", size = 13911, upload-time = "2025-09-27T18:36:53.513Z" }, + { url = "https://files.pythonhosted.org/packages/e4/d7/e05cd7efe43a88a17a37b3ae96e79a19e846f3f456fe79c57ca61356ef01/markupsafe-3.0.3-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:218551f6df4868a8d527e3062d0fb968682fe92054e89978594c28e642c43a73", size = 11658, upload-time = "2025-09-27T18:36:54.819Z" }, + { url = "https://files.pythonhosted.org/packages/99/9e/e412117548182ce2148bdeacdda3bb494260c0b0184360fe0d56389b523b/markupsafe-3.0.3-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:3524b778fe5cfb3452a09d31e7b5adefeea8c5be1d43c4f810ba09f2ceb29d37", size = 12066, upload-time = "2025-09-27T18:36:55.714Z" }, + { url = "https://files.pythonhosted.org/packages/bc/e6/fa0ffcda717ef64a5108eaa7b4f5ed28d56122c9a6d70ab8b72f9f715c80/markupsafe-3.0.3-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4e885a3d1efa2eadc93c894a21770e4bc67899e3543680313b09f139e149ab19", size = 25639, upload-time = "2025-09-27T18:36:56.908Z" }, + { url = "https://files.pythonhosted.org/packages/96/ec/2102e881fe9d25fc16cb4b25d5f5cde50970967ffa5dddafdb771237062d/markupsafe-3.0.3-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:8709b08f4a89aa7586de0aadc8da56180242ee0ada3999749b183aa23df95025", size = 23569, upload-time = "2025-09-27T18:36:57.913Z" }, + { url = "https://files.pythonhosted.org/packages/4b/30/6f2fce1f1f205fc9323255b216ca8a235b15860c34b6798f810f05828e32/markupsafe-3.0.3-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:b8512a91625c9b3da6f127803b166b629725e68af71f8184ae7e7d54686a56d6", size = 23284, upload-time = "2025-09-27T18:36:58.833Z" }, + { url = "https://files.pythonhosted.org/packages/58/47/4a0ccea4ab9f5dcb6f79c0236d954acb382202721e704223a8aafa38b5c8/markupsafe-3.0.3-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:9b79b7a16f7fedff2495d684f2b59b0457c3b493778c9eed31111be64d58279f", size = 24801, upload-time = "2025-09-27T18:36:59.739Z" }, + { url = "https://files.pythonhosted.org/packages/6a/70/3780e9b72180b6fecb83a4814d84c3bf4b4ae4bf0b19c27196104149734c/markupsafe-3.0.3-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:12c63dfb4a98206f045aa9563db46507995f7ef6d83b2f68eda65c307c6829eb", size = 22769, upload-time = "2025-09-27T18:37:00.719Z" }, + { url = "https://files.pythonhosted.org/packages/98/c5/c03c7f4125180fc215220c035beac6b9cb684bc7a067c84fc69414d315f5/markupsafe-3.0.3-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:8f71bc33915be5186016f675cd83a1e08523649b0e33efdb898db577ef5bb009", size = 23642, upload-time = "2025-09-27T18:37:01.673Z" }, + { url = "https://files.pythonhosted.org/packages/80/d6/2d1b89f6ca4bff1036499b1e29a1d02d282259f3681540e16563f27ebc23/markupsafe-3.0.3-cp313-cp313t-win32.whl", hash = "sha256:69c0b73548bc525c8cb9a251cddf1931d1db4d2258e9599c28c07ef3580ef354", size = 14612, upload-time = "2025-09-27T18:37:02.639Z" }, + { url = "https://files.pythonhosted.org/packages/2b/98/e48a4bfba0a0ffcf9925fe2d69240bfaa19c6f7507b8cd09c70684a53c1e/markupsafe-3.0.3-cp313-cp313t-win_amd64.whl", hash = "sha256:1b4b79e8ebf6b55351f0d91fe80f893b4743f104bff22e90697db1590e47a218", size = 15200, upload-time = "2025-09-27T18:37:03.582Z" }, + { url = "https://files.pythonhosted.org/packages/0e/72/e3cc540f351f316e9ed0f092757459afbc595824ca724cbc5a5d4263713f/markupsafe-3.0.3-cp313-cp313t-win_arm64.whl", hash = "sha256:ad2cf8aa28b8c020ab2fc8287b0f823d0a7d8630784c31e9ee5edea20f406287", size = 13973, upload-time = "2025-09-27T18:37:04.929Z" }, + { url = "https://files.pythonhosted.org/packages/33/8a/8e42d4838cd89b7dde187011e97fe6c3af66d8c044997d2183fbd6d31352/markupsafe-3.0.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:eaa9599de571d72e2daf60164784109f19978b327a3910d3e9de8c97b5b70cfe", size = 11619, upload-time = "2025-09-27T18:37:06.342Z" }, + { url = "https://files.pythonhosted.org/packages/b5/64/7660f8a4a8e53c924d0fa05dc3a55c9cee10bbd82b11c5afb27d44b096ce/markupsafe-3.0.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:c47a551199eb8eb2121d4f0f15ae0f923d31350ab9280078d1e5f12b249e0026", size = 12029, upload-time = "2025-09-27T18:37:07.213Z" }, + { url = "https://files.pythonhosted.org/packages/da/ef/e648bfd021127bef5fa12e1720ffed0c6cbb8310c8d9bea7266337ff06de/markupsafe-3.0.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f34c41761022dd093b4b6896d4810782ffbabe30f2d443ff5f083e0cbbb8c737", size = 24408, upload-time = "2025-09-27T18:37:09.572Z" }, + { url = "https://files.pythonhosted.org/packages/41/3c/a36c2450754618e62008bf7435ccb0f88053e07592e6028a34776213d877/markupsafe-3.0.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:457a69a9577064c05a97c41f4e65148652db078a3a509039e64d3467b9e7ef97", size = 23005, upload-time = "2025-09-27T18:37:10.58Z" }, + { url = "https://files.pythonhosted.org/packages/bc/20/b7fdf89a8456b099837cd1dc21974632a02a999ec9bf7ca3e490aacd98e7/markupsafe-3.0.3-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:e8afc3f2ccfa24215f8cb28dcf43f0113ac3c37c2f0f0806d8c70e4228c5cf4d", size = 22048, upload-time = "2025-09-27T18:37:11.547Z" }, + { url = "https://files.pythonhosted.org/packages/9a/a7/591f592afdc734f47db08a75793a55d7fbcc6902a723ae4cfbab61010cc5/markupsafe-3.0.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:ec15a59cf5af7be74194f7ab02d0f59a62bdcf1a537677ce67a2537c9b87fcda", size = 23821, upload-time = "2025-09-27T18:37:12.48Z" }, + { url = "https://files.pythonhosted.org/packages/7d/33/45b24e4f44195b26521bc6f1a82197118f74df348556594bd2262bda1038/markupsafe-3.0.3-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:0eb9ff8191e8498cca014656ae6b8d61f39da5f95b488805da4bb029cccbfbaf", size = 21606, upload-time = "2025-09-27T18:37:13.485Z" }, + { url = "https://files.pythonhosted.org/packages/ff/0e/53dfaca23a69fbfbbf17a4b64072090e70717344c52eaaaa9c5ddff1e5f0/markupsafe-3.0.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:2713baf880df847f2bece4230d4d094280f4e67b1e813eec43b4c0e144a34ffe", size = 23043, upload-time = "2025-09-27T18:37:14.408Z" }, + { url = "https://files.pythonhosted.org/packages/46/11/f333a06fc16236d5238bfe74daccbca41459dcd8d1fa952e8fbd5dccfb70/markupsafe-3.0.3-cp314-cp314-win32.whl", hash = "sha256:729586769a26dbceff69f7a7dbbf59ab6572b99d94576a5592625d5b411576b9", size = 14747, upload-time = "2025-09-27T18:37:15.36Z" }, + { url = "https://files.pythonhosted.org/packages/28/52/182836104b33b444e400b14f797212f720cbc9ed6ba34c800639d154e821/markupsafe-3.0.3-cp314-cp314-win_amd64.whl", hash = "sha256:bdc919ead48f234740ad807933cdf545180bfbe9342c2bb451556db2ed958581", size = 15341, upload-time = "2025-09-27T18:37:16.496Z" }, + { url = "https://files.pythonhosted.org/packages/6f/18/acf23e91bd94fd7b3031558b1f013adfa21a8e407a3fdb32745538730382/markupsafe-3.0.3-cp314-cp314-win_arm64.whl", hash = "sha256:5a7d5dc5140555cf21a6fefbdbf8723f06fcd2f63ef108f2854de715e4422cb4", size = 14073, upload-time = "2025-09-27T18:37:17.476Z" }, + { url = "https://files.pythonhosted.org/packages/3c/f0/57689aa4076e1b43b15fdfa646b04653969d50cf30c32a102762be2485da/markupsafe-3.0.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:1353ef0c1b138e1907ae78e2f6c63ff67501122006b0f9abad68fda5f4ffc6ab", size = 11661, upload-time = "2025-09-27T18:37:18.453Z" }, + { url = "https://files.pythonhosted.org/packages/89/c3/2e67a7ca217c6912985ec766c6393b636fb0c2344443ff9d91404dc4c79f/markupsafe-3.0.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:1085e7fbddd3be5f89cc898938f42c0b3c711fdcb37d75221de2666af647c175", size = 12069, upload-time = "2025-09-27T18:37:19.332Z" }, + { url = "https://files.pythonhosted.org/packages/f0/00/be561dce4e6ca66b15276e184ce4b8aec61fe83662cce2f7d72bd3249d28/markupsafe-3.0.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1b52b4fb9df4eb9ae465f8d0c228a00624de2334f216f178a995ccdcf82c4634", size = 25670, upload-time = "2025-09-27T18:37:20.245Z" }, + { url = "https://files.pythonhosted.org/packages/50/09/c419f6f5a92e5fadde27efd190eca90f05e1261b10dbd8cbcb39cd8ea1dc/markupsafe-3.0.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fed51ac40f757d41b7c48425901843666a6677e3e8eb0abcff09e4ba6e664f50", size = 23598, upload-time = "2025-09-27T18:37:21.177Z" }, + { url = "https://files.pythonhosted.org/packages/22/44/a0681611106e0b2921b3033fc19bc53323e0b50bc70cffdd19f7d679bb66/markupsafe-3.0.3-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:f190daf01f13c72eac4efd5c430a8de82489d9cff23c364c3ea822545032993e", size = 23261, upload-time = "2025-09-27T18:37:22.167Z" }, + { url = "https://files.pythonhosted.org/packages/5f/57/1b0b3f100259dc9fffe780cfb60d4be71375510e435efec3d116b6436d43/markupsafe-3.0.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:e56b7d45a839a697b5eb268c82a71bd8c7f6c94d6fd50c3d577fa39a9f1409f5", size = 24835, upload-time = "2025-09-27T18:37:23.296Z" }, + { url = "https://files.pythonhosted.org/packages/26/6a/4bf6d0c97c4920f1597cc14dd720705eca0bf7c787aebc6bb4d1bead5388/markupsafe-3.0.3-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:f3e98bb3798ead92273dc0e5fd0f31ade220f59a266ffd8a4f6065e0a3ce0523", size = 22733, upload-time = "2025-09-27T18:37:24.237Z" }, + { url = "https://files.pythonhosted.org/packages/14/c7/ca723101509b518797fedc2fdf79ba57f886b4aca8a7d31857ba3ee8281f/markupsafe-3.0.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:5678211cb9333a6468fb8d8be0305520aa073f50d17f089b5b4b477ea6e67fdc", size = 23672, upload-time = "2025-09-27T18:37:25.271Z" }, + { url = "https://files.pythonhosted.org/packages/fb/df/5bd7a48c256faecd1d36edc13133e51397e41b73bb77e1a69deab746ebac/markupsafe-3.0.3-cp314-cp314t-win32.whl", hash = "sha256:915c04ba3851909ce68ccc2b8e2cd691618c4dc4c4232fb7982bca3f41fd8c3d", size = 14819, upload-time = "2025-09-27T18:37:26.285Z" }, + { url = "https://files.pythonhosted.org/packages/1a/8a/0402ba61a2f16038b48b39bccca271134be00c5c9f0f623208399333c448/markupsafe-3.0.3-cp314-cp314t-win_amd64.whl", hash = "sha256:4faffd047e07c38848ce017e8725090413cd80cbc23d86e55c587bf979e579c9", size = 15426, upload-time = "2025-09-27T18:37:27.316Z" }, + { url = "https://files.pythonhosted.org/packages/70/bc/6f1c2f612465f5fa89b95bead1f44dcb607670fd42891d8fdcd5d039f4f4/markupsafe-3.0.3-cp314-cp314t-win_arm64.whl", hash = "sha256:32001d6a8fc98c8cb5c947787c5d08b0a50663d139f1305bac5885d98d9b40fa", size = 14146, upload-time = "2025-09-27T18:37:28.327Z" }, ] [[package]] name = "mypy" -version = "1.20.0" +version = "1.19.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "librt", marker = "platform_python_implementation != 'PyPy'" }, @@ -1521,51 +1594,39 @@ dependencies = [ { name = "tomli", marker = "python_full_version < '3.11'" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/f8/5c/b0089fe7fef0a994ae5ee07029ced0526082c6cfaaa4c10d40a10e33b097/mypy-1.20.0.tar.gz", hash = "sha256:eb96c84efcc33f0b5e0e04beacf00129dd963b67226b01c00b9dfc8affb464c3", size = 3815028, upload-time = "2026-03-31T16:55:14.959Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/4d/a2/a965c8c3fcd4fa8b84ba0d46606181b0d0a1d50f274c67877f3e9ed4882c/mypy-1.20.0-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:d99f515f95fd03a90875fdb2cca12ff074aa04490db4d190905851bdf8a549a8", size = 14430138, upload-time = "2026-03-31T16:52:37.843Z" }, - { url = "https://files.pythonhosted.org/packages/53/6e/043477501deeb8eabbab7f1a2f6cac62cfb631806dc1d6862a04a7f5011b/mypy-1.20.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:bd0212976dc57a5bfeede7c219e7cd66568a32c05c9129686dd487c059c1b88a", size = 13311282, upload-time = "2026-03-31T16:55:11.021Z" }, - { url = "https://files.pythonhosted.org/packages/65/aa/bd89b247b83128197a214f29f0632ff3c14f54d4cd70d144d157bd7d7d6e/mypy-1.20.0-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f8426d4d75d68714abc17a4292d922f6ba2cfb984b72c2278c437f6dae797865", size = 13750889, upload-time = "2026-03-31T16:52:02.909Z" }, - { url = "https://files.pythonhosted.org/packages/fa/9d/2860be7355c45247ccc0be1501c91176318964c2a137bd4743f58ce6200e/mypy-1.20.0-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:02cca0761c75b42a20a2757ae58713276605eb29a08dd8a6e092aa347c4115ca", size = 14619788, upload-time = "2026-03-31T16:50:48.928Z" }, - { url = "https://files.pythonhosted.org/packages/75/7f/3ef3e360c91f3de120f205c8ce405e9caf9fc52ef14b65d37073e322c114/mypy-1.20.0-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:b3a49064504be59e59da664c5e149edc1f26c67c4f8e8456f6ba6aba55033018", size = 14918849, upload-time = "2026-03-31T16:51:10.478Z" }, - { url = "https://files.pythonhosted.org/packages/ae/72/af970dfe167ef788df7c5e6109d2ed0229f164432ce828bc9741a4250e64/mypy-1.20.0-cp310-cp310-win_amd64.whl", hash = "sha256:ebea00201737ad4391142808ed16e875add5c17f676e0912b387739f84991e13", size = 10822007, upload-time = "2026-03-31T16:50:25.268Z" }, - { url = "https://files.pythonhosted.org/packages/93/94/ba9065c2ebe5421619aff684b793d953e438a8bfe31a320dd6d1e0706e81/mypy-1.20.0-cp310-cp310-win_arm64.whl", hash = "sha256:e80cf77847d0d3e6e3111b7b25db32a7f8762fd4b9a3a72ce53fe16a2863b281", size = 9756158, upload-time = "2026-03-31T16:48:36.213Z" }, - { url = "https://files.pythonhosted.org/packages/6e/1c/74cb1d9993236910286865679d1c616b136b2eae468493aa939431eda410/mypy-1.20.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:4525e7010b1b38334516181c5b81e16180b8e149e6684cee5a727c78186b4e3b", size = 14343972, upload-time = "2026-03-31T16:49:04.887Z" }, - { url = "https://files.pythonhosted.org/packages/d5/0d/01399515eca280386e308cf57901e68d3a52af18691941b773b3380c1df8/mypy-1.20.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:a17c5d0bdcca61ce24a35beb828a2d0d323d3fcf387d7512206888c900193367", size = 13225007, upload-time = "2026-03-31T16:50:08.151Z" }, - { url = "https://files.pythonhosted.org/packages/56/ac/b4ba5094fb2d7fe9d2037cd8d18bbe02bcf68fd22ab9ff013f55e57ba095/mypy-1.20.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f75ff57defcd0f1d6e006d721ccdec6c88d4f6a7816eb92f1c4890d979d9ee62", size = 13663752, upload-time = "2026-03-31T16:49:26.064Z" }, - { url = "https://files.pythonhosted.org/packages/db/a7/460678d3cf7da252d2288dad0c602294b6ec22a91932ec368cc11e44bb6e/mypy-1.20.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b503ab55a836136b619b5fc21c8803d810c5b87551af8600b72eecafb0059cb0", size = 14532265, upload-time = "2026-03-31T16:53:55.077Z" }, - { url = "https://files.pythonhosted.org/packages/a3/3e/051cca8166cf0438ae3ea80e0e7c030d7a8ab98dffc93f80a1aa3f23c1a2/mypy-1.20.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:1973868d2adbb4584a3835780b27436f06d1dc606af5be09f187aaa25be1070f", size = 14768476, upload-time = "2026-03-31T16:50:34.587Z" }, - { url = "https://files.pythonhosted.org/packages/be/66/8e02ec184f852ed5c4abb805583305db475930854e09964b55e107cdcbc4/mypy-1.20.0-cp311-cp311-win_amd64.whl", hash = "sha256:2fcedb16d456106e545b2bfd7ef9d24e70b38ec252d2a629823a4d07ebcdb69e", size = 10818226, upload-time = "2026-03-31T16:53:15.624Z" }, - { url = "https://files.pythonhosted.org/packages/13/4b/383ad1924b28f41e4879a74151e7a5451123330d45652da359f9183bcd45/mypy-1.20.0-cp311-cp311-win_arm64.whl", hash = "sha256:379edf079ce44ac8d2805bcf9b3dd7340d4f97aad3a5e0ebabbf9d125b84b442", size = 9750091, upload-time = "2026-03-31T16:54:12.162Z" }, - { url = "https://files.pythonhosted.org/packages/be/dd/3afa29b58c2e57c79116ed55d700721c3c3b15955e2b6251dd165d377c0e/mypy-1.20.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:002b613ae19f4ac7d18b7e168ffe1cb9013b37c57f7411984abbd3b817b0a214", size = 14509525, upload-time = "2026-03-31T16:55:01.824Z" }, - { url = "https://files.pythonhosted.org/packages/54/eb/227b516ab8cad9f2a13c5e7a98d28cd6aa75e9c83e82776ae6c1c4c046c7/mypy-1.20.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a9336b5e6712f4adaf5afc3203a99a40b379049104349d747eb3e5a3aa23ac2e", size = 13326469, upload-time = "2026-03-31T16:51:41.23Z" }, - { url = "https://files.pythonhosted.org/packages/57/d4/1ddb799860c1b5ac6117ec307b965f65deeb47044395ff01ab793248a591/mypy-1.20.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f13b3e41bce9d257eded794c0f12878af3129d80aacd8a3ee0dee51f3a978651", size = 13705953, upload-time = "2026-03-31T16:48:55.69Z" }, - { url = "https://files.pythonhosted.org/packages/c5/b7/54a720f565a87b893182a2a393370289ae7149e4715859e10e1c05e49154/mypy-1.20.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9804c3ad27f78e54e58b32e7cb532d128b43dbfb9f3f9f06262b821a0f6bd3f5", size = 14710363, upload-time = "2026-03-31T16:53:26.948Z" }, - { url = "https://files.pythonhosted.org/packages/b2/2a/74810274848d061f8a8ea4ac23aaad43bd3d8c1882457999c2e568341c57/mypy-1.20.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:697f102c5c1d526bdd761a69f17c6070f9892eebcb94b1a5963d679288c09e78", size = 14947005, upload-time = "2026-03-31T16:50:17.591Z" }, - { url = "https://files.pythonhosted.org/packages/77/91/21b8ba75f958bcda75690951ce6fa6b7138b03471618959529d74b8544e2/mypy-1.20.0-cp312-cp312-win_amd64.whl", hash = "sha256:0ecd63f75fdd30327e4ad8b5704bd6d91fc6c1b2e029f8ee14705e1207212489", size = 10880616, upload-time = "2026-03-31T16:52:19.986Z" }, - { url = "https://files.pythonhosted.org/packages/8a/15/3d8198ef97c1ca03aea010cce4f1d4f3bc5d9849e8c0140111ca2ead9fdd/mypy-1.20.0-cp312-cp312-win_arm64.whl", hash = "sha256:f194db59657c58593a3c47c6dfd7bad4ef4ac12dbc94d01b3a95521f78177e33", size = 9813091, upload-time = "2026-03-31T16:53:44.385Z" }, - { url = "https://files.pythonhosted.org/packages/d6/a7/f64ea7bd592fa431cb597418b6dec4a47f7d0c36325fec7ac67bc8402b94/mypy-1.20.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:b20c8b0fd5877abdf402e79a3af987053de07e6fb208c18df6659f708b535134", size = 14485344, upload-time = "2026-03-31T16:49:16.78Z" }, - { url = "https://files.pythonhosted.org/packages/bb/72/8927d84cfc90c6abea6e96663576e2e417589347eb538749a464c4c218a0/mypy-1.20.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:367e5c993ba34d5054d11937d0485ad6dfc60ba760fa326c01090fc256adf15c", size = 13327400, upload-time = "2026-03-31T16:53:08.02Z" }, - { url = "https://files.pythonhosted.org/packages/ab/4a/11ab99f9afa41aa350178d24a7d2da17043228ea10f6456523f64b5a6cf6/mypy-1.20.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f799d9db89fc00446f03281f84a221e50018fc40113a3ba9864b132895619ebe", size = 13706384, upload-time = "2026-03-31T16:52:28.577Z" }, - { url = "https://files.pythonhosted.org/packages/42/79/694ca73979cfb3535ebfe78733844cd5aff2e63304f59bf90585110d975a/mypy-1.20.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:555658c611099455b2da507582ea20d2043dfdfe7f5ad0add472b1c6238b433f", size = 14700378, upload-time = "2026-03-31T16:48:45.527Z" }, - { url = "https://files.pythonhosted.org/packages/84/24/a022ccab3a46e3d2cdf2e0e260648633640eb396c7e75d5a42818a8d3971/mypy-1.20.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:efe8d70949c3023698c3fca1e94527e7e790a361ab8116f90d11221421cd8726", size = 14932170, upload-time = "2026-03-31T16:49:36.038Z" }, - { url = "https://files.pythonhosted.org/packages/d8/9b/549228d88f574d04117e736f55958bd4908f980f9f5700a07aeb85df005b/mypy-1.20.0-cp313-cp313-win_amd64.whl", hash = "sha256:f49590891d2c2f8a9de15614e32e459a794bcba84693c2394291a2038bbaaa69", size = 10888526, upload-time = "2026-03-31T16:50:59.827Z" }, - { url = "https://files.pythonhosted.org/packages/91/17/15095c0e54a8bc04d22d4ff06b2139d5f142c2e87520b4e39010c4862771/mypy-1.20.0-cp313-cp313-win_arm64.whl", hash = "sha256:76a70bf840495729be47510856b978f1b0ec7d08f257ca38c9d932720bf6b43e", size = 9816456, upload-time = "2026-03-31T16:49:59.537Z" }, - { url = "https://files.pythonhosted.org/packages/4e/0e/6ca4a84cbed9e62384bc0b2974c90395ece5ed672393e553996501625fc5/mypy-1.20.0-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:0f42dfaab7ec1baff3b383ad7af562ab0de573c5f6edb44b2dab016082b89948", size = 14483331, upload-time = "2026-03-31T16:52:57.999Z" }, - { url = "https://files.pythonhosted.org/packages/7d/c5/5fe9d8a729dd9605064691816243ae6c49fde0bd28f6e5e17f6a24203c43/mypy-1.20.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:31b5dbb55293c1bd27c0fc813a0d2bb5ceef9d65ac5afa2e58f829dab7921fd5", size = 13342047, upload-time = "2026-03-31T16:54:21.555Z" }, - { url = "https://files.pythonhosted.org/packages/4c/33/e18bcfa338ca4e6b2771c85d4c5203e627d0c69d9de5c1a2cf2ba13320ba/mypy-1.20.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:49d11c6f573a5a08f77fad13faff2139f6d0730ebed2cfa9b3d2702671dd7188", size = 13719585, upload-time = "2026-03-31T16:51:53.89Z" }, - { url = "https://files.pythonhosted.org/packages/6b/8d/93491ff7b79419edc7eabf95cb3b3f7490e2e574b2855c7c7e7394ff933f/mypy-1.20.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7d3243c406773185144527f83be0e0aefc7bf4601b0b2b956665608bf7c98a83", size = 14685075, upload-time = "2026-03-31T16:54:04.464Z" }, - { url = "https://files.pythonhosted.org/packages/b5/9d/d924b38a4923f8d164bf2b4ec98bf13beaf6e10a5348b4b137eadae40a6e/mypy-1.20.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:a79c1eba7ac4209f2d850f0edd0a2f8bba88cbfdfefe6fb76a19e9d4fe5e71a2", size = 14919141, upload-time = "2026-03-31T16:54:51.785Z" }, - { url = "https://files.pythonhosted.org/packages/59/98/1da9977016678c0b99d43afe52ed00bb3c1a0c4c995d3e6acca1a6ebb9b4/mypy-1.20.0-cp314-cp314-win_amd64.whl", hash = "sha256:00e047c74d3ec6e71a2eb88e9ea551a2edb90c21f993aefa9e0d2a898e0bb732", size = 11050925, upload-time = "2026-03-31T16:51:30.758Z" }, - { url = "https://files.pythonhosted.org/packages/5e/e3/ba0b7a3143e49a9c4f5967dde6ea4bf8e0b10ecbbcca69af84027160ee89/mypy-1.20.0-cp314-cp314-win_arm64.whl", hash = "sha256:931a7630bba591593dcf6e97224a21ff80fb357e7982628d25e3c618e7f598ef", size = 10001089, upload-time = "2026-03-31T16:49:43.632Z" }, - { url = "https://files.pythonhosted.org/packages/12/28/e617e67b3be9d213cda7277913269c874eb26472489f95d09d89765ce2d8/mypy-1.20.0-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:26c8b52627b6552f47ff11adb4e1509605f094e29815323e487fc0053ebe93d1", size = 15534710, upload-time = "2026-03-31T16:52:12.506Z" }, - { url = "https://files.pythonhosted.org/packages/6e/0c/3b5f2d3e45dc7169b811adce8451679d9430399d03b168f9b0489f43adaa/mypy-1.20.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:39362cdb4ba5f916e7976fccecaab1ba3a83e35f60fa68b64e9a70e221bb2436", size = 14393013, upload-time = "2026-03-31T16:54:41.186Z" }, - { url = "https://files.pythonhosted.org/packages/a3/49/edc8b0aa145cc09c1c74f7ce2858eead9329931dcbbb26e2ad40906daa4e/mypy-1.20.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:34506397dbf40c15dc567635d18a21d33827e9ab29014fb83d292a8f4f8953b6", size = 15047240, upload-time = "2026-03-31T16:54:31.955Z" }, - { url = "https://files.pythonhosted.org/packages/42/37/a946bb416e37a57fa752b3100fd5ede0e28df94f92366d1716555d47c454/mypy-1.20.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:555493c44a4f5a1b58d611a43333e71a9981c6dbe26270377b6f8174126a0526", size = 15858565, upload-time = "2026-03-31T16:53:36.997Z" }, - { url = "https://files.pythonhosted.org/packages/2f/99/7690b5b5b552db1bd4ff362e4c0eb3107b98d680835e65823fbe888c8b78/mypy-1.20.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:2721f0ce49cb74a38f00c50da67cb7d36317b5eda38877a49614dc018e91c787", size = 16087874, upload-time = "2026-03-31T16:52:48.313Z" }, - { url = "https://files.pythonhosted.org/packages/aa/76/53e893a498138066acd28192b77495c9357e5a58cc4be753182846b43315/mypy-1.20.0-cp314-cp314t-win_amd64.whl", hash = "sha256:47781555a7aa5fedcc2d16bcd72e0dc83eb272c10dd657f9fb3f9cc08e2e6abb", size = 12572380, upload-time = "2026-03-31T16:49:52.454Z" }, - { url = "https://files.pythonhosted.org/packages/76/9c/6dbdae21f01b7aacddc2c0bbf3c5557aa547827fdf271770fe1e521e7093/mypy-1.20.0-cp314-cp314t-win_arm64.whl", hash = "sha256:c70380fe5d64010f79fb863b9081c7004dd65225d2277333c219d93a10dad4dd", size = 10381174, upload-time = "2026-03-31T16:51:20.179Z" }, - { url = "https://files.pythonhosted.org/packages/21/66/4d734961ce167f0fd8380769b3b7c06dbdd6ff54c2190f3f2ecd22528158/mypy-1.20.0-py3-none-any.whl", hash = "sha256:a6e0641147cbfa7e4e94efdb95c2dab1aff8cfc159ded13e07f308ddccc8c48e", size = 2636365, upload-time = "2026-03-31T16:51:44.911Z" }, +sdist = { url = "https://files.pythonhosted.org/packages/f5/db/4efed9504bc01309ab9c2da7e352cc223569f05478012b5d9ece38fd44d2/mypy-1.19.1.tar.gz", hash = "sha256:19d88bb05303fe63f71dd2c6270daca27cb9401c4ca8255fe50d1d920e0eb9ba", size = 3582404, upload-time = "2025-12-15T05:03:48.42Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/2f/63/e499890d8e39b1ff2df4c0c6ce5d371b6844ee22b8250687a99fd2f657a8/mypy-1.19.1-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:5f05aa3d375b385734388e844bc01733bd33c644ab48e9684faa54e5389775ec", size = 13101333, upload-time = "2025-12-15T05:03:03.28Z" }, + { url = "https://files.pythonhosted.org/packages/72/4b/095626fc136fba96effc4fd4a82b41d688ab92124f8c4f7564bffe5cf1b0/mypy-1.19.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:022ea7279374af1a5d78dfcab853fe6a536eebfda4b59deab53cd21f6cd9f00b", size = 12164102, upload-time = "2025-12-15T05:02:33.611Z" }, + { url = "https://files.pythonhosted.org/packages/0c/5b/952928dd081bf88a83a5ccd49aaecfcd18fd0d2710c7ff07b8fb6f7032b9/mypy-1.19.1-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ee4c11e460685c3e0c64a4c5de82ae143622410950d6be863303a1c4ba0e36d6", size = 12765799, upload-time = "2025-12-15T05:03:28.44Z" }, + { url = "https://files.pythonhosted.org/packages/2a/0d/93c2e4a287f74ef11a66fb6d49c7a9f05e47b0a4399040e6719b57f500d2/mypy-1.19.1-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:de759aafbae8763283b2ee5869c7255391fbc4de3ff171f8f030b5ec48381b74", size = 13522149, upload-time = "2025-12-15T05:02:36.011Z" }, + { url = "https://files.pythonhosted.org/packages/7b/0e/33a294b56aaad2b338d203e3a1d8b453637ac36cb278b45005e0901cf148/mypy-1.19.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:ab43590f9cd5108f41aacf9fca31841142c786827a74ab7cc8a2eacb634e09a1", size = 13810105, upload-time = "2025-12-15T05:02:40.327Z" }, + { url = "https://files.pythonhosted.org/packages/0e/fd/3e82603a0cb66b67c5e7abababce6bf1a929ddf67bf445e652684af5c5a0/mypy-1.19.1-cp310-cp310-win_amd64.whl", hash = "sha256:2899753e2f61e571b3971747e302d5f420c3fd09650e1951e99f823bc3089dac", size = 10057200, upload-time = "2025-12-15T05:02:51.012Z" }, + { url = "https://files.pythonhosted.org/packages/ef/47/6b3ebabd5474d9cdc170d1342fbf9dddc1b0ec13ec90bf9004ee6f391c31/mypy-1.19.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d8dfc6ab58ca7dda47d9237349157500468e404b17213d44fc1cb77bce532288", size = 13028539, upload-time = "2025-12-15T05:03:44.129Z" }, + { url = "https://files.pythonhosted.org/packages/5c/a6/ac7c7a88a3c9c54334f53a941b765e6ec6c4ebd65d3fe8cdcfbe0d0fd7db/mypy-1.19.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:e3f276d8493c3c97930e354b2595a44a21348b320d859fb4a2b9f66da9ed27ab", size = 12083163, upload-time = "2025-12-15T05:03:37.679Z" }, + { url = "https://files.pythonhosted.org/packages/67/af/3afa9cf880aa4a2c803798ac24f1d11ef72a0c8079689fac5cfd815e2830/mypy-1.19.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:2abb24cf3f17864770d18d673c85235ba52456b36a06b6afc1e07c1fdcd3d0e6", size = 12687629, upload-time = "2025-12-15T05:02:31.526Z" }, + { url = "https://files.pythonhosted.org/packages/2d/46/20f8a7114a56484ab268b0ab372461cb3a8f7deed31ea96b83a4e4cfcfca/mypy-1.19.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a009ffa5a621762d0c926a078c2d639104becab69e79538a494bcccb62cc0331", size = 13436933, upload-time = "2025-12-15T05:03:15.606Z" }, + { url = "https://files.pythonhosted.org/packages/5b/f8/33b291ea85050a21f15da910002460f1f445f8007adb29230f0adea279cb/mypy-1.19.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:f7cee03c9a2e2ee26ec07479f38ea9c884e301d42c6d43a19d20fb014e3ba925", size = 13661754, upload-time = "2025-12-15T05:02:26.731Z" }, + { url = "https://files.pythonhosted.org/packages/fd/a3/47cbd4e85bec4335a9cd80cf67dbc02be21b5d4c9c23ad6b95d6c5196bac/mypy-1.19.1-cp311-cp311-win_amd64.whl", hash = "sha256:4b84a7a18f41e167f7995200a1d07a4a6810e89d29859df936f1c3923d263042", size = 10055772, upload-time = "2025-12-15T05:03:26.179Z" }, + { url = "https://files.pythonhosted.org/packages/06/8a/19bfae96f6615aa8a0604915512e0289b1fad33d5909bf7244f02935d33a/mypy-1.19.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:a8174a03289288c1f6c46d55cef02379b478bfbc8e358e02047487cad44c6ca1", size = 13206053, upload-time = "2025-12-15T05:03:46.622Z" }, + { url = "https://files.pythonhosted.org/packages/a5/34/3e63879ab041602154ba2a9f99817bb0c85c4df19a23a1443c8986e4d565/mypy-1.19.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ffcebe56eb09ff0c0885e750036a095e23793ba6c2e894e7e63f6d89ad51f22e", size = 12219134, upload-time = "2025-12-15T05:03:24.367Z" }, + { url = "https://files.pythonhosted.org/packages/89/cc/2db6f0e95366b630364e09845672dbee0cbf0bbe753a204b29a944967cd9/mypy-1.19.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b64d987153888790bcdb03a6473d321820597ab8dd9243b27a92153c4fa50fd2", size = 12731616, upload-time = "2025-12-15T05:02:44.725Z" }, + { url = "https://files.pythonhosted.org/packages/00/be/dd56c1fd4807bc1eba1cf18b2a850d0de7bacb55e158755eb79f77c41f8e/mypy-1.19.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c35d298c2c4bba75feb2195655dfea8124d855dfd7343bf8b8c055421eaf0cf8", size = 13620847, upload-time = "2025-12-15T05:03:39.633Z" }, + { url = "https://files.pythonhosted.org/packages/6d/42/332951aae42b79329f743bf1da088cd75d8d4d9acc18fbcbd84f26c1af4e/mypy-1.19.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:34c81968774648ab5ac09c29a375fdede03ba253f8f8287847bd480782f73a6a", size = 13834976, upload-time = "2025-12-15T05:03:08.786Z" }, + { url = "https://files.pythonhosted.org/packages/6f/63/e7493e5f90e1e085c562bb06e2eb32cae27c5057b9653348d38b47daaecc/mypy-1.19.1-cp312-cp312-win_amd64.whl", hash = "sha256:b10e7c2cd7870ba4ad9b2d8a6102eb5ffc1f16ca35e3de6bfa390c1113029d13", size = 10118104, upload-time = "2025-12-15T05:03:10.834Z" }, + { url = "https://files.pythonhosted.org/packages/de/9f/a6abae693f7a0c697dbb435aac52e958dc8da44e92e08ba88d2e42326176/mypy-1.19.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:e3157c7594ff2ef1634ee058aafc56a82db665c9438fd41b390f3bde1ab12250", size = 13201927, upload-time = "2025-12-15T05:02:29.138Z" }, + { url = "https://files.pythonhosted.org/packages/9a/a4/45c35ccf6e1c65afc23a069f50e2c66f46bd3798cbe0d680c12d12935caa/mypy-1.19.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:bdb12f69bcc02700c2b47e070238f42cb87f18c0bc1fc4cdb4fb2bc5fd7a3b8b", size = 12206730, upload-time = "2025-12-15T05:03:01.325Z" }, + { url = "https://files.pythonhosted.org/packages/05/bb/cdcf89678e26b187650512620eec8368fded4cfd99cfcb431e4cdfd19dec/mypy-1.19.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:f859fb09d9583a985be9a493d5cfc5515b56b08f7447759a0c5deaf68d80506e", size = 12724581, upload-time = "2025-12-15T05:03:20.087Z" }, + { url = "https://files.pythonhosted.org/packages/d1/32/dd260d52babf67bad8e6770f8e1102021877ce0edea106e72df5626bb0ec/mypy-1.19.1-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c9a6538e0415310aad77cb94004ca6482330fece18036b5f360b62c45814c4ef", size = 13616252, upload-time = "2025-12-15T05:02:49.036Z" }, + { url = "https://files.pythonhosted.org/packages/71/d0/5e60a9d2e3bd48432ae2b454b7ef2b62a960ab51292b1eda2a95edd78198/mypy-1.19.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:da4869fc5e7f62a88f3fe0b5c919d1d9f7ea3cef92d3689de2823fd27e40aa75", size = 13840848, upload-time = "2025-12-15T05:02:55.95Z" }, + { url = "https://files.pythonhosted.org/packages/98/76/d32051fa65ecf6cc8c6610956473abdc9b4c43301107476ac03559507843/mypy-1.19.1-cp313-cp313-win_amd64.whl", hash = "sha256:016f2246209095e8eda7538944daa1d60e1e8134d98983b9fc1e92c1fc0cb8dd", size = 10135510, upload-time = "2025-12-15T05:02:58.438Z" }, + { url = "https://files.pythonhosted.org/packages/de/eb/b83e75f4c820c4247a58580ef86fcd35165028f191e7e1ba57128c52782d/mypy-1.19.1-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:06e6170bd5836770e8104c8fdd58e5e725cfeb309f0a6c681a811f557e97eac1", size = 13199744, upload-time = "2025-12-15T05:03:30.823Z" }, + { url = "https://files.pythonhosted.org/packages/94/28/52785ab7bfa165f87fcbb61547a93f98bb20e7f82f90f165a1f69bce7b3d/mypy-1.19.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:804bd67b8054a85447c8954215a906d6eff9cabeabe493fb6334b24f4bfff718", size = 12215815, upload-time = "2025-12-15T05:02:42.323Z" }, + { url = "https://files.pythonhosted.org/packages/0a/c6/bdd60774a0dbfb05122e3e925f2e9e846c009e479dcec4821dad881f5b52/mypy-1.19.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:21761006a7f497cb0d4de3d8ef4ca70532256688b0523eee02baf9eec895e27b", size = 12740047, upload-time = "2025-12-15T05:03:33.168Z" }, + { url = "https://files.pythonhosted.org/packages/32/2a/66ba933fe6c76bd40d1fe916a83f04fed253152f451a877520b3c4a5e41e/mypy-1.19.1-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:28902ee51f12e0f19e1e16fbe2f8f06b6637f482c459dd393efddd0ec7f82045", size = 13601998, upload-time = "2025-12-15T05:03:13.056Z" }, + { url = "https://files.pythonhosted.org/packages/e3/da/5055c63e377c5c2418760411fd6a63ee2b96cf95397259038756c042574f/mypy-1.19.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:481daf36a4c443332e2ae9c137dfee878fcea781a2e3f895d54bd3002a900957", size = 13807476, upload-time = "2025-12-15T05:03:17.977Z" }, + { url = "https://files.pythonhosted.org/packages/cd/09/4ebd873390a063176f06b0dbf1f7783dd87bd120eae7727fa4ae4179b685/mypy-1.19.1-cp314-cp314-win_amd64.whl", hash = "sha256:8bb5c6f6d043655e055be9b542aa5f3bdd30e4f3589163e85f93f3640060509f", size = 10281872, upload-time = "2025-12-15T05:03:05.549Z" }, + { url = "https://files.pythonhosted.org/packages/8d/f4/4ce9a05ce5ded1de3ec1c1d96cf9f9504a04e54ce0ed55cfa38619a32b8d/mypy-1.19.1-py3-none-any.whl", hash = "sha256:f1235f5ea01b7db5468d53ece6aaddf1ad0b88d9e7462b86ef96fe04995d7247", size = 2471239, upload-time = "2025-12-15T05:03:07.248Z" }, ] [[package]] @@ -1579,11 +1640,11 @@ wheels = [ [[package]] name = "nodeenv" -version = "1.9.1" +version = "1.10.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/43/16/fc88b08840de0e0a72a2f9d8c6bae36be573e475a6326ae854bcc549fc45/nodeenv-1.9.1.tar.gz", hash = "sha256:6ec12890a2dab7946721edbfbcd91f3319c6ccc9aec47be7c7e6b7011ee6645f", size = 47437, upload-time = "2024-06-04T18:44:11.171Z" } +sdist = { url = "https://files.pythonhosted.org/packages/24/bf/d1bda4f6168e0b2e9e5958945e01910052158313224ada5ce1fb2e1113b8/nodeenv-1.10.0.tar.gz", hash = "sha256:996c191ad80897d076bdfba80a41994c2b47c68e224c542b48feba42ba00f8bb", size = 55611, upload-time = "2025-12-20T14:08:54.006Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/d2/1d/1b658dbd2b9fa9c4c9f32accbfc0205d532c8c6194dc0f2a4c0428e7128a/nodeenv-1.9.1-py2.py3-none-any.whl", hash = "sha256:ba11c9782d29c27c70ffbdda2d7415098754709be8a7056d79a737cd901155c9", size = 22314, upload-time = "2024-06-04T18:44:08.352Z" }, + { url = "https://files.pythonhosted.org/packages/88/b2/d0896bdcdc8d28a7fc5717c305f1a861c26e18c05047949fb371034d98bd/nodeenv-1.10.0-py2.py3-none-any.whl", hash = "sha256:5bb13e3eed2923615535339b3c620e76779af4cb4c6a90deccc9e36b274d3827", size = 23438, upload-time = "2025-12-20T14:08:52.782Z" }, ] [[package]] @@ -1640,11 +1701,11 @@ wheels = [ [[package]] name = "packaging" -version = "25.0" +version = "26.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/a1/d4/1fc4078c65507b51b96ca8f8c3ba19e6a61c8253c72794544580a7b6c24d/packaging-25.0.tar.gz", hash = "sha256:d443872c98d677bf60f6a1f2f8c1cb748e8fe762d2bf9d3148b5599295b0fc4f", size = 165727, upload-time = "2025-04-19T11:48:59.673Z" } +sdist = { url = "https://files.pythonhosted.org/packages/65/ee/299d360cdc32edc7d2cf530f3accf79c4fca01e96ffc950d8a52213bd8e4/packaging-26.0.tar.gz", hash = "sha256:00243ae351a257117b6a241061796684b084ed1c516a08c48a3f7e147a9d80b4", size = 143416, upload-time = "2026-01-21T20:50:39.064Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/20/12/38679034af332785aac8774540895e234f4d07f7545804097de4b666afd8/packaging-25.0-py3-none-any.whl", hash = "sha256:29572ef2b1f17581046b3a2227d5c611fb25ec70ca1ba8554b24b0e69331a484", size = 66469, upload-time = "2025-04-19T11:48:57.875Z" }, + { url = "https://files.pythonhosted.org/packages/b7/b9/c538f279a4e237a006a2c98387d081e9eb060d203d8ed34467cc0f0b9b53/packaging-26.0-py3-none-any.whl", hash = "sha256:b36f1fef9334a5588b4166f8bcd26a14e521f2b55e6b9de3aaa80d3ff7a37529", size = 74366, upload-time = "2026-01-21T20:50:37.788Z" }, ] [[package]] @@ -1658,11 +1719,11 @@ wheels = [ [[package]] name = "platformdirs" -version = "4.3.8" +version = "4.9.4" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/fe/8b/3c73abc9c759ecd3f1f7ceff6685840859e8070c4d947c93fae71f6a0bf2/platformdirs-4.3.8.tar.gz", hash = "sha256:3d512d96e16bcb959a814c9f348431070822a6496326a4be0911c40b5a74c2bc", size = 21362, upload-time = "2025-05-07T22:47:42.121Z" } +sdist = { url = "https://files.pythonhosted.org/packages/19/56/8d4c30c8a1d07013911a8fdbd8f89440ef9f08d07a1b50ab8ca8be5a20f9/platformdirs-4.9.4.tar.gz", hash = "sha256:1ec356301b7dc906d83f371c8f487070e99d3ccf9e501686456394622a01a934", size = 28737, upload-time = "2026-03-05T18:34:13.271Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/fe/39/979e8e21520d4e47a0bbe349e2713c0aac6f3d853d0e5b34d76206c439aa/platformdirs-4.3.8-py3-none-any.whl", hash = "sha256:ff7059bb7eb1179e2685604f4aaf157cfd9535242bd23742eadc3c13542139b4", size = 18567, upload-time = "2025-05-07T22:47:40.376Z" }, + { url = "https://files.pythonhosted.org/packages/63/d7/97f7e3a6abb67d8080dd406fd4df842c2be0efaf712d1c899c32a075027c/platformdirs-4.9.4-py3-none-any.whl", hash = "sha256:68a9a4619a666ea6439f2ff250c12a853cd1cbd5158d258bd824a7df6be2f868", size = 21216, upload-time = "2026-03-05T18:34:12.172Z" }, ] [[package]] @@ -1692,29 +1753,29 @@ wheels = [ [[package]] name = "proto-plus" -version = "1.26.1" +version = "1.27.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "protobuf" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/f4/ac/87285f15f7cce6d4a008f33f1757fb5a13611ea8914eb58c3d0d26243468/proto_plus-1.26.1.tar.gz", hash = "sha256:21a515a4c4c0088a773899e23c7bbade3d18f9c66c73edd4c7ee3816bc96a012", size = 56142, upload-time = "2025-03-10T15:54:38.843Z" } +sdist = { url = "https://files.pythonhosted.org/packages/3a/02/8832cde80e7380c600fbf55090b6ab7b62bd6825dbedde6d6657c15a1f8e/proto_plus-1.27.1.tar.gz", hash = "sha256:912a7460446625b792f6448bade9e55cd4e41e6ac10e27009ef71a7f317fa147", size = 56929, upload-time = "2026-02-02T17:34:49.035Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/4e/6d/280c4c2ce28b1593a19ad5239c8b826871fc6ec275c21afc8e1820108039/proto_plus-1.26.1-py3-none-any.whl", hash = "sha256:13285478c2dcf2abb829db158e1047e2f1e8d63a077d94263c2b88b043c75a66", size = 50163, upload-time = "2025-03-10T15:54:37.335Z" }, + { url = "https://files.pythonhosted.org/packages/5d/79/ac273cbbf744691821a9cca88957257f41afe271637794975ca090b9588b/proto_plus-1.27.1-py3-none-any.whl", hash = "sha256:e4643061f3a4d0de092d62aa4ad09fa4756b2cbb89d4627f3985018216f9fefc", size = 50480, upload-time = "2026-02-02T17:34:47.339Z" }, ] [[package]] name = "protobuf" -version = "6.33.5" +version = "6.33.6" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/ba/25/7c72c307aafc96fa87062aa6291d9f7c94836e43214d43722e86037aac02/protobuf-6.33.5.tar.gz", hash = "sha256:6ddcac2a081f8b7b9642c09406bc6a4290128fce5f471cddd165960bb9119e5c", size = 444465, upload-time = "2026-01-29T21:51:33.494Z" } +sdist = { url = "https://files.pythonhosted.org/packages/66/70/e908e9c5e52ef7c3a6c7902c9dfbb34c7e29c25d2f81ade3856445fd5c94/protobuf-6.33.6.tar.gz", hash = "sha256:a6768d25248312c297558af96a9f9c929e8c4cee0659cb07e780731095f38135", size = 444531, upload-time = "2026-03-18T19:05:00.988Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/b1/79/af92d0a8369732b027e6d6084251dd8e782c685c72da161bd4a2e00fbabb/protobuf-6.33.5-cp310-abi3-win32.whl", hash = "sha256:d71b040839446bac0f4d162e758bea99c8251161dae9d0983a3b88dee345153b", size = 425769, upload-time = "2026-01-29T21:51:21.751Z" }, - { url = "https://files.pythonhosted.org/packages/55/75/bb9bc917d10e9ee13dee8607eb9ab963b7cf8be607c46e7862c748aa2af7/protobuf-6.33.5-cp310-abi3-win_amd64.whl", hash = "sha256:3093804752167bcab3998bec9f1048baae6e29505adaf1afd14a37bddede533c", size = 437118, upload-time = "2026-01-29T21:51:24.022Z" }, - { url = "https://files.pythonhosted.org/packages/a2/6b/e48dfc1191bc5b52950246275bf4089773e91cb5ba3592621723cdddca62/protobuf-6.33.5-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:a5cb85982d95d906df1e2210e58f8e4f1e3cdc088e52c921a041f9c9a0386de5", size = 427766, upload-time = "2026-01-29T21:51:25.413Z" }, - { url = "https://files.pythonhosted.org/packages/4e/b1/c79468184310de09d75095ed1314b839eb2f72df71097db9d1404a1b2717/protobuf-6.33.5-cp39-abi3-manylinux2014_aarch64.whl", hash = "sha256:9b71e0281f36f179d00cbcb119cb19dec4d14a81393e5ea220f64b286173e190", size = 324638, upload-time = "2026-01-29T21:51:26.423Z" }, - { url = "https://files.pythonhosted.org/packages/c5/f5/65d838092fd01c44d16037953fd4c2cc851e783de9b8f02b27ec4ffd906f/protobuf-6.33.5-cp39-abi3-manylinux2014_s390x.whl", hash = "sha256:8afa18e1d6d20af15b417e728e9f60f3aa108ee76f23c3b2c07a2c3b546d3afd", size = 339411, upload-time = "2026-01-29T21:51:27.446Z" }, - { url = "https://files.pythonhosted.org/packages/9b/53/a9443aa3ca9ba8724fdfa02dd1887c1bcd8e89556b715cfbacca6b63dbec/protobuf-6.33.5-cp39-abi3-manylinux2014_x86_64.whl", hash = "sha256:cbf16ba3350fb7b889fca858fb215967792dc125b35c7976ca4818bee3521cf0", size = 323465, upload-time = "2026-01-29T21:51:28.925Z" }, - { url = "https://files.pythonhosted.org/packages/57/bf/2086963c69bdac3d7cff1cc7ff79b8ce5ea0bec6797a017e1be338a46248/protobuf-6.33.5-py3-none-any.whl", hash = "sha256:69915a973dd0f60f31a08b8318b73eab2bd6a392c79184b3612226b0a3f8ec02", size = 170687, upload-time = "2026-01-29T21:51:32.557Z" }, + { url = "https://files.pythonhosted.org/packages/fc/9f/2f509339e89cfa6f6a4c4ff50438db9ca488dec341f7e454adad60150b00/protobuf-6.33.6-cp310-abi3-win32.whl", hash = "sha256:7d29d9b65f8afef196f8334e80d6bc1d5d4adedb449971fefd3723824e6e77d3", size = 425739, upload-time = "2026-03-18T19:04:48.373Z" }, + { url = "https://files.pythonhosted.org/packages/76/5d/683efcd4798e0030c1bab27374fd13a89f7c2515fb1f3123efdfaa5eab57/protobuf-6.33.6-cp310-abi3-win_amd64.whl", hash = "sha256:0cd27b587afca21b7cfa59a74dcbd48a50f0a6400cfb59391340ad729d91d326", size = 437089, upload-time = "2026-03-18T19:04:50.381Z" }, + { url = "https://files.pythonhosted.org/packages/5c/01/a3c3ed5cd186f39e7880f8303cc51385a198a81469d53d0fdecf1f64d929/protobuf-6.33.6-cp39-abi3-macosx_10_9_universal2.whl", hash = "sha256:9720e6961b251bde64edfdab7d500725a2af5280f3f4c87e57c0208376aa8c3a", size = 427737, upload-time = "2026-03-18T19:04:51.866Z" }, + { url = "https://files.pythonhosted.org/packages/ee/90/b3c01fdec7d2f627b3a6884243ba328c1217ed2d978def5c12dc50d328a3/protobuf-6.33.6-cp39-abi3-manylinux2014_aarch64.whl", hash = "sha256:e2afbae9b8e1825e3529f88d514754e094278bb95eadc0e199751cdd9a2e82a2", size = 324610, upload-time = "2026-03-18T19:04:53.096Z" }, + { url = "https://files.pythonhosted.org/packages/9b/ca/25afc144934014700c52e05103c2421997482d561f3101ff352e1292fb81/protobuf-6.33.6-cp39-abi3-manylinux2014_s390x.whl", hash = "sha256:c96c37eec15086b79762ed265d59ab204dabc53056e3443e702d2681f4b39ce3", size = 339381, upload-time = "2026-03-18T19:04:54.616Z" }, + { url = "https://files.pythonhosted.org/packages/16/92/d1e32e3e0d894fe00b15ce28ad4944ab692713f2e7f0a99787405e43533a/protobuf-6.33.6-cp39-abi3-manylinux2014_x86_64.whl", hash = "sha256:e9db7e292e0ab79dd108d7f1a94fe31601ce1ee3f7b79e0692043423020b0593", size = 323436, upload-time = "2026-03-18T19:04:55.768Z" }, + { url = "https://files.pythonhosted.org/packages/c4/72/02445137af02769918a93807b2b7890047c32bfb9f90371cbc12688819eb/protobuf-6.33.6-py3-none-any.whl", hash = "sha256:77179e006c476e69bf8e8ce866640091ec42e1beb80b213c3900006ecfba6901", size = 170656, upload-time = "2026-03-18T19:04:59.826Z" }, ] [[package]] @@ -1740,11 +1801,11 @@ wheels = [ [[package]] name = "pycparser" -version = "2.22" +version = "3.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/1d/b2/31537cf4b1ca988837256c910a668b553fceb8f069bedc4b1c826024b52c/pycparser-2.22.tar.gz", hash = "sha256:491c8be9c040f5390f5bf44a5b07752bd07f56edf992381b05c701439eec10f6", size = 172736, upload-time = "2024-03-30T13:22:22.564Z" } +sdist = { url = "https://files.pythonhosted.org/packages/1b/7d/92392ff7815c21062bea51aa7b87d45576f649f16458d78b7cf94b9ab2e6/pycparser-3.0.tar.gz", hash = "sha256:600f49d217304a5902ac3c37e1281c9fe94e4d0489de643a9504c5cdfdfc6b29", size = 103492, upload-time = "2026-01-21T14:26:51.89Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/13/a3/a812df4e2dd5696d1f351d58b8fe16a405b234ad2886a0dab9183fb78109/pycparser-2.22-py3-none-any.whl", hash = "sha256:c3702b6d3dd8c7abc1afa565d7e63d53a1d0bd86cdc24edd75470f4de499cfcc", size = 117552, upload-time = "2024-03-30T13:22:20.476Z" }, + { url = "https://files.pythonhosted.org/packages/0c/c3/44f3fbbfa403ea2a7c779186dc20772604442dde72947e7d01069cbe98e3/pycparser-3.0-py3-none-any.whl", hash = "sha256:b727414169a36b7d524c1c3e31839a521725078d7b2ff038656844266160a992", size = 48172, upload-time = "2026-01-21T14:26:50.693Z" }, ] [[package]] @@ -1903,11 +1964,24 @@ wheels = [ [[package]] name = "pymysql" -version = "1.1.1" +version = "1.1.2" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/b3/8f/ce59b5e5ed4ce8512f879ff1fa5ab699d211ae2495f1adaa5fbba2a1eada/pymysql-1.1.1.tar.gz", hash = "sha256:e127611aaf2b417403c60bf4dc570124aeb4a57f5f37b8e95ae399a42f904cd0", size = 47678, upload-time = "2024-05-21T11:03:43.722Z" } +sdist = { url = "https://files.pythonhosted.org/packages/f5/ae/1fe3fcd9f959efa0ebe200b8de88b5a5ce3e767e38c7ac32fb179f16a388/pymysql-1.1.2.tar.gz", hash = "sha256:4961d3e165614ae65014e361811a724e2044ad3ea3739de9903ae7c21f539f03", size = 48258, upload-time = "2025-08-24T12:55:55.146Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/0c/94/e4181a1f6286f545507528c78016e00065ea913276888db2262507693ce5/PyMySQL-1.1.1-py3-none-any.whl", hash = "sha256:4de15da4c61dc132f4fb9ab763063e693d521a80fd0e87943b9a453dd4c19d6c", size = 44972, upload-time = "2024-05-21T11:03:41.216Z" }, + { url = "https://files.pythonhosted.org/packages/7c/4c/ad33b92b9864cbde84f259d5df035a6447f91891f5be77788e2a3892bce3/pymysql-1.1.2-py3-none-any.whl", hash = "sha256:e6b1d89711dd51f8f74b1631fe08f039e7d76cf67a42a323d3178f0f25762ed9", size = 45300, upload-time = "2025-08-24T12:55:53.394Z" }, +] + +[[package]] +name = "pyright" +version = "1.1.408" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "nodeenv" }, + { name = "typing-extensions" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/74/b2/5db700e52554b8f025faa9c3c624c59f1f6c8841ba81ab97641b54322f16/pyright-1.1.408.tar.gz", hash = "sha256:f28f2321f96852fa50b5829ea492f6adb0e6954568d1caa3f3af3a5f555eb684", size = 4400578, upload-time = "2026-01-08T08:07:38.795Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/0c/82/a2c93e32800940d9573fb28c346772a14778b84ba7524e691b324620ab89/pyright-1.1.408-py3-none-any.whl", hash = "sha256:090b32865f4fdb1e0e6cd82bf5618480d48eecd2eb2e70f960982a3d9a4c17c1", size = 6399144, upload-time = "2026-01-08T08:07:37.082Z" }, ] [[package]] @@ -1944,16 +2018,16 @@ wheels = [ [[package]] name = "pytest-cov" -version = "7.1.0" +version = "7.0.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "coverage", extra = ["toml"] }, { name = "pluggy" }, { name = "pytest" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/b1/51/a849f96e117386044471c8ec2bd6cfebacda285da9525c9106aeb28da671/pytest_cov-7.1.0.tar.gz", hash = "sha256:30674f2b5f6351aa09702a9c8c364f6a01c27aae0c1366ae8016160d1efc56b2", size = 55592, upload-time = "2026-03-21T20:11:16.284Z" } +sdist = { url = "https://files.pythonhosted.org/packages/5e/f7/c933acc76f5208b3b00089573cf6a2bc26dc80a8aece8f52bb7d6b1855ca/pytest_cov-7.0.0.tar.gz", hash = "sha256:33c97eda2e049a0c5298e91f519302a1334c26ac65c1a483d6206fd458361af1", size = 54328, upload-time = "2025-09-09T10:57:02.113Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/9d/7a/d968e294073affff457b041c2be9868a40c1c71f4a35fcc1e45e5493067b/pytest_cov-7.1.0-py3-none-any.whl", hash = "sha256:a0461110b7865f9a271aa1b51e516c9a95de9d696734a2f71e3e78f46e1d4678", size = 22876, upload-time = "2026-03-21T20:11:14.438Z" }, + { url = "https://files.pythonhosted.org/packages/ee/49/1377b49de7d0c1ce41292161ea0f721913fa8722c19fb9c1e3aa0367eecb/pytest_cov-7.0.0-py3-none-any.whl", hash = "sha256:3b8e9558b16cc1479da72058bdecf8073661c7f57f7d3c5f22a1c23507f2d861", size = 22424, upload-time = "2025-09-09T10:57:00.695Z" }, ] [[package]] @@ -2006,91 +2080,85 @@ wheels = [ ] [[package]] -name = "pytokens" -version = "0.4.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/b6/34/b4e015b99031667a7b960f888889c5bd34ef585c85e1cb56a594b92836ac/pytokens-0.4.1.tar.gz", hash = "sha256:292052fe80923aae2260c073f822ceba21f3872ced9a68bb7953b348e561179a", size = 23015, upload-time = "2026-01-30T01:03:45.924Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/42/24/f206113e05cb8ef51b3850e7ef88f20da6f4bf932190ceb48bd3da103e10/pytokens-0.4.1-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:2a44ed93ea23415c54f3face3b65ef2b844d96aeb3455b8a69b3df6beab6acc5", size = 161522, upload-time = "2026-01-30T01:02:50.393Z" }, - { url = "https://files.pythonhosted.org/packages/d4/e9/06a6bf1b90c2ed81a9c7d2544232fe5d2891d1cd480e8a1809ca354a8eb2/pytokens-0.4.1-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:add8bf86b71a5d9fb5b89f023a80b791e04fba57960aa790cc6125f7f1d39dfe", size = 246945, upload-time = "2026-01-30T01:02:52.399Z" }, - { url = "https://files.pythonhosted.org/packages/69/66/f6fb1007a4c3d8b682d5d65b7c1fb33257587a5f782647091e3408abe0b8/pytokens-0.4.1-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:670d286910b531c7b7e3c0b453fd8156f250adb140146d234a82219459b9640c", size = 259525, upload-time = "2026-01-30T01:02:53.737Z" }, - { url = "https://files.pythonhosted.org/packages/04/92/086f89b4d622a18418bac74ab5db7f68cf0c21cf7cc92de6c7b919d76c88/pytokens-0.4.1-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:4e691d7f5186bd2842c14813f79f8884bb03f5995f0575272009982c5ac6c0f7", size = 262693, upload-time = "2026-01-30T01:02:54.871Z" }, - { url = "https://files.pythonhosted.org/packages/b4/7b/8b31c347cf94a3f900bdde750b2e9131575a61fdb620d3d3c75832262137/pytokens-0.4.1-cp310-cp310-win_amd64.whl", hash = "sha256:27b83ad28825978742beef057bfe406ad6ed524b2d28c252c5de7b4a6dd48fa2", size = 103567, upload-time = "2026-01-30T01:02:56.414Z" }, - { url = "https://files.pythonhosted.org/packages/3d/92/790ebe03f07b57e53b10884c329b9a1a308648fc083a6d4a39a10a28c8fc/pytokens-0.4.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:d70e77c55ae8380c91c0c18dea05951482e263982911fc7410b1ffd1dadd3440", size = 160864, upload-time = "2026-01-30T01:02:57.882Z" }, - { url = "https://files.pythonhosted.org/packages/13/25/a4f555281d975bfdd1eba731450e2fe3a95870274da73fb12c40aeae7625/pytokens-0.4.1-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:4a58d057208cb9075c144950d789511220b07636dd2e4708d5645d24de666bdc", size = 248565, upload-time = "2026-01-30T01:02:59.912Z" }, - { url = "https://files.pythonhosted.org/packages/17/50/bc0394b4ad5b1601be22fa43652173d47e4c9efbf0044c62e9a59b747c56/pytokens-0.4.1-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b49750419d300e2b5a3813cf229d4e5a4c728dae470bcc89867a9ad6f25a722d", size = 260824, upload-time = "2026-01-30T01:03:01.471Z" }, - { url = "https://files.pythonhosted.org/packages/4e/54/3e04f9d92a4be4fc6c80016bc396b923d2a6933ae94b5f557c939c460ee0/pytokens-0.4.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:d9907d61f15bf7261d7e775bd5d7ee4d2930e04424bab1972591918497623a16", size = 264075, upload-time = "2026-01-30T01:03:04.143Z" }, - { url = "https://files.pythonhosted.org/packages/d1/1b/44b0326cb5470a4375f37988aea5d61b5cc52407143303015ebee94abfd6/pytokens-0.4.1-cp311-cp311-win_amd64.whl", hash = "sha256:ee44d0f85b803321710f9239f335aafe16553b39106384cef8e6de40cb4ef2f6", size = 103323, upload-time = "2026-01-30T01:03:05.412Z" }, - { url = "https://files.pythonhosted.org/packages/41/5d/e44573011401fb82e9d51e97f1290ceb377800fb4eed650b96f4753b499c/pytokens-0.4.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:140709331e846b728475786df8aeb27d24f48cbcf7bcd449f8de75cae7a45083", size = 160663, upload-time = "2026-01-30T01:03:06.473Z" }, - { url = "https://files.pythonhosted.org/packages/f0/e6/5bbc3019f8e6f21d09c41f8b8654536117e5e211a85d89212d59cbdab381/pytokens-0.4.1-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6d6c4268598f762bc8e91f5dbf2ab2f61f7b95bdc07953b602db879b3c8c18e1", size = 255626, upload-time = "2026-01-30T01:03:08.177Z" }, - { url = "https://files.pythonhosted.org/packages/bf/3c/2d5297d82286f6f3d92770289fd439956b201c0a4fc7e72efb9b2293758e/pytokens-0.4.1-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:24afde1f53d95348b5a0eb19488661147285ca4dd7ed752bbc3e1c6242a304d1", size = 269779, upload-time = "2026-01-30T01:03:09.756Z" }, - { url = "https://files.pythonhosted.org/packages/20/01/7436e9ad693cebda0551203e0bf28f7669976c60ad07d6402098208476de/pytokens-0.4.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:5ad948d085ed6c16413eb5fec6b3e02fa00dc29a2534f088d3302c47eb59adf9", size = 268076, upload-time = "2026-01-30T01:03:10.957Z" }, - { url = "https://files.pythonhosted.org/packages/2e/df/533c82a3c752ba13ae7ef238b7f8cdd272cf1475f03c63ac6cf3fcfb00b6/pytokens-0.4.1-cp312-cp312-win_amd64.whl", hash = "sha256:3f901fe783e06e48e8cbdc82d631fca8f118333798193e026a50ce1b3757ea68", size = 103552, upload-time = "2026-01-30T01:03:12.066Z" }, - { url = "https://files.pythonhosted.org/packages/cb/dc/08b1a080372afda3cceb4f3c0a7ba2bde9d6a5241f1edb02a22a019ee147/pytokens-0.4.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:8bdb9d0ce90cbf99c525e75a2fa415144fd570a1ba987380190e8b786bc6ef9b", size = 160720, upload-time = "2026-01-30T01:03:13.843Z" }, - { url = "https://files.pythonhosted.org/packages/64/0c/41ea22205da480837a700e395507e6a24425151dfb7ead73343d6e2d7ffe/pytokens-0.4.1-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5502408cab1cb18e128570f8d598981c68a50d0cbd7c61312a90507cd3a1276f", size = 254204, upload-time = "2026-01-30T01:03:14.886Z" }, - { url = "https://files.pythonhosted.org/packages/e0/d2/afe5c7f8607018beb99971489dbb846508f1b8f351fcefc225fcf4b2adc0/pytokens-0.4.1-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:29d1d8fb1030af4d231789959f21821ab6325e463f0503a61d204343c9b355d1", size = 268423, upload-time = "2026-01-30T01:03:15.936Z" }, - { url = "https://files.pythonhosted.org/packages/68/d4/00ffdbd370410c04e9591da9220a68dc1693ef7499173eb3e30d06e05ed1/pytokens-0.4.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:970b08dd6b86058b6dc07efe9e98414f5102974716232d10f32ff39701e841c4", size = 266859, upload-time = "2026-01-30T01:03:17.458Z" }, - { url = "https://files.pythonhosted.org/packages/a7/c9/c3161313b4ca0c601eeefabd3d3b576edaa9afdefd32da97210700e47652/pytokens-0.4.1-cp313-cp313-win_amd64.whl", hash = "sha256:9bd7d7f544d362576be74f9d5901a22f317efc20046efe2034dced238cbbfe78", size = 103520, upload-time = "2026-01-30T01:03:18.652Z" }, - { url = "https://files.pythonhosted.org/packages/8f/a7/b470f672e6fc5fee0a01d9e75005a0e617e162381974213a945fcd274843/pytokens-0.4.1-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:4a14d5f5fc78ce85e426aa159489e2d5961acf0e47575e08f35584009178e321", size = 160821, upload-time = "2026-01-30T01:03:19.684Z" }, - { url = "https://files.pythonhosted.org/packages/80/98/e83a36fe8d170c911f864bfded690d2542bfcfacb9c649d11a9e6eb9dc41/pytokens-0.4.1-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:97f50fd18543be72da51dd505e2ed20d2228c74e0464e4262e4899797803d7fa", size = 254263, upload-time = "2026-01-30T01:03:20.834Z" }, - { url = "https://files.pythonhosted.org/packages/0f/95/70d7041273890f9f97a24234c00b746e8da86df462620194cef1d411ddeb/pytokens-0.4.1-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:dc74c035f9bfca0255c1af77ddd2d6ae8419012805453e4b0e7513e17904545d", size = 268071, upload-time = "2026-01-30T01:03:21.888Z" }, - { url = "https://files.pythonhosted.org/packages/da/79/76e6d09ae19c99404656d7db9c35dfd20f2086f3eb6ecb496b5b31163bad/pytokens-0.4.1-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:f66a6bbe741bd431f6d741e617e0f39ec7257ca1f89089593479347cc4d13324", size = 271716, upload-time = "2026-01-30T01:03:23.633Z" }, - { url = "https://files.pythonhosted.org/packages/79/37/482e55fa1602e0a7ff012661d8c946bafdc05e480ea5a32f4f7e336d4aa9/pytokens-0.4.1-cp314-cp314-win_amd64.whl", hash = "sha256:b35d7e5ad269804f6697727702da3c517bb8a5228afa450ab0fa787732055fc9", size = 104539, upload-time = "2026-01-30T01:03:24.788Z" }, - { url = "https://files.pythonhosted.org/packages/30/e8/20e7db907c23f3d63b0be3b8a4fd1927f6da2395f5bcc7f72242bb963dfe/pytokens-0.4.1-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:8fcb9ba3709ff77e77f1c7022ff11d13553f3c30299a9fe246a166903e9091eb", size = 168474, upload-time = "2026-01-30T01:03:26.428Z" }, - { url = "https://files.pythonhosted.org/packages/d6/81/88a95ee9fafdd8f5f3452107748fd04c24930d500b9aba9738f3ade642cc/pytokens-0.4.1-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:79fc6b8699564e1f9b521582c35435f1bd32dd06822322ec44afdeba666d8cb3", size = 290473, upload-time = "2026-01-30T01:03:27.415Z" }, - { url = "https://files.pythonhosted.org/packages/cf/35/3aa899645e29b6375b4aed9f8d21df219e7c958c4c186b465e42ee0a06bf/pytokens-0.4.1-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:d31b97b3de0f61571a124a00ffe9a81fb9939146c122c11060725bd5aea79975", size = 303485, upload-time = "2026-01-30T01:03:28.558Z" }, - { url = "https://files.pythonhosted.org/packages/52/a0/07907b6ff512674d9b201859f7d212298c44933633c946703a20c25e9d81/pytokens-0.4.1-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:967cf6e3fd4adf7de8fc73cd3043754ae79c36475c1c11d514fc72cf5490094a", size = 306698, upload-time = "2026-01-30T01:03:29.653Z" }, - { url = "https://files.pythonhosted.org/packages/39/2a/cbbf9250020a4a8dd53ba83a46c097b69e5eb49dd14e708f496f548c6612/pytokens-0.4.1-cp314-cp314t-win_amd64.whl", hash = "sha256:584c80c24b078eec1e227079d56dc22ff755e0ba8654d8383b2c549107528918", size = 116287, upload-time = "2026-01-30T01:03:30.912Z" }, - { url = "https://files.pythonhosted.org/packages/c6/78/397db326746f0a342855b81216ae1f0a32965deccfd7c830a2dbc66d2483/pytokens-0.4.1-py3-none-any.whl", hash = "sha256:26cef14744a8385f35d0e095dc8b3a7583f6c953c2e3d269c7f82484bf5ad2de", size = 13729, upload-time = "2026-01-30T01:03:45.029Z" }, +name = "python-discovery" +version = "1.2.0" +source = { registry = "https://pypi.org/simple" } +dependencies = [ + { name = "filelock" }, + { name = "platformdirs" }, +] +sdist = { url = "https://files.pythonhosted.org/packages/9c/90/bcce6b46823c9bec1757c964dc37ed332579be512e17a30e9698095dcae4/python_discovery-1.2.0.tar.gz", hash = "sha256:7d33e350704818b09e3da2bd419d37e21e7c30db6e0977bb438916e06b41b5b1", size = 58055, upload-time = "2026-03-19T01:43:08.248Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/c2/3c/2005227cb951df502412de2fa781f800663cccbef8d90ec6f1b371ac2c0d/python_discovery-1.2.0-py3-none-any.whl", hash = "sha256:1e108f1bbe2ed0ef089823d28805d5ad32be8e734b86a5f212bf89b71c266e4a", size = 31524, upload-time = "2026-03-19T01:43:07.045Z" }, ] [[package]] name = "pyyaml" -version = "6.0.2" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/54/ed/79a089b6be93607fa5cdaedf301d7dfb23af5f25c398d5ead2525b063e17/pyyaml-6.0.2.tar.gz", hash = "sha256:d584d9ec91ad65861cc08d42e834324ef890a082e591037abe114850ff7bbc3e", size = 130631, upload-time = "2024-08-06T20:33:50.674Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/9b/95/a3fac87cb7158e231b5a6012e438c647e1a87f09f8e0d123acec8ab8bf71/PyYAML-6.0.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0a9a2848a5b7feac301353437eb7d5957887edbf81d56e903999a75a3d743086", size = 184199, upload-time = "2024-08-06T20:31:40.178Z" }, - { url = "https://files.pythonhosted.org/packages/c7/7a/68bd47624dab8fd4afbfd3c48e3b79efe09098ae941de5b58abcbadff5cb/PyYAML-6.0.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:29717114e51c84ddfba879543fb232a6ed60086602313ca38cce623c1d62cfbf", size = 171758, upload-time = "2024-08-06T20:31:42.173Z" }, - { url = "https://files.pythonhosted.org/packages/49/ee/14c54df452143b9ee9f0f29074d7ca5516a36edb0b4cc40c3f280131656f/PyYAML-6.0.2-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:8824b5a04a04a047e72eea5cec3bc266db09e35de6bdfe34c9436ac5ee27d237", size = 718463, upload-time = "2024-08-06T20:31:44.263Z" }, - { url = "https://files.pythonhosted.org/packages/4d/61/de363a97476e766574650d742205be468921a7b532aa2499fcd886b62530/PyYAML-6.0.2-cp310-cp310-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:7c36280e6fb8385e520936c3cb3b8042851904eba0e58d277dca80a5cfed590b", size = 719280, upload-time = "2024-08-06T20:31:50.199Z" }, - { url = "https://files.pythonhosted.org/packages/6b/4e/1523cb902fd98355e2e9ea5e5eb237cbc5f3ad5f3075fa65087aa0ecb669/PyYAML-6.0.2-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ec031d5d2feb36d1d1a24380e4db6d43695f3748343d99434e6f5f9156aaa2ed", size = 751239, upload-time = "2024-08-06T20:31:52.292Z" }, - { url = "https://files.pythonhosted.org/packages/b7/33/5504b3a9a4464893c32f118a9cc045190a91637b119a9c881da1cf6b7a72/PyYAML-6.0.2-cp310-cp310-musllinux_1_1_aarch64.whl", hash = "sha256:936d68689298c36b53b29f23c6dbb74de12b4ac12ca6cfe0e047bedceea56180", size = 695802, upload-time = "2024-08-06T20:31:53.836Z" }, - { url = "https://files.pythonhosted.org/packages/5c/20/8347dcabd41ef3a3cdc4f7b7a2aff3d06598c8779faa189cdbf878b626a4/PyYAML-6.0.2-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:23502f431948090f597378482b4812b0caae32c22213aecf3b55325e049a6c68", size = 720527, upload-time = "2024-08-06T20:31:55.565Z" }, - { url = "https://files.pythonhosted.org/packages/be/aa/5afe99233fb360d0ff37377145a949ae258aaab831bde4792b32650a4378/PyYAML-6.0.2-cp310-cp310-win32.whl", hash = "sha256:2e99c6826ffa974fe6e27cdb5ed0021786b03fc98e5ee3c5bfe1fd5015f42b99", size = 144052, upload-time = "2024-08-06T20:31:56.914Z" }, - { url = "https://files.pythonhosted.org/packages/b5/84/0fa4b06f6d6c958d207620fc60005e241ecedceee58931bb20138e1e5776/PyYAML-6.0.2-cp310-cp310-win_amd64.whl", hash = "sha256:a4d3091415f010369ae4ed1fc6b79def9416358877534caf6a0fdd2146c87a3e", size = 161774, upload-time = "2024-08-06T20:31:58.304Z" }, - { url = "https://files.pythonhosted.org/packages/f8/aa/7af4e81f7acba21a4c6be026da38fd2b872ca46226673c89a758ebdc4fd2/PyYAML-6.0.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:cc1c1159b3d456576af7a3e4d1ba7e6924cb39de8f67111c735f6fc832082774", size = 184612, upload-time = "2024-08-06T20:32:03.408Z" }, - { url = "https://files.pythonhosted.org/packages/8b/62/b9faa998fd185f65c1371643678e4d58254add437edb764a08c5a98fb986/PyYAML-6.0.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1e2120ef853f59c7419231f3bf4e7021f1b936f6ebd222406c3b60212205d2ee", size = 172040, upload-time = "2024-08-06T20:32:04.926Z" }, - { url = "https://files.pythonhosted.org/packages/ad/0c/c804f5f922a9a6563bab712d8dcc70251e8af811fce4524d57c2c0fd49a4/PyYAML-6.0.2-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:5d225db5a45f21e78dd9358e58a98702a0302f2659a3c6cd320564b75b86f47c", size = 736829, upload-time = "2024-08-06T20:32:06.459Z" }, - { url = "https://files.pythonhosted.org/packages/51/16/6af8d6a6b210c8e54f1406a6b9481febf9c64a3109c541567e35a49aa2e7/PyYAML-6.0.2-cp311-cp311-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:5ac9328ec4831237bec75defaf839f7d4564be1e6b25ac710bd1a96321cc8317", size = 764167, upload-time = "2024-08-06T20:32:08.338Z" }, - { url = "https://files.pythonhosted.org/packages/75/e4/2c27590dfc9992f73aabbeb9241ae20220bd9452df27483b6e56d3975cc5/PyYAML-6.0.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:3ad2a3decf9aaba3d29c8f537ac4b243e36bef957511b4766cb0057d32b0be85", size = 762952, upload-time = "2024-08-06T20:32:14.124Z" }, - { url = "https://files.pythonhosted.org/packages/9b/97/ecc1abf4a823f5ac61941a9c00fe501b02ac3ab0e373c3857f7d4b83e2b6/PyYAML-6.0.2-cp311-cp311-musllinux_1_1_aarch64.whl", hash = "sha256:ff3824dc5261f50c9b0dfb3be22b4567a6f938ccce4587b38952d85fd9e9afe4", size = 735301, upload-time = "2024-08-06T20:32:16.17Z" }, - { url = "https://files.pythonhosted.org/packages/45/73/0f49dacd6e82c9430e46f4a027baa4ca205e8b0a9dce1397f44edc23559d/PyYAML-6.0.2-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:797b4f722ffa07cc8d62053e4cff1486fa6dc094105d13fea7b1de7d8bf71c9e", size = 756638, upload-time = "2024-08-06T20:32:18.555Z" }, - { url = "https://files.pythonhosted.org/packages/22/5f/956f0f9fc65223a58fbc14459bf34b4cc48dec52e00535c79b8db361aabd/PyYAML-6.0.2-cp311-cp311-win32.whl", hash = "sha256:11d8f3dd2b9c1207dcaf2ee0bbbfd5991f571186ec9cc78427ba5bd32afae4b5", size = 143850, upload-time = "2024-08-06T20:32:19.889Z" }, - { url = "https://files.pythonhosted.org/packages/ed/23/8da0bbe2ab9dcdd11f4f4557ccaf95c10b9811b13ecced089d43ce59c3c8/PyYAML-6.0.2-cp311-cp311-win_amd64.whl", hash = "sha256:e10ce637b18caea04431ce14fabcf5c64a1c61ec9c56b071a4b7ca131ca52d44", size = 161980, upload-time = "2024-08-06T20:32:21.273Z" }, - { url = "https://files.pythonhosted.org/packages/86/0c/c581167fc46d6d6d7ddcfb8c843a4de25bdd27e4466938109ca68492292c/PyYAML-6.0.2-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:c70c95198c015b85feafc136515252a261a84561b7b1d51e3384e0655ddf25ab", size = 183873, upload-time = "2024-08-06T20:32:25.131Z" }, - { url = "https://files.pythonhosted.org/packages/a8/0c/38374f5bb272c051e2a69281d71cba6fdb983413e6758b84482905e29a5d/PyYAML-6.0.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:ce826d6ef20b1bc864f0a68340c8b3287705cae2f8b4b1d932177dcc76721725", size = 173302, upload-time = "2024-08-06T20:32:26.511Z" }, - { url = "https://files.pythonhosted.org/packages/c3/93/9916574aa8c00aa06bbac729972eb1071d002b8e158bd0e83a3b9a20a1f7/PyYAML-6.0.2-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:1f71ea527786de97d1a0cc0eacd1defc0985dcf6b3f17bb77dcfc8c34bec4dc5", size = 739154, upload-time = "2024-08-06T20:32:28.363Z" }, - { url = "https://files.pythonhosted.org/packages/95/0f/b8938f1cbd09739c6da569d172531567dbcc9789e0029aa070856f123984/PyYAML-6.0.2-cp312-cp312-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:9b22676e8097e9e22e36d6b7bda33190d0d400f345f23d4065d48f4ca7ae0425", size = 766223, upload-time = "2024-08-06T20:32:30.058Z" }, - { url = "https://files.pythonhosted.org/packages/b9/2b/614b4752f2e127db5cc206abc23a8c19678e92b23c3db30fc86ab731d3bd/PyYAML-6.0.2-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:80bab7bfc629882493af4aa31a4cfa43a4c57c83813253626916b8c7ada83476", size = 767542, upload-time = "2024-08-06T20:32:31.881Z" }, - { url = "https://files.pythonhosted.org/packages/d4/00/dd137d5bcc7efea1836d6264f049359861cf548469d18da90cd8216cf05f/PyYAML-6.0.2-cp312-cp312-musllinux_1_1_aarch64.whl", hash = "sha256:0833f8694549e586547b576dcfaba4a6b55b9e96098b36cdc7ebefe667dfed48", size = 731164, upload-time = "2024-08-06T20:32:37.083Z" }, - { url = "https://files.pythonhosted.org/packages/c9/1f/4f998c900485e5c0ef43838363ba4a9723ac0ad73a9dc42068b12aaba4e4/PyYAML-6.0.2-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:8b9c7197f7cb2738065c481a0461e50ad02f18c78cd75775628afb4d7137fb3b", size = 756611, upload-time = "2024-08-06T20:32:38.898Z" }, - { url = "https://files.pythonhosted.org/packages/df/d1/f5a275fdb252768b7a11ec63585bc38d0e87c9e05668a139fea92b80634c/PyYAML-6.0.2-cp312-cp312-win32.whl", hash = "sha256:ef6107725bd54b262d6dedcc2af448a266975032bc85ef0172c5f059da6325b4", size = 140591, upload-time = "2024-08-06T20:32:40.241Z" }, - { url = "https://files.pythonhosted.org/packages/0c/e8/4f648c598b17c3d06e8753d7d13d57542b30d56e6c2dedf9c331ae56312e/PyYAML-6.0.2-cp312-cp312-win_amd64.whl", hash = "sha256:7e7401d0de89a9a855c839bc697c079a4af81cf878373abd7dc625847d25cbd8", size = 156338, upload-time = "2024-08-06T20:32:41.93Z" }, - { url = "https://files.pythonhosted.org/packages/ef/e3/3af305b830494fa85d95f6d95ef7fa73f2ee1cc8ef5b495c7c3269fb835f/PyYAML-6.0.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:efdca5630322a10774e8e98e1af481aad470dd62c3170801852d752aa7a783ba", size = 181309, upload-time = "2024-08-06T20:32:43.4Z" }, - { url = "https://files.pythonhosted.org/packages/45/9f/3b1c20a0b7a3200524eb0076cc027a970d320bd3a6592873c85c92a08731/PyYAML-6.0.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:50187695423ffe49e2deacb8cd10510bc361faac997de9efef88badc3bb9e2d1", size = 171679, upload-time = "2024-08-06T20:32:44.801Z" }, - { url = "https://files.pythonhosted.org/packages/7c/9a/337322f27005c33bcb656c655fa78325b730324c78620e8328ae28b64d0c/PyYAML-6.0.2-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:0ffe8360bab4910ef1b9e87fb812d8bc0a308b0d0eef8c8f44e0254ab3b07133", size = 733428, upload-time = "2024-08-06T20:32:46.432Z" }, - { url = "https://files.pythonhosted.org/packages/a3/69/864fbe19e6c18ea3cc196cbe5d392175b4cf3d5d0ac1403ec3f2d237ebb5/PyYAML-6.0.2-cp313-cp313-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:17e311b6c678207928d649faa7cb0d7b4c26a0ba73d41e99c4fff6b6c3276484", size = 763361, upload-time = "2024-08-06T20:32:51.188Z" }, - { url = "https://files.pythonhosted.org/packages/04/24/b7721e4845c2f162d26f50521b825fb061bc0a5afcf9a386840f23ea19fa/PyYAML-6.0.2-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:70b189594dbe54f75ab3a1acec5f1e3faa7e8cf2f1e08d9b561cb41b845f69d5", size = 759523, upload-time = "2024-08-06T20:32:53.019Z" }, - { url = "https://files.pythonhosted.org/packages/2b/b2/e3234f59ba06559c6ff63c4e10baea10e5e7df868092bf9ab40e5b9c56b6/PyYAML-6.0.2-cp313-cp313-musllinux_1_1_aarch64.whl", hash = "sha256:41e4e3953a79407c794916fa277a82531dd93aad34e29c2a514c2c0c5fe971cc", size = 726660, upload-time = "2024-08-06T20:32:54.708Z" }, - { url = "https://files.pythonhosted.org/packages/fe/0f/25911a9f080464c59fab9027482f822b86bf0608957a5fcc6eaac85aa515/PyYAML-6.0.2-cp313-cp313-musllinux_1_1_x86_64.whl", hash = "sha256:68ccc6023a3400877818152ad9a1033e3db8625d899c72eacb5a668902e4d652", size = 751597, upload-time = "2024-08-06T20:32:56.985Z" }, - { url = "https://files.pythonhosted.org/packages/14/0d/e2c3b43bbce3cf6bd97c840b46088a3031085179e596d4929729d8d68270/PyYAML-6.0.2-cp313-cp313-win32.whl", hash = "sha256:bc2fa7c6b47d6bc618dd7fb02ef6fdedb1090ec036abab80d4681424b84c1183", size = 140527, upload-time = "2024-08-06T20:33:03.001Z" }, - { url = "https://files.pythonhosted.org/packages/fa/de/02b54f42487e3d3c6efb3f89428677074ca7bf43aae402517bc7cca949f3/PyYAML-6.0.2-cp313-cp313-win_amd64.whl", hash = "sha256:8388ee1976c416731879ac16da0aff3f63b286ffdd57cdeb95f3f2e085687563", size = 156446, upload-time = "2024-08-06T20:33:04.33Z" }, +version = "6.0.3" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/05/8e/961c0007c59b8dd7729d542c61a4d537767a59645b82a0b521206e1e25c2/pyyaml-6.0.3.tar.gz", hash = "sha256:d76623373421df22fb4cf8817020cbb7ef15c725b9d5e45f17e189bfc384190f", size = 130960, upload-time = "2025-09-25T21:33:16.546Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/f4/a0/39350dd17dd6d6c6507025c0e53aef67a9293a6d37d3511f23ea510d5800/pyyaml-6.0.3-cp310-cp310-macosx_10_13_x86_64.whl", hash = "sha256:214ed4befebe12df36bcc8bc2b64b396ca31be9304b8f59e25c11cf94a4c033b", size = 184227, upload-time = "2025-09-25T21:31:46.04Z" }, + { url = "https://files.pythonhosted.org/packages/05/14/52d505b5c59ce73244f59c7a50ecf47093ce4765f116cdb98286a71eeca2/pyyaml-6.0.3-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:02ea2dfa234451bbb8772601d7b8e426c2bfa197136796224e50e35a78777956", size = 174019, upload-time = "2025-09-25T21:31:47.706Z" }, + { url = "https://files.pythonhosted.org/packages/43/f7/0e6a5ae5599c838c696adb4e6330a59f463265bfa1e116cfd1fbb0abaaae/pyyaml-6.0.3-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:b30236e45cf30d2b8e7b3e85881719e98507abed1011bf463a8fa23e9c3e98a8", size = 740646, upload-time = "2025-09-25T21:31:49.21Z" }, + { url = "https://files.pythonhosted.org/packages/2f/3a/61b9db1d28f00f8fd0ae760459a5c4bf1b941baf714e207b6eb0657d2578/pyyaml-6.0.3-cp310-cp310-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:66291b10affd76d76f54fad28e22e51719ef9ba22b29e1d7d03d6777a9174198", size = 840793, upload-time = "2025-09-25T21:31:50.735Z" }, + { url = "https://files.pythonhosted.org/packages/7a/1e/7acc4f0e74c4b3d9531e24739e0ab832a5edf40e64fbae1a9c01941cabd7/pyyaml-6.0.3-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:9c7708761fccb9397fe64bbc0395abcae8c4bf7b0eac081e12b809bf47700d0b", size = 770293, upload-time = "2025-09-25T21:31:51.828Z" }, + { url = "https://files.pythonhosted.org/packages/8b/ef/abd085f06853af0cd59fa5f913d61a8eab65d7639ff2a658d18a25d6a89d/pyyaml-6.0.3-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:418cf3f2111bc80e0933b2cd8cd04f286338bb88bdc7bc8e6dd775ebde60b5e0", size = 732872, upload-time = "2025-09-25T21:31:53.282Z" }, + { url = "https://files.pythonhosted.org/packages/1f/15/2bc9c8faf6450a8b3c9fc5448ed869c599c0a74ba2669772b1f3a0040180/pyyaml-6.0.3-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:5e0b74767e5f8c593e8c9b5912019159ed0533c70051e9cce3e8b6aa699fcd69", size = 758828, upload-time = "2025-09-25T21:31:54.807Z" }, + { url = "https://files.pythonhosted.org/packages/a3/00/531e92e88c00f4333ce359e50c19b8d1de9fe8d581b1534e35ccfbc5f393/pyyaml-6.0.3-cp310-cp310-win32.whl", hash = "sha256:28c8d926f98f432f88adc23edf2e6d4921ac26fb084b028c733d01868d19007e", size = 142415, upload-time = "2025-09-25T21:31:55.885Z" }, + { url = "https://files.pythonhosted.org/packages/2a/fa/926c003379b19fca39dd4634818b00dec6c62d87faf628d1394e137354d4/pyyaml-6.0.3-cp310-cp310-win_amd64.whl", hash = "sha256:bdb2c67c6c1390b63c6ff89f210c8fd09d9a1217a465701eac7316313c915e4c", size = 158561, upload-time = "2025-09-25T21:31:57.406Z" }, + { url = "https://files.pythonhosted.org/packages/6d/16/a95b6757765b7b031c9374925bb718d55e0a9ba8a1b6a12d25962ea44347/pyyaml-6.0.3-cp311-cp311-macosx_10_13_x86_64.whl", hash = "sha256:44edc647873928551a01e7a563d7452ccdebee747728c1080d881d68af7b997e", size = 185826, upload-time = "2025-09-25T21:31:58.655Z" }, + { url = "https://files.pythonhosted.org/packages/16/19/13de8e4377ed53079ee996e1ab0a9c33ec2faf808a4647b7b4c0d46dd239/pyyaml-6.0.3-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:652cb6edd41e718550aad172851962662ff2681490a8a711af6a4d288dd96824", size = 175577, upload-time = "2025-09-25T21:32:00.088Z" }, + { url = "https://files.pythonhosted.org/packages/0c/62/d2eb46264d4b157dae1275b573017abec435397aa59cbcdab6fc978a8af4/pyyaml-6.0.3-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:10892704fc220243f5305762e276552a0395f7beb4dbf9b14ec8fd43b57f126c", size = 775556, upload-time = "2025-09-25T21:32:01.31Z" }, + { url = "https://files.pythonhosted.org/packages/10/cb/16c3f2cf3266edd25aaa00d6c4350381c8b012ed6f5276675b9eba8d9ff4/pyyaml-6.0.3-cp311-cp311-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:850774a7879607d3a6f50d36d04f00ee69e7fc816450e5f7e58d7f17f1ae5c00", size = 882114, upload-time = "2025-09-25T21:32:03.376Z" }, + { url = "https://files.pythonhosted.org/packages/71/60/917329f640924b18ff085ab889a11c763e0b573da888e8404ff486657602/pyyaml-6.0.3-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b8bb0864c5a28024fac8a632c443c87c5aa6f215c0b126c449ae1a150412f31d", size = 806638, upload-time = "2025-09-25T21:32:04.553Z" }, + { url = "https://files.pythonhosted.org/packages/dd/6f/529b0f316a9fd167281a6c3826b5583e6192dba792dd55e3203d3f8e655a/pyyaml-6.0.3-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1d37d57ad971609cf3c53ba6a7e365e40660e3be0e5175fa9f2365a379d6095a", size = 767463, upload-time = "2025-09-25T21:32:06.152Z" }, + { url = "https://files.pythonhosted.org/packages/f2/6a/b627b4e0c1dd03718543519ffb2f1deea4a1e6d42fbab8021936a4d22589/pyyaml-6.0.3-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:37503bfbfc9d2c40b344d06b2199cf0e96e97957ab1c1b546fd4f87e53e5d3e4", size = 794986, upload-time = "2025-09-25T21:32:07.367Z" }, + { url = "https://files.pythonhosted.org/packages/45/91/47a6e1c42d9ee337c4839208f30d9f09caa9f720ec7582917b264defc875/pyyaml-6.0.3-cp311-cp311-win32.whl", hash = "sha256:8098f252adfa6c80ab48096053f512f2321f0b998f98150cea9bd23d83e1467b", size = 142543, upload-time = "2025-09-25T21:32:08.95Z" }, + { url = "https://files.pythonhosted.org/packages/da/e3/ea007450a105ae919a72393cb06f122f288ef60bba2dc64b26e2646fa315/pyyaml-6.0.3-cp311-cp311-win_amd64.whl", hash = "sha256:9f3bfb4965eb874431221a3ff3fdcddc7e74e3b07799e0e84ca4a0f867d449bf", size = 158763, upload-time = "2025-09-25T21:32:09.96Z" }, + { url = "https://files.pythonhosted.org/packages/d1/33/422b98d2195232ca1826284a76852ad5a86fe23e31b009c9886b2d0fb8b2/pyyaml-6.0.3-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:7f047e29dcae44602496db43be01ad42fc6f1cc0d8cd6c83d342306c32270196", size = 182063, upload-time = "2025-09-25T21:32:11.445Z" }, + { url = "https://files.pythonhosted.org/packages/89/a0/6cf41a19a1f2f3feab0e9c0b74134aa2ce6849093d5517a0c550fe37a648/pyyaml-6.0.3-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:fc09d0aa354569bc501d4e787133afc08552722d3ab34836a80547331bb5d4a0", size = 173973, upload-time = "2025-09-25T21:32:12.492Z" }, + { url = "https://files.pythonhosted.org/packages/ed/23/7a778b6bd0b9a8039df8b1b1d80e2e2ad78aa04171592c8a5c43a56a6af4/pyyaml-6.0.3-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9149cad251584d5fb4981be1ecde53a1ca46c891a79788c0df828d2f166bda28", size = 775116, upload-time = "2025-09-25T21:32:13.652Z" }, + { url = "https://files.pythonhosted.org/packages/65/30/d7353c338e12baef4ecc1b09e877c1970bd3382789c159b4f89d6a70dc09/pyyaml-6.0.3-cp312-cp312-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:5fdec68f91a0c6739b380c83b951e2c72ac0197ace422360e6d5a959d8d97b2c", size = 844011, upload-time = "2025-09-25T21:32:15.21Z" }, + { url = "https://files.pythonhosted.org/packages/8b/9d/b3589d3877982d4f2329302ef98a8026e7f4443c765c46cfecc8858c6b4b/pyyaml-6.0.3-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:ba1cc08a7ccde2d2ec775841541641e4548226580ab850948cbfda66a1befcdc", size = 807870, upload-time = "2025-09-25T21:32:16.431Z" }, + { url = "https://files.pythonhosted.org/packages/05/c0/b3be26a015601b822b97d9149ff8cb5ead58c66f981e04fedf4e762f4bd4/pyyaml-6.0.3-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8dc52c23056b9ddd46818a57b78404882310fb473d63f17b07d5c40421e47f8e", size = 761089, upload-time = "2025-09-25T21:32:17.56Z" }, + { url = "https://files.pythonhosted.org/packages/be/8e/98435a21d1d4b46590d5459a22d88128103f8da4c2d4cb8f14f2a96504e1/pyyaml-6.0.3-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:41715c910c881bc081f1e8872880d3c650acf13dfa8214bad49ed4cede7c34ea", size = 790181, upload-time = "2025-09-25T21:32:18.834Z" }, + { url = "https://files.pythonhosted.org/packages/74/93/7baea19427dcfbe1e5a372d81473250b379f04b1bd3c4c5ff825e2327202/pyyaml-6.0.3-cp312-cp312-win32.whl", hash = "sha256:96b533f0e99f6579b3d4d4995707cf36df9100d67e0c8303a0c55b27b5f99bc5", size = 137658, upload-time = "2025-09-25T21:32:20.209Z" }, + { url = "https://files.pythonhosted.org/packages/86/bf/899e81e4cce32febab4fb42bb97dcdf66bc135272882d1987881a4b519e9/pyyaml-6.0.3-cp312-cp312-win_amd64.whl", hash = "sha256:5fcd34e47f6e0b794d17de1b4ff496c00986e1c83f7ab2fb8fcfe9616ff7477b", size = 154003, upload-time = "2025-09-25T21:32:21.167Z" }, + { url = "https://files.pythonhosted.org/packages/1a/08/67bd04656199bbb51dbed1439b7f27601dfb576fb864099c7ef0c3e55531/pyyaml-6.0.3-cp312-cp312-win_arm64.whl", hash = "sha256:64386e5e707d03a7e172c0701abfb7e10f0fb753ee1d773128192742712a98fd", size = 140344, upload-time = "2025-09-25T21:32:22.617Z" }, + { url = "https://files.pythonhosted.org/packages/d1/11/0fd08f8192109f7169db964b5707a2f1e8b745d4e239b784a5a1dd80d1db/pyyaml-6.0.3-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:8da9669d359f02c0b91ccc01cac4a67f16afec0dac22c2ad09f46bee0697eba8", size = 181669, upload-time = "2025-09-25T21:32:23.673Z" }, + { url = "https://files.pythonhosted.org/packages/b1/16/95309993f1d3748cd644e02e38b75d50cbc0d9561d21f390a76242ce073f/pyyaml-6.0.3-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:2283a07e2c21a2aa78d9c4442724ec1eb15f5e42a723b99cb3d822d48f5f7ad1", size = 173252, upload-time = "2025-09-25T21:32:25.149Z" }, + { url = "https://files.pythonhosted.org/packages/50/31/b20f376d3f810b9b2371e72ef5adb33879b25edb7a6d072cb7ca0c486398/pyyaml-6.0.3-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:ee2922902c45ae8ccada2c5b501ab86c36525b883eff4255313a253a3160861c", size = 767081, upload-time = "2025-09-25T21:32:26.575Z" }, + { url = "https://files.pythonhosted.org/packages/49/1e/a55ca81e949270d5d4432fbbd19dfea5321eda7c41a849d443dc92fd1ff7/pyyaml-6.0.3-cp313-cp313-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a33284e20b78bd4a18c8c2282d549d10bc8408a2a7ff57653c0cf0b9be0afce5", size = 841159, upload-time = "2025-09-25T21:32:27.727Z" }, + { url = "https://files.pythonhosted.org/packages/74/27/e5b8f34d02d9995b80abcef563ea1f8b56d20134d8f4e5e81733b1feceb2/pyyaml-6.0.3-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0f29edc409a6392443abf94b9cf89ce99889a1dd5376d94316ae5145dfedd5d6", size = 801626, upload-time = "2025-09-25T21:32:28.878Z" }, + { url = "https://files.pythonhosted.org/packages/f9/11/ba845c23988798f40e52ba45f34849aa8a1f2d4af4b798588010792ebad6/pyyaml-6.0.3-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f7057c9a337546edc7973c0d3ba84ddcdf0daa14533c2065749c9075001090e6", size = 753613, upload-time = "2025-09-25T21:32:30.178Z" }, + { url = "https://files.pythonhosted.org/packages/3d/e0/7966e1a7bfc0a45bf0a7fb6b98ea03fc9b8d84fa7f2229e9659680b69ee3/pyyaml-6.0.3-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:eda16858a3cab07b80edaf74336ece1f986ba330fdb8ee0d6c0d68fe82bc96be", size = 794115, upload-time = "2025-09-25T21:32:31.353Z" }, + { url = "https://files.pythonhosted.org/packages/de/94/980b50a6531b3019e45ddeada0626d45fa85cbe22300844a7983285bed3b/pyyaml-6.0.3-cp313-cp313-win32.whl", hash = "sha256:d0eae10f8159e8fdad514efdc92d74fd8d682c933a6dd088030f3834bc8e6b26", size = 137427, upload-time = "2025-09-25T21:32:32.58Z" }, + { url = "https://files.pythonhosted.org/packages/97/c9/39d5b874e8b28845e4ec2202b5da735d0199dbe5b8fb85f91398814a9a46/pyyaml-6.0.3-cp313-cp313-win_amd64.whl", hash = "sha256:79005a0d97d5ddabfeeea4cf676af11e647e41d81c9a7722a193022accdb6b7c", size = 154090, upload-time = "2025-09-25T21:32:33.659Z" }, + { url = "https://files.pythonhosted.org/packages/73/e8/2bdf3ca2090f68bb3d75b44da7bbc71843b19c9f2b9cb9b0f4ab7a5a4329/pyyaml-6.0.3-cp313-cp313-win_arm64.whl", hash = "sha256:5498cd1645aa724a7c71c8f378eb29ebe23da2fc0d7a08071d89469bf1d2defb", size = 140246, upload-time = "2025-09-25T21:32:34.663Z" }, + { url = "https://files.pythonhosted.org/packages/9d/8c/f4bd7f6465179953d3ac9bc44ac1a8a3e6122cf8ada906b4f96c60172d43/pyyaml-6.0.3-cp314-cp314-macosx_10_13_x86_64.whl", hash = "sha256:8d1fab6bb153a416f9aeb4b8763bc0f22a5586065f86f7664fc23339fc1c1fac", size = 181814, upload-time = "2025-09-25T21:32:35.712Z" }, + { url = "https://files.pythonhosted.org/packages/bd/9c/4d95bb87eb2063d20db7b60faa3840c1b18025517ae857371c4dd55a6b3a/pyyaml-6.0.3-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:34d5fcd24b8445fadc33f9cf348c1047101756fd760b4dacb5c3e99755703310", size = 173809, upload-time = "2025-09-25T21:32:36.789Z" }, + { url = "https://files.pythonhosted.org/packages/92/b5/47e807c2623074914e29dabd16cbbdd4bf5e9b2db9f8090fa64411fc5382/pyyaml-6.0.3-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:501a031947e3a9025ed4405a168e6ef5ae3126c59f90ce0cd6f2bfc477be31b7", size = 766454, upload-time = "2025-09-25T21:32:37.966Z" }, + { url = "https://files.pythonhosted.org/packages/02/9e/e5e9b168be58564121efb3de6859c452fccde0ab093d8438905899a3a483/pyyaml-6.0.3-cp314-cp314-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:b3bc83488de33889877a0f2543ade9f70c67d66d9ebb4ac959502e12de895788", size = 836355, upload-time = "2025-09-25T21:32:39.178Z" }, + { url = "https://files.pythonhosted.org/packages/88/f9/16491d7ed2a919954993e48aa941b200f38040928474c9e85ea9e64222c3/pyyaml-6.0.3-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:c458b6d084f9b935061bc36216e8a69a7e293a2f1e68bf956dcd9e6cbcd143f5", size = 794175, upload-time = "2025-09-25T21:32:40.865Z" }, + { url = "https://files.pythonhosted.org/packages/dd/3f/5989debef34dc6397317802b527dbbafb2b4760878a53d4166579111411e/pyyaml-6.0.3-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:7c6610def4f163542a622a73fb39f534f8c101d690126992300bf3207eab9764", size = 755228, upload-time = "2025-09-25T21:32:42.084Z" }, + { url = "https://files.pythonhosted.org/packages/d7/ce/af88a49043cd2e265be63d083fc75b27b6ed062f5f9fd6cdc223ad62f03e/pyyaml-6.0.3-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:5190d403f121660ce8d1d2c1bb2ef1bd05b5f68533fc5c2ea899bd15f4399b35", size = 789194, upload-time = "2025-09-25T21:32:43.362Z" }, + { url = "https://files.pythonhosted.org/packages/23/20/bb6982b26a40bb43951265ba29d4c246ef0ff59c9fdcdf0ed04e0687de4d/pyyaml-6.0.3-cp314-cp314-win_amd64.whl", hash = "sha256:4a2e8cebe2ff6ab7d1050ecd59c25d4c8bd7e6f400f5f82b96557ac0abafd0ac", size = 156429, upload-time = "2025-09-25T21:32:57.844Z" }, + { url = "https://files.pythonhosted.org/packages/f4/f4/a4541072bb9422c8a883ab55255f918fa378ecf083f5b85e87fc2b4eda1b/pyyaml-6.0.3-cp314-cp314-win_arm64.whl", hash = "sha256:93dda82c9c22deb0a405ea4dc5f2d0cda384168e466364dec6255b293923b2f3", size = 143912, upload-time = "2025-09-25T21:32:59.247Z" }, + { url = "https://files.pythonhosted.org/packages/7c/f9/07dd09ae774e4616edf6cda684ee78f97777bdd15847253637a6f052a62f/pyyaml-6.0.3-cp314-cp314t-macosx_10_13_x86_64.whl", hash = "sha256:02893d100e99e03eda1c8fd5c441d8c60103fd175728e23e431db1b589cf5ab3", size = 189108, upload-time = "2025-09-25T21:32:44.377Z" }, + { url = "https://files.pythonhosted.org/packages/4e/78/8d08c9fb7ce09ad8c38ad533c1191cf27f7ae1effe5bb9400a46d9437fcf/pyyaml-6.0.3-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:c1ff362665ae507275af2853520967820d9124984e0f7466736aea23d8611fba", size = 183641, upload-time = "2025-09-25T21:32:45.407Z" }, + { url = "https://files.pythonhosted.org/packages/7b/5b/3babb19104a46945cf816d047db2788bcaf8c94527a805610b0289a01c6b/pyyaml-6.0.3-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:6adc77889b628398debc7b65c073bcb99c4a0237b248cacaf3fe8a557563ef6c", size = 831901, upload-time = "2025-09-25T21:32:48.83Z" }, + { url = "https://files.pythonhosted.org/packages/8b/cc/dff0684d8dc44da4d22a13f35f073d558c268780ce3c6ba1b87055bb0b87/pyyaml-6.0.3-cp314-cp314t-manylinux2014_s390x.manylinux_2_17_s390x.manylinux_2_28_s390x.whl", hash = "sha256:a80cb027f6b349846a3bf6d73b5e95e782175e52f22108cfa17876aaeff93702", size = 861132, upload-time = "2025-09-25T21:32:50.149Z" }, + { url = "https://files.pythonhosted.org/packages/b1/5e/f77dc6b9036943e285ba76b49e118d9ea929885becb0a29ba8a7c75e29fe/pyyaml-6.0.3-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:00c4bdeba853cc34e7dd471f16b4114f4162dc03e6b7afcc2128711f0eca823c", size = 839261, upload-time = "2025-09-25T21:32:51.808Z" }, + { url = "https://files.pythonhosted.org/packages/ce/88/a9db1376aa2a228197c58b37302f284b5617f56a5d959fd1763fb1675ce6/pyyaml-6.0.3-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:66e1674c3ef6f541c35191caae2d429b967b99e02040f5ba928632d9a7f0f065", size = 805272, upload-time = "2025-09-25T21:32:52.941Z" }, + { url = "https://files.pythonhosted.org/packages/da/92/1446574745d74df0c92e6aa4a7b0b3130706a4142b2d1a5869f2eaa423c6/pyyaml-6.0.3-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:16249ee61e95f858e83976573de0f5b2893b3677ba71c9dd36b9cf8be9ac6d65", size = 829923, upload-time = "2025-09-25T21:32:54.537Z" }, + { url = "https://files.pythonhosted.org/packages/f0/7a/1c7270340330e575b92f397352af856a8c06f230aa3e76f86b39d01b416a/pyyaml-6.0.3-cp314-cp314t-win_amd64.whl", hash = "sha256:4ad1906908f2f5ae4e5a8ddfce73c320c2a1429ec52eafd27138b7f1cbe341c9", size = 174062, upload-time = "2025-09-25T21:32:55.767Z" }, + { url = "https://files.pythonhosted.org/packages/f1/12/de94a39c2ef588c7e6455cfbe7343d3b2dc9d6b6b2f40c4c6565744c873d/pyyaml-6.0.3-cp314-cp314t-win_arm64.whl", hash = "sha256:ebc55a14a21cb14062aa4162f906cd962b28e2e9ea38f9b4391244cd8de4ae0b", size = 149341, upload-time = "2025-09-25T21:32:56.828Z" }, ] [[package]] name = "requests" -version = "2.33.0" +version = "2.32.5" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "certifi" }, @@ -2098,9 +2166,9 @@ dependencies = [ { name = "idna" }, { name = "urllib3" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/34/64/8860370b167a9721e8956ae116825caff829224fbca0ca6e7bf8ddef8430/requests-2.33.0.tar.gz", hash = "sha256:c7ebc5e8b0f21837386ad0e1c8fe8b829fa5f544d8df3b2253bff14ef29d7652", size = 134232, upload-time = "2026-03-25T15:10:41.586Z" } +sdist = { url = "https://files.pythonhosted.org/packages/c9/74/b3ff8e6c8446842c3f5c837e9c3dfcfe2018ea6ecef224c710c85ef728f4/requests-2.32.5.tar.gz", hash = "sha256:dbba0bac56e100853db0ea71b82b4dfd5fe2bf6d3754a8893c3af500cec7d7cf", size = 134517, upload-time = "2025-08-18T20:46:02.573Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/56/5d/c814546c2333ceea4ba42262d8c4d55763003e767fa169adc693bd524478/requests-2.33.0-py3-none-any.whl", hash = "sha256:3324635456fa185245e24865e810cecec7b4caf933d7eb133dcde67d48cee69b", size = 65017, upload-time = "2026-03-25T15:10:40.382Z" }, + { url = "https://files.pythonhosted.org/packages/1e/db/4254e3eabe8020b458f1a747140d32277ec7a271daf1d235b70dc0b4e6e3/requests-2.32.5-py3-none-any.whl", hash = "sha256:2462f94637a34fd532264295e186976db0f5d453d1cdd31473c85a6a161affb6", size = 64738, upload-time = "2025-08-18T20:46:00.542Z" }, ] [[package]] @@ -2115,50 +2183,38 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/8e/67/afbb0978d5399bc9ea200f1d4489a23c9a1dad4eee6376242b8182389c79/respx-0.22.0-py2.py3-none-any.whl", hash = "sha256:631128d4c9aba15e56903fb5f66fb1eff412ce28dd387ca3a81339e52dbd3ad0", size = 25127, upload-time = "2024-12-19T22:33:57.837Z" }, ] -[[package]] -name = "rsa" -version = "4.9.1" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "pyasn1" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/da/8a/22b7beea3ee0d44b1916c0c1cb0ee3af23b700b6da9f04991899d0c555d4/rsa-4.9.1.tar.gz", hash = "sha256:e7bdbfdb5497da4c07dfd35530e1a902659db6ff241e39d9953cad06ebd0ae75", size = 29034, upload-time = "2025-04-16T09:51:18.218Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/64/8d/0133e4eb4beed9e425d9a98ed6e081a55d195481b7632472be1af08d2f6b/rsa-4.9.1-py3-none-any.whl", hash = "sha256:68635866661c6836b8d39430f97a996acbd61bfa49406748ea243539fe239762", size = 34696, upload-time = "2025-04-16T09:51:17.142Z" }, -] - [[package]] name = "ruff" -version = "0.15.8" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/14/b0/73cf7550861e2b4824950b8b52eebdcc5adc792a00c514406556c5b80817/ruff-0.15.8.tar.gz", hash = "sha256:995f11f63597ee362130d1d5a327a87cb6f3f5eae3094c620bcc632329a4d26e", size = 4610921, upload-time = "2026-03-26T18:39:38.675Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/4a/92/c445b0cd6da6e7ae51e954939cb69f97e008dbe750cfca89b8cedc081be7/ruff-0.15.8-py3-none-linux_armv6l.whl", hash = "sha256:cbe05adeba76d58162762d6b239c9056f1a15a55bd4b346cfd21e26cd6ad7bc7", size = 10527394, upload-time = "2026-03-26T18:39:41.566Z" }, - { url = "https://files.pythonhosted.org/packages/eb/92/f1c662784d149ad1414cae450b082cf736430c12ca78367f20f5ed569d65/ruff-0.15.8-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:d3e3d0b6ba8dca1b7ef9ab80a28e840a20070c4b62e56d675c24f366ef330570", size = 10905693, upload-time = "2026-03-26T18:39:30.364Z" }, - { url = "https://files.pythonhosted.org/packages/ca/f2/7a631a8af6d88bcef997eb1bf87cc3da158294c57044aafd3e17030613de/ruff-0.15.8-py3-none-macosx_11_0_arm64.whl", hash = "sha256:6ee3ae5c65a42f273f126686353f2e08ff29927b7b7e203b711514370d500de3", size = 10323044, upload-time = "2026-03-26T18:39:33.37Z" }, - { url = "https://files.pythonhosted.org/packages/67/18/1bf38e20914a05e72ef3b9569b1d5c70a7ef26cd188d69e9ca8ef588d5bf/ruff-0.15.8-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:fdce027ada77baa448077ccc6ebb2fa9c3c62fd110d8659d601cf2f475858d94", size = 10629135, upload-time = "2026-03-26T18:39:44.142Z" }, - { url = "https://files.pythonhosted.org/packages/d2/e9/138c150ff9af60556121623d41aba18b7b57d95ac032e177b6a53789d279/ruff-0.15.8-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:12e617fc01a95e5821648a6df341d80456bd627bfab8a829f7cfc26a14a4b4a3", size = 10348041, upload-time = "2026-03-26T18:39:52.178Z" }, - { url = "https://files.pythonhosted.org/packages/02/f1/5bfb9298d9c323f842c5ddeb85f1f10ef51516ac7a34ba446c9347d898df/ruff-0.15.8-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:432701303b26416d22ba696c39f2c6f12499b89093b61360abc34bcc9bf07762", size = 11121987, upload-time = "2026-03-26T18:39:55.195Z" }, - { url = "https://files.pythonhosted.org/packages/10/11/6da2e538704e753c04e8d86b1fc55712fdbdcc266af1a1ece7a51fff0d10/ruff-0.15.8-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:d910ae974b7a06a33a057cb87d2a10792a3b2b3b35e33d2699fdf63ec8f6b17a", size = 11951057, upload-time = "2026-03-26T18:39:19.18Z" }, - { url = "https://files.pythonhosted.org/packages/83/f0/c9208c5fd5101bf87002fed774ff25a96eea313d305f1e5d5744698dc314/ruff-0.15.8-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:2033f963c43949d51e6fdccd3946633c6b37c484f5f98c3035f49c27395a8ab8", size = 11464613, upload-time = "2026-03-26T18:40:06.301Z" }, - { url = "https://files.pythonhosted.org/packages/f8/22/d7f2fabdba4fae9f3b570e5605d5eb4500dcb7b770d3217dca4428484b17/ruff-0.15.8-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0f29b989a55572fb885b77464cf24af05500806ab4edf9a0fd8977f9759d85b1", size = 11257557, upload-time = "2026-03-26T18:39:57.972Z" }, - { url = "https://files.pythonhosted.org/packages/71/8c/382a9620038cf6906446b23ce8632ab8c0811b8f9d3e764f58bedd0c9a6f/ruff-0.15.8-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:ac51d486bf457cdc985a412fb1801b2dfd1bd8838372fc55de64b1510eff4bec", size = 11169440, upload-time = "2026-03-26T18:39:22.205Z" }, - { url = "https://files.pythonhosted.org/packages/4d/0d/0994c802a7eaaf99380085e4e40c845f8e32a562e20a38ec06174b52ef24/ruff-0.15.8-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:c9861eb959edab053c10ad62c278835ee69ca527b6dcd72b47d5c1e5648964f6", size = 10605963, upload-time = "2026-03-26T18:39:46.682Z" }, - { url = "https://files.pythonhosted.org/packages/19/aa/d624b86f5b0aad7cef6bbf9cd47a6a02dfdc4f72c92a337d724e39c9d14b/ruff-0.15.8-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:8d9a5b8ea13f26ae90838afc33f91b547e61b794865374f114f349e9036835fb", size = 10357484, upload-time = "2026-03-26T18:39:49.176Z" }, - { url = "https://files.pythonhosted.org/packages/35/c3/e0b7835d23001f7d999f3895c6b569927c4d39912286897f625736e1fd04/ruff-0.15.8-py3-none-musllinux_1_2_i686.whl", hash = "sha256:c2a33a529fb3cbc23a7124b5c6ff121e4d6228029cba374777bd7649cc8598b8", size = 10830426, upload-time = "2026-03-26T18:40:03.702Z" }, - { url = "https://files.pythonhosted.org/packages/f0/51/ab20b322f637b369383adc341d761eaaa0f0203d6b9a7421cd6e783d81b9/ruff-0.15.8-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:75e5cd06b1cf3f47a3996cfc999226b19aa92e7cce682dcd62f80d7035f98f49", size = 11345125, upload-time = "2026-03-26T18:39:27.799Z" }, - { url = "https://files.pythonhosted.org/packages/37/e6/90b2b33419f59d0f2c4c8a48a4b74b460709a557e8e0064cf33ad894f983/ruff-0.15.8-py3-none-win32.whl", hash = "sha256:bc1f0a51254ba21767bfa9a8b5013ca8149dcf38092e6a9eb704d876de94dc34", size = 10571959, upload-time = "2026-03-26T18:39:36.117Z" }, - { url = "https://files.pythonhosted.org/packages/1f/a2/ef467cb77099062317154c63f234b8a7baf7cb690b99af760c5b68b9ee7f/ruff-0.15.8-py3-none-win_amd64.whl", hash = "sha256:04f79eff02a72db209d47d665ba7ebcad609d8918a134f86cb13dd132159fc89", size = 11743893, upload-time = "2026-03-26T18:39:25.01Z" }, - { url = "https://files.pythonhosted.org/packages/15/e2/77be4fff062fa78d9b2a4dea85d14785dac5f1d0c1fb58ed52331f0ebe28/ruff-0.15.8-py3-none-win_arm64.whl", hash = "sha256:cf891fa8e3bb430c0e7fac93851a5978fc99c8fa2c053b57b118972866f8e5f2", size = 11048175, upload-time = "2026-03-26T18:40:01.06Z" }, +version = "0.15.7" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/a1/22/9e4f66ee588588dc6c9af6a994e12d26e19efbe874d1a909d09a6dac7a59/ruff-0.15.7.tar.gz", hash = "sha256:04f1ae61fc20fe0b148617c324d9d009b5f63412c0b16474f3d5f1a1a665f7ac", size = 4601277, upload-time = "2026-03-19T16:26:22.605Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/41/2f/0b08ced94412af091807b6119ca03755d651d3d93a242682bf020189db94/ruff-0.15.7-py3-none-linux_armv6l.whl", hash = "sha256:a81cc5b6910fb7dfc7c32d20652e50fa05963f6e13ead3c5915c41ac5d16668e", size = 10489037, upload-time = "2026-03-19T16:26:32.47Z" }, + { url = "https://files.pythonhosted.org/packages/91/4a/82e0fa632e5c8b1eba5ee86ecd929e8ff327bbdbfb3c6ac5d81631bef605/ruff-0.15.7-py3-none-macosx_10_12_x86_64.whl", hash = "sha256:722d165bd52403f3bdabc0ce9e41fc47070ac56d7a91b4e0d097b516a53a3477", size = 10955433, upload-time = "2026-03-19T16:27:00.205Z" }, + { url = "https://files.pythonhosted.org/packages/ab/10/12586735d0ff42526ad78c049bf51d7428618c8b5c467e72508c694119df/ruff-0.15.7-py3-none-macosx_11_0_arm64.whl", hash = "sha256:7fbc2448094262552146cbe1b9643a92f66559d3761f1ad0656d4991491af49e", size = 10269302, upload-time = "2026-03-19T16:26:26.183Z" }, + { url = "https://files.pythonhosted.org/packages/eb/5d/32b5c44ccf149a26623671df49cbfbd0a0ae511ff3df9d9d2426966a8d57/ruff-0.15.7-py3-none-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:6b39329b60eba44156d138275323cc726bbfbddcec3063da57caa8a8b1d50adf", size = 10607625, upload-time = "2026-03-19T16:27:03.263Z" }, + { url = "https://files.pythonhosted.org/packages/5d/f1/f0001cabe86173aaacb6eb9bb734aa0605f9a6aa6fa7d43cb49cbc4af9c9/ruff-0.15.7-py3-none-manylinux_2_17_armv7l.manylinux2014_armv7l.whl", hash = "sha256:87768c151808505f2bfc93ae44e5f9e7c8518943e5074f76ac21558ef5627c85", size = 10324743, upload-time = "2026-03-19T16:27:09.791Z" }, + { url = "https://files.pythonhosted.org/packages/7a/87/b8a8f3d56b8d848008559e7c9d8bf367934d5367f6d932ba779456e2f73b/ruff-0.15.7-py3-none-manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:fb0511670002c6c529ec66c0e30641c976c8963de26a113f3a30456b702468b0", size = 11138536, upload-time = "2026-03-19T16:27:06.101Z" }, + { url = "https://files.pythonhosted.org/packages/e4/f2/4fd0d05aab0c5934b2e1464784f85ba2eab9d54bffc53fb5430d1ed8b829/ruff-0.15.7-py3-none-manylinux_2_17_ppc64le.manylinux2014_ppc64le.whl", hash = "sha256:e0d19644f801849229db8345180a71bee5407b429dd217f853ec515e968a6912", size = 11994292, upload-time = "2026-03-19T16:26:48.718Z" }, + { url = "https://files.pythonhosted.org/packages/64/22/fc4483871e767e5e95d1622ad83dad5ebb830f762ed0420fde7dfa9d9b08/ruff-0.15.7-py3-none-manylinux_2_17_s390x.manylinux2014_s390x.whl", hash = "sha256:4806d8e09ef5e84eb19ba833d0442f7e300b23fe3f0981cae159a248a10f0036", size = 11398981, upload-time = "2026-03-19T16:26:54.513Z" }, + { url = "https://files.pythonhosted.org/packages/b0/99/66f0343176d5eab02c3f7fcd2de7a8e0dd7a41f0d982bee56cd1c24db62b/ruff-0.15.7-py3-none-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:dce0896488562f09a27b9c91b1f58a097457143931f3c4d519690dea54e624c5", size = 11242422, upload-time = "2026-03-19T16:26:29.277Z" }, + { url = "https://files.pythonhosted.org/packages/5d/3a/a7060f145bfdcce4c987ea27788b30c60e2c81d6e9a65157ca8afe646328/ruff-0.15.7-py3-none-manylinux_2_31_riscv64.whl", hash = "sha256:1852ce241d2bc89e5dc823e03cff4ce73d816b5c6cdadd27dbfe7b03217d2a12", size = 11232158, upload-time = "2026-03-19T16:26:42.321Z" }, + { url = "https://files.pythonhosted.org/packages/a7/53/90fbb9e08b29c048c403558d3cdd0adf2668b02ce9d50602452e187cd4af/ruff-0.15.7-py3-none-musllinux_1_2_aarch64.whl", hash = "sha256:5f3e4b221fb4bd293f79912fc5e93a9063ebd6d0dcbd528f91b89172a9b8436c", size = 10577861, upload-time = "2026-03-19T16:26:57.459Z" }, + { url = "https://files.pythonhosted.org/packages/2f/aa/5f486226538fe4d0f0439e2da1716e1acf895e2a232b26f2459c55f8ddad/ruff-0.15.7-py3-none-musllinux_1_2_armv7l.whl", hash = "sha256:b15e48602c9c1d9bdc504b472e90b90c97dc7d46c7028011ae67f3861ceba7b4", size = 10327310, upload-time = "2026-03-19T16:26:35.909Z" }, + { url = "https://files.pythonhosted.org/packages/99/9e/271afdffb81fe7bfc8c43ba079e9d96238f674380099457a74ccb3863857/ruff-0.15.7-py3-none-musllinux_1_2_i686.whl", hash = "sha256:1b4705e0e85cedc74b0a23cf6a179dbb3df184cb227761979cc76c0440b5ab0d", size = 10840752, upload-time = "2026-03-19T16:26:45.723Z" }, + { url = "https://files.pythonhosted.org/packages/bf/29/a4ae78394f76c7759953c47884eb44de271b03a66634148d9f7d11e721bd/ruff-0.15.7-py3-none-musllinux_1_2_x86_64.whl", hash = "sha256:112c1fa316a558bb34319282c1200a8bf0495f1b735aeb78bfcb2991e6087580", size = 11336961, upload-time = "2026-03-19T16:26:39.076Z" }, + { url = "https://files.pythonhosted.org/packages/26/6b/8786ba5736562220d588a2f6653e6c17e90c59ced34a2d7b512ef8956103/ruff-0.15.7-py3-none-win32.whl", hash = "sha256:6d39e2d3505b082323352f733599f28169d12e891f7dd407f2d4f54b4c2886de", size = 10582538, upload-time = "2026-03-19T16:26:15.992Z" }, + { url = "https://files.pythonhosted.org/packages/2b/e9/346d4d3fffc6871125e877dae8d9a1966b254fbd92a50f8561078b88b099/ruff-0.15.7-py3-none-win_amd64.whl", hash = "sha256:4d53d712ddebcd7dace1bc395367aec12c057aacfe9adbb6d832302575f4d3a1", size = 11755839, upload-time = "2026-03-19T16:26:19.897Z" }, + { url = "https://files.pythonhosted.org/packages/8f/e8/726643a3ea68c727da31570bde48c7a10f1aa60eddd628d94078fec586ff/ruff-0.15.7-py3-none-win_arm64.whl", hash = "sha256:18e8d73f1c3fdf27931497972250340f92e8c861722161a9caeb89a58ead6ed2", size = 11023304, upload-time = "2026-03-19T16:26:51.669Z" }, ] [[package]] name = "setuptools" -version = "80.9.0" +version = "82.0.1" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/18/5d/3bf57dcd21979b887f014ea83c24ae194cfcd12b9e0fda66b957c69d1fca/setuptools-80.9.0.tar.gz", hash = "sha256:f36b47402ecde768dbfafc46e8e4207b4360c654f1f3bb84475f0a28628fb19c", size = 1319958, upload-time = "2025-05-27T00:56:51.443Z" } +sdist = { url = "https://files.pythonhosted.org/packages/4f/db/cfac1baf10650ab4d1c111714410d2fbb77ac5a616db26775db562c8fab2/setuptools-82.0.1.tar.gz", hash = "sha256:7d872682c5d01cfde07da7bccc7b65469d3dca203318515ada1de5eda35efbf9", size = 1152316, upload-time = "2026-03-09T12:47:17.221Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/a3/dc/17031897dae0efacfea57dfd3a82fdd2a2aeb58e0ff71b77b87e44edc772/setuptools-80.9.0-py3-none-any.whl", hash = "sha256:062d34222ad13e0cc312a4c02d73f059e86a4acbfbdea8f8f76b28c99f306922", size = 1201486, upload-time = "2025-05-27T00:56:49.664Z" }, + { url = "https://files.pythonhosted.org/packages/9d/76/f789f7a86709c6b087c5a2f52f911838cad707cc613162401badc665acfe/setuptools-82.0.1-py3-none-any.whl", hash = "sha256:a59e362652f08dcd477c78bb6e7bd9d80a7995bc73ce773050228a348ce2e5bb", size = 1006223, upload-time = "2026-03-09T12:47:15.026Z" }, ] [[package]] @@ -2190,47 +2246,62 @@ wheels = [ [[package]] name = "sqlalchemy" -version = "2.0.42" +version = "2.0.48" source = { registry = "https://pypi.org/simple" } dependencies = [ - { name = "greenlet", marker = "(python_full_version < '3.14' and platform_machine == 'AMD64') or (python_full_version < '3.14' and platform_machine == 'WIN32') or (python_full_version < '3.14' and platform_machine == 'aarch64') or (python_full_version < '3.14' and platform_machine == 'amd64') or (python_full_version < '3.14' and platform_machine == 'ppc64le') or (python_full_version < '3.14' and platform_machine == 'win32') or (python_full_version < '3.14' and platform_machine == 'x86_64')" }, + { name = "greenlet", marker = "platform_machine == 'AMD64' or platform_machine == 'WIN32' or platform_machine == 'aarch64' or platform_machine == 'amd64' or platform_machine == 'ppc64le' or platform_machine == 'win32' or platform_machine == 'x86_64'" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/5a/03/a0af991e3a43174d6b83fca4fb399745abceddd1171bdabae48ce877ff47/sqlalchemy-2.0.42.tar.gz", hash = "sha256:160bedd8a5c28765bd5be4dec2d881e109e33b34922e50a3b881a7681773ac5f", size = 9749972, upload-time = "2025-07-29T12:48:09.323Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/3a/12/33ff43214c2c6cc87499b402fe419869d2980a08101c991daae31345e901/sqlalchemy-2.0.42-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:172b244753e034d91a826f80a9a70f4cbac690641207f2217f8404c261473efe", size = 2130469, upload-time = "2025-07-29T13:25:15.215Z" }, - { url = "https://files.pythonhosted.org/packages/63/c4/4d2f2c21ddde9a2c7f7b258b202d6af0bac9fc5abfca5de367461c86d766/sqlalchemy-2.0.42-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:be28f88abd74af8519a4542185ee80ca914933ca65cdfa99504d82af0e4210df", size = 2120393, upload-time = "2025-07-29T13:25:16.367Z" }, - { url = "https://files.pythonhosted.org/packages/a8/0d/5ff2f2dfbac10e4a9ade1942f8985ffc4bd8f157926b1f8aed553dfe3b88/sqlalchemy-2.0.42-cp310-cp310-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:98b344859d282fde388047f1710860bb23f4098f705491e06b8ab52a48aafea9", size = 3206173, upload-time = "2025-07-29T13:29:00.623Z" }, - { url = "https://files.pythonhosted.org/packages/1f/59/71493fe74bd76a773ae8fa0c50bfc2ccac1cbf7cfa4f9843ad92897e6dcf/sqlalchemy-2.0.42-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:97978d223b11f1d161390a96f28c49a13ce48fdd2fed7683167c39bdb1b8aa09", size = 3206910, upload-time = "2025-07-29T13:24:50.58Z" }, - { url = "https://files.pythonhosted.org/packages/a9/51/01b1d85bbb492a36b25df54a070a0f887052e9b190dff71263a09f48576b/sqlalchemy-2.0.42-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:e35b9b000c59fcac2867ab3a79fc368a6caca8706741beab3b799d47005b3407", size = 3145479, upload-time = "2025-07-29T13:29:02.3Z" }, - { url = "https://files.pythonhosted.org/packages/fa/78/10834f010e2a3df689f6d1888ea6ea0074ff10184e6a550b8ed7f9189a89/sqlalchemy-2.0.42-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:bc7347ad7a7b1c78b94177f2d57263113bb950e62c59b96ed839b131ea4234e1", size = 3169605, upload-time = "2025-07-29T13:24:52.135Z" }, - { url = "https://files.pythonhosted.org/packages/0c/75/e6fdd66d237582c8488dd1dfa90899f6502822fbd866363ab70e8ac4a2ce/sqlalchemy-2.0.42-cp310-cp310-win32.whl", hash = "sha256:739e58879b20a179156b63aa21f05ccacfd3e28e08e9c2b630ff55cd7177c4f1", size = 2098759, upload-time = "2025-07-29T13:23:55.809Z" }, - { url = "https://files.pythonhosted.org/packages/a5/a8/366db192641c2c2d1ea8977e7c77b65a0d16a7858907bb76ea68b9dd37af/sqlalchemy-2.0.42-cp310-cp310-win_amd64.whl", hash = "sha256:1aef304ada61b81f1955196f584b9e72b798ed525a7c0b46e09e98397393297b", size = 2122423, upload-time = "2025-07-29T13:23:56.968Z" }, - { url = "https://files.pythonhosted.org/packages/ea/3c/7bfd65f3c2046e2fb4475b21fa0b9d7995f8c08bfa0948df7a4d2d0de869/sqlalchemy-2.0.42-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:c34100c0b7ea31fbc113c124bcf93a53094f8951c7bf39c45f39d327bad6d1e7", size = 2133779, upload-time = "2025-07-29T13:25:18.446Z" }, - { url = "https://files.pythonhosted.org/packages/66/17/19be542fe9dd64a766090e90e789e86bdaa608affda6b3c1e118a25a2509/sqlalchemy-2.0.42-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:ad59dbe4d1252448c19d171dfba14c74e7950b46dc49d015722a4a06bfdab2b0", size = 2123843, upload-time = "2025-07-29T13:25:19.749Z" }, - { url = "https://files.pythonhosted.org/packages/14/fc/83e45fc25f0acf1c26962ebff45b4c77e5570abb7c1a425a54b00bcfa9c7/sqlalchemy-2.0.42-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:f9187498c2149919753a7fd51766ea9c8eecdec7da47c1b955fa8090bc642eaa", size = 3294824, upload-time = "2025-07-29T13:29:03.879Z" }, - { url = "https://files.pythonhosted.org/packages/b9/81/421efc09837104cd1a267d68b470e5b7b6792c2963b8096ca1e060ba0975/sqlalchemy-2.0.42-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:1f092cf83ebcafba23a247f5e03f99f5436e3ef026d01c8213b5eca48ad6efa9", size = 3294662, upload-time = "2025-07-29T13:24:53.715Z" }, - { url = "https://files.pythonhosted.org/packages/2f/ba/55406e09d32ed5e5f9e8aaec5ef70c4f20b4ae25b9fa9784f4afaa28e7c3/sqlalchemy-2.0.42-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:fc6afee7e66fdba4f5a68610b487c1f754fccdc53894a9567785932dbb6a265e", size = 3229413, upload-time = "2025-07-29T13:29:05.638Z" }, - { url = "https://files.pythonhosted.org/packages/d4/c4/df596777fce27bde2d1a4a2f5a7ddea997c0c6d4b5246aafba966b421cc0/sqlalchemy-2.0.42-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:260ca1d2e5910f1f1ad3fe0113f8fab28657cee2542cb48c2f342ed90046e8ec", size = 3255563, upload-time = "2025-07-29T13:24:55.17Z" }, - { url = "https://files.pythonhosted.org/packages/16/ed/b9c4a939b314400f43f972c9eb0091da59d8466ef9c51d0fd5b449edc495/sqlalchemy-2.0.42-cp311-cp311-win32.whl", hash = "sha256:2eb539fd83185a85e5fcd6b19214e1c734ab0351d81505b0f987705ba0a1e231", size = 2098513, upload-time = "2025-07-29T13:23:58.946Z" }, - { url = "https://files.pythonhosted.org/packages/91/72/55b0c34e39feb81991aa3c974d85074c356239ac1170dfb81a474b4c23b3/sqlalchemy-2.0.42-cp311-cp311-win_amd64.whl", hash = "sha256:9193fa484bf00dcc1804aecbb4f528f1123c04bad6a08d7710c909750fa76aeb", size = 2123380, upload-time = "2025-07-29T13:24:00.155Z" }, - { url = "https://files.pythonhosted.org/packages/61/66/ac31a9821fc70a7376321fb2c70fdd7eadbc06dadf66ee216a22a41d6058/sqlalchemy-2.0.42-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:09637a0872689d3eb71c41e249c6f422e3e18bbd05b4cd258193cfc7a9a50da2", size = 2132203, upload-time = "2025-07-29T13:29:19.291Z" }, - { url = "https://files.pythonhosted.org/packages/fc/ba/fd943172e017f955d7a8b3a94695265b7114efe4854feaa01f057e8f5293/sqlalchemy-2.0.42-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:a3cb3ec67cc08bea54e06b569398ae21623534a7b1b23c258883a7c696ae10df", size = 2120373, upload-time = "2025-07-29T13:29:21.049Z" }, - { url = "https://files.pythonhosted.org/packages/ea/a2/b5f7d233d063ffadf7e9fff3898b42657ba154a5bec95a96f44cba7f818b/sqlalchemy-2.0.42-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:e87e6a5ef6f9d8daeb2ce5918bf5fddecc11cae6a7d7a671fcc4616c47635e01", size = 3317685, upload-time = "2025-07-29T13:26:40.837Z" }, - { url = "https://files.pythonhosted.org/packages/86/00/fcd8daab13a9119d41f3e485a101c29f5d2085bda459154ba354c616bf4e/sqlalchemy-2.0.42-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0b718011a9d66c0d2f78e1997755cd965f3414563b31867475e9bc6efdc2281d", size = 3326967, upload-time = "2025-07-29T13:22:31.009Z" }, - { url = "https://files.pythonhosted.org/packages/a3/85/e622a273d648d39d6771157961956991a6d760e323e273d15e9704c30ccc/sqlalchemy-2.0.42-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:16d9b544873fe6486dddbb859501a07d89f77c61d29060bb87d0faf7519b6a4d", size = 3255331, upload-time = "2025-07-29T13:26:42.579Z" }, - { url = "https://files.pythonhosted.org/packages/3a/a0/2c2338b592c7b0a61feffd005378c084b4c01fabaf1ed5f655ab7bd446f0/sqlalchemy-2.0.42-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:21bfdf57abf72fa89b97dd74d3187caa3172a78c125f2144764a73970810c4ee", size = 3291791, upload-time = "2025-07-29T13:22:32.454Z" }, - { url = "https://files.pythonhosted.org/packages/41/19/b8a2907972a78285fdce4c880ecaab3c5067eb726882ca6347f7a4bf64f6/sqlalchemy-2.0.42-cp312-cp312-win32.whl", hash = "sha256:78b46555b730a24901ceb4cb901c6b45c9407f8875209ed3c5d6bcd0390a6ed1", size = 2096180, upload-time = "2025-07-29T13:16:08.952Z" }, - { url = "https://files.pythonhosted.org/packages/48/1f/67a78f3dfd08a2ed1c7be820fe7775944f5126080b5027cc859084f8e223/sqlalchemy-2.0.42-cp312-cp312-win_amd64.whl", hash = "sha256:4c94447a016f36c4da80072e6c6964713b0af3c8019e9c4daadf21f61b81ab53", size = 2123533, upload-time = "2025-07-29T13:16:11.705Z" }, - { url = "https://files.pythonhosted.org/packages/e9/7e/25d8c28b86730c9fb0e09156f601d7a96d1c634043bf8ba36513eb78887b/sqlalchemy-2.0.42-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:941804f55c7d507334da38133268e3f6e5b0340d584ba0f277dd884197f4ae8c", size = 2127905, upload-time = "2025-07-29T13:29:22.249Z" }, - { url = "https://files.pythonhosted.org/packages/e5/a1/9d8c93434d1d983880d976400fcb7895a79576bd94dca61c3b7b90b1ed0d/sqlalchemy-2.0.42-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:95d3d06a968a760ce2aa6a5889fefcbdd53ca935735e0768e1db046ec08cbf01", size = 2115726, upload-time = "2025-07-29T13:29:23.496Z" }, - { url = "https://files.pythonhosted.org/packages/a2/cc/d33646fcc24c87cc4e30a03556b611a4e7bcfa69a4c935bffb923e3c89f4/sqlalchemy-2.0.42-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4cf10396a8a700a0f38ccd220d940be529c8f64435c5d5b29375acab9267a6c9", size = 3246007, upload-time = "2025-07-29T13:26:44.166Z" }, - { url = "https://files.pythonhosted.org/packages/67/08/4e6c533d4c7f5e7c4cbb6fe8a2c4e813202a40f05700d4009a44ec6e236d/sqlalchemy-2.0.42-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9cae6c2b05326d7c2c7c0519f323f90e0fb9e8afa783c6a05bb9ee92a90d0f04", size = 3250919, upload-time = "2025-07-29T13:22:33.74Z" }, - { url = "https://files.pythonhosted.org/packages/5c/82/f680e9a636d217aece1b9a8030d18ad2b59b5e216e0c94e03ad86b344af3/sqlalchemy-2.0.42-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f50f7b20677b23cfb35b6afcd8372b2feb348a38e3033f6447ee0704540be894", size = 3180546, upload-time = "2025-07-29T13:26:45.648Z" }, - { url = "https://files.pythonhosted.org/packages/7d/a2/8c8f6325f153894afa3775584c429cc936353fb1db26eddb60a549d0ff4b/sqlalchemy-2.0.42-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:9d88a1c0d66d24e229e3938e1ef16ebdbd2bf4ced93af6eff55225f7465cf350", size = 3216683, upload-time = "2025-07-29T13:22:34.977Z" }, - { url = "https://files.pythonhosted.org/packages/39/44/3a451d7fa4482a8ffdf364e803ddc2cfcafc1c4635fb366f169ecc2c3b11/sqlalchemy-2.0.42-cp313-cp313-win32.whl", hash = "sha256:45c842c94c9ad546c72225a0c0d1ae8ef3f7c212484be3d429715a062970e87f", size = 2093990, upload-time = "2025-07-29T13:16:13.036Z" }, - { url = "https://files.pythonhosted.org/packages/4b/9e/9bce34f67aea0251c8ac104f7bdb2229d58fb2e86a4ad8807999c4bee34b/sqlalchemy-2.0.42-cp313-cp313-win_amd64.whl", hash = "sha256:eb9905f7f1e49fd57a7ed6269bc567fcbbdac9feadff20ad6bd7707266a91577", size = 2120473, upload-time = "2025-07-29T13:16:14.502Z" }, - { url = "https://files.pythonhosted.org/packages/ee/55/ba2546ab09a6adebc521bf3974440dc1d8c06ed342cceb30ed62a8858835/sqlalchemy-2.0.42-py3-none-any.whl", hash = "sha256:defcdff7e661f0043daa381832af65d616e060ddb54d3fe4476f51df7eaa1835", size = 1922072, upload-time = "2025-07-29T13:09:17.061Z" }, +sdist = { url = "https://files.pythonhosted.org/packages/1f/73/b4a9737255583b5fa858e0bb8e116eb94b88c910164ed2ed719147bde3de/sqlalchemy-2.0.48.tar.gz", hash = "sha256:5ca74f37f3369b45e1f6b7b06afb182af1fd5dde009e4ffd831830d98cbe5fe7", size = 9886075, upload-time = "2026-03-02T15:28:51.474Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/9a/67/1235676e93dd3b742a4a8eddfae49eea46c85e3eed29f0da446a8dd57500/sqlalchemy-2.0.48-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:7001dc9d5f6bb4deb756d5928eaefe1930f6f4179da3924cbd95ee0e9f4dce89", size = 2157384, upload-time = "2026-03-02T15:38:26.781Z" }, + { url = "https://files.pythonhosted.org/packages/4d/d7/fa728b856daa18c10e1390e76f26f64ac890c947008284387451d56ca3d0/sqlalchemy-2.0.48-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:1a89ce07ad2d4b8cfc30bd5889ec40613e028ed80ef47da7d9dd2ce969ad30e0", size = 3236981, upload-time = "2026-03-02T15:58:53.53Z" }, + { url = "https://files.pythonhosted.org/packages/5c/ad/6c4395649a212a6c603a72c5b9ab5dce3135a1546cfdffa3c427e71fd535/sqlalchemy-2.0.48-cp310-cp310-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:10853a53a4a00417a00913d270dddda75815fcb80675874285f41051c094d7dd", size = 3235232, upload-time = "2026-03-02T15:52:25.654Z" }, + { url = "https://files.pythonhosted.org/packages/01/f4/58f845e511ac0509765a6f85eb24924c1ef0d54fb50de9d15b28c3601458/sqlalchemy-2.0.48-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:fac0fa4e4f55f118fd87177dacb1c6522fe39c28d498d259014020fec9164c29", size = 3188106, upload-time = "2026-03-02T15:58:55.193Z" }, + { url = "https://files.pythonhosted.org/packages/3f/f9/6dcc7bfa5f5794c3a095e78cd1de8269dfb5584dfd4c2c00a50d3c1ade44/sqlalchemy-2.0.48-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:3713e21ea67bca727eecd4a24bf68bcd414c403faae4989442be60994301ded0", size = 3209522, upload-time = "2026-03-02T15:52:27.407Z" }, + { url = "https://files.pythonhosted.org/packages/d7/5a/b632875ab35874d42657f079529f0745410604645c269a8c21fb4272ff7a/sqlalchemy-2.0.48-cp310-cp310-win32.whl", hash = "sha256:d404dc897ce10e565d647795861762aa2d06ca3f4a728c5e9a835096c7059018", size = 2117695, upload-time = "2026-03-02T15:46:51.389Z" }, + { url = "https://files.pythonhosted.org/packages/de/03/9752eb2a41afdd8568e41ac3c3128e32a0a73eada5ab80483083604a56d1/sqlalchemy-2.0.48-cp310-cp310-win_amd64.whl", hash = "sha256:841a94c66577661c1f088ac958cd767d7c9bf507698f45afffe7a4017049de76", size = 2140928, upload-time = "2026-03-02T15:46:52.992Z" }, + { url = "https://files.pythonhosted.org/packages/d7/6d/b8b78b5b80f3c3ab3f7fa90faa195ec3401f6d884b60221260fd4d51864c/sqlalchemy-2.0.48-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:1b4c575df7368b3b13e0cebf01d4679f9a28ed2ae6c1cd0b1d5beffb6b2007dc", size = 2157184, upload-time = "2026-03-02T15:38:28.161Z" }, + { url = "https://files.pythonhosted.org/packages/21/4b/4f3d4a43743ab58b95b9ddf5580a265b593d017693df9e08bd55780af5bb/sqlalchemy-2.0.48-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:e83e3f959aaa1c9df95c22c528096d94848a1bc819f5d0ebf7ee3df0ca63db6c", size = 3313555, upload-time = "2026-03-02T15:58:57.21Z" }, + { url = "https://files.pythonhosted.org/packages/21/dd/3b7c53f1dbbf736fd27041aee68f8ac52226b610f914085b1652c2323442/sqlalchemy-2.0.48-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:6f7b7243850edd0b8b97043f04748f31de50cf426e939def5c16bedb540698f7", size = 3313057, upload-time = "2026-03-02T15:52:29.366Z" }, + { url = "https://files.pythonhosted.org/packages/d9/cc/3e600a90ae64047f33313d7d32e5ad025417f09d2ded487e8284b5e21a15/sqlalchemy-2.0.48-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:82745b03b4043e04600a6b665cb98697c4339b24e34d74b0a2ac0a2488b6f94d", size = 3265431, upload-time = "2026-03-02T15:58:59.096Z" }, + { url = "https://files.pythonhosted.org/packages/8b/19/780138dacfe3f5024f4cf96e4005e91edf6653d53d3673be4844578faf1d/sqlalchemy-2.0.48-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:e5e088bf43f6ee6fec7dbf1ef7ff7774a616c236b5c0cb3e00662dd71a56b571", size = 3287646, upload-time = "2026-03-02T15:52:31.569Z" }, + { url = "https://files.pythonhosted.org/packages/40/fd/f32ced124f01a23151f4777e4c705f3a470adc7bd241d9f36a7c941a33bf/sqlalchemy-2.0.48-cp311-cp311-win32.whl", hash = "sha256:9c7d0a77e36b5f4b01ca398482230ab792061d243d715299b44a0b55c89fe617", size = 2116956, upload-time = "2026-03-02T15:46:54.535Z" }, + { url = "https://files.pythonhosted.org/packages/58/d5/dd767277f6feef12d05651538f280277e661698f617fa4d086cce6055416/sqlalchemy-2.0.48-cp311-cp311-win_amd64.whl", hash = "sha256:583849c743e0e3c9bb7446f5b5addeacedc168d657a69b418063dfdb2d90081c", size = 2141627, upload-time = "2026-03-02T15:46:55.849Z" }, + { url = "https://files.pythonhosted.org/packages/ef/91/a42ae716f8925e9659df2da21ba941f158686856107a61cc97a95e7647a3/sqlalchemy-2.0.48-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:348174f228b99f33ca1f773e85510e08927620caa59ffe7803b37170df30332b", size = 2155737, upload-time = "2026-03-02T15:49:13.207Z" }, + { url = "https://files.pythonhosted.org/packages/b9/52/f75f516a1f3888f027c1cfb5d22d4376f4b46236f2e8669dcb0cddc60275/sqlalchemy-2.0.48-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:53667b5f668991e279d21f94ccfa6e45b4e3f4500e7591ae59a8012d0f010dcb", size = 3337020, upload-time = "2026-03-02T15:50:34.547Z" }, + { url = "https://files.pythonhosted.org/packages/37/9a/0c28b6371e0cdcb14f8f1930778cb3123acfcbd2c95bb9cf6b4a2ba0cce3/sqlalchemy-2.0.48-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:34634e196f620c7a61d18d5cf7dc841ca6daa7961aed75d532b7e58b309ac894", size = 3349983, upload-time = "2026-03-02T15:53:25.542Z" }, + { url = "https://files.pythonhosted.org/packages/1c/46/0aee8f3ff20b1dcbceb46ca2d87fcc3d48b407925a383ff668218509d132/sqlalchemy-2.0.48-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:546572a1793cc35857a2ffa1fe0e58571af1779bcc1ffa7c9fb0839885ed69a9", size = 3279690, upload-time = "2026-03-02T15:50:36.277Z" }, + { url = "https://files.pythonhosted.org/packages/ce/8c/a957bc91293b49181350bfd55e6dfc6e30b7f7d83dc6792d72043274a390/sqlalchemy-2.0.48-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:07edba08061bc277bfdc772dd2a1a43978f5a45994dd3ede26391b405c15221e", size = 3314738, upload-time = "2026-03-02T15:53:27.519Z" }, + { url = "https://files.pythonhosted.org/packages/4b/44/1d257d9f9556661e7bdc83667cc414ba210acfc110c82938cb3611eea58f/sqlalchemy-2.0.48-cp312-cp312-win32.whl", hash = "sha256:908a3fa6908716f803b86896a09a2c4dde5f5ce2bb07aacc71ffebb57986ce99", size = 2115546, upload-time = "2026-03-02T15:54:31.591Z" }, + { url = "https://files.pythonhosted.org/packages/f2/af/c3c7e1f3a2b383155a16454df62ae8c62a30dd238e42e68c24cebebbfae6/sqlalchemy-2.0.48-cp312-cp312-win_amd64.whl", hash = "sha256:68549c403f79a8e25984376480959975212a670405e3913830614432b5daa07a", size = 2142484, upload-time = "2026-03-02T15:54:34.072Z" }, + { url = "https://files.pythonhosted.org/packages/d1/c6/569dc8bf3cd375abc5907e82235923e986799f301cd79a903f784b996fca/sqlalchemy-2.0.48-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:e3070c03701037aa418b55d36532ecb8f8446ed0135acb71c678dbdf12f5b6e4", size = 2152599, upload-time = "2026-03-02T15:49:14.41Z" }, + { url = "https://files.pythonhosted.org/packages/6d/ff/f4e04a4bd5a24304f38cb0d4aa2ad4c0fb34999f8b884c656535e1b2b74c/sqlalchemy-2.0.48-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:2645b7d8a738763b664a12a1542c89c940daa55196e8d73e55b169cc5c99f65f", size = 3278825, upload-time = "2026-03-02T15:50:38.269Z" }, + { url = "https://files.pythonhosted.org/packages/fe/88/cb59509e4668d8001818d7355d9995be90c321313078c912420603a7cb95/sqlalchemy-2.0.48-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:b19151e76620a412c2ac1c6f977ab1b9fa7ad43140178345136456d5265b32ed", size = 3295200, upload-time = "2026-03-02T15:53:29.366Z" }, + { url = "https://files.pythonhosted.org/packages/87/dc/1609a4442aefd750ea2f32629559394ec92e89ac1d621a7f462b70f736ff/sqlalchemy-2.0.48-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:5b193a7e29fd9fa56e502920dca47dffe60f97c863494946bd698c6058a55658", size = 3226876, upload-time = "2026-03-02T15:50:39.802Z" }, + { url = "https://files.pythonhosted.org/packages/37/c3/6ae2ab5ea2fa989fbac4e674de01224b7a9d744becaf59bb967d62e99bed/sqlalchemy-2.0.48-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:36ac4ddc3d33e852da9cb00ffb08cea62ca05c39711dc67062ca2bb1fae35fd8", size = 3265045, upload-time = "2026-03-02T15:53:31.421Z" }, + { url = "https://files.pythonhosted.org/packages/6f/82/ea4665d1bb98c50c19666e672f21b81356bd6077c4574e3d2bbb84541f53/sqlalchemy-2.0.48-cp313-cp313-win32.whl", hash = "sha256:389b984139278f97757ea9b08993e7b9d1142912e046ab7d82b3fbaeb0209131", size = 2113700, upload-time = "2026-03-02T15:54:35.825Z" }, + { url = "https://files.pythonhosted.org/packages/b7/2b/b9040bec58c58225f073f5b0c1870defe1940835549dafec680cbd58c3c3/sqlalchemy-2.0.48-cp313-cp313-win_amd64.whl", hash = "sha256:d612c976cbc2d17edfcc4c006874b764e85e990c29ce9bd411f926bbfb02b9a2", size = 2139487, upload-time = "2026-03-02T15:54:37.079Z" }, + { url = "https://files.pythonhosted.org/packages/f4/f4/7b17bd50244b78a49d22cc63c969d71dc4de54567dc152a9b46f6fae40ce/sqlalchemy-2.0.48-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:69f5bc24904d3bc3640961cddd2523e361257ef68585d6e364166dfbe8c78fae", size = 3558851, upload-time = "2026-03-02T15:57:48.607Z" }, + { url = "https://files.pythonhosted.org/packages/20/0d/213668e9aca61d370f7d2a6449ea4ec699747fac67d4bda1bb3d129025be/sqlalchemy-2.0.48-cp313-cp313t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:fd08b90d211c086181caed76931ecfa2bdfc83eea3cfccdb0f82abc6c4b876cb", size = 3525525, upload-time = "2026-03-02T16:04:38.058Z" }, + { url = "https://files.pythonhosted.org/packages/85/d7/a84edf412979e7d59c69b89a5871f90a49228360594680e667cb2c46a828/sqlalchemy-2.0.48-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:1ccd42229aaac2df431562117ac7e667d702e8e44afdb6cf0e50fa3f18160f0b", size = 3466611, upload-time = "2026-03-02T15:57:50.759Z" }, + { url = "https://files.pythonhosted.org/packages/86/55/42404ce5770f6be26a2b0607e7866c31b9a4176c819e9a7a5e0a055770be/sqlalchemy-2.0.48-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:f0dcbc588cd5b725162c076eb9119342f6579c7f7f55057bb7e3c6ff27e13121", size = 3475812, upload-time = "2026-03-02T16:04:40.092Z" }, + { url = "https://files.pythonhosted.org/packages/ae/ae/29b87775fadc43e627cf582fe3bda4d02e300f6b8f2747c764950d13784c/sqlalchemy-2.0.48-cp313-cp313t-win32.whl", hash = "sha256:9764014ef5e58aab76220c5664abb5d47d5bc858d9debf821e55cfdd0f128485", size = 2141335, upload-time = "2026-03-02T15:52:51.518Z" }, + { url = "https://files.pythonhosted.org/packages/91/44/f39d063c90f2443e5b46ec4819abd3d8de653893aae92df42a5c4f5843de/sqlalchemy-2.0.48-cp313-cp313t-win_amd64.whl", hash = "sha256:e2f35b4cccd9ed286ad62e0a3c3ac21e06c02abc60e20aa51a3e305a30f5fa79", size = 2173095, upload-time = "2026-03-02T15:52:52.79Z" }, + { url = "https://files.pythonhosted.org/packages/f7/b3/f437eaa1cf028bb3c927172c7272366393e73ccd104dcf5b6963f4ab5318/sqlalchemy-2.0.48-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:e2d0d88686e3d35a76f3e15a34e8c12d73fc94c1dea1cd55782e695cc14086dd", size = 2154401, upload-time = "2026-03-02T15:49:17.24Z" }, + { url = "https://files.pythonhosted.org/packages/6c/1c/b3abdf0f402aa3f60f0df6ea53d92a162b458fca2321d8f1f00278506402/sqlalchemy-2.0.48-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:49b7bddc1eebf011ea5ab722fdbe67a401caa34a350d278cc7733c0e88fecb1f", size = 3274528, upload-time = "2026-03-02T15:50:41.489Z" }, + { url = "https://files.pythonhosted.org/packages/f2/5e/327428a034407651a048f5e624361adf3f9fbac9d0fa98e981e9c6ff2f5e/sqlalchemy-2.0.48-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:426c5ca86415d9b8945c7073597e10de9644802e2ff502b8e1f11a7a2642856b", size = 3279523, upload-time = "2026-03-02T15:53:32.962Z" }, + { url = "https://files.pythonhosted.org/packages/2a/ca/ece73c81a918add0965b76b868b7b5359e068380b90ef1656ee995940c02/sqlalchemy-2.0.48-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:288937433bd44e3990e7da2402fabc44a3c6c25d3704da066b85b89a85474ae0", size = 3224312, upload-time = "2026-03-02T15:50:42.996Z" }, + { url = "https://files.pythonhosted.org/packages/88/11/fbaf1ae91fa4ee43f4fe79661cead6358644824419c26adb004941bdce7c/sqlalchemy-2.0.48-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:8183dc57ae7d9edc1346e007e840a9f3d6aa7b7f165203a99e16f447150140d2", size = 3246304, upload-time = "2026-03-02T15:53:34.937Z" }, + { url = "https://files.pythonhosted.org/packages/fa/a8/5fb0deb13930b4f2f698c5541ae076c18981173e27dd00376dbaea7a9c82/sqlalchemy-2.0.48-cp314-cp314-win32.whl", hash = "sha256:1182437cb2d97988cfea04cf6cdc0b0bb9c74f4d56ec3d08b81e23d621a28cc6", size = 2116565, upload-time = "2026-03-02T15:54:38.321Z" }, + { url = "https://files.pythonhosted.org/packages/95/7e/e83615cb63f80047f18e61e31e8e32257d39458426c23006deeaf48f463b/sqlalchemy-2.0.48-cp314-cp314-win_amd64.whl", hash = "sha256:144921da96c08feb9e2b052c5c5c1d0d151a292c6135623c6b2c041f2a45f9e0", size = 2142205, upload-time = "2026-03-02T15:54:39.831Z" }, + { url = "https://files.pythonhosted.org/packages/83/e3/69d8711b3f2c5135e9cde5f063bc1605860f0b2c53086d40c04017eb1f77/sqlalchemy-2.0.48-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:5aee45fd2c6c0f2b9cdddf48c48535e7471e42d6fb81adfde801da0bd5b93241", size = 3563519, upload-time = "2026-03-02T15:57:52.387Z" }, + { url = "https://files.pythonhosted.org/packages/f8/4f/a7cce98facca73c149ea4578981594aaa5fd841e956834931de503359336/sqlalchemy-2.0.48-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:7cddca31edf8b0653090cbb54562ca027c421c58ddde2c0685f49ff56a1690e0", size = 3528611, upload-time = "2026-03-02T16:04:42.097Z" }, + { url = "https://files.pythonhosted.org/packages/cd/7d/5936c7a03a0b0cb0fa0cc425998821c6029756b0855a8f7ee70fba1de955/sqlalchemy-2.0.48-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:7a936f1bb23d370b7c8cc079d5fce4c7d18da87a33c6744e51a93b0f9e97e9b3", size = 3472326, upload-time = "2026-03-02T15:57:54.423Z" }, + { url = "https://files.pythonhosted.org/packages/f4/33/cea7dfc31b52904efe3dcdc169eb4514078887dff1f5ae28a7f4c5d54b3c/sqlalchemy-2.0.48-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:e004aa9248e8cb0a5f9b96d003ca7c1c0a5da8decd1066e7b53f59eb8ce7c62b", size = 3478453, upload-time = "2026-03-02T16:04:44.584Z" }, + { url = "https://files.pythonhosted.org/packages/c8/95/32107c4d13be077a9cae61e9ae49966a35dc4bf442a8852dd871db31f62e/sqlalchemy-2.0.48-cp314-cp314t-win32.whl", hash = "sha256:b8438ec5594980d405251451c5b7ea9aa58dda38eb7ac35fb7e4c696712ee24f", size = 2147209, upload-time = "2026-03-02T15:52:54.274Z" }, + { url = "https://files.pythonhosted.org/packages/d2/d7/1e073da7a4bc645eb83c76067284a0374e643bc4be57f14cc6414656f92c/sqlalchemy-2.0.48-cp314-cp314t-win_amd64.whl", hash = "sha256:d854b3970067297f3a7fbd7a4683587134aa9b3877ee15aa29eea478dc68f933", size = 2182198, upload-time = "2026-03-02T15:52:55.606Z" }, + { url = "https://files.pythonhosted.org/packages/46/2c/9664130905f03db57961b8980b05cab624afd114bf2be2576628a9f22da4/sqlalchemy-2.0.48-py3-none-any.whl", hash = "sha256:a66fe406437dd65cacd96a72689a3aaaecaebbcd62d81c5ac1c0fdbeac835096", size = 1940202, upload-time = "2026-03-02T15:52:43.285Z" }, ] [package.optional-dependencies] @@ -2253,28 +2324,28 @@ postgresql-asyncpg = [ [[package]] name = "sse-starlette" -version = "3.3.4" +version = "3.3.3" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "anyio" }, { name = "starlette" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/26/8c/f9290339ef6d79badbc010f067cd769d6601ec11a57d78569c683fb4dd87/sse_starlette-3.3.4.tar.gz", hash = "sha256:aaf92fc067af8a5427192895ac028e947b484ac01edbc3caf00e7e7137c7bef1", size = 32427, upload-time = "2026-03-29T09:00:23.307Z" } +sdist = { url = "https://files.pythonhosted.org/packages/14/2f/9223c24f568bb7a0c03d751e609844dce0968f13b39a3f73fbb3a96cd27a/sse_starlette-3.3.3.tar.gz", hash = "sha256:72a95d7575fd5129bd0ae15275ac6432bb35ac542fdebb82889c24bb9f3f4049", size = 32420, upload-time = "2026-03-17T20:05:55.529Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/f8/7f/3de5402f39890ac5660b86bcf5c03f9d855dad5c4ed764866d7b592b46fd/sse_starlette-3.3.4-py3-none-any.whl", hash = "sha256:84bb06e58939a8b38d8341f1bc9792f06c2b53f48c608dd207582b664fc8f3c1", size = 14330, upload-time = "2026-03-29T09:00:21.846Z" }, + { url = "https://files.pythonhosted.org/packages/78/e2/b8cff57a67dddf9a464d7e943218e031617fb3ddc133aeeb0602ff5f6c85/sse_starlette-3.3.3-py3-none-any.whl", hash = "sha256:c5abb5082a1cc1c6294d89c5290c46b5f67808cfdb612b7ec27e8ba061c22e8d", size = 14329, upload-time = "2026-03-17T20:05:54.35Z" }, ] [[package]] name = "starlette" -version = "1.0.0" +version = "0.52.1" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "anyio" }, { name = "typing-extensions", marker = "python_full_version < '3.13'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/81/69/17425771797c36cded50b7fe44e850315d039f28b15901ab44839e70b593/starlette-1.0.0.tar.gz", hash = "sha256:6a4beaf1f81bb472fd19ea9b918b50dc3a77a6f2e190a12954b25e6ed5eea149", size = 2655289, upload-time = "2026-03-22T18:29:46.779Z" } +sdist = { url = "https://files.pythonhosted.org/packages/c4/68/79977123bb7be889ad680d79a40f339082c1978b5cfcf62c2d8d196873ac/starlette-0.52.1.tar.gz", hash = "sha256:834edd1b0a23167694292e94f597773bc3f89f362be6effee198165a35d62933", size = 2653702, upload-time = "2026-01-18T13:34:11.062Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/0b/c9/584bc9651441b4ba60cc4d557d8a547b5aff901af35bda3a4ee30c819b82/starlette-1.0.0-py3-none-any.whl", hash = "sha256:d3ec55e0bb321692d275455ddfd3df75fff145d009685eb40dc91fc66b03d38b", size = 72651, upload-time = "2026-03-22T18:29:45.111Z" }, + { url = "https://files.pythonhosted.org/packages/81/0d/13d1d239a25cbfb19e740db83143e95c772a1fe10202dda4b76792b114dd/starlette-0.52.1-py3-none-any.whl", hash = "sha256:0029d43eb3d273bc4f83a08720b4912ea4b071087a3b48db01b7c839f7954d74", size = 74272, upload-time = "2026-01-18T13:34:09.188Z" }, ] [[package]] @@ -2288,50 +2359,65 @@ wheels = [ [[package]] name = "tomli" -version = "2.2.1" -source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/18/87/302344fed471e44a87289cf4967697d07e532f2421fdaf868a303cbae4ff/tomli-2.2.1.tar.gz", hash = "sha256:cd45e1dc79c835ce60f7404ec8119f2eb06d38b1deba146f07ced3bbc44505ff", size = 17175, upload-time = "2024-11-27T22:38:36.873Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/43/ca/75707e6efa2b37c77dadb324ae7d9571cb424e61ea73fad7c56c2d14527f/tomli-2.2.1-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:678e4fa69e4575eb77d103de3df8a895e1591b48e740211bd1067378c69e8249", size = 131077, upload-time = "2024-11-27T22:37:54.956Z" }, - { url = "https://files.pythonhosted.org/packages/c7/16/51ae563a8615d472fdbffc43a3f3d46588c264ac4f024f63f01283becfbb/tomli-2.2.1-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:023aa114dd824ade0100497eb2318602af309e5a55595f76b626d6d9f3b7b0a6", size = 123429, upload-time = "2024-11-27T22:37:56.698Z" }, - { url = "https://files.pythonhosted.org/packages/f1/dd/4f6cd1e7b160041db83c694abc78e100473c15d54620083dbd5aae7b990e/tomli-2.2.1-cp311-cp311-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:ece47d672db52ac607a3d9599a9d48dcb2f2f735c6c2d1f34130085bb12b112a", size = 226067, upload-time = "2024-11-27T22:37:57.63Z" }, - { url = "https://files.pythonhosted.org/packages/a9/6b/c54ede5dc70d648cc6361eaf429304b02f2871a345bbdd51e993d6cdf550/tomli-2.2.1-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6972ca9c9cc9f0acaa56a8ca1ff51e7af152a9f87fb64623e31d5c83700080ee", size = 236030, upload-time = "2024-11-27T22:37:59.344Z" }, - { url = "https://files.pythonhosted.org/packages/1f/47/999514fa49cfaf7a92c805a86c3c43f4215621855d151b61c602abb38091/tomli-2.2.1-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:c954d2250168d28797dd4e3ac5cf812a406cd5a92674ee4c8f123c889786aa8e", size = 240898, upload-time = "2024-11-27T22:38:00.429Z" }, - { url = "https://files.pythonhosted.org/packages/73/41/0a01279a7ae09ee1573b423318e7934674ce06eb33f50936655071d81a24/tomli-2.2.1-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:8dd28b3e155b80f4d54beb40a441d366adcfe740969820caf156c019fb5c7ec4", size = 229894, upload-time = "2024-11-27T22:38:02.094Z" }, - { url = "https://files.pythonhosted.org/packages/55/18/5d8bc5b0a0362311ce4d18830a5d28943667599a60d20118074ea1b01bb7/tomli-2.2.1-cp311-cp311-musllinux_1_2_i686.whl", hash = "sha256:e59e304978767a54663af13c07b3d1af22ddee3bb2fb0618ca1593e4f593a106", size = 245319, upload-time = "2024-11-27T22:38:03.206Z" }, - { url = "https://files.pythonhosted.org/packages/92/a3/7ade0576d17f3cdf5ff44d61390d4b3febb8a9fc2b480c75c47ea048c646/tomli-2.2.1-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:33580bccab0338d00994d7f16f4c4ec25b776af3ffaac1ed74e0b3fc95e885a8", size = 238273, upload-time = "2024-11-27T22:38:04.217Z" }, - { url = "https://files.pythonhosted.org/packages/72/6f/fa64ef058ac1446a1e51110c375339b3ec6be245af9d14c87c4a6412dd32/tomli-2.2.1-cp311-cp311-win32.whl", hash = "sha256:465af0e0875402f1d226519c9904f37254b3045fc5084697cefb9bdde1ff99ff", size = 98310, upload-time = "2024-11-27T22:38:05.908Z" }, - { url = "https://files.pythonhosted.org/packages/6a/1c/4a2dcde4a51b81be3530565e92eda625d94dafb46dbeb15069df4caffc34/tomli-2.2.1-cp311-cp311-win_amd64.whl", hash = "sha256:2d0f2fdd22b02c6d81637a3c95f8cd77f995846af7414c5c4b8d0545afa1bc4b", size = 108309, upload-time = "2024-11-27T22:38:06.812Z" }, - { url = "https://files.pythonhosted.org/packages/52/e1/f8af4c2fcde17500422858155aeb0d7e93477a0d59a98e56cbfe75070fd0/tomli-2.2.1-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:4a8f6e44de52d5e6c657c9fe83b562f5f4256d8ebbfe4ff922c495620a7f6cea", size = 132762, upload-time = "2024-11-27T22:38:07.731Z" }, - { url = "https://files.pythonhosted.org/packages/03/b8/152c68bb84fc00396b83e7bbddd5ec0bd3dd409db4195e2a9b3e398ad2e3/tomli-2.2.1-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:8d57ca8095a641b8237d5b079147646153d22552f1c637fd3ba7f4b0b29167a8", size = 123453, upload-time = "2024-11-27T22:38:09.384Z" }, - { url = "https://files.pythonhosted.org/packages/c8/d6/fc9267af9166f79ac528ff7e8c55c8181ded34eb4b0e93daa767b8841573/tomli-2.2.1-cp312-cp312-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:4e340144ad7ae1533cb897d406382b4b6fede8890a03738ff1683af800d54192", size = 233486, upload-time = "2024-11-27T22:38:10.329Z" }, - { url = "https://files.pythonhosted.org/packages/5c/51/51c3f2884d7bab89af25f678447ea7d297b53b5a3b5730a7cb2ef6069f07/tomli-2.2.1-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:db2b95f9de79181805df90bedc5a5ab4c165e6ec3fe99f970d0e302f384ad222", size = 242349, upload-time = "2024-11-27T22:38:11.443Z" }, - { url = "https://files.pythonhosted.org/packages/ab/df/bfa89627d13a5cc22402e441e8a931ef2108403db390ff3345c05253935e/tomli-2.2.1-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:40741994320b232529c802f8bc86da4e1aa9f413db394617b9a256ae0f9a7f77", size = 252159, upload-time = "2024-11-27T22:38:13.099Z" }, - { url = "https://files.pythonhosted.org/packages/9e/6e/fa2b916dced65763a5168c6ccb91066f7639bdc88b48adda990db10c8c0b/tomli-2.2.1-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:400e720fe168c0f8521520190686ef8ef033fb19fc493da09779e592861b78c6", size = 237243, upload-time = "2024-11-27T22:38:14.766Z" }, - { url = "https://files.pythonhosted.org/packages/b4/04/885d3b1f650e1153cbb93a6a9782c58a972b94ea4483ae4ac5cedd5e4a09/tomli-2.2.1-cp312-cp312-musllinux_1_2_i686.whl", hash = "sha256:02abe224de6ae62c19f090f68da4e27b10af2b93213d36cf44e6e1c5abd19fdd", size = 259645, upload-time = "2024-11-27T22:38:15.843Z" }, - { url = "https://files.pythonhosted.org/packages/9c/de/6b432d66e986e501586da298e28ebeefd3edc2c780f3ad73d22566034239/tomli-2.2.1-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:b82ebccc8c8a36f2094e969560a1b836758481f3dc360ce9a3277c65f374285e", size = 244584, upload-time = "2024-11-27T22:38:17.645Z" }, - { url = "https://files.pythonhosted.org/packages/1c/9a/47c0449b98e6e7d1be6cbac02f93dd79003234ddc4aaab6ba07a9a7482e2/tomli-2.2.1-cp312-cp312-win32.whl", hash = "sha256:889f80ef92701b9dbb224e49ec87c645ce5df3fa2cc548664eb8a25e03127a98", size = 98875, upload-time = "2024-11-27T22:38:19.159Z" }, - { url = "https://files.pythonhosted.org/packages/ef/60/9b9638f081c6f1261e2688bd487625cd1e660d0a85bd469e91d8db969734/tomli-2.2.1-cp312-cp312-win_amd64.whl", hash = "sha256:7fc04e92e1d624a4a63c76474610238576942d6b8950a2d7f908a340494e67e4", size = 109418, upload-time = "2024-11-27T22:38:20.064Z" }, - { url = "https://files.pythonhosted.org/packages/04/90/2ee5f2e0362cb8a0b6499dc44f4d7d48f8fff06d28ba46e6f1eaa61a1388/tomli-2.2.1-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:f4039b9cbc3048b2416cc57ab3bda989a6fcf9b36cf8937f01a6e731b64f80d7", size = 132708, upload-time = "2024-11-27T22:38:21.659Z" }, - { url = "https://files.pythonhosted.org/packages/c0/ec/46b4108816de6b385141f082ba99e315501ccd0a2ea23db4a100dd3990ea/tomli-2.2.1-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:286f0ca2ffeeb5b9bd4fcc8d6c330534323ec51b2f52da063b11c502da16f30c", size = 123582, upload-time = "2024-11-27T22:38:22.693Z" }, - { url = "https://files.pythonhosted.org/packages/a0/bd/b470466d0137b37b68d24556c38a0cc819e8febe392d5b199dcd7f578365/tomli-2.2.1-cp313-cp313-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:a92ef1a44547e894e2a17d24e7557a5e85a9e1d0048b0b5e7541f76c5032cb13", size = 232543, upload-time = "2024-11-27T22:38:24.367Z" }, - { url = "https://files.pythonhosted.org/packages/d9/e5/82e80ff3b751373f7cead2815bcbe2d51c895b3c990686741a8e56ec42ab/tomli-2.2.1-cp313-cp313-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9316dc65bed1684c9a98ee68759ceaed29d229e985297003e494aa825ebb0281", size = 241691, upload-time = "2024-11-27T22:38:26.081Z" }, - { url = "https://files.pythonhosted.org/packages/05/7e/2a110bc2713557d6a1bfb06af23dd01e7dde52b6ee7dadc589868f9abfac/tomli-2.2.1-cp313-cp313-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:e85e99945e688e32d5a35c1ff38ed0b3f41f43fad8df0bdf79f72b2ba7bc5272", size = 251170, upload-time = "2024-11-27T22:38:27.921Z" }, - { url = "https://files.pythonhosted.org/packages/64/7b/22d713946efe00e0adbcdfd6d1aa119ae03fd0b60ebed51ebb3fa9f5a2e5/tomli-2.2.1-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:ac065718db92ca818f8d6141b5f66369833d4a80a9d74435a268c52bdfa73140", size = 236530, upload-time = "2024-11-27T22:38:29.591Z" }, - { url = "https://files.pythonhosted.org/packages/38/31/3a76f67da4b0cf37b742ca76beaf819dca0ebef26d78fc794a576e08accf/tomli-2.2.1-cp313-cp313-musllinux_1_2_i686.whl", hash = "sha256:d920f33822747519673ee656a4b6ac33e382eca9d331c87770faa3eef562aeb2", size = 258666, upload-time = "2024-11-27T22:38:30.639Z" }, - { url = "https://files.pythonhosted.org/packages/07/10/5af1293da642aded87e8a988753945d0cf7e00a9452d3911dd3bb354c9e2/tomli-2.2.1-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:a198f10c4d1b1375d7687bc25294306e551bf1abfa4eace6650070a5c1ae2744", size = 243954, upload-time = "2024-11-27T22:38:31.702Z" }, - { url = "https://files.pythonhosted.org/packages/5b/b9/1ed31d167be802da0fc95020d04cd27b7d7065cc6fbefdd2f9186f60d7bd/tomli-2.2.1-cp313-cp313-win32.whl", hash = "sha256:d3f5614314d758649ab2ab3a62d4f2004c825922f9e370b29416484086b264ec", size = 98724, upload-time = "2024-11-27T22:38:32.837Z" }, - { url = "https://files.pythonhosted.org/packages/c7/32/b0963458706accd9afcfeb867c0f9175a741bf7b19cd424230714d722198/tomli-2.2.1-cp313-cp313-win_amd64.whl", hash = "sha256:a38aa0308e754b0e3c67e344754dff64999ff9b513e691d0e786265c93583c69", size = 109383, upload-time = "2024-11-27T22:38:34.455Z" }, - { url = "https://files.pythonhosted.org/packages/6e/c2/61d3e0f47e2b74ef40a68b9e6ad5984f6241a942f7cd3bbfbdbd03861ea9/tomli-2.2.1-py3-none-any.whl", hash = "sha256:cb55c73c5f4408779d0cf3eef9f762b9c9f147a77de7b258bef0a5628adc85cc", size = 14257, upload-time = "2024-11-27T22:38:35.385Z" }, +version = "2.4.0" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/82/30/31573e9457673ab10aa432461bee537ce6cef177667deca369efb79df071/tomli-2.4.0.tar.gz", hash = "sha256:aa89c3f6c277dd275d8e243ad24f3b5e701491a860d5121f2cdd399fbb31fc9c", size = 17477, upload-time = "2026-01-11T11:22:38.165Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/3c/d9/3dc2289e1f3b32eb19b9785b6a006b28ee99acb37d1d47f78d4c10e28bf8/tomli-2.4.0-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:b5ef256a3fd497d4973c11bf142e9ed78b150d36f5773f1ca6088c230ffc5867", size = 153663, upload-time = "2026-01-11T11:21:45.27Z" }, + { url = "https://files.pythonhosted.org/packages/51/32/ef9f6845e6b9ca392cd3f64f9ec185cc6f09f0a2df3db08cbe8809d1d435/tomli-2.4.0-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:5572e41282d5268eb09a697c89a7bee84fae66511f87533a6f88bd2f7b652da9", size = 148469, upload-time = "2026-01-11T11:21:46.873Z" }, + { url = "https://files.pythonhosted.org/packages/d6/c2/506e44cce89a8b1b1e047d64bd495c22c9f71f21e05f380f1a950dd9c217/tomli-2.4.0-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:551e321c6ba03b55676970b47cb1b73f14a0a4dce6a3e1a9458fd6d921d72e95", size = 236039, upload-time = "2026-01-11T11:21:48.503Z" }, + { url = "https://files.pythonhosted.org/packages/b3/40/e1b65986dbc861b7e986e8ec394598187fa8aee85b1650b01dd925ca0be8/tomli-2.4.0-cp311-cp311-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:5e3f639a7a8f10069d0e15408c0b96a2a828cfdec6fca05296ebcdcc28ca7c76", size = 243007, upload-time = "2026-01-11T11:21:49.456Z" }, + { url = "https://files.pythonhosted.org/packages/9c/6f/6e39ce66b58a5b7ae572a0f4352ff40c71e8573633deda43f6a379d56b3e/tomli-2.4.0-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:1b168f2731796b045128c45982d3a4874057626da0e2ef1fdd722848b741361d", size = 240875, upload-time = "2026-01-11T11:21:50.755Z" }, + { url = "https://files.pythonhosted.org/packages/aa/ad/cb089cb190487caa80204d503c7fd0f4d443f90b95cf4ef5cf5aa0f439b0/tomli-2.4.0-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:133e93646ec4300d651839d382d63edff11d8978be23da4cc106f5a18b7d0576", size = 246271, upload-time = "2026-01-11T11:21:51.81Z" }, + { url = "https://files.pythonhosted.org/packages/0b/63/69125220e47fd7a3a27fd0de0c6398c89432fec41bc739823bcc66506af6/tomli-2.4.0-cp311-cp311-win32.whl", hash = "sha256:b6c78bdf37764092d369722d9946cb65b8767bfa4110f902a1b2542d8d173c8a", size = 96770, upload-time = "2026-01-11T11:21:52.647Z" }, + { url = "https://files.pythonhosted.org/packages/1e/0d/a22bb6c83f83386b0008425a6cd1fa1c14b5f3dd4bad05e98cf3dbbf4a64/tomli-2.4.0-cp311-cp311-win_amd64.whl", hash = "sha256:d3d1654e11d724760cdb37a3d7691f0be9db5fbdaef59c9f532aabf87006dbaa", size = 107626, upload-time = "2026-01-11T11:21:53.459Z" }, + { url = "https://files.pythonhosted.org/packages/2f/6d/77be674a3485e75cacbf2ddba2b146911477bd887dda9d8c9dfb2f15e871/tomli-2.4.0-cp311-cp311-win_arm64.whl", hash = "sha256:cae9c19ed12d4e8f3ebf46d1a75090e4c0dc16271c5bce1c833ac168f08fb614", size = 94842, upload-time = "2026-01-11T11:21:54.831Z" }, + { url = "https://files.pythonhosted.org/packages/3c/43/7389a1869f2f26dba52404e1ef13b4784b6b37dac93bac53457e3ff24ca3/tomli-2.4.0-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:920b1de295e72887bafa3ad9f7a792f811847d57ea6b1215154030cf131f16b1", size = 154894, upload-time = "2026-01-11T11:21:56.07Z" }, + { url = "https://files.pythonhosted.org/packages/e9/05/2f9bf110b5294132b2edf13fe6ca6ae456204f3d749f623307cbb7a946f2/tomli-2.4.0-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:7d6d9a4aee98fac3eab4952ad1d73aee87359452d1c086b5ceb43ed02ddb16b8", size = 149053, upload-time = "2026-01-11T11:21:57.467Z" }, + { url = "https://files.pythonhosted.org/packages/e8/41/1eda3ca1abc6f6154a8db4d714a4d35c4ad90adc0bcf700657291593fbf3/tomli-2.4.0-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:36b9d05b51e65b254ea6c2585b59d2c4cb91c8a3d91d0ed0f17591a29aaea54a", size = 243481, upload-time = "2026-01-11T11:21:58.661Z" }, + { url = "https://files.pythonhosted.org/packages/d2/6d/02ff5ab6c8868b41e7d4b987ce2b5f6a51d3335a70aa144edd999e055a01/tomli-2.4.0-cp312-cp312-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1c8a885b370751837c029ef9bc014f27d80840e48bac415f3412e6593bbc18c1", size = 251720, upload-time = "2026-01-11T11:22:00.178Z" }, + { url = "https://files.pythonhosted.org/packages/7b/57/0405c59a909c45d5b6f146107c6d997825aa87568b042042f7a9c0afed34/tomli-2.4.0-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:8768715ffc41f0008abe25d808c20c3d990f42b6e2e58305d5da280ae7d1fa3b", size = 247014, upload-time = "2026-01-11T11:22:01.238Z" }, + { url = "https://files.pythonhosted.org/packages/2c/0e/2e37568edd944b4165735687cbaf2fe3648129e440c26d02223672ee0630/tomli-2.4.0-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:7b438885858efd5be02a9a133caf5812b8776ee0c969fea02c45e8e3f296ba51", size = 251820, upload-time = "2026-01-11T11:22:02.727Z" }, + { url = "https://files.pythonhosted.org/packages/5a/1c/ee3b707fdac82aeeb92d1a113f803cf6d0f37bdca0849cb489553e1f417a/tomli-2.4.0-cp312-cp312-win32.whl", hash = "sha256:0408e3de5ec77cc7f81960c362543cbbd91ef883e3138e81b729fc3eea5b9729", size = 97712, upload-time = "2026-01-11T11:22:03.777Z" }, + { url = "https://files.pythonhosted.org/packages/69/13/c07a9177d0b3bab7913299b9278845fc6eaaca14a02667c6be0b0a2270c8/tomli-2.4.0-cp312-cp312-win_amd64.whl", hash = "sha256:685306e2cc7da35be4ee914fd34ab801a6acacb061b6a7abca922aaf9ad368da", size = 108296, upload-time = "2026-01-11T11:22:04.86Z" }, + { url = "https://files.pythonhosted.org/packages/18/27/e267a60bbeeee343bcc279bb9e8fbed0cbe224bc7b2a3dc2975f22809a09/tomli-2.4.0-cp312-cp312-win_arm64.whl", hash = "sha256:5aa48d7c2356055feef06a43611fc401a07337d5b006be13a30f6c58f869e3c3", size = 94553, upload-time = "2026-01-11T11:22:05.854Z" }, + { url = "https://files.pythonhosted.org/packages/34/91/7f65f9809f2936e1f4ce6268ae1903074563603b2a2bd969ebbda802744f/tomli-2.4.0-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:84d081fbc252d1b6a982e1870660e7330fb8f90f676f6e78b052ad4e64714bf0", size = 154915, upload-time = "2026-01-11T11:22:06.703Z" }, + { url = "https://files.pythonhosted.org/packages/20/aa/64dd73a5a849c2e8f216b755599c511badde80e91e9bc2271baa7b2cdbb1/tomli-2.4.0-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:9a08144fa4cba33db5255f9b74f0b89888622109bd2776148f2597447f92a94e", size = 149038, upload-time = "2026-01-11T11:22:07.56Z" }, + { url = "https://files.pythonhosted.org/packages/9e/8a/6d38870bd3d52c8d1505ce054469a73f73a0fe62c0eaf5dddf61447e32fa/tomli-2.4.0-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:c73add4bb52a206fd0c0723432db123c0c75c280cbd67174dd9d2db228ebb1b4", size = 242245, upload-time = "2026-01-11T11:22:08.344Z" }, + { url = "https://files.pythonhosted.org/packages/59/bb/8002fadefb64ab2669e5b977df3f5e444febea60e717e755b38bb7c41029/tomli-2.4.0-cp313-cp313-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:1fb2945cbe303b1419e2706e711b7113da57b7db31ee378d08712d678a34e51e", size = 250335, upload-time = "2026-01-11T11:22:09.951Z" }, + { url = "https://files.pythonhosted.org/packages/a5/3d/4cdb6f791682b2ea916af2de96121b3cb1284d7c203d97d92d6003e91c8d/tomli-2.4.0-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:bbb1b10aa643d973366dc2cb1ad94f99c1726a02343d43cbc011edbfac579e7c", size = 245962, upload-time = "2026-01-11T11:22:11.27Z" }, + { url = "https://files.pythonhosted.org/packages/f2/4a/5f25789f9a460bd858ba9756ff52d0830d825b458e13f754952dd15fb7bb/tomli-2.4.0-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:4cbcb367d44a1f0c2be408758b43e1ffb5308abe0ea222897d6bfc8e8281ef2f", size = 250396, upload-time = "2026-01-11T11:22:12.325Z" }, + { url = "https://files.pythonhosted.org/packages/aa/2f/b73a36fea58dfa08e8b3a268750e6853a6aac2a349241a905ebd86f3047a/tomli-2.4.0-cp313-cp313-win32.whl", hash = "sha256:7d49c66a7d5e56ac959cb6fc583aff0651094ec071ba9ad43df785abc2320d86", size = 97530, upload-time = "2026-01-11T11:22:13.865Z" }, + { url = "https://files.pythonhosted.org/packages/3b/af/ca18c134b5d75de7e8dc551c5234eaba2e8e951f6b30139599b53de9c187/tomli-2.4.0-cp313-cp313-win_amd64.whl", hash = "sha256:3cf226acb51d8f1c394c1b310e0e0e61fecdd7adcb78d01e294ac297dd2e7f87", size = 108227, upload-time = "2026-01-11T11:22:15.224Z" }, + { url = "https://files.pythonhosted.org/packages/22/c3/b386b832f209fee8073c8138ec50f27b4460db2fdae9ffe022df89a57f9b/tomli-2.4.0-cp313-cp313-win_arm64.whl", hash = "sha256:d20b797a5c1ad80c516e41bc1fb0443ddb5006e9aaa7bda2d71978346aeb9132", size = 94748, upload-time = "2026-01-11T11:22:16.009Z" }, + { url = "https://files.pythonhosted.org/packages/f3/c4/84047a97eb1004418bc10bdbcfebda209fca6338002eba2dc27cc6d13563/tomli-2.4.0-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:26ab906a1eb794cd4e103691daa23d95c6919cc2fa9160000ac02370cc9dd3f6", size = 154725, upload-time = "2026-01-11T11:22:17.269Z" }, + { url = "https://files.pythonhosted.org/packages/a8/5d/d39038e646060b9d76274078cddf146ced86dc2b9e8bbf737ad5983609a0/tomli-2.4.0-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:20cedb4ee43278bc4f2fee6cb50daec836959aadaf948db5172e776dd3d993fc", size = 148901, upload-time = "2026-01-11T11:22:18.287Z" }, + { url = "https://files.pythonhosted.org/packages/73/e5/383be1724cb30f4ce44983d249645684a48c435e1cd4f8b5cded8a816d3c/tomli-2.4.0-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:39b0b5d1b6dd03684b3fb276407ebed7090bbec989fa55838c98560c01113b66", size = 243375, upload-time = "2026-01-11T11:22:19.154Z" }, + { url = "https://files.pythonhosted.org/packages/31/f0/bea80c17971c8d16d3cc109dc3585b0f2ce1036b5f4a8a183789023574f2/tomli-2.4.0-cp314-cp314-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:a26d7ff68dfdb9f87a016ecfd1e1c2bacbe3108f4e0f8bcd2228ef9a766c787d", size = 250639, upload-time = "2026-01-11T11:22:20.168Z" }, + { url = "https://files.pythonhosted.org/packages/2c/8f/2853c36abbb7608e3f945d8a74e32ed3a74ee3a1f468f1ffc7d1cb3abba6/tomli-2.4.0-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:20ffd184fb1df76a66e34bd1b36b4a4641bd2b82954befa32fe8163e79f1a702", size = 246897, upload-time = "2026-01-11T11:22:21.544Z" }, + { url = "https://files.pythonhosted.org/packages/49/f0/6c05e3196ed5337b9fe7ea003e95fd3819a840b7a0f2bf5a408ef1dad8ed/tomli-2.4.0-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:75c2f8bbddf170e8effc98f5e9084a8751f8174ea6ccf4fca5398436e0320bc8", size = 254697, upload-time = "2026-01-11T11:22:23.058Z" }, + { url = "https://files.pythonhosted.org/packages/f3/f5/2922ef29c9f2951883525def7429967fc4d8208494e5ab524234f06b688b/tomli-2.4.0-cp314-cp314-win32.whl", hash = "sha256:31d556d079d72db7c584c0627ff3a24c5d3fb4f730221d3444f3efb1b2514776", size = 98567, upload-time = "2026-01-11T11:22:24.033Z" }, + { url = "https://files.pythonhosted.org/packages/7b/31/22b52e2e06dd2a5fdbc3ee73226d763b184ff21fc24e20316a44ccc4d96b/tomli-2.4.0-cp314-cp314-win_amd64.whl", hash = "sha256:43e685b9b2341681907759cf3a04e14d7104b3580f808cfde1dfdb60ada85475", size = 108556, upload-time = "2026-01-11T11:22:25.378Z" }, + { url = "https://files.pythonhosted.org/packages/48/3d/5058dff3255a3d01b705413f64f4306a141a8fd7a251e5a495e3f192a998/tomli-2.4.0-cp314-cp314-win_arm64.whl", hash = "sha256:3d895d56bd3f82ddd6faaff993c275efc2ff38e52322ea264122d72729dca2b2", size = 96014, upload-time = "2026-01-11T11:22:26.138Z" }, + { url = "https://files.pythonhosted.org/packages/b8/4e/75dab8586e268424202d3a1997ef6014919c941b50642a1682df43204c22/tomli-2.4.0-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:5b5807f3999fb66776dbce568cc9a828544244a8eb84b84b9bafc080c99597b9", size = 163339, upload-time = "2026-01-11T11:22:27.143Z" }, + { url = "https://files.pythonhosted.org/packages/06/e3/b904d9ab1016829a776d97f163f183a48be6a4deb87304d1e0116a349519/tomli-2.4.0-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:c084ad935abe686bd9c898e62a02a19abfc9760b5a79bc29644463eaf2840cb0", size = 159490, upload-time = "2026-01-11T11:22:28.399Z" }, + { url = "https://files.pythonhosted.org/packages/e3/5a/fc3622c8b1ad823e8ea98a35e3c632ee316d48f66f80f9708ceb4f2a0322/tomli-2.4.0-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:0f2e3955efea4d1cfbcb87bc321e00dc08d2bcb737fd1d5e398af111d86db5df", size = 269398, upload-time = "2026-01-11T11:22:29.345Z" }, + { url = "https://files.pythonhosted.org/packages/fd/33/62bd6152c8bdd4c305ad9faca48f51d3acb2df1f8791b1477d46ff86e7f8/tomli-2.4.0-cp314-cp314t-manylinux2014_x86_64.manylinux_2_17_x86_64.manylinux_2_28_x86_64.whl", hash = "sha256:0e0fe8a0b8312acf3a88077a0802565cb09ee34107813bba1c7cd591fa6cfc8d", size = 276515, upload-time = "2026-01-11T11:22:30.327Z" }, + { url = "https://files.pythonhosted.org/packages/4b/ff/ae53619499f5235ee4211e62a8d7982ba9e439a0fb4f2f351a93d67c1dd2/tomli-2.4.0-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:413540dce94673591859c4c6f794dfeaa845e98bf35d72ed59636f869ef9f86f", size = 273806, upload-time = "2026-01-11T11:22:32.56Z" }, + { url = "https://files.pythonhosted.org/packages/47/71/cbca7787fa68d4d0a9f7072821980b39fbb1b6faeb5f5cf02f4a5559fa28/tomli-2.4.0-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:0dc56fef0e2c1c470aeac5b6ca8cc7b640bb93e92d9803ddaf9ea03e198f5b0b", size = 281340, upload-time = "2026-01-11T11:22:33.505Z" }, + { url = "https://files.pythonhosted.org/packages/f5/00/d595c120963ad42474cf6ee7771ad0d0e8a49d0f01e29576ee9195d9ecdf/tomli-2.4.0-cp314-cp314t-win32.whl", hash = "sha256:d878f2a6707cc9d53a1be1414bbb419e629c3d6e67f69230217bb663e76b5087", size = 108106, upload-time = "2026-01-11T11:22:34.451Z" }, + { url = "https://files.pythonhosted.org/packages/de/69/9aa0c6a505c2f80e519b43764f8b4ba93b5a0bbd2d9a9de6e2b24271b9a5/tomli-2.4.0-cp314-cp314t-win_amd64.whl", hash = "sha256:2add28aacc7425117ff6364fe9e06a183bb0251b03f986df0e78e974047571fd", size = 120504, upload-time = "2026-01-11T11:22:35.764Z" }, + { url = "https://files.pythonhosted.org/packages/b3/9f/f1668c281c58cfae01482f7114a4b88d345e4c140386241a1a24dcc9e7bc/tomli-2.4.0-cp314-cp314t-win_arm64.whl", hash = "sha256:2b1e3b80e1d5e52e40e9b924ec43d81570f0e7d09d11081b797bc4692765a3d4", size = 99561, upload-time = "2026-01-11T11:22:36.624Z" }, + { url = "https://files.pythonhosted.org/packages/23/d1/136eb2cb77520a31e1f64cbae9d33ec6df0d78bdf4160398e86eec8a8754/tomli-2.4.0-py3-none-any.whl", hash = "sha256:1f776e7d669ebceb01dee46484485f43a4048746235e683bcdffacdf1fb4785a", size = 14477, upload-time = "2026-01-11T11:22:37.446Z" }, ] [[package]] name = "tomlkit" -version = "0.13.3" +version = "0.14.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/cc/18/0bbf3884e9eaa38819ebe46a7bd25dcd56b67434402b66a58c4b8e552575/tomlkit-0.13.3.tar.gz", hash = "sha256:430cf247ee57df2b94ee3fbe588e71d362a941ebb545dec29b53961d61add2a1", size = 185207, upload-time = "2025-06-05T07:13:44.947Z" } +sdist = { url = "https://files.pythonhosted.org/packages/c3/af/14b24e41977adb296d6bd1fb59402cf7d60ce364f90c890bd2ec65c43b5a/tomlkit-0.14.0.tar.gz", hash = "sha256:cf00efca415dbd57575befb1f6634c4f42d2d87dbba376128adb42c121b87064", size = 187167, upload-time = "2026-01-13T01:14:53.304Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/bd/75/8539d011f6be8e29f339c42e633aae3cb73bffa95dd0f9adec09b9c58e85/tomlkit-0.13.3-py3-none-any.whl", hash = "sha256:c89c649d79ee40629a9fda55f8ace8c6a1b42deb912b2a8fd8d942ddadb606b0", size = 38901, upload-time = "2025-06-05T07:13:43.546Z" }, + { url = "https://files.pythonhosted.org/packages/b5/11/87d6d29fb5d237229d67973a6c9e06e048f01cf4994dee194ab0ea841814/tomlkit-0.14.0-py3-none-any.whl", hash = "sha256:592064ed85b40fa213469f81ac584f67a4f2992509a7c3ea2d632208623a3680", size = 39310, upload-time = "2026-01-13T01:14:51.965Z" }, ] [[package]] @@ -2354,23 +2440,11 @@ wheels = [ [[package]] name = "trove-classifiers" -version = "2025.5.9.12" +version = "2026.1.14.14" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/38/04/1cd43f72c241fedcf0d9a18d0783953ee301eac9e5d9db1df0f0f089d9af/trove_classifiers-2025.5.9.12.tar.gz", hash = "sha256:7ca7c8a7a76e2cd314468c677c69d12cc2357711fcab4a60f87994c1589e5cb5", size = 16940, upload-time = "2025-05-09T12:04:48.829Z" } +sdist = { url = "https://files.pythonhosted.org/packages/d8/43/7935f8ea93fcb6680bc10a6fdbf534075c198eeead59150dd5ed68449642/trove_classifiers-2026.1.14.14.tar.gz", hash = "sha256:00492545a1402b09d4858605ba190ea33243d361e2b01c9c296ce06b5c3325f3", size = 16997, upload-time = "2026-01-14T14:54:50.526Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/92/ef/c6deb083748be3bcad6f471b6ae983950c161890bf5ae1b2af80cc56c530/trove_classifiers-2025.5.9.12-py3-none-any.whl", hash = "sha256:e381c05537adac78881c8fa345fd0e9970159f4e4a04fcc42cfd3129cca640ce", size = 14119, upload-time = "2025-05-09T12:04:46.38Z" }, -] - -[[package]] -name = "typeguard" -version = "4.4.4" -source = { registry = "https://pypi.org/simple" } -dependencies = [ - { name = "typing-extensions" }, -] -sdist = { url = "https://files.pythonhosted.org/packages/c7/68/71c1a15b5f65f40e91b65da23b8224dad41349894535a97f63a52e462196/typeguard-4.4.4.tar.gz", hash = "sha256:3a7fd2dffb705d4d0efaed4306a704c89b9dee850b688f060a8b1615a79e5f74", size = 75203, upload-time = "2025-06-18T09:56:07.624Z" } -wheels = [ - { url = "https://files.pythonhosted.org/packages/1b/a9/e3aee762739c1d7528da1c3e06d518503f8b6c439c35549b53735ba52ead/typeguard-4.4.4-py3-none-any.whl", hash = "sha256:b5f562281b6bfa1f5492470464730ef001646128b180769880468bd84b68b09e", size = 34874, upload-time = "2025-06-18T09:56:05.999Z" }, + { url = "https://files.pythonhosted.org/packages/bb/4a/2e5583e544bc437d5e8e54b47db87430df9031b29b48d17f26d129fa60c0/trove_classifiers-2026.1.14.14-py3-none-any.whl", hash = "sha256:1f9553927f18d0513d8e5ff80ab8980b8202ce37ecae0e3274ed2ef11880e74d", size = 14197, upload-time = "2026-01-14T14:54:49.067Z" }, ] [[package]] @@ -2384,23 +2458,23 @@ wheels = [ [[package]] name = "types-requests" -version = "2.33.0.20260327" +version = "2.32.4.20260107" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "urllib3" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/02/5f/2e3dbae6e21be6ae026563bad96cbf76602d73aa85ea09f13419ddbdabb4/types_requests-2.33.0.20260327.tar.gz", hash = "sha256:f4f74f0b44f059e3db420ff17bd1966e3587cdd34062fe38a23cda97868f8dd8", size = 23804, upload-time = "2026-03-27T04:23:38.737Z" } +sdist = { url = "https://files.pythonhosted.org/packages/0f/f3/a0663907082280664d745929205a89d41dffb29e89a50f753af7d57d0a96/types_requests-2.32.4.20260107.tar.gz", hash = "sha256:018a11ac158f801bfa84857ddec1650750e393df8a004a8a9ae2a9bec6fcb24f", size = 23165, upload-time = "2026-01-07T03:20:54.091Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/8c/55/951e733616c92cb96b57554746d2f65f4464d080cc2cc093605f897aba89/types_requests-2.33.0.20260327-py3-none-any.whl", hash = "sha256:fde0712be6d7c9a4d490042d6323115baf872d9a71a22900809d0432de15776e", size = 20737, upload-time = "2026-03-27T04:23:37.813Z" }, + { url = "https://files.pythonhosted.org/packages/1c/12/709ea261f2bf91ef0a26a9eed20f2623227a8ed85610c1e54c5805692ecb/types_requests-2.32.4.20260107-py3-none-any.whl", hash = "sha256:b703fe72f8ce5b31ef031264fe9395cac8f46a04661a79f7ed31a80fb308730d", size = 20676, upload-time = "2026-01-07T03:20:52.929Z" }, ] [[package]] name = "typing-extensions" -version = "4.14.1" +version = "4.15.0" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/98/5a/da40306b885cc8c09109dc2e1abd358d5684b1425678151cdaed4731c822/typing_extensions-4.14.1.tar.gz", hash = "sha256:38b39f4aeeab64884ce9f74c94263ef78f3c22467c8724005483154c26648d36", size = 107673, upload-time = "2025-07-04T13:28:34.16Z" } +sdist = { url = "https://files.pythonhosted.org/packages/72/94/1a15dd82efb362ac84269196e94cf00f187f7ed21c242792a923cdb1c61f/typing_extensions-4.15.0.tar.gz", hash = "sha256:0cea48d173cc12fa28ecabc3b837ea3cf6f38c6d1136f85cbaaf598984861466", size = 109391, upload-time = "2025-08-25T13:49:26.313Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/b5/00/d631e67a838026495268c2f6884f3711a15a9a2a96cd244fdaea53b823fb/typing_extensions-4.14.1-py3-none-any.whl", hash = "sha256:d1e1e3b58374dc93031d6eda2420a48ea44a36c2b4766a4fdeb3710755731d76", size = 43906, upload-time = "2025-07-04T13:28:32.743Z" }, + { url = "https://files.pythonhosted.org/packages/18/67/36e9267722cc04a6b9f15c7f3441c2363321a3ea07da7ae0c0707beb2a9c/typing_extensions-4.15.0-py3-none-any.whl", hash = "sha256:f0fa19c6845758ab08074a0cfa8b7aecb71c999ca73d62883bc25cc018c4e548", size = 44614, upload-time = "2025-08-25T13:49:24.86Z" }, ] [[package]] @@ -2417,16 +2491,16 @@ wheels = [ [[package]] name = "urllib3" -version = "2.5.0" +version = "2.6.3" source = { registry = "https://pypi.org/simple" } -sdist = { url = "https://files.pythonhosted.org/packages/15/22/9ee70a2574a4f4599c47dd506532914ce044817c7752a79b6a51286319bc/urllib3-2.5.0.tar.gz", hash = "sha256:3fc47733c7e419d4bc3f6b3dc2b4f890bb743906a30d56ba4a5bfa4bbff92760", size = 393185, upload-time = "2025-06-18T14:07:41.644Z" } +sdist = { url = "https://files.pythonhosted.org/packages/c7/24/5f1b3bdffd70275f6661c76461e25f024d5a38a46f04aaca912426a2b1d3/urllib3-2.6.3.tar.gz", hash = "sha256:1b62b6884944a57dbe321509ab94fd4d3b307075e0c2eae991ac71ee15ad38ed", size = 435556, upload-time = "2026-01-07T16:24:43.925Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/a7/c2/fe1e52489ae3122415c51f387e221dd0773709bad6c6cdaa599e8a2c5185/urllib3-2.5.0-py3-none-any.whl", hash = "sha256:e6b01673c0fa6a13e374b50871808eb3bf7046c4b125b216f6bf1cc604cff0dc", size = 129795, upload-time = "2025-06-18T14:07:40.39Z" }, + { url = "https://files.pythonhosted.org/packages/39/08/aaaad47bc4e9dc8c725e68f9d04865dbcb2052843ff09c97b08904852d84/urllib3-2.6.3-py3-none-any.whl", hash = "sha256:bf272323e553dfb2e87d9bfd225ca7b0f467b919d7bbd355436d3fd37cb0acd4", size = 131584, upload-time = "2026-01-07T16:24:42.685Z" }, ] [[package]] name = "uv-dynamic-versioning" -version = "0.14.0" +version = "0.13.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "dunamai" }, @@ -2434,9 +2508,9 @@ dependencies = [ { name = "jinja2" }, { name = "tomlkit" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/15/ef/63270118de5af8f45ba417946290b63f86b0b2a7d07d739d5dc619462711/uv_dynamic_versioning-0.14.0.tar.gz", hash = "sha256:574fbc07e87ace45c01d55967ad3b864871257b98ff5b8ac87c261227ac8db5b", size = 47203, upload-time = "2026-03-22T04:53:36.374Z" } +sdist = { url = "https://files.pythonhosted.org/packages/24/b7/46e3106071b85016237f6de589e99f614565d10a16af17b374d003272076/uv_dynamic_versioning-0.13.0.tar.gz", hash = "sha256:3220cbf10987d862d78e9931957782a274fa438d33efb1fa26b8155353749e06", size = 38797, upload-time = "2026-01-19T09:45:33.366Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/89/39/35773a629ac27d8803ff5ed86bde89d06f77041d7afa0a06cdc584ee8c6f/uv_dynamic_versioning-0.14.0-py3-none-any.whl", hash = "sha256:e087c346a786e98d41292ac2315180fb700cedfb30565fc973d64ce11a112387", size = 12172, upload-time = "2026-03-22T04:53:35.063Z" }, + { url = "https://files.pythonhosted.org/packages/28/4f/15d9ec8aaed4a78aca1b8f0368f0cdd3cca8a04a81edbf03bc9e12c1a188/uv_dynamic_versioning-0.13.0-py3-none-any.whl", hash = "sha256:86d37b89fa2b6836a515301f74ea2d56a1bc59a46a74d66a24c869d1fc8f7585", size = 11480, upload-time = "2026-01-19T09:45:32.002Z" }, ] [[package]] @@ -2455,17 +2529,18 @@ wheels = [ [[package]] name = "virtualenv" -version = "20.36.1" +version = "21.2.0" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "distlib" }, { name = "filelock" }, { name = "platformdirs" }, + { name = "python-discovery" }, { name = "typing-extensions", marker = "python_full_version < '3.11'" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/aa/a3/4d310fa5f00863544e1d0f4de93bddec248499ccf97d4791bc3122c9d4f3/virtualenv-20.36.1.tar.gz", hash = "sha256:8befb5c81842c641f8ee658481e42641c68b5eab3521d8e092d18320902466ba", size = 6032239, upload-time = "2026-01-09T18:21:01.296Z" } +sdist = { url = "https://files.pythonhosted.org/packages/aa/92/58199fe10049f9703c2666e809c4f686c54ef0a68b0f6afccf518c0b1eb9/virtualenv-21.2.0.tar.gz", hash = "sha256:1720dc3a62ef5b443092e3f499228599045d7fea4c79199770499df8becf9098", size = 5840618, upload-time = "2026-03-09T17:24:38.013Z" } wheels = [ - { url = "https://files.pythonhosted.org/packages/6a/2a/dc2228b2888f51192c7dc766106cd475f1b768c10caaf9727659726f7391/virtualenv-20.36.1-py3-none-any.whl", hash = "sha256:575a8d6b124ef88f6f51d56d656132389f961062a9177016a50e4f507bbcc19f", size = 6008258, upload-time = "2026-01-09T18:20:59.425Z" }, + { url = "https://files.pythonhosted.org/packages/c6/59/7d02447a55b2e55755011a647479041bc92a82e143f96a8195cb33bd0a1c/virtualenv-21.2.0-py3-none-any.whl", hash = "sha256:1bd755b504931164a5a496d217c014d098426cddc79363ad66ac78125f9d908f", size = 5825084, upload-time = "2026-03-09T17:24:35.378Z" }, ] [[package]] @@ -2536,6 +2611,92 @@ wheels = [ { url = "https://files.pythonhosted.org/packages/6f/28/258ebab549c2bf3e64d2b0217b973467394a9cea8c42f70418ca2c5d0d2e/websockets-16.0-py3-none-any.whl", hash = "sha256:1637db62fad1dc833276dded54215f2c7fa46912301a24bd94d45d46a011ceec", size = 171598, upload-time = "2026-01-10T09:23:45.395Z" }, ] +[[package]] +name = "wrapt" +version = "2.1.2" +source = { registry = "https://pypi.org/simple" } +sdist = { url = "https://files.pythonhosted.org/packages/2e/64/925f213fdcbb9baeb1530449ac71a4d57fc361c053d06bf78d0c5c7cd80c/wrapt-2.1.2.tar.gz", hash = "sha256:3996a67eecc2c68fd47b4e3c564405a5777367adfd9b8abb58387b63ee83b21e", size = 81678, upload-time = "2026-03-06T02:53:25.134Z" } +wheels = [ + { url = "https://files.pythonhosted.org/packages/da/d2/387594fb592d027366645f3d7cc9b4d7ca7be93845fbaba6d835a912ef3c/wrapt-2.1.2-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:4b7a86d99a14f76facb269dc148590c01aaf47584071809a70da30555228158c", size = 60669, upload-time = "2026-03-06T02:52:40.671Z" }, + { url = "https://files.pythonhosted.org/packages/c9/18/3f373935bc5509e7ac444c8026a56762e50c1183e7061797437ca96c12ce/wrapt-2.1.2-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a819e39017f95bf7aede768f75915635aa8f671f2993c036991b8d3bfe8dbb6f", size = 61603, upload-time = "2026-03-06T02:54:21.032Z" }, + { url = "https://files.pythonhosted.org/packages/c2/7a/32758ca2853b07a887a4574b74e28843919103194bb47001a304e24af62f/wrapt-2.1.2-cp310-cp310-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:5681123e60aed0e64c7d44f72bbf8b4ce45f79d81467e2c4c728629f5baf06eb", size = 113632, upload-time = "2026-03-06T02:53:54.121Z" }, + { url = "https://files.pythonhosted.org/packages/1d/d5/eeaa38f670d462e97d978b3b0d9ce06d5b91e54bebac6fbed867809216e7/wrapt-2.1.2-cp310-cp310-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:2b8b28e97a44d21836259739ae76284e180b18abbb4dcfdff07a415cf1016c3e", size = 115644, upload-time = "2026-03-06T02:54:53.33Z" }, + { url = "https://files.pythonhosted.org/packages/e3/09/2a41506cb17affb0bdf9d5e2129c8c19e192b388c4c01d05e1b14db23c00/wrapt-2.1.2-cp310-cp310-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:cef91c95a50596fcdc31397eb6955476f82ae8a3f5a8eabdc13611b60ee380ba", size = 112016, upload-time = "2026-03-06T02:54:43.274Z" }, + { url = "https://files.pythonhosted.org/packages/64/15/0e6c3f5e87caadc43db279724ee36979246d5194fa32fed489c73643ba59/wrapt-2.1.2-cp310-cp310-musllinux_1_2_aarch64.whl", hash = "sha256:dad63212b168de8569b1c512f4eac4b57f2c6934b30df32d6ee9534a79f1493f", size = 114823, upload-time = "2026-03-06T02:54:29.392Z" }, + { url = "https://files.pythonhosted.org/packages/56/b2/0ad17c8248f4e57bedf44938c26ec3ee194715f812d2dbbd9d7ff4be6c06/wrapt-2.1.2-cp310-cp310-musllinux_1_2_riscv64.whl", hash = "sha256:d307aa6888d5efab2c1cde09843d48c843990be13069003184b67d426d145394", size = 111244, upload-time = "2026-03-06T02:54:02.149Z" }, + { url = "https://files.pythonhosted.org/packages/ff/04/bcdba98c26f2c6522c7c09a726d5d9229120163493620205b2f76bd13c01/wrapt-2.1.2-cp310-cp310-musllinux_1_2_x86_64.whl", hash = "sha256:c87cf3f0c85e27b3ac7d9ad95da166bf8739ca215a8b171e8404a2d739897a45", size = 113307, upload-time = "2026-03-06T02:54:12.428Z" }, + { url = "https://files.pythonhosted.org/packages/0e/1b/5e2883c6bc14143924e465a6fc5a92d09eeabe35310842a481fb0581f832/wrapt-2.1.2-cp310-cp310-win32.whl", hash = "sha256:d1c5fea4f9fe3762e2b905fdd67df51e4be7a73b7674957af2d2ade71a5c075d", size = 57986, upload-time = "2026-03-06T02:54:26.823Z" }, + { url = "https://files.pythonhosted.org/packages/42/5a/4efc997bccadd3af5749c250b49412793bc41e13a83a486b2b54a33e240c/wrapt-2.1.2-cp310-cp310-win_amd64.whl", hash = "sha256:d8f7740e1af13dff2684e4d56fe604a7e04d6c94e737a60568d8d4238b9a0c71", size = 60336, upload-time = "2026-03-06T02:54:18Z" }, + { url = "https://files.pythonhosted.org/packages/c1/f5/a2bb833e20181b937e87c242645ed5d5aa9c373006b0467bfe1a35c727d0/wrapt-2.1.2-cp310-cp310-win_arm64.whl", hash = "sha256:1c6cc827c00dc839350155f316f1f8b4b0c370f52b6a19e782e2bda89600c7dc", size = 58757, upload-time = "2026-03-06T02:53:51.545Z" }, + { url = "https://files.pythonhosted.org/packages/c7/81/60c4471fce95afa5922ca09b88a25f03c93343f759aae0f31fb4412a85c7/wrapt-2.1.2-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:96159a0ee2b0277d44201c3b5be479a9979cf154e8c82fa5df49586a8e7679bb", size = 60666, upload-time = "2026-03-06T02:52:58.934Z" }, + { url = "https://files.pythonhosted.org/packages/6b/be/80e80e39e7cb90b006a0eaf11c73ac3a62bbfb3068469aec15cc0bc795de/wrapt-2.1.2-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:98ba61833a77b747901e9012072f038795de7fc77849f1faa965464f3f87ff2d", size = 61601, upload-time = "2026-03-06T02:53:00.487Z" }, + { url = "https://files.pythonhosted.org/packages/b0/be/d7c88cd9293c859fc74b232abdc65a229bb953997995d6912fc85af18323/wrapt-2.1.2-cp311-cp311-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:767c0dbbe76cae2a60dd2b235ac0c87c9cccf4898aef8062e57bead46b5f6894", size = 114057, upload-time = "2026-03-06T02:52:44.08Z" }, + { url = "https://files.pythonhosted.org/packages/ea/25/36c04602831a4d685d45a93b3abea61eca7fe35dab6c842d6f5d570ef94a/wrapt-2.1.2-cp311-cp311-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:9c691a6bc752c0cc4711cc0c00896fcd0f116abc253609ef64ef930032821842", size = 116099, upload-time = "2026-03-06T02:54:56.74Z" }, + { url = "https://files.pythonhosted.org/packages/5c/4e/98a6eb417ef551dc277bec1253d5246b25003cf36fdf3913b65cb7657a56/wrapt-2.1.2-cp311-cp311-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:f3b7d73012ea75aee5844de58c88f44cf62d0d62711e39da5a82824a7c4626a8", size = 112457, upload-time = "2026-03-06T02:53:52.842Z" }, + { url = "https://files.pythonhosted.org/packages/cb/a6/a6f7186a5297cad8ec53fd7578533b28f795fdf5372368c74bd7e6e9841c/wrapt-2.1.2-cp311-cp311-musllinux_1_2_aarch64.whl", hash = "sha256:577dff354e7acd9d411eaf4bfe76b724c89c89c8fc9b7e127ee28c5f7bcb25b6", size = 115351, upload-time = "2026-03-06T02:53:32.684Z" }, + { url = "https://files.pythonhosted.org/packages/97/6f/06e66189e721dbebd5cf20e138acc4d1150288ce118462f2fcbff92d38db/wrapt-2.1.2-cp311-cp311-musllinux_1_2_riscv64.whl", hash = "sha256:3d7b6fd105f8b24e5bd23ccf41cb1d1099796524bcc6f7fbb8fe576c44befbc9", size = 111748, upload-time = "2026-03-06T02:53:08.455Z" }, + { url = "https://files.pythonhosted.org/packages/ef/43/4808b86f499a51370fbdbdfa6cb91e9b9169e762716456471b619fca7a70/wrapt-2.1.2-cp311-cp311-musllinux_1_2_x86_64.whl", hash = "sha256:866abdbf4612e0b34764922ef8b1c5668867610a718d3053d59e24a5e5fcfc15", size = 113783, upload-time = "2026-03-06T02:53:02.02Z" }, + { url = "https://files.pythonhosted.org/packages/91/2c/a3f28b8fa7ac2cefa01cfcaca3471f9b0460608d012b693998cd61ef43df/wrapt-2.1.2-cp311-cp311-win32.whl", hash = "sha256:5a0a0a3a882393095573344075189eb2d566e0fd205a2b6414e9997b1b800a8b", size = 57977, upload-time = "2026-03-06T02:53:27.844Z" }, + { url = "https://files.pythonhosted.org/packages/3f/c3/2b1c7bd07a27b1db885a2fab469b707bdd35bddf30a113b4917a7e2139d2/wrapt-2.1.2-cp311-cp311-win_amd64.whl", hash = "sha256:64a07a71d2730ba56f11d1a4b91f7817dc79bc134c11516b75d1921a7c6fcda1", size = 60336, upload-time = "2026-03-06T02:54:28.104Z" }, + { url = "https://files.pythonhosted.org/packages/ec/5c/76ece7b401b088daa6503d6264dd80f9a727df3e6042802de9a223084ea2/wrapt-2.1.2-cp311-cp311-win_arm64.whl", hash = "sha256:b89f095fe98bc12107f82a9f7d570dc83a0870291aeb6b1d7a7d35575f55d98a", size = 58756, upload-time = "2026-03-06T02:53:16.319Z" }, + { url = "https://files.pythonhosted.org/packages/4c/b6/1db817582c49c7fcbb7df6809d0f515af29d7c2fbf57eb44c36e98fb1492/wrapt-2.1.2-cp312-cp312-macosx_10_13_x86_64.whl", hash = "sha256:ff2aad9c4cda28a8f0653fc2d487596458c2a3f475e56ba02909e950a9efa6a9", size = 61255, upload-time = "2026-03-06T02:52:45.663Z" }, + { url = "https://files.pythonhosted.org/packages/a2/16/9b02a6b99c09227c93cd4b73acc3678114154ec38da53043c0ddc1fba0dc/wrapt-2.1.2-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:6433ea84e1cfacf32021d2a4ee909554ade7fd392caa6f7c13f1f4bf7b8e8748", size = 61848, upload-time = "2026-03-06T02:53:48.728Z" }, + { url = "https://files.pythonhosted.org/packages/af/aa/ead46a88f9ec3a432a4832dfedb84092fc35af2d0ba40cd04aea3889f247/wrapt-2.1.2-cp312-cp312-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:c20b757c268d30d6215916a5fa8461048d023865d888e437fab451139cad6c8e", size = 121433, upload-time = "2026-03-06T02:54:40.328Z" }, + { url = "https://files.pythonhosted.org/packages/3a/9f/742c7c7cdf58b59085a1ee4b6c37b013f66ac33673a7ef4aaed5e992bc33/wrapt-2.1.2-cp312-cp312-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:79847b83eb38e70d93dc392c7c5b587efe65b3e7afcc167aa8abd5d60e8761c8", size = 123013, upload-time = "2026-03-06T02:53:26.58Z" }, + { url = "https://files.pythonhosted.org/packages/e8/44/2c3dd45d53236b7ed7c646fcf212251dc19e48e599debd3926b52310fafb/wrapt-2.1.2-cp312-cp312-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:f8fba1bae256186a83d1875b2b1f4e2d1242e8fac0f58ec0d7e41b26967b965c", size = 117326, upload-time = "2026-03-06T02:53:11.547Z" }, + { url = "https://files.pythonhosted.org/packages/74/e2/b17d66abc26bd96f89dec0ecd0ef03da4a1286e6ff793839ec431b9fae57/wrapt-2.1.2-cp312-cp312-musllinux_1_2_aarch64.whl", hash = "sha256:e3d3b35eedcf5f7d022291ecd7533321c4775f7b9cd0050a31a68499ba45757c", size = 121444, upload-time = "2026-03-06T02:54:09.5Z" }, + { url = "https://files.pythonhosted.org/packages/3c/62/e2977843fdf9f03daf1586a0ff49060b1b2fc7ff85a7ea82b6217c1ae36e/wrapt-2.1.2-cp312-cp312-musllinux_1_2_riscv64.whl", hash = "sha256:6f2c5390460de57fa9582bc8a1b7a6c86e1a41dfad74c5225fc07044c15cc8d1", size = 116237, upload-time = "2026-03-06T02:54:03.884Z" }, + { url = "https://files.pythonhosted.org/packages/88/dd/27fc67914e68d740bce512f11734aec08696e6b17641fef8867c00c949fc/wrapt-2.1.2-cp312-cp312-musllinux_1_2_x86_64.whl", hash = "sha256:7dfa9f2cf65d027b951d05c662cc99ee3bd01f6e4691ed39848a7a5fffc902b2", size = 120563, upload-time = "2026-03-06T02:53:20.412Z" }, + { url = "https://files.pythonhosted.org/packages/ec/9f/b750b3692ed2ef4705cb305bd68858e73010492b80e43d2a4faa5573cbe7/wrapt-2.1.2-cp312-cp312-win32.whl", hash = "sha256:eba8155747eb2cae4a0b913d9ebd12a1db4d860fc4c829d7578c7b989bd3f2f0", size = 58198, upload-time = "2026-03-06T02:53:37.732Z" }, + { url = "https://files.pythonhosted.org/packages/8e/b2/feecfe29f28483d888d76a48f03c4c4d8afea944dbee2b0cd3380f9df032/wrapt-2.1.2-cp312-cp312-win_amd64.whl", hash = "sha256:1c51c738d7d9faa0b3601708e7e2eda9bf779e1b601dce6c77411f2a1b324a63", size = 60441, upload-time = "2026-03-06T02:52:47.138Z" }, + { url = "https://files.pythonhosted.org/packages/44/e1/e328f605d6e208547ea9fd120804fcdec68536ac748987a68c47c606eea8/wrapt-2.1.2-cp312-cp312-win_arm64.whl", hash = "sha256:c8e46ae8e4032792eb2f677dbd0d557170a8e5524d22acc55199f43efedd39bf", size = 58836, upload-time = "2026-03-06T02:53:22.053Z" }, + { url = "https://files.pythonhosted.org/packages/4c/7a/d936840735c828b38d26a854e85d5338894cda544cb7a85a9d5b8b9c4df7/wrapt-2.1.2-cp313-cp313-macosx_10_13_x86_64.whl", hash = "sha256:787fd6f4d67befa6fe2abdffcbd3de2d82dfc6fb8a6d850407c53332709d030b", size = 61259, upload-time = "2026-03-06T02:53:41.922Z" }, + { url = "https://files.pythonhosted.org/packages/5e/88/9a9b9a90ac8ca11c2fdb6a286cb3a1fc7dd774c00ed70929a6434f6bc634/wrapt-2.1.2-cp313-cp313-macosx_11_0_arm64.whl", hash = "sha256:4bdf26e03e6d0da3f0e9422fd36bcebf7bc0eeb55fdf9c727a09abc6b9fe472e", size = 61851, upload-time = "2026-03-06T02:52:48.672Z" }, + { url = "https://files.pythonhosted.org/packages/03/a9/5b7d6a16fd6533fed2756900fc8fc923f678179aea62ada6d65c92718c00/wrapt-2.1.2-cp313-cp313-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:bbac24d879aa22998e87f6b3f481a5216311e7d53c7db87f189a7a0266dafffb", size = 121446, upload-time = "2026-03-06T02:54:14.013Z" }, + { url = "https://files.pythonhosted.org/packages/45/bb/34c443690c847835cfe9f892be78c533d4f32366ad2888972c094a897e39/wrapt-2.1.2-cp313-cp313-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:16997dfb9d67addc2e3f41b62a104341e80cac52f91110dece393923c0ebd5ca", size = 123056, upload-time = "2026-03-06T02:54:10.829Z" }, + { url = "https://files.pythonhosted.org/packages/93/b9/ff205f391cb708f67f41ea148545f2b53ff543a7ac293b30d178af4d2271/wrapt-2.1.2-cp313-cp313-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:162e4e2ba7542da9027821cb6e7c5e068d64f9a10b5f15512ea28e954893a267", size = 117359, upload-time = "2026-03-06T02:53:03.623Z" }, + { url = "https://files.pythonhosted.org/packages/1f/3d/1ea04d7747825119c3c9a5e0874a40b33594ada92e5649347c457d982805/wrapt-2.1.2-cp313-cp313-musllinux_1_2_aarch64.whl", hash = "sha256:f29c827a8d9936ac320746747a016c4bc66ef639f5cd0d32df24f5eacbf9c69f", size = 121479, upload-time = "2026-03-06T02:53:45.844Z" }, + { url = "https://files.pythonhosted.org/packages/78/cc/ee3a011920c7a023b25e8df26f306b2484a531ab84ca5c96260a73de76c0/wrapt-2.1.2-cp313-cp313-musllinux_1_2_riscv64.whl", hash = "sha256:a9dd9813825f7ecb018c17fd147a01845eb330254dff86d3b5816f20f4d6aaf8", size = 116271, upload-time = "2026-03-06T02:54:46.356Z" }, + { url = "https://files.pythonhosted.org/packages/98/fd/e5ff7ded41b76d802cf1191288473e850d24ba2e39a6ec540f21ae3b57cb/wrapt-2.1.2-cp313-cp313-musllinux_1_2_x86_64.whl", hash = "sha256:6f8dbdd3719e534860d6a78526aafc220e0241f981367018c2875178cf83a413", size = 120573, upload-time = "2026-03-06T02:52:50.163Z" }, + { url = "https://files.pythonhosted.org/packages/47/c5/242cae3b5b080cd09bacef0591691ba1879739050cc7c801ff35c8886b66/wrapt-2.1.2-cp313-cp313-win32.whl", hash = "sha256:5c35b5d82b16a3bc6e0a04349b606a0582bc29f573786aebe98e0c159bc48db6", size = 58205, upload-time = "2026-03-06T02:53:47.494Z" }, + { url = "https://files.pythonhosted.org/packages/12/69/c358c61e7a50f290958809b3c61ebe8b3838ea3e070d7aac9814f95a0528/wrapt-2.1.2-cp313-cp313-win_amd64.whl", hash = "sha256:f8bc1c264d8d1cf5b3560a87bbdd31131573eb25f9f9447bb6252b8d4c44a3a1", size = 60452, upload-time = "2026-03-06T02:53:30.038Z" }, + { url = "https://files.pythonhosted.org/packages/8e/66/c8a6fcfe321295fd8c0ab1bd685b5a01462a9b3aa2f597254462fc2bc975/wrapt-2.1.2-cp313-cp313-win_arm64.whl", hash = "sha256:3beb22f674550d5634642c645aba4c72a2c66fb185ae1aebe1e955fae5a13baf", size = 58842, upload-time = "2026-03-06T02:52:52.114Z" }, + { url = "https://files.pythonhosted.org/packages/da/55/9c7052c349106e0b3f17ae8db4b23a691a963c334de7f9dbd60f8f74a831/wrapt-2.1.2-cp313-cp313t-macosx_10_13_x86_64.whl", hash = "sha256:0fc04bc8664a8bc4c8e00b37b5355cffca2535209fba1abb09ae2b7c76ddf82b", size = 63075, upload-time = "2026-03-06T02:53:19.108Z" }, + { url = "https://files.pythonhosted.org/packages/09/a8/ce7b4006f7218248dd71b7b2b732d0710845a0e49213b18faef64811ffef/wrapt-2.1.2-cp313-cp313t-macosx_11_0_arm64.whl", hash = "sha256:a9b9d50c9af998875a1482a038eb05755dfd6fe303a313f6a940bb53a83c3f18", size = 63719, upload-time = "2026-03-06T02:54:33.452Z" }, + { url = "https://files.pythonhosted.org/packages/e4/e5/2ca472e80b9e2b7a17f106bb8f9df1db11e62101652ce210f66935c6af67/wrapt-2.1.2-cp313-cp313t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:2d3ff4f0024dd224290c0eabf0240f1bfc1f26363431505fb1b0283d3b08f11d", size = 152643, upload-time = "2026-03-06T02:52:42.721Z" }, + { url = "https://files.pythonhosted.org/packages/36/42/30f0f2cefca9d9cbf6835f544d825064570203c3e70aa873d8ae12e23791/wrapt-2.1.2-cp313-cp313t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:3278c471f4468ad544a691b31bb856374fbdefb7fee1a152153e64019379f015", size = 158805, upload-time = "2026-03-06T02:54:25.441Z" }, + { url = "https://files.pythonhosted.org/packages/bb/67/d08672f801f604889dcf58f1a0b424fe3808860ede9e03affc1876b295af/wrapt-2.1.2-cp313-cp313t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:a8914c754d3134a3032601c6984db1c576e6abaf3fc68094bb8ab1379d75ff92", size = 145990, upload-time = "2026-03-06T02:53:57.456Z" }, + { url = "https://files.pythonhosted.org/packages/68/a7/fd371b02e73babec1de6ade596e8cd9691051058cfdadbfd62a5898f3295/wrapt-2.1.2-cp313-cp313t-musllinux_1_2_aarch64.whl", hash = "sha256:ff95d4264e55839be37bafe1536db2ab2de19da6b65f9244f01f332b5286cfbf", size = 155670, upload-time = "2026-03-06T02:54:55.309Z" }, + { url = "https://files.pythonhosted.org/packages/86/2d/9fe0095dfdb621009f40117dcebf41d7396c2c22dca6eac779f4c007b86c/wrapt-2.1.2-cp313-cp313t-musllinux_1_2_riscv64.whl", hash = "sha256:76405518ca4e1b76fbb1b9f686cff93aebae03920cc55ceeec48ff9f719c5f67", size = 144357, upload-time = "2026-03-06T02:54:24.092Z" }, + { url = "https://files.pythonhosted.org/packages/0e/b6/ec7b4a254abbe4cde9fa15c5d2cca4518f6b07d0f1b77d4ee9655e30280e/wrapt-2.1.2-cp313-cp313t-musllinux_1_2_x86_64.whl", hash = "sha256:c0be8b5a74c5824e9359b53e7e58bef71a729bacc82e16587db1c4ebc91f7c5a", size = 150269, upload-time = "2026-03-06T02:53:31.268Z" }, + { url = "https://files.pythonhosted.org/packages/6e/6b/2fabe8ebf148f4ee3c782aae86a795cc68ffe7d432ef550f234025ce0cfa/wrapt-2.1.2-cp313-cp313t-win32.whl", hash = "sha256:f01277d9a5fc1862f26f7626da9cf443bebc0abd2f303f41c5e995b15887dabd", size = 59894, upload-time = "2026-03-06T02:54:15.391Z" }, + { url = "https://files.pythonhosted.org/packages/ca/fb/9ba66fc2dedc936de5f8073c0217b5d4484e966d87723415cc8262c5d9c2/wrapt-2.1.2-cp313-cp313t-win_amd64.whl", hash = "sha256:84ce8f1c2104d2f6daa912b1b5b039f331febfeee74f8042ad4e04992bd95c8f", size = 63197, upload-time = "2026-03-06T02:54:41.943Z" }, + { url = "https://files.pythonhosted.org/packages/c0/1c/012d7423c95d0e337117723eb8ecf73c622ce15a97847e84cf3f8f26cd7e/wrapt-2.1.2-cp313-cp313t-win_arm64.whl", hash = "sha256:a93cd767e37faeddbe07d8fc4212d5cba660af59bdb0f6372c93faaa13e6e679", size = 60363, upload-time = "2026-03-06T02:54:48.093Z" }, + { url = "https://files.pythonhosted.org/packages/39/25/e7ea0b417db02bb796182a5316398a75792cd9a22528783d868755e1f669/wrapt-2.1.2-cp314-cp314-macosx_10_15_x86_64.whl", hash = "sha256:1370e516598854e5b4366e09ce81e08bfe94d42b0fd569b88ec46cc56d9164a9", size = 61418, upload-time = "2026-03-06T02:53:55.706Z" }, + { url = "https://files.pythonhosted.org/packages/ec/0f/fa539e2f6a770249907757eaeb9a5ff4deb41c026f8466c1c6d799088a9b/wrapt-2.1.2-cp314-cp314-macosx_11_0_arm64.whl", hash = "sha256:6de1a3851c27e0bd6a04ca993ea6f80fc53e6c742ee1601f486c08e9f9b900a9", size = 61914, upload-time = "2026-03-06T02:52:53.37Z" }, + { url = "https://files.pythonhosted.org/packages/53/37/02af1867f5b1441aaeda9c82deed061b7cd1372572ddcd717f6df90b5e93/wrapt-2.1.2-cp314-cp314-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:de9f1a2bbc5ac7f6012ec24525bdd444765a2ff64b5985ac6e0692144838542e", size = 120417, upload-time = "2026-03-06T02:54:30.74Z" }, + { url = "https://files.pythonhosted.org/packages/c3/b7/0138a6238c8ba7476c77cf786a807f871672b37f37a422970342308276e7/wrapt-2.1.2-cp314-cp314-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:970d57ed83fa040d8b20c52fe74a6ae7e3775ae8cff5efd6a81e06b19078484c", size = 122797, upload-time = "2026-03-06T02:54:51.539Z" }, + { url = "https://files.pythonhosted.org/packages/e1/ad/819ae558036d6a15b7ed290d5b14e209ca795dd4da9c58e50c067d5927b0/wrapt-2.1.2-cp314-cp314-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:3969c56e4563c375861c8df14fa55146e81ac11c8db49ea6fb7f2ba58bc1ff9a", size = 117350, upload-time = "2026-03-06T02:54:37.651Z" }, + { url = "https://files.pythonhosted.org/packages/8b/2d/afc18dc57a4600a6e594f77a9ae09db54f55ba455440a54886694a84c71b/wrapt-2.1.2-cp314-cp314-musllinux_1_2_aarch64.whl", hash = "sha256:57d7c0c980abdc5f1d98b11a2aa3bb159790add80258c717fa49a99921456d90", size = 121223, upload-time = "2026-03-06T02:54:35.221Z" }, + { url = "https://files.pythonhosted.org/packages/b9/5b/5ec189b22205697bc56eb3b62aed87a1e0423e9c8285d0781c7a83170d15/wrapt-2.1.2-cp314-cp314-musllinux_1_2_riscv64.whl", hash = "sha256:776867878e83130c7a04237010463372e877c1c994d449ca6aaafeab6aab2586", size = 116287, upload-time = "2026-03-06T02:54:19.654Z" }, + { url = "https://files.pythonhosted.org/packages/f7/2d/f84939a7c9b5e6cdd8a8d0f6a26cabf36a0f7e468b967720e8b0cd2bdf69/wrapt-2.1.2-cp314-cp314-musllinux_1_2_x86_64.whl", hash = "sha256:fab036efe5464ec3291411fabb80a7a39e2dd80bae9bcbeeca5087fdfa891e19", size = 119593, upload-time = "2026-03-06T02:54:16.697Z" }, + { url = "https://files.pythonhosted.org/packages/0b/fe/ccd22a1263159c4ac811ab9374c061bcb4a702773f6e06e38de5f81a1bdc/wrapt-2.1.2-cp314-cp314-win32.whl", hash = "sha256:e6ed62c82ddf58d001096ae84ce7f833db97ae2263bff31c9b336ba8cfe3f508", size = 58631, upload-time = "2026-03-06T02:53:06.498Z" }, + { url = "https://files.pythonhosted.org/packages/65/0a/6bd83be7bff2e7efaac7b4ac9748da9d75a34634bbbbc8ad077d527146df/wrapt-2.1.2-cp314-cp314-win_amd64.whl", hash = "sha256:467e7c76315390331c67073073d00662015bb730c566820c9ca9b54e4d67fd04", size = 60875, upload-time = "2026-03-06T02:53:50.252Z" }, + { url = "https://files.pythonhosted.org/packages/6c/c0/0b3056397fe02ff80e5a5d72d627c11eb885d1ca78e71b1a5c1e8c7d45de/wrapt-2.1.2-cp314-cp314-win_arm64.whl", hash = "sha256:da1f00a557c66225d53b095a97eace0fc5349e3bfda28fa34ffae238978ee575", size = 59164, upload-time = "2026-03-06T02:53:59.128Z" }, + { url = "https://files.pythonhosted.org/packages/71/ed/5d89c798741993b2371396eb9d4634f009ff1ad8a6c78d366fe2883ea7a6/wrapt-2.1.2-cp314-cp314t-macosx_10_15_x86_64.whl", hash = "sha256:62503ffbc2d3a69891cf29beeaccdb4d5e0a126e2b6a851688d4777e01428dbb", size = 63163, upload-time = "2026-03-06T02:52:54.873Z" }, + { url = "https://files.pythonhosted.org/packages/c6/8c/05d277d182bf36b0a13d6bd393ed1dec3468a25b59d01fba2dd70fe4d6ae/wrapt-2.1.2-cp314-cp314t-macosx_11_0_arm64.whl", hash = "sha256:c7e6cd120ef837d5b6f860a6ea3745f8763805c418bb2f12eeb1fa6e25f22d22", size = 63723, upload-time = "2026-03-06T02:52:56.374Z" }, + { url = "https://files.pythonhosted.org/packages/f4/27/6c51ec1eff4413c57e72d6106bb8dec6f0c7cdba6503d78f0fa98767bcc9/wrapt-2.1.2-cp314-cp314t-manylinux1_x86_64.manylinux_2_28_x86_64.manylinux_2_5_x86_64.whl", hash = "sha256:3769a77df8e756d65fbc050333f423c01ae012b4f6731aaf70cf2bef61b34596", size = 152652, upload-time = "2026-03-06T02:53:23.79Z" }, + { url = "https://files.pythonhosted.org/packages/db/4c/d7dd662d6963fc7335bfe29d512b02b71cdfa23eeca7ab3ac74a67505deb/wrapt-2.1.2-cp314-cp314t-manylinux2014_aarch64.manylinux_2_17_aarch64.manylinux_2_28_aarch64.whl", hash = "sha256:a76d61a2e851996150ba0f80582dd92a870643fa481f3b3846f229de88caf044", size = 158807, upload-time = "2026-03-06T02:53:35.742Z" }, + { url = "https://files.pythonhosted.org/packages/b4/4d/1e5eea1a78d539d346765727422976676615814029522c76b87a95f6bcdd/wrapt-2.1.2-cp314-cp314t-manylinux_2_31_riscv64.manylinux_2_39_riscv64.whl", hash = "sha256:6f97edc9842cf215312b75fe737ee7c8adda75a89979f8e11558dfff6343cc4b", size = 146061, upload-time = "2026-03-06T02:52:57.574Z" }, + { url = "https://files.pythonhosted.org/packages/89/bc/62cabea7695cd12a288023251eeefdcb8465056ddaab6227cb78a2de005b/wrapt-2.1.2-cp314-cp314t-musllinux_1_2_aarch64.whl", hash = "sha256:4006c351de6d5007aa33a551f600404ba44228a89e833d2fadc5caa5de8edfbf", size = 155667, upload-time = "2026-03-06T02:53:39.422Z" }, + { url = "https://files.pythonhosted.org/packages/e9/99/6f2888cd68588f24df3a76572c69c2de28287acb9e1972bf0c83ce97dbc1/wrapt-2.1.2-cp314-cp314t-musllinux_1_2_riscv64.whl", hash = "sha256:a9372fc3639a878c8e7d87e1556fa209091b0a66e912c611e3f833e2c4202be2", size = 144392, upload-time = "2026-03-06T02:54:22.41Z" }, + { url = "https://files.pythonhosted.org/packages/40/51/1dfc783a6c57971614c48e361a82ca3b6da9055879952587bc99fe1a7171/wrapt-2.1.2-cp314-cp314t-musllinux_1_2_x86_64.whl", hash = "sha256:3144b027ff30cbd2fca07c0a87e67011adb717eb5f5bd8496325c17e454257a3", size = 150296, upload-time = "2026-03-06T02:54:07.848Z" }, + { url = "https://files.pythonhosted.org/packages/6c/38/cbb8b933a0201076c1f64fc42883b0023002bdc14a4964219154e6ff3350/wrapt-2.1.2-cp314-cp314t-win32.whl", hash = "sha256:3b8d15e52e195813efe5db8cec156eebe339aaf84222f4f4f051a6c01f237ed7", size = 60539, upload-time = "2026-03-06T02:54:00.594Z" }, + { url = "https://files.pythonhosted.org/packages/82/dd/e5176e4b241c9f528402cebb238a36785a628179d7d8b71091154b3e4c9e/wrapt-2.1.2-cp314-cp314t-win_amd64.whl", hash = "sha256:08ffa54146a7559f5b8df4b289b46d963a8e74ed16ba3687f99896101a3990c5", size = 63969, upload-time = "2026-03-06T02:54:39Z" }, + { url = "https://files.pythonhosted.org/packages/5c/99/79f17046cf67e4a95b9987ea129632ba8bcec0bc81f3fb3d19bdb0bd60cd/wrapt-2.1.2-cp314-cp314t-win_arm64.whl", hash = "sha256:72aaa9d0d8e4ed0e2e98019cea47a21f823c9dd4b43c7b77bba6679ffcca6a00", size = 60554, upload-time = "2026-03-06T02:53:14.132Z" }, + { url = "https://files.pythonhosted.org/packages/1a/c7/8528ac2dfa2c1e6708f647df7ae144ead13f0a31146f43c7264b4942bf12/wrapt-2.1.2-py3-none-any.whl", hash = "sha256:b8fd6fa2b2c4e7621808f8c62e8317f4aae56e59721ad933bac5239d913cf0e8", size = 43993, upload-time = "2026-03-06T02:53:12.905Z" }, +] + [[package]] name = "zipp" version = "3.23.0"