diff --git a/.github/renovate.json5 b/.github/renovate.json5 index 4ccbaf68..165aee89 100644 --- a/.github/renovate.json5 +++ b/.github/renovate.json5 @@ -1,22 +1,49 @@ { + // Allow for intellisense in editors + $schema: "https://docs.renovatebot.com/renovate-schema.json", + + // List of rules to apply extends: [ + // Recommended best practices from renovate itself + // See: https://docs.renovatebot.com/upgrade-best-practices/#whats-in-the-configbest-practices-preset + "config:best-practices", + + // Apply our own internal best practices + // See: https://github.com/apollographql/apollo-mcp-server/commits/main/.github/renovate.json5 "github>apollographql/renovate-config-apollo-open-source:default.json5", + + // Update to the latest rust stable version as it releases. + // See: https://github.com/Turbo87/renovate-config/blob/master/rust/updateToolchain.json "github>Turbo87/renovate-config//rust/updateToolchain", ], + + // Globally disable all automatic update PRs from renovate packageRules: [ { enabled: false, matchPackageNames: ["*"], }, ], + // Automating Nix upgrades is currently in beta and opt-in only. // https://docs.renovatebot.com/modules/manager/nix/ nix: { enabled: true, }, + + // Globally enable vulnerability alerts + // + // Note: This needs extra configuration at the repository level, which is described in the link + // below. + // + // See: https://docs.renovatebot.com/configuration-options/#vulnerabilityalerts vulnerabilityAlerts: { enabled: true, }, + + // Disable automatically updating lock files to latest versions once a week. + // + // See: https://docs.renovatebot.com/configuration-options/#lockfilemaintenance lockFileMaintenance: { enabled: false, }, diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 6ba78966..fb42f641 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -28,7 +28,7 @@ jobs: actions: write contents: read steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v5 with: ref: ${{ github.event.pull_request.head.sha }} - uses: nixbuild/nix-quick-install-action@v30 @@ -55,7 +55,7 @@ jobs: actions: write contents: read steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v5 with: ref: ${{ github.event.pull_request.head.sha }} - uses: nixbuild/nix-quick-install-action@v30 @@ -81,7 +81,7 @@ jobs: actions: write contents: read steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v5 with: ref: ${{ github.event.pull_request.head.sha }} - uses: nixbuild/nix-quick-install-action@v30 @@ -107,7 +107,7 @@ jobs: actions: write contents: read steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v5 with: ref: ${{ github.event.pull_request.head.sha }} - uses: nixbuild/nix-quick-install-action@v30 @@ -124,3 +124,22 @@ jobs: gc-max-store-size: 5G - name: Run Tests run: 'nix develop --command bash -c "cargo test"' + + coverage: + name: Run Coverage + runs-on: ubuntu-24.04 + permissions: + contents: read + steps: + - uses: actions/checkout@v5 + with: + ref: ${{ github.event.pull_request.head.sha }} + - uses: taiki-e/install-action@cargo-llvm-cov + - name: Generate code coverage + run: cargo llvm-cov --all-features --workspace --codecov --output-path codecov.json + - name: Upload coverage to Codecov + uses: codecov/codecov-action@v5 + with: + token: ${{ secrets.CODECOV_TOKEN }} # not required for public repos + files: codecov.json + fail_ci_if_error: true diff --git a/.github/workflows/prep-release.yml b/.github/workflows/prep-release.yml index cb992199..04dc979c 100644 --- a/.github/workflows/prep-release.yml +++ b/.github/workflows/prep-release.yml @@ -47,7 +47,7 @@ jobs: GH_TOKEN: ${{ secrets.GH_PAT }} steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v5 with: fetch-depth: 0 @@ -232,7 +232,7 @@ jobs: - name: Open/Update draft PR to main env: - HEAD: ${{ github.ref_name }} + HEAD: release/${{ steps.bump.outputs.new_version }} TITLE: Releasing ${{ steps.bump.outputs.new_version }} shell: bash run: | diff --git a/.github/workflows/release-bins.yml b/.github/workflows/release-bins.yml index 3f808715..c73063b1 100644 --- a/.github/workflows/release-bins.yml +++ b/.github/workflows/release-bins.yml @@ -42,7 +42,7 @@ jobs: attestations: write id-token: write steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v5 with: ref: ${{ github.ref }} - uses: nixbuild/nix-quick-install-action@v30 diff --git a/.github/workflows/release-container.yml b/.github/workflows/release-container.yml index 79d5dd33..64bde965 100644 --- a/.github/workflows/release-container.yml +++ b/.github/workflows/release-container.yml @@ -32,7 +32,7 @@ jobs: attestations: write id-token: write steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v5 with: ref: ${{ github.ref }} diff --git a/.github/workflows/sync-develop.yml b/.github/workflows/sync-develop.yml index a1e1be28..79cbc72f 100644 --- a/.github/workflows/sync-develop.yml +++ b/.github/workflows/sync-develop.yml @@ -51,7 +51,7 @@ jobs: GH_TOKEN: ${{ secrets.GH_PAT }} steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@v5 with: fetch-depth: 0 token: ${{ secrets.GH_PAT }} diff --git a/.github/workflows/verify-changeset.yml b/.github/workflows/verify-changeset.yml index bad4a44e..2bac53f0 100644 --- a/.github/workflows/verify-changeset.yml +++ b/.github/workflows/verify-changeset.yml @@ -1,6 +1,7 @@ name: Verify Changeset on: pull_request: + types: [opened, reopened, synchronize, ready_for_review] branches-ignore: - main - release/** @@ -19,7 +20,7 @@ on: jobs: verify-changeset: - if: ${{ !contains(github.event.pull_request.labels.*.name, 'skip-changeset') && !startsWith(github.head_ref, 'sync/') && !startsWith(github.head_ref, 'conflict/') }} + if: ${{ !contains(github.event.pull_request.labels.*.name, 'skip-changeset') && !startsWith(github.head_ref, 'sync/') && !startsWith(github.head_ref, 'conflict/') && !github.event.pull_request.draft }} name: Verify runs-on: ubuntu-24.04 permissions: diff --git a/.vscode/launch.json b/.vscode/launch.json index 9363ac0d..66814a53 100644 --- a/.vscode/launch.json +++ b/.vscode/launch.json @@ -1,75 +1,86 @@ { - "version": "0.2.0", - "configurations": [ - { - "type": "node", - "request": "launch", - "name": "Run apollo-mcp-server [Weather][Streamable HTTP]", - "runtimeExecutable": "cargo", - "runtimeArgs": [ - "run", - "--bin", - "apollo-mcp-server", - "--", - "graphql/weather/config.yaml", - ], - "cwd": "${workspaceFolder}", - "console": "integratedTerminal", - "env": { - "RUST_BACKTRACE": "1" - } - }, - { - "type": "lldb", - "request": "launch", - "name": "Debug apollo-mcp-server [Weather][Streamable HTTP]", - "cargo": { - "args": [ - "build", - "--bin=apollo-mcp-server", - "--lib" - ], - "filter": { - "name": "apollo-mcp-server", - "kind": "bin" - } - }, - "args": [ - "graphql/weather/config.yaml", - ], - "cwd": "${workspaceFolder}", - "env": { - "RUST_BACKTRACE": "1" - } - }, - { - "type": "node", - "request": "launch", - "name": "Run apollo-mcp-server [TheSpaceDevs][Streamable HTTP]", - "runtimeExecutable": "cargo", - "runtimeArgs": [ - "run", - "--bin", - "apollo-mcp-server", - "--", - "graphql/TheSpaceDevs/config.yaml", - ], - "cwd": "${workspaceFolder}", - "console": "integratedTerminal", - "env": { - "RUST_BACKTRACE": "1" - } - }, - { - "type": "node", - "request": "launch", - "name": "Run mcp-inspector", - "runtimeExecutable": "npx", - "runtimeArgs": [ - "@modelcontextprotocol/inspector" - ], - "cwd": "${workspaceFolder}", - "console": "integratedTerminal" + "version": "0.2.0", + "configurations": [ + { + "type": "node", + "request": "launch", + "name": "Run apollo-mcp-server [Weather][Streamable HTTP]", + "runtimeExecutable": "cargo", + "runtimeArgs": [ + "run", + "--bin", + "apollo-mcp-server", + "--", + "graphql/weather/config.yaml" + ], + "cwd": "${workspaceFolder}", + "console": "integratedTerminal", + "env": { + "RUST_BACKTRACE": "1" + } + }, + { + "type": "lldb", + "request": "launch", + "name": "Debug apollo-mcp-server [Weather][Streamable HTTP]", + "cargo": { + "args": ["build", "--bin=apollo-mcp-server", "--lib"], + "filter": { + "name": "apollo-mcp-server", + "kind": "bin" } - ] -} \ No newline at end of file + }, + "args": ["graphql/weather/config.yaml"], + "cwd": "${workspaceFolder}", + "env": { + "RUST_BACKTRACE": "1", + "APOLLO_MCP_LOGGING__LEVEL": "debug" + } + }, + { + "type": "node", + "request": "launch", + "name": "Run apollo-mcp-server [TheSpaceDevs][Streamable HTTP]", + "runtimeExecutable": "cargo", + "runtimeArgs": [ + "run", + "--bin", + "apollo-mcp-server", + "--", + "graphql/TheSpaceDevs/config.yaml" + ], + "cwd": "${workspaceFolder}", + "console": "integratedTerminal", + "env": { + "RUST_BACKTRACE": "1" + } + }, + { + "type": "lldb", + "request": "launch", + "name": "Debug apollo-mcp-server [TheSpaceDevs][Streamable HTTP]", + "cargo": { + "args": ["build", "--bin=apollo-mcp-server", "--lib"], + "filter": { + "name": "apollo-mcp-server", + "kind": "bin" + } + }, + "args": ["graphql/TheSpaceDevs/config.yaml"], + "cwd": "${workspaceFolder}", + "env": { + "RUST_BACKTRACE": "1", + "APOLLO_MCP_LOGGING__LEVEL": "debug" + } + }, + { + "type": "node", + "request": "launch", + "name": "Run mcp-inspector", + "runtimeExecutable": "npx", + "runtimeArgs": ["@modelcontextprotocol/inspector"], + "cwd": "${workspaceFolder}", + "console": "integratedTerminal" + } + ] +} diff --git a/CHANGELOG.md b/CHANGELOG.md index 87d061bc..b79dae16 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -4,6 +4,212 @@ All notable changes to this project will be documented in this file. This project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0.html). +# [0.9.0] - 2025-09-24 + +## πŸš€ Features + +### Add CORS support - @DaleSeo PR #362 + +This PR implements comprehensive CORS support for Apollo MCP Server to enable web-based MCP clients to connect without CORS errors. The implementation and configuration draw heavily from the Router's approach. Similar to other features like health checks and telemetry, CORS is supported only for the StreamableHttp transport, making it a top-level configuration. + +### feat: Configuration for disabling authorization token passthrough - @swcollard PR #336 + +A new optional new MCP Server configuration parameter, `transport.auth.disable_auth_token_passthrough`, which is `false` by default, that when true, will no longer pass through validated Auth tokens to the GraphQL API. + +### Implement metrics for mcp tool and operation counts and durations - @swcollard PR #297 + +This PR adds metrics to count and measure request duration to events throughout the MCP server + +* apollo.mcp.operation.duration +* apollo.mcp.operation.count +* apollo.mcp.tool.duration +* apollo.mcp.tool.count +* apollo.mcp.initialize.count +* apollo.mcp.list_tools.count +* apollo.mcp.get_info.count + +### feat: adding config option for trace sampling - @alocay PR #366 + +Adding configuration option to sample traces. Can use the following options: +1. Ratio based samples (ratio >= 1 is always sample) +2. Always on +3. Always off + +Defaults to always on if not provided. + +### Prototype OpenTelemetry Traces in MCP Server - @swcollard PR #274 + +Pulls in new crates and SDKs for prototyping instrumenting the Apollo MCP Server with Open Telemetry Traces. + +* Adds new rust crates to support OTel +* Annotates excecute and call_tool functions with trace macro +* Adds Axum and Tower middleware's for OTel tracing +* Refactors Logging so that all the tracing_subscribers are set together in a single module. + +### Telemetry: Trace operations and auth - @swcollard PR #375 + +* Adds traces for the MCP server generating Tools from Operations and performing authorization +* Includes the HTTP status code to the top level HTTP trace + +### feat: Enhance tool descriptions - @DaleSeo PR #350 + +This PR enhances the descriptions of the introspect and search tools to offer clearer guidance for AI models on efficient GraphQL schema exploration patterns. + +### feat: adding ability to omit attributes for traces and metrics - @alocay PR #358 + +Adding ability to configure which attributes are omitted from telemetry traces and metrics. + +1. Using a Rust build script (`build.rs`) to auto-generate telemetry attribute code based on the data found in `telemetry.toml`. +2. Utilizing an enum for attributes so typos in the config file raise an error. +3. Omitting trace attributes by filtering it out in a custom exporter. +4. Omitting metric attributes by indicating which attributes are allowed via a view. +5. Created `telemetry_attributes.rs` to map `TelemetryAttribute` enum to a OTEL `Key`. + +The `telemetry.toml` file includes attributes (both for metrics and traces) as well as list of metrics gathered. An example would look like the following: +``` +[attributes.apollo.mcp] +my_attribute = "Some attribute info" + +[metrics.apollo.mcp] +some.count = "Some metric count info" +``` +This would generate a file that looks like the following: +``` +/// All TelemetryAttribute values +pub const ALL_ATTRS: &[TelemetryAttribute; 1usize] = &[ + TelemetryAttribute::MyAttribute +]; +#[derive(Debug, ::serde::Deserialize, ::schemars::JsonSchema,, Clone, Eq, PartialEq, Hash, Copy)] +pub enum TelemetryAttribute { + ///Some attribute info + #[serde(alias = "my_attribute")] + MyAttribute, +} +impl TelemetryAttribute { + /// Supported telemetry attribute (tags) values + pub const fn as_str(&self) -> &'static str { + match self { + TelemetryAttribute::MyAttribute => "apollo.mcp.my_attribute", + } + } +} +#[derive(Debug, ::serde::Deserialize, ::schemars::JsonSchema,, Clone, Eq, PartialEq, Hash, Copy)] +pub enum TelemetryMetric { + ///Some metric count info + #[serde(alias = "some.count")] + SomeCount, +} +impl TelemetryMetric { + /// Converts TelemetryMetric to &str + pub const fn as_str(&self) -> &'static str { + match self { + TelemetryMetric::SomeCount => "apollo.mcp.some.count", + } + } +} +``` +An example configuration that omits `tool_name` attribute for metrics and `request_id` for tracing would look like the following: +``` +telemetry: + exporters: + metrics: + otlp: + endpoint: "http://localhost:4317" + protocol: "grpc" + omitted_attributes: + - tool_name + tracing: + otlp: + endpoint: "http://localhost:4317" + protocol: "grpc" + omitted_attributes: + - request_id +``` + +## πŸ› Fixes + +### fix: Include the cargo feature and TraceContextPropagator to send otel headers downstream - @swcollard PR #307 + +Inside the reqwest middleware, if the global text_map_propagator is not set, it will no op and not send the traceparent and tracestate headers to the Router. Adding this is needed to correlate traces from the mcp server to the router or other downstream APIs + +### Update SDL handling in sdl_to_api_schema function - @lennyburdette PR #365 + +Loads supergraph schemas using a function that supports various features, including Apollo Connectors. When supergraph loading failed, it would load it as a standard GraphQL schema, which reveals Federation query planning directives in when using the `search` and `introspection` tools. + +### Minify: Add support for deprecated directive - @esilverm PR #367 + +Includes any existing `@deprecated` directives in the schema in the minified output of builtin tools. Now operations generated via these tools should take into account deprecated fields when being generated. + +## πŸ“ƒ Configuration + +### fix: Disable statefulness to fix initialize race condition - @swcollard PR #351 + +We've been seeing errors with state and session handling in the MCP Server. Whether that is requests being sent before the initialized notification is processed. Or running a fleet of MCP Server pods behind a round robin load balancer. A new configuration option under the streamable_http transport `stateful_mode`, allows disabling session handling which appears to fix the race condition issue. + +### Add basic config file options to otel telemetry - @swcollard PR #330 + +Adds new Configuration options for setting up configuration beyond the standard OTEL environment variables needed before. + +* Renames trace->telemetry +* Adds OTLP options for metrics and tracing to choose grpc or http upload protocols and setting the endpoints +* This configuration is all optional, so by default nothing will be logged + +## πŸ›  Maintenance + +### Fix version on mcp server tester - @alocay PR #374 + +Add a specific version when calling the mcp-server-tester for e2e tests. The current latest (1.4.1) as an issue so to avoid problems now and in the future updating the test script to invoke the testing tool via specific version. + +### Configure Codecov with coverage targets - @DaleSeo PR #337 + +This PR adds `codecov.yml` to set up Codecov with specific coverage targets and quality standards. It helps define clear expectations for code quality. It also includes some documentation about code coverage in `CONTRIBUTING.md` and adds the Codecov badge to `README.md`. + +### Implement Test Coverage Measurement and Reporting - @DaleSeo PR #335 + +This PR adds the bare minimum for code coverage reporting using [cargo-llvm-cov](https://crates.io/crates/cargo-llvm-cov) and integrates with [Codecov](https://www.codecov.io/). It adds a new `coverage` job to the CI workflow that generates and uploads coverage reporting in parallel with existing tests. The setup mirrors that of Router, except it uses `nextest` instead of the built-in test runner and CircleCI instead of GitHub Actions. + +### test: add tests for server event and SupergraphSdlQuery - @DaleSeo PR #347 + +This PR adds tests for some uncovered parts of the codebase to check the Codecov integration. + +### chore: update RMCP dependency ([328](https://github.com/apollographql/apollo-mcp-server/issues/328)) + +Update the RMCP dependency to the latest version, pulling in newer specification changes. + +### ci: Pin stable rust version ([Issue #287](https://github.com/apollographql/apollo-mcp-server/issues/287)) + +Pins the stable version of Rust to the current latest version to ensure backwards compatibility with future versions. + + + +# [0.8.0] - 2025-09-12 + +## πŸš€ Features + +### feat: Configuration for disabling authorization token passthrough - @swcollard PR #336 + +A new optional new MCP Server configuration parameter, `transport.auth.disable_auth_token_passthrough`, which is `false` by default, that when true, will no longer pass through validated Auth tokens to the GraphQL API. + +## πŸ›  Maintenance + +### Configure Codecov with coverage targets - @DaleSeo PR #337 + +This PR adds `codecov.yml` to set up Codecov with specific coverage targets and quality standards. It helps define clear expectations for code quality. It also includes some documentation about code coverage in `CONTRIBUTING.md` and adds the Codecov badge to `README.md`. + +### Implement Test Coverage Measurement and Reporting - @DaleSeo PR #335 + +This PR adds the bare minimum for code coverage reporting using [cargo-llvm-cov](https://crates.io/crates/cargo-llvm-cov) and integrates with [Codecov](https://www.codecov.io/). It adds a new `coverage` job to the CI workflow that generates and uploads coverage reporting in parallel with existing tests. The setup mirrors that of Router, except it uses `nextest` instead of the built-in test runner and CircleCI instead of GitHub Actions. + +### chore: update RMCP dependency ([328](https://github.com/apollographql/apollo-mcp-server/issues/328)) + +Update the RMCP dependency to the latest version, pulling in newer specification changes. + +### ci: Pin stable rust version ([Issue #287](https://github.com/apollographql/apollo-mcp-server/issues/287)) + +Pins the stable version of Rust to the current latest version to ensure backwards compatibility with future versions. + + + # [0.7.5] - 2025-09-03 ## πŸ› Fixes diff --git a/CHANGELOG_SECTION.md b/CHANGELOG_SECTION.md new file mode 100644 index 00000000..057307a6 --- /dev/null +++ b/CHANGELOG_SECTION.md @@ -0,0 +1,176 @@ +# [0.9.0] - 2025-09-24 + +## πŸš€ Features + +### Add CORS support - @DaleSeo PR #362 + +This PR implements comprehensive CORS support for Apollo MCP Server to enable web-based MCP clients to connect without CORS errors. The implementation and configuration draw heavily from the Router's approach. Similar to other features like health checks and telemetry, CORS is supported only for the StreamableHttp transport, making it a top-level configuration. + +### feat: Configuration for disabling authorization token passthrough - @swcollard PR #336 + +A new optional new MCP Server configuration parameter, `transport.auth.disable_auth_token_passthrough`, which is `false` by default, that when true, will no longer pass through validated Auth tokens to the GraphQL API. + +### Implement metrics for mcp tool and operation counts and durations - @swcollard PR #297 + +This PR adds metrics to count and measure request duration to events throughout the MCP server + +* apollo.mcp.operation.duration +* apollo.mcp.operation.count +* apollo.mcp.tool.duration +* apollo.mcp.tool.count +* apollo.mcp.initialize.count +* apollo.mcp.list_tools.count +* apollo.mcp.get_info.count + +### feat: adding config option for trace sampling - @alocay PR #366 + +Adding configuration option to sample traces. Can use the following options: +1. Ratio based samples (ratio >= 1 is always sample) +2. Always on +3. Always off + +Defaults to always on if not provided. + +### Prototype OpenTelemetry Traces in MCP Server - @swcollard PR #274 + +Pulls in new crates and SDKs for prototyping instrumenting the Apollo MCP Server with Open Telemetry Traces. + +* Adds new rust crates to support OTel +* Annotates excecute and call_tool functions with trace macro +* Adds Axum and Tower middleware's for OTel tracing +* Refactors Logging so that all the tracing_subscribers are set together in a single module. + +### Telemetry: Trace operations and auth - @swcollard PR #375 + +* Adds traces for the MCP server generating Tools from Operations and performing authorization +* Includes the HTTP status code to the top level HTTP trace + +### feat: Enhance tool descriptions - @DaleSeo PR #350 + +This PR enhances the descriptions of the introspect and search tools to offer clearer guidance for AI models on efficient GraphQL schema exploration patterns. + +### feat: adding ability to omit attributes for traces and metrics - @alocay PR #358 + +Adding ability to configure which attributes are omitted from telemetry traces and metrics. + +1. Using a Rust build script (`build.rs`) to auto-generate telemetry attribute code based on the data found in `telemetry.toml`. +2. Utilizing an enum for attributes so typos in the config file raise an error. +3. Omitting trace attributes by filtering it out in a custom exporter. +4. Omitting metric attributes by indicating which attributes are allowed via a view. +5. Created `telemetry_attributes.rs` to map `TelemetryAttribute` enum to a OTEL `Key`. + +The `telemetry.toml` file includes attributes (both for metrics and traces) as well as list of metrics gathered. An example would look like the following: +``` +[attributes.apollo.mcp] +my_attribute = "Some attribute info" + +[metrics.apollo.mcp] +some.count = "Some metric count info" +``` +This would generate a file that looks like the following: +``` +/// All TelemetryAttribute values +pub const ALL_ATTRS: &[TelemetryAttribute; 1usize] = &[ + TelemetryAttribute::MyAttribute +]; +#[derive(Debug, ::serde::Deserialize, ::schemars::JsonSchema,, Clone, Eq, PartialEq, Hash, Copy)] +pub enum TelemetryAttribute { + ///Some attribute info + #[serde(alias = "my_attribute")] + MyAttribute, +} +impl TelemetryAttribute { + /// Supported telemetry attribute (tags) values + pub const fn as_str(&self) -> &'static str { + match self { + TelemetryAttribute::MyAttribute => "apollo.mcp.my_attribute", + } + } +} +#[derive(Debug, ::serde::Deserialize, ::schemars::JsonSchema,, Clone, Eq, PartialEq, Hash, Copy)] +pub enum TelemetryMetric { + ///Some metric count info + #[serde(alias = "some.count")] + SomeCount, +} +impl TelemetryMetric { + /// Converts TelemetryMetric to &str + pub const fn as_str(&self) -> &'static str { + match self { + TelemetryMetric::SomeCount => "apollo.mcp.some.count", + } + } +} +``` +An example configuration that omits `tool_name` attribute for metrics and `request_id` for tracing would look like the following: +``` +telemetry: + exporters: + metrics: + otlp: + endpoint: "http://localhost:4317" + protocol: "grpc" + omitted_attributes: + - tool_name + tracing: + otlp: + endpoint: "http://localhost:4317" + protocol: "grpc" + omitted_attributes: + - request_id +``` + +## πŸ› Fixes + +### fix: Include the cargo feature and TraceContextPropagator to send otel headers downstream - @swcollard PR #307 + +Inside the reqwest middleware, if the global text_map_propagator is not set, it will no op and not send the traceparent and tracestate headers to the Router. Adding this is needed to correlate traces from the mcp server to the router or other downstream APIs + +### Update SDL handling in sdl_to_api_schema function - @lennyburdette PR #365 + +Loads supergraph schemas using a function that supports various features, including Apollo Connectors. When supergraph loading failed, it would load it as a standard GraphQL schema, which reveals Federation query planning directives in when using the `search` and `introspection` tools. + +### Minify: Add support for deprecated directive - @esilverm PR #367 + +Includes any existing `@deprecated` directives in the schema in the minified output of builtin tools. Now operations generated via these tools should take into account deprecated fields when being generated. + +## πŸ“ƒ Configuration + +### fix: Disable statefulness to fix initialize race condition - @swcollard PR #351 + +We've been seeing errors with state and session handling in the MCP Server. Whether that is requests being sent before the initialized notification is processed. Or running a fleet of MCP Server pods behind a round robin load balancer. A new configuration option under the streamable_http transport `stateful_mode`, allows disabling session handling which appears to fix the race condition issue. + +### Add basic config file options to otel telemetry - @swcollard PR #330 + +Adds new Configuration options for setting up configuration beyond the standard OTEL environment variables needed before. + +* Renames trace->telemetry +* Adds OTLP options for metrics and tracing to choose grpc or http upload protocols and setting the endpoints +* This configuration is all optional, so by default nothing will be logged + +## πŸ›  Maintenance + +### Fix version on mcp server tester - @alocay PR #374 + +Add a specific version when calling the mcp-server-tester for e2e tests. The current latest (1.4.1) as an issue so to avoid problems now and in the future updating the test script to invoke the testing tool via specific version. + +### Configure Codecov with coverage targets - @DaleSeo PR #337 + +This PR adds `codecov.yml` to set up Codecov with specific coverage targets and quality standards. It helps define clear expectations for code quality. It also includes some documentation about code coverage in `CONTRIBUTING.md` and adds the Codecov badge to `README.md`. + +### Implement Test Coverage Measurement and Reporting - @DaleSeo PR #335 + +This PR adds the bare minimum for code coverage reporting using [cargo-llvm-cov](https://crates.io/crates/cargo-llvm-cov) and integrates with [Codecov](https://www.codecov.io/). It adds a new `coverage` job to the CI workflow that generates and uploads coverage reporting in parallel with existing tests. The setup mirrors that of Router, except it uses `nextest` instead of the built-in test runner and CircleCI instead of GitHub Actions. + +### test: add tests for server event and SupergraphSdlQuery - @DaleSeo PR #347 + +This PR adds tests for some uncovered parts of the codebase to check the Codecov integration. + +### chore: update RMCP dependency ([328](https://github.com/apollographql/apollo-mcp-server/issues/328)) + +Update the RMCP dependency to the latest version, pulling in newer specification changes. + +### ci: Pin stable rust version ([Issue #287](https://github.com/apollographql/apollo-mcp-server/issues/287)) + +Pins the stable version of Rust to the current latest version to ensure backwards compatibility with future versions. + diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index c25c3623..cb3a2152 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -57,4 +57,21 @@ It’s important that every piece of code in Apollo packages is reviewed by at l 2. Simplicity. Is this the simplest way to achieve the intended goal? If there are too many files, redundant functions, or complex lines of code, suggest a simpler way to do the same thing. In particular, avoid implementing an overly general solution when a simple, small, and pragmatic fix will do. 3. Testing. Please make sure that the tests ensure that the code won’t break when other stuff change around it. The error messages in the test should help identify what is broken exactly and how. The tests should test every edge case if possible. Please make sure you get as much coverage as possible. 4. No unnecessary or unrelated changes. PRs shouldn’t come with random formatting changes, especially in unrelated parts of the code. If there is some refactoring that needs to be done, it should be in a separate PR from a bug fix or feature, if possible. -5. Please run `cargo test`, `cargo clippy`, and `cargo fmt` prior to creating a PR. \ No newline at end of file +5. Please run `cargo test`, `cargo clippy`, and `cargo fmt` prior to creating a PR. + +### Code Coverage + +Apollo MCP Server uses comprehensive code coverage reporting to ensure code quality and test effectiveness. +The project uses [cargo-llvm-cov](https://crates.io/crates/cargo-llvm-cov) for generating code coverage reports and [Codecov](https://www.codecov.io/) for coverage analysis and reporting. Coverage is automatically generated and reported on every pull request through GitHub Actions. + +#### Coverage Targets + +The project maintains the following coverage targets, configured in `codecov.yml`: + +- **Project Coverage**: Automatically maintained - should increase overall coverage on each PR +- **Patch Coverage**: 80% - requires 80% coverage on all new/modified code + +These targets help ensure that: + +- The overall codebase coverage doesn't decrease over time +- New code is well-tested before being merged diff --git a/Cargo.lock b/Cargo.lock index a129845a..5e5c947c 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -26,6 +26,7 @@ dependencies = [ "cfg-if", "getrandom 0.3.3", "once_cell", + "serde", "version_check", "zerocopy", ] @@ -45,12 +46,6 @@ version = "0.2.21" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "683d7910e743518b0e34f1186f92494becacb047c7b6bf616c96772180fef923" -[[package]] -name = "android-tzdata" -version = "0.1.1" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e999941b234f3131b00bc13c22d06e8c5ff726d1b6318ac7eb276997bbb4fef0" - [[package]] name = "android_system_properties" version = "0.1.5" @@ -112,15 +107,15 @@ dependencies = [ [[package]] name = "anyhow" -version = "1.0.99" +version = "1.0.100" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b0674a1ddeecb70197781e945de4b3b8ffb61fa939a5597bcf48503737663100" +checksum = "a23eb6b1614318a8071c9b2521f36b424b2c83db5eb3a0fead4a6c0809af6e61" [[package]] name = "apollo-compiler" -version = "1.29.0" +version = "1.30.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "4369d2ac382b0752cc5ff8cdb020e7a3c74480e7d940fc99f139281f8701fb81" +checksum = "87e4c0116cde9e3e5679806cf91c464d9efb7f1e231abffc505e0f6d4b911260" dependencies = [ "ahash", "apollo-parser", @@ -130,22 +125,24 @@ dependencies = [ "rowan", "serde", "serde_json_bytes", - "thiserror 2.0.14", + "thiserror 2.0.16", "triomphe", "typed-arena", ] [[package]] name = "apollo-federation" -version = "2.5.0" +version = "2.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "dbea4f0fcfcd95ec6f03c41a104488f66eafb37d05bd304a219702d1814449c5" +checksum = "1595bfb0fd31882d0b2dd258205ccac93a43c0ae37038a1a6e1cc2834eaf958f" dependencies = [ "apollo-compiler", + "countmap", "derive_more", "either", + "encoding_rs", "form_urlencoded", - "hashbrown 0.15.5", + "hashbrown 0.16.0", "http", "indexmap", "itertools", @@ -166,7 +163,7 @@ dependencies = [ "shape", "strum", "strum_macros", - "thiserror 2.0.14", + "thiserror 2.0.16", "time", "tracing", "url", @@ -174,7 +171,7 @@ dependencies = [ [[package]] name = "apollo-mcp-registry" -version = "0.7.5" +version = "0.9.0" dependencies = [ "derive_more", "educe", @@ -187,7 +184,7 @@ dependencies = [ "serde", "serde_json", "test-log", - "thiserror 2.0.14", + "thiserror 2.0.16", "tokio", "tokio-stream", "tower", @@ -202,41 +199,62 @@ dependencies = [ [[package]] name = "apollo-mcp-server" -version = "0.7.5" +version = "0.9.0" dependencies = [ "anyhow", "apollo-compiler", "apollo-federation", "apollo-mcp-registry", "apollo-schema-index", + "async-trait", "axum", "axum-extra", + "axum-otel-metrics", + "axum-tracing-opentelemetry", "bon", "chrono", "clap", + "cruet", "figment", "futures", "headers", "http", "humantime-serde", "insta", + "jsonschema", "jsonwebtoken", "jwks", "lz-str", "mockito", + "opentelemetry", + "opentelemetry-appender-log", + "opentelemetry-otlp", + "opentelemetry-resource-detectors", + "opentelemetry-semantic-conventions", + "opentelemetry-stdout", + "opentelemetry_sdk", + "prettyplease", + "quote", "regex", "reqwest", + "reqwest-middleware", + "reqwest-tracing", "rmcp", "rstest", - "schemars 1.0.4", + "schemars", "serde", "serde_json", - "thiserror 2.0.14", + "syn 2.0.106", + "thiserror 2.0.16", "tokio", "tokio-util", + "toml", + "tower", "tower-http", "tracing", "tracing-appender", + "tracing-core", + "tracing-opentelemetry", "tracing-subscriber", "tracing-test", "url", @@ -255,7 +273,7 @@ dependencies = [ [[package]] name = "apollo-schema-index" -version = "0.7.5" +version = "0.9.0" dependencies = [ "apollo-compiler", "enumset", @@ -263,7 +281,7 @@ dependencies = [ "itertools", "rstest", "tantivy", - "thiserror 2.0.14", + "thiserror 2.0.16", "tracing", ] @@ -296,13 +314,13 @@ dependencies = [ [[package]] name = "async-compression" -version = "0.4.27" +version = "0.4.30" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ddb939d66e4ae03cee6091612804ba446b12878410cfa17f785f4dd67d4014e8" +checksum = "977eb15ea9efd848bb8a4a1a2500347ed7f0bf794edf0dc3ddcf439f43d36b23" dependencies = [ - "flate2", + "compression-codecs", + "compression-core", "futures-core", - "memchr", "pin-project-lite", "tokio", ] @@ -315,7 +333,7 @@ checksum = "9035ad2d096bed7955a320ee7e2230574d28fd3c3a0f186cbea1ff3c7eed5dbb" dependencies = [ "proc-macro2", "quote", - "syn 2.0.105", + "syn 2.0.106", ] [[package]] @@ -416,6 +434,41 @@ dependencies = [ "tower-service", ] +[[package]] +name = "axum-otel-metrics" +version = "0.12.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d82cf7343b4fc88312e4d7b731152a1a09edcdb6def398d836ef8bccb57f066a" +dependencies = [ + "axum", + "futures-util", + "http", + "http-body", + "opentelemetry", + "opentelemetry-semantic-conventions", + "opentelemetry_sdk", + "pin-project-lite", + "tower", +] + +[[package]] +name = "axum-tracing-opentelemetry" +version = "0.29.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dd3e188039e0e9e3dce1ad873358fd6bab72e6496e18b898bc36d72c07af4b26" +dependencies = [ + "axum", + "futures-core", + "futures-util", + "http", + "opentelemetry", + "pin-project-lite", + "tower", + "tracing", + "tracing-opentelemetry", + "tracing-opentelemetry-instrumentation-sdk", +] + [[package]] name = "backtrace" version = "0.3.75" @@ -437,6 +490,21 @@ version = "0.22.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "72b3254f16251a8381aa12e40e3c4d2f0199f8c6508fbecb9d91f575e0fbb8c6" +[[package]] +name = "bit-set" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "08807e080ed7f9d5433fa9b275196cfc35414f66a0c79d864dc51a0d825231a3" +dependencies = [ + "bit-vec", +] + +[[package]] +name = "bit-vec" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5e764a1d40d510daf35e07be9eb06e75770908c27d411ee6c92109c9840eaaf7" + [[package]] name = "bitflags" version = "1.3.2" @@ -445,9 +513,9 @@ checksum = "bef38d45163c2f1dde094a7dfd33ccf595c92905c8f8f4fdc18d06fb1037718a" [[package]] name = "bitflags" -version = "2.9.1" +version = "2.9.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1b8e56985ec62d17e9c1001dc89c88ecd7dc08e47eba5ec7c29c7b5eeecde967" +checksum = "2261d10cca569e4643e526d8dc2e62e433cc8aba21ab764233731f8d369bf394" [[package]] name = "bitpacking" @@ -469,9 +537,9 @@ dependencies = [ [[package]] name = "bon" -version = "3.7.0" +version = "3.7.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "67a0c21249ad725ebcadcb1b1885f8e3d56e8e6b8924f560268aab000982d637" +checksum = "c2529c31017402be841eb45892278a6c21a000c0a17643af326c73a73f83f0fb" dependencies = [ "bon-macros", "rustversion", @@ -479,19 +547,25 @@ dependencies = [ [[package]] name = "bon-macros" -version = "3.7.0" +version = "3.7.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a660ebdea4d4d3ec7788cfc9c035b66efb66028b9b97bf6cde7023ccc8e77e28" +checksum = "d82020dadcb845a345591863adb65d74fa8dc5c18a0b6d408470e13b7adc7005" dependencies = [ - "darling 0.21.2", + "darling", "ident_case", "prettyplease", "proc-macro2", "quote", "rustversion", - "syn 2.0.105", + "syn 2.0.106", ] +[[package]] +name = "borrow-or-share" +version = "0.2.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "3eeab4423108c5d7c744f4d234de88d18d636100093ae04caf4825134b9c3a32" + [[package]] name = "bstr" version = "1.12.0" @@ -534,10 +608,11 @@ checksum = "d71b6127be86fdcfddb610f7182ac57211d4b18a3e9c82eb2d17662f2227ad6a" [[package]] name = "cc" -version = "1.2.32" +version = "1.2.38" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2352e5597e9c544d5e6d9c95190d5d27738ade584fa8db0a16e130e5c2b5296e" +checksum = "80f41ae168f955c12fb8960b057d70d0ca153fb83182b57d86380443527be7e9" dependencies = [ + "find-msvc-tools", "jobserver", "libc", "shlex", @@ -551,30 +626,29 @@ checksum = "4f4c707c6a209cbe82d10abd08e1ea8995e9ea937d2550646e02798948992be0" [[package]] name = "cfg-if" -version = "1.0.1" +version = "1.0.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9555578bc9e57714c812a1f84e4fc5b4d21fcb063490c624de019f7464c91268" +checksum = "2fd1289c04a9ea8cb22300a459a72a385d7c73d3259e2ed7dcb2af674838cfa9" [[package]] name = "chrono" -version = "0.4.41" +version = "0.4.42" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c469d952047f47f91b68d1cba3f10d63c11d73e4636f24f08daf0278abf01c4d" +checksum = "145052bdd345b87320e369255277e3fb5152762ad123a901ef5c262dd38fe8d2" dependencies = [ - "android-tzdata", "iana-time-zone", "js-sys", - "num-traits", + "num-traits 0.2.19", "serde", "wasm-bindgen", - "windows-link", + "windows-link 0.2.0", ] [[package]] name = "clap" -version = "4.5.45" +version = "4.5.48" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1fc0e74a703892159f5ae7d3aac52c8e6c392f5ae5f359c70b5881d60aaac318" +checksum = "e2134bb3ea021b78629caa971416385309e0131b351b25e01dc16fb54e1b5fae" dependencies = [ "clap_builder", "clap_derive", @@ -582,9 +656,9 @@ dependencies = [ [[package]] name = "clap_builder" -version = "4.5.44" +version = "4.5.48" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b3e7f4214277f3c7aa526a59dd3fbe306a370daee1f8b7b8c987069cd8e888a8" +checksum = "c2ba64afa3c0a6df7fa517765e31314e983f51dda798ffba27b988194fb65dc9" dependencies = [ "anstream", "anstyle", @@ -594,14 +668,14 @@ dependencies = [ [[package]] name = "clap_derive" -version = "4.5.45" +version = "4.5.47" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "14cb31bb0a7d536caef2639baa7fad459e15c3144efefa6dbd1c84562c4739f6" +checksum = "bbfd7eae0b0f1a6e63d4b13c9c478de77c2eb546fba158ad50b4203dc24b9f9c" dependencies = [ "heck 0.5.0", "proc-macro2", "quote", - "syn 2.0.105", + "syn 2.0.106", ] [[package]] @@ -635,6 +709,23 @@ dependencies = [ "memchr", ] +[[package]] +name = "compression-codecs" +version = "0.4.30" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "485abf41ac0c8047c07c87c72c8fb3eb5197f6e9d7ded615dfd1a00ae00a0f64" +dependencies = [ + "compression-core", + "flate2", + "memchr", +] + +[[package]] +name = "compression-core" +version = "0.4.29" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e47641d3deaf41fb1538ac1f54735925e275eaf3bf4d55c81b137fba797e5cbb" + [[package]] name = "concolor" version = "0.1.1" @@ -692,6 +783,15 @@ version = "0.8.7" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "773648b94d0e5d620f64f280777445740e61fe701025087ec8b57f45c791888b" +[[package]] +name = "countmap" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1ef2a403c4af585607826502480ab6e453f320c230ef67255eee21f0cc72c0a6" +dependencies = [ + "num-traits 0.1.43", +] + [[package]] name = "countme" version = "3.0.1" @@ -750,6 +850,16 @@ version = "0.8.21" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d0a5c400df2834b80a4c3327b3aad3a4c4cd4de0629063962b03235697506a28" +[[package]] +name = "cruet" +version = "0.15.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "b7a9ae414b9768aada1b316493261653e41af05c9d2ccc9c504a8fc051c6a790" +dependencies = [ + "once_cell", + "regex", +] + [[package]] name = "crunchy" version = "0.2.4" @@ -768,82 +878,47 @@ dependencies = [ [[package]] name = "darling" -version = "0.20.11" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fc7f46116c46ff9ab3eb1597a45688b6715c6e628b5c133e288e709a29bcb4ee" -dependencies = [ - "darling_core 0.20.11", - "darling_macro 0.20.11", -] - -[[package]] -name = "darling" -version = "0.21.2" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "08440b3dd222c3d0433e63e097463969485f112baff337dfdaca043a0d760570" -dependencies = [ - "darling_core 0.21.2", - "darling_macro 0.21.2", -] - -[[package]] -name = "darling_core" -version = "0.20.11" +version = "0.21.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0d00b9596d185e565c2207a0b01f8bd1a135483d02d9b7b0a54b11da8d53412e" +checksum = "9cdf337090841a411e2a7f3deb9187445851f91b309c0c0a29e05f74a00a48c0" dependencies = [ - "fnv", - "ident_case", - "proc-macro2", - "quote", - "strsim", - "syn 2.0.105", + "darling_core", + "darling_macro", ] [[package]] name = "darling_core" -version = "0.21.2" +version = "0.21.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d25b7912bc28a04ab1b7715a68ea03aaa15662b43a1a4b2c480531fd19f8bf7e" +checksum = "1247195ecd7e3c85f83c8d2a366e4210d588e802133e1e355180a9870b517ea4" dependencies = [ "fnv", "ident_case", "proc-macro2", "quote", "strsim", - "syn 2.0.105", -] - -[[package]] -name = "darling_macro" -version = "0.20.11" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fc34b93ccb385b40dc71c6fceac4b2ad23662c7eeb248cf10d529b7e055b6ead" -dependencies = [ - "darling_core 0.20.11", - "quote", - "syn 2.0.105", + "syn 2.0.106", ] [[package]] name = "darling_macro" -version = "0.21.2" +version = "0.21.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ce154b9bea7fb0c8e8326e62d00354000c36e79770ff21b8c84e3aa267d9d531" +checksum = "d38308df82d1080de0afee5d069fa14b0326a88c14f15c5ccda35b4a6c414c81" dependencies = [ - "darling_core 0.21.2", + "darling_core", "quote", - "syn 2.0.105", + "syn 2.0.106", ] [[package]] name = "deadpool" -version = "0.10.0" +version = "0.12.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fb84100978c1c7b37f09ed3ce3e5f843af02c2a2c431bae5b19230dad2c1b490" +checksum = "0be2b1d1d6ec8d846f05e137292d0b89133caf95ef33695424c09568bdd39b1b" dependencies = [ - "async-trait", "deadpool-runtime", + "lazy_static", "num_cpus", "tokio", ] @@ -856,12 +931,12 @@ checksum = "092966b41edc516079bdf31ec78a2e0588d1d0c08f78b91d8307215928642b2b" [[package]] name = "deranged" -version = "0.4.0" +version = "0.5.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9c9e6a11ca8224451684bc0d7d5a7adbf8f2fd6887261a1cfc3c0432f9d4068e" +checksum = "a41953f86f8a05768a6cda24def994fd2f424b04ec5c719cf89989779f199071" dependencies = [ "powerfmt", - "serde", + "serde_core", ] [[package]] @@ -882,7 +957,7 @@ dependencies = [ "convert_case", "proc-macro2", "quote", - "syn 2.0.105", + "syn 2.0.106", "unicode-xid", ] @@ -904,14 +979,14 @@ checksum = "97369cbbc041bc366949bc74d34658d6cda5621039731c6310521892a3a20ae0" dependencies = [ "proc-macro2", "quote", - "syn 2.0.105", + "syn 2.0.106", ] [[package]] name = "downcast-rs" -version = "2.0.1" +version = "2.0.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ea8a8b81cacc08888170eef4d13b775126db426d0b348bee9d18c2c1eaf123cf" +checksum = "117240f60069e65410b3ae1bb213295bd828f707b5bec6596a1afc8793ce0cbc" [[package]] name = "dyn-clone" @@ -928,7 +1003,7 @@ dependencies = [ "enum-ordinalize", "proc-macro2", "quote", - "syn 2.0.105", + "syn 2.0.106", ] [[package]] @@ -937,12 +1012,30 @@ version = "1.15.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "48c757948c5ede0e46177b7add2e67155f70e33c07fea8284df6576da70b3719" +[[package]] +name = "email_address" +version = "0.2.9" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e079f19b08ca6239f47f8ba8509c11cf3ea30095831f7fed61441475edd8c449" +dependencies = [ + "serde", +] + [[package]] name = "encode_unicode" version = "1.0.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "34aa73646ffb006b8f5147f3dc182bd4bcb190227ce861fc4a4844bf8e3cb2c0" +[[package]] +name = "encoding_rs" +version = "0.8.35" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "75030f3c4f45dafd7586dd6780965a8c7e8e285a5ecb86713e63a79c5b2766f3" +dependencies = [ + "cfg-if", +] + [[package]] name = "enum-ordinalize" version = "4.3.0" @@ -960,28 +1053,28 @@ checksum = "0d28318a75d4aead5c4db25382e8ef717932d0346600cacae6357eb5941bc5ff" dependencies = [ "proc-macro2", "quote", - "syn 2.0.105", + "syn 2.0.106", ] [[package]] name = "enumset" -version = "1.1.7" +version = "1.1.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d6ee17054f550fd7400e1906e2f9356c7672643ed34008a9e8abe147ccd2d821" +checksum = "25b07a8dfbbbfc0064c0a6bdf9edcf966de6b1c33ce344bdeca3b41615452634" dependencies = [ "enumset_derive", ] [[package]] name = "enumset_derive" -version = "0.12.0" +version = "0.14.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "76d07902c93376f1e96c34abc4d507c0911df3816cef50b01f5a2ff3ad8c370d" +checksum = "f43e744e4ea338060faee68ed933e46e722fb7f3617e722a5772d7e856d8b3ce" dependencies = [ - "darling 0.20.11", + "darling", "proc-macro2", "quote", - "syn 2.0.105", + "syn 2.0.106", ] [[package]] @@ -992,12 +1085,23 @@ checksum = "877a4ace8713b0bcf2a4e7eec82529c029f1d0619886d18145fea96c3ffe5c0f" [[package]] name = "errno" -version = "0.3.13" +version = "0.3.14" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "778e2ac28f6c47af28e4907f13ffd1e1ddbd400980a9abd7c8df189bf578a5ad" +checksum = "39cab71617ae0d63f51a36d69f866391735b51691dbda63cf6f96d042b63efeb" dependencies = [ "libc", - "windows-sys 0.60.2", + "windows-sys 0.61.0", +] + +[[package]] +name = "fancy-regex" +version = "0.16.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "998b056554fbe42e03ae0e152895cd1a7e1002aec800fdc6635d20270260c46f" +dependencies = [ + "bit-set", + "regex-automata", + "regex-syntax", ] [[package]] @@ -1028,6 +1132,12 @@ dependencies = [ "version_check", ] +[[package]] +name = "find-msvc-tools" +version = "0.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1ced73b1dacfc750a6db6c0a0c3a3853c8b41997e2e2c563dc90804ae6867959" + [[package]] name = "fixedbitset" version = "0.5.7" @@ -1044,6 +1154,17 @@ dependencies = [ "miniz_oxide", ] +[[package]] +name = "fluent-uri" +version = "0.3.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1918b65d96df47d3591bed19c5cca17e3fa5d0707318e4b5ef2eae01764df7e5" +dependencies = [ + "borrow-or-share", + "ref-cast", + "serde", +] + [[package]] name = "fnv" version = "1.0.7" @@ -1056,6 +1177,12 @@ version = "0.1.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d9c4f5dac5e15c24eb999c26181a6ca40b39fe946cbe4c263c7209467bc83af2" +[[package]] +name = "foldhash" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "77ce24cb58228fbb8aa041425bb1050850ac19177686ea6e0f41a70416f56fdb" + [[package]] name = "foreign-types" version = "0.3.2" @@ -1073,13 +1200,23 @@ checksum = "00b0228411908ca8685dba7fc2cdd70ec9990a6e753e89b6ac91a84c40fbaf4b" [[package]] name = "form_urlencoded" -version = "1.2.1" +version = "1.2.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e13624c2627564efccf4934284bdd98cbaa14e79b0b5a141218e507b3a823456" +checksum = "cb4cb245038516f5f85277875cdaa4f7d2c9a0fa0468de06ed190163b1581fcf" dependencies = [ "percent-encoding", ] +[[package]] +name = "fraction" +version = "0.15.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0f158e3ff0a1b334408dc9fb811cd99b446986f4d8b741bb08f9df1604085ae7" +dependencies = [ + "lazy_static", + "num", +] + [[package]] name = "fs4" version = "0.8.4" @@ -1156,7 +1293,7 @@ checksum = "162ee34ebcb7c64a8abebc059ce0fee27c2262618d7b60ed8faf72fef13c3650" dependencies = [ "proc-macro2", "quote", - "syn 2.0.105", + "syn 2.0.106", ] [[package]] @@ -1227,7 +1364,7 @@ dependencies = [ "cfg-if", "libc", "r-efi", - "wasi 0.14.2+wasi-0.2.4", + "wasi 0.14.7+wasi-0.2.4", ] [[package]] @@ -1346,7 +1483,18 @@ checksum = "9229cfe53dfd69f0609a49f65461bd93001ea1ef889cd5529dd176593f5338a1" dependencies = [ "allocator-api2", "equivalent", - "foldhash", + "foldhash 0.1.5", +] + +[[package]] +name = "hashbrown" +version = "0.16.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5419bdc4f6a9207fbeba6d11b604d481addf78ecd10c11ad51e76c2f6482748d" +dependencies = [ + "allocator-api2", + "equivalent", + "foldhash 0.2.0", ] [[package]] @@ -1445,9 +1593,9 @@ checksum = "df3b46402a9d5adb4c86a0cf463f42e19994e3ee891101b1841f30a545cb49a9" [[package]] name = "humantime" -version = "2.2.0" +version = "2.3.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9b112acc8b3adf4b107a8ec20977da0273a8c386765a3ec0229bd500a1443f9f" +checksum = "135b12329e5e3ce057a9f972339ea52bc954fe1e9358ef27f95e89716fbc5424" [[package]] name = "humantime-serde" @@ -1461,13 +1609,14 @@ dependencies = [ [[package]] name = "hyper" -version = "1.6.0" +version = "1.7.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cc2b571658e38e0c01b1fdca3bbbe93c00d3d71693ff2770043f8c29bc7d6f80" +checksum = "eb3aa54a13a0dfe7fbe3a59e0c76093041720fdc77b110cc0fc260fafb4dc51e" dependencies = [ + "atomic-waker", "bytes", "futures-channel", - "futures-util", + "futures-core", "h2", "http", "http-body", @@ -1475,11 +1624,25 @@ dependencies = [ "httpdate", "itoa", "pin-project-lite", + "pin-utils", "smallvec", "tokio", "want", ] +[[package]] +name = "hyper-timeout" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2b90d566bffbce6a75bd8b09a05aa8c2cb1fabb6cb348f8840c9e4c90a0d83b0" +dependencies = [ + "hyper", + "hyper-util", + "pin-project-lite", + "tokio", + "tower-service", +] + [[package]] name = "hyper-tls" version = "0.6.0" @@ -1498,9 +1661,9 @@ dependencies = [ [[package]] name = "hyper-util" -version = "0.1.16" +version = "0.1.17" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8d9b05277c7e8da2c93a568989bb6207bef0112e8d17df7a6eda4a3cf143bc5e" +checksum = "3c6995591a8f1380fcb4ba966a252a4b29188d51d2b89e3a252f5305be65aea8" dependencies = [ "base64", "bytes", @@ -1531,9 +1694,9 @@ dependencies = [ [[package]] name = "iana-time-zone" -version = "0.1.63" +version = "0.1.64" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b0c919e5debc312ad217002b8048a17b7d83f80703865bbfcfebb0458b0b27d8" +checksum = "33e57f83510bb73707521ebaffa789ec8caf86f9657cad665b092b581d40e9fb" dependencies = [ "android_system_properties", "core-foundation-sys", @@ -1647,9 +1810,9 @@ checksum = "b9e0384b61958566e926dc50660321d12159025e767c18e043daf26b70104c39" [[package]] name = "idna" -version = "1.0.3" +version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "686f825264d630750a544639377bae737628043f20d38bbc029e8f29ea968a7e" +checksum = "3b0875f23caa03898994f6ddc501886a45c7d3d62d04d2d90788d47be1b1e4de" dependencies = [ "idna_adapter", "smallvec", @@ -1668,13 +1831,14 @@ dependencies = [ [[package]] name = "indexmap" -version = "2.10.0" +version = "2.11.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "fe4cd85333e22411419a0bcae1297d25e58c9443848b11dc6a86fefe8c78a661" +checksum = "4b0f83760fb341a774ed326568e19f5a863af4a952def8c39f9ab92fd95b88e5" dependencies = [ "equivalent", - "hashbrown 0.15.5", + "hashbrown 0.16.0", "serde", + "serde_core", ] [[package]] @@ -1689,7 +1853,7 @@ version = "0.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "f37dccff2791ab604f9babef0ba14fbe0be30bd368dc541e2b08d07c8aa908f3" dependencies = [ - "bitflags 2.9.1", + "bitflags 2.9.4", "inotify-sys", "libc", ] @@ -1705,9 +1869,9 @@ dependencies = [ [[package]] name = "insta" -version = "1.43.1" +version = "1.43.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "154934ea70c58054b556dd430b99a98c2a7ff5309ac9891597e339b5c28f4371" +checksum = "46fdb647ebde000f43b5b53f773c30cf9b0cb4300453208713fa38b2c70935a0" dependencies = [ "console", "globset", @@ -1721,11 +1885,11 @@ dependencies = [ [[package]] name = "io-uring" -version = "0.7.9" +version = "0.7.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d93587f37623a1a17d94ef2bc9ada592f5465fe7732084ab7beefabe5c77c0c4" +checksum = "046fa2d4d00aea763528b4950358d0ead425372445dc8ff86312b3c69ff7727b" dependencies = [ - "bitflags 2.9.1", + "bitflags 2.9.4", "cfg-if", "libc", ] @@ -1780,9 +1944,9 @@ checksum = "4a5f13b858c8d314ee3e8f639011f7ccefe71f97f96e50151fb991f267928e2c" [[package]] name = "jobserver" -version = "0.1.33" +version = "0.1.34" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "38f262f097c174adebe41eb73d66ae9c06b2844fb0da69969647bbddd9b0538a" +checksum = "9afb3de4395d6b3e67a780b6de64b51c978ecf11cb9a462c66be7d4ca9039d33" dependencies = [ "getrandom 0.3.3", "libc", @@ -1790,9 +1954,9 @@ dependencies = [ [[package]] name = "js-sys" -version = "0.3.77" +version = "0.3.81" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1cfaf33c695fc6e08064efbc1f72ec937429614f25eef83af942d0e227c3a28f" +checksum = "ec48937a97411dcb524a265206ccd4c90bb711fca92b2792c407f268825b9305" dependencies = [ "once_cell", "wasm-bindgen", @@ -1811,6 +1975,33 @@ dependencies = [ "thiserror 1.0.69", ] +[[package]] +name = "jsonschema" +version = "0.33.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d46662859bc5f60a145b75f4632fbadc84e829e45df6c5de74cfc8e05acb96b5" +dependencies = [ + "ahash", + "base64", + "bytecount", + "email_address", + "fancy-regex", + "fraction", + "idna", + "itoa", + "num-cmp", + "num-traits 0.2.19", + "once_cell", + "percent-encoding", + "referencing", + "regex", + "regex-syntax", + "reqwest", + "serde", + "serde_json", + "uuid-simd", +] + [[package]] name = "jsonwebtoken" version = "9.3.1" @@ -1880,9 +2071,9 @@ checksum = "0c2cdeb66e45e9f36bfad5bbdb4d2384e70936afbee843c6f6543f0c551ebb25" [[package]] name = "libc" -version = "0.2.175" +version = "0.2.176" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6a82ae493e598baaea5209805c49bbf2ea7de956d50d7da0da1164f9c6d28543" +checksum = "58f929b4d672ea937a23a1ab494143d968337a5f47e56d0815df1e0890ddf174" [[package]] name = "libm" @@ -1904,9 +2095,9 @@ checksum = "d26c52dbd32dccf2d10cac7725f8eae5296885fb5703b261f7d0a0739ec807ab" [[package]] name = "linux-raw-sys" -version = "0.9.4" +version = "0.11.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cd945864f07fe9f5371a27ad7b52a172b4b499999f1d97574c9fa68373937e12" +checksum = "df1d3c3b53da64cf5760482273a98e575c651a67eec7f77df96b5b642de8f039" [[package]] name = "litemap" @@ -1926,9 +2117,9 @@ dependencies = [ [[package]] name = "log" -version = "0.4.27" +version = "0.4.28" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "13dc2df351e3202783a1fe0d44375f7295ffb4049267b0f3018346dc122a1d94" +checksum = "34080505efa8e45a4b816c349525ebe327ceaa8559756f0356cba97ef3bf7432" [[package]] name = "lru" @@ -1983,9 +2174,9 @@ checksum = "32a282da65faaf38286cf3be983213fcf1d2e2a58700e808f83f4ea9a4804bc0" [[package]] name = "memmap2" -version = "0.9.7" +version = "0.9.8" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "483758ad303d734cec05e5c12b41d7e93e6a6390c5e9dae6bdeb7c1259012d28" +checksum = "843a98750cd611cc2965a8213b53b43e715f13c37a9e096c6408e69990961db7" dependencies = [ "libc", ] @@ -2112,7 +2303,7 @@ version = "8.2.0" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "4d3d07927151ff8575b7087f245456e549fea62edf0ec4e565a5ee50c8402bc3" dependencies = [ - "bitflags 2.9.1", + "bitflags 2.9.4", "fsevent-sys", "inotify", "kqueue", @@ -2139,6 +2330,20 @@ dependencies = [ "windows-sys 0.52.0", ] +[[package]] +name = "num" +version = "0.4.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "35bd024e8b2ff75562e5f34e7f4905839deb4b22955ef5e73d2fea1b9813cb23" +dependencies = [ + "num-bigint", + "num-complex", + "num-integer", + "num-iter", + "num-rational", + "num-traits 0.2.19", +] + [[package]] name = "num-bigint" version = "0.4.6" @@ -2146,7 +2351,22 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "a5e44f723f1133c9deac646763579fdb3ac745e418f2a7af9cd0c431da1f20b9" dependencies = [ "num-integer", - "num-traits", + "num-traits 0.2.19", +] + +[[package]] +name = "num-cmp" +version = "0.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "63335b2e2c34fae2fb0aa2cecfd9f0832a1e24b3b32ecec612c3426d46dc8aaa" + +[[package]] +name = "num-complex" +version = "0.4.6" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "73f88a1307638156682bada9d7604135552957b7818057dcef22705b4d509495" +dependencies = [ + "num-traits 0.2.19", ] [[package]] @@ -2161,7 +2381,38 @@ version = "0.1.46" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "7969661fd2958a5cb096e56c8e1ad0444ac2bbcd0061bd28660485a44879858f" dependencies = [ - "num-traits", + "num-traits 0.2.19", +] + +[[package]] +name = "num-iter" +version = "0.1.45" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1429034a0490724d0075ebb2bc9e875d6503c3cf69e235a8941aa757d83ef5bf" +dependencies = [ + "autocfg", + "num-integer", + "num-traits 0.2.19", +] + +[[package]] +name = "num-rational" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "f83d14da390562dca69fc84082e73e548e1ad308d24accdedd2720017cb37824" +dependencies = [ + "num-bigint", + "num-integer", + "num-traits 0.2.19", +] + +[[package]] +name = "num-traits" +version = "0.1.43" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "92e5113e9fd4cc14ded8e499429f396a20f98c772a47cc8622a736e1ec843c31" +dependencies = [ + "num-traits 0.2.19", ] [[package]] @@ -2226,7 +2477,7 @@ version = "0.10.73" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8505734d46c8ab1e19a1dce3aef597ad87dcb4c37e7188231769bd6bd51cebf8" dependencies = [ - "bitflags 2.9.1", + "bitflags 2.9.4", "cfg-if", "foreign-types", "libc", @@ -2243,7 +2494,7 @@ checksum = "a948666b637a0f465e8564c73e89d4dde00d72d4d473cc972f390fc3dcee7d9c" dependencies = [ "proc-macro2", "quote", - "syn 2.0.105", + "syn 2.0.106", ] [[package]] @@ -2275,13 +2526,133 @@ dependencies = [ ] [[package]] -name = "ownedbytes" -version = "0.9.0" +name = "opentelemetry" +version = "0.30.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2fbd56f7631767e61784dc43f8580f403f4475bd4aaa4da003e6295e1bab4a7e" +checksum = "aaf416e4cb72756655126f7dd7bb0af49c674f4c1b9903e80c009e0c37e552e6" dependencies = [ - "stable_deref_trait", -] + "futures-core", + "futures-sink", + "js-sys", + "pin-project-lite", + "thiserror 2.0.16", + "tracing", +] + +[[package]] +name = "opentelemetry-appender-log" +version = "0.30.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e688026e48f4603494f619583e0aa0b0edd9c0b9430e1c46804df2ff32bc8798" +dependencies = [ + "log", + "opentelemetry", +] + +[[package]] +name = "opentelemetry-http" +version = "0.30.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "50f6639e842a97dbea8886e3439710ae463120091e2e064518ba8e716e6ac36d" +dependencies = [ + "async-trait", + "bytes", + "http", + "opentelemetry", + "reqwest", +] + +[[package]] +name = "opentelemetry-otlp" +version = "0.30.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "dbee664a43e07615731afc539ca60c6d9f1a9425e25ca09c57bc36c87c55852b" +dependencies = [ + "http", + "opentelemetry", + "opentelemetry-http", + "opentelemetry-proto", + "opentelemetry_sdk", + "prost", + "reqwest", + "thiserror 2.0.16", + "tokio", + "tonic", + "tracing", +] + +[[package]] +name = "opentelemetry-proto" +version = "0.30.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2e046fd7660710fe5a05e8748e70d9058dc15c94ba914e7c4faa7c728f0e8ddc" +dependencies = [ + "opentelemetry", + "opentelemetry_sdk", + "prost", + "tonic", +] + +[[package]] +name = "opentelemetry-resource-detectors" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0a44e076f07fa3d76e741991f4f7d3ecbac0eed8521ced491fbdf8db77d024cf" +dependencies = [ + "opentelemetry", + "opentelemetry-semantic-conventions", + "opentelemetry_sdk", +] + +[[package]] +name = "opentelemetry-semantic-conventions" +version = "0.30.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "83d059a296a47436748557a353c5e6c5705b9470ef6c95cfc52c21a8814ddac2" + +[[package]] +name = "opentelemetry-stdout" +version = "0.30.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "447191061af41c3943e082ea359ab8b64ff27d6d34d30d327df309ddef1eef6f" +dependencies = [ + "chrono", + "opentelemetry", + "opentelemetry_sdk", +] + +[[package]] +name = "opentelemetry_sdk" +version = "0.30.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "11f644aa9e5e31d11896e024305d7e3c98a88884d9f8919dbf37a9991bc47a4b" +dependencies = [ + "futures-channel", + "futures-executor", + "futures-util", + "opentelemetry", + "percent-encoding", + "rand 0.9.2", + "serde_json", + "thiserror 2.0.16", + "tokio", + "tokio-stream", +] + +[[package]] +name = "outref" +version = "0.5.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "1a80800c0488c3a21695ea981a54918fbb37abf04f4d0720c453632255e2ff0e" + +[[package]] +name = "ownedbytes" +version = "0.9.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2fbd56f7631767e61784dc43f8580f403f4475bd4aaa4da003e6295e1bab4a7e" +dependencies = [ + "stable_deref_trait", +] [[package]] name = "parking_lot" @@ -2332,7 +2703,7 @@ dependencies = [ "proc-macro2", "proc-macro2-diagnostics", "quote", - "syn 2.0.105", + "syn 2.0.106", ] [[package]] @@ -2347,26 +2718,26 @@ dependencies = [ [[package]] name = "percent-encoding" -version = "2.3.1" +version = "2.3.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e3148f5046208a5d56bcfc03053e3ca6334e51da8dfb19b6cdc8b306fae3283e" +checksum = "9b4f627cb1b25917193a259e49bdad08f671f8d9708acfd5fe0a8c1455d87220" [[package]] name = "pest" -version = "2.8.1" +version = "2.8.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1db05f56d34358a8b1066f67cbb203ee3e7ed2ba674a6263a1d5ec6db2204323" +checksum = "21e0a3a33733faeaf8651dfee72dd0f388f0c8e5ad496a3478fa5a922f49cfa8" dependencies = [ "memchr", - "thiserror 2.0.14", + "thiserror 2.0.16", "ucd-trie", ] [[package]] name = "pest_derive" -version = "2.8.1" +version = "2.8.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "bb056d9e8ea77922845ec74a1c4e8fb17e7c218cc4fc11a15c5d25e189aa40bc" +checksum = "bc58706f770acb1dbd0973e6530a3cff4746fb721207feb3a8a6064cd0b6c663" dependencies = [ "pest", "pest_generator", @@ -2374,22 +2745,22 @@ dependencies = [ [[package]] name = "pest_generator" -version = "2.8.1" +version = "2.8.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "87e404e638f781eb3202dc82db6760c8ae8a1eeef7fb3fa8264b2ef280504966" +checksum = "6d4f36811dfe07f7b8573462465d5cb8965fffc2e71ae377a33aecf14c2c9a2f" dependencies = [ "pest", "pest_meta", "proc-macro2", "quote", - "syn 2.0.105", + "syn 2.0.106", ] [[package]] name = "pest_meta" -version = "2.8.1" +version = "2.8.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "edd1101f170f5903fde0914f899bb503d9ff5271d7ba76bbb70bea63690cc0d5" +checksum = "42919b05089acbd0a5dcd5405fb304d17d1053847b81163d09c4ad18ce8e8420" dependencies = [ "pest", "sha2", @@ -2425,7 +2796,7 @@ checksum = "6e918e4ff8c4549eb882f14b3a4bc8c8bc93de829416eacf579f1207a8fbf861" dependencies = [ "proc-macro2", "quote", - "syn 2.0.105", + "syn 2.0.106", ] [[package]] @@ -2448,9 +2819,9 @@ checksum = "7edddbd0b52d732b21ad9a5fab5c704c14cd949e5e9a1ec5929a24fded1b904c" [[package]] name = "potential_utf" -version = "0.1.2" +version = "0.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e5a7c30837279ca13e7c867e9e40053bc68740f988cb07f7ca6df43cc734b585" +checksum = "84df19adbe5b5a0782edcab45899906947ab039ccf4573713735ee7de1e6b08a" dependencies = [ "zerovec", ] @@ -2472,28 +2843,28 @@ dependencies = [ [[package]] name = "prettyplease" -version = "0.2.36" +version = "0.2.37" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "ff24dfcda44452b9816fff4cd4227e1bb73ff5a2f1bc1105aa92fb8565ce44d2" +checksum = "479ca8adacdd7ce8f1fb39ce9ecccbfe93a3f1344b3d0d97f20bc0196208f62b" dependencies = [ "proc-macro2", - "syn 2.0.105", + "syn 2.0.106", ] [[package]] name = "proc-macro-crate" -version = "3.3.0" +version = "3.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "edce586971a4dfaa28950c6f18ed55e0406c1ab88bbce2c6f6293a7aaba73d35" +checksum = "219cb19e96be00ab2e37d6e299658a0cfa83e52429179969b0f0121b4ac46983" dependencies = [ "toml_edit", ] [[package]] name = "proc-macro2" -version = "1.0.97" +version = "1.0.101" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "d61789d7719defeb74ea5fe81f2fdfdbd28a803847077cecce2ff14e1472f6f1" +checksum = "89ae43fd86e4158d6db51ad8e2b80f313af9cc74f5c0e03ccb87de09998732de" dependencies = [ "unicode-ident", ] @@ -2506,11 +2877,34 @@ checksum = "af066a9c399a26e020ada66a034357a868728e72cd426f3adcd35f80d88d88c8" dependencies = [ "proc-macro2", "quote", - "syn 2.0.105", + "syn 2.0.106", "version_check", "yansi", ] +[[package]] +name = "prost" +version = "0.13.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "2796faa41db3ec313a31f7624d9286acf277b52de526150b7e69f3debf891ee5" +dependencies = [ + "bytes", + "prost-derive", +] + +[[package]] +name = "prost-derive" +version = "0.13.5" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "8a56d757972c98b346a9b766e3f02746cde6dd1cd1d1d563472929fdd74bec4d" +dependencies = [ + "anyhow", + "itertools", + "proc-macro2", + "quote", + "syn 2.0.106", +] + [[package]] name = "quote" version = "1.0.40" @@ -2591,7 +2985,7 @@ version = "0.4.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "32cb0b9bc82b0a0876c2dd994a7e7a2683d3e7390ca40e6886785ef0c7e3ee31" dependencies = [ - "num-traits", + "num-traits 0.2.19", "rand 0.8.5", ] @@ -2621,7 +3015,7 @@ version = "0.5.17" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5407465600fb0548f1442edf71dd20683c6ed326200ace4b1ef0763521bb3b77" dependencies = [ - "bitflags 2.9.1", + "bitflags 2.9.4", ] [[package]] @@ -2641,14 +3035,28 @@ checksum = "1165225c21bff1f3bbce98f5a1f889949bc902d3575308cc7b0de30b4f6d27c7" dependencies = [ "proc-macro2", "quote", - "syn 2.0.105", + "syn 2.0.106", +] + +[[package]] +name = "referencing" +version = "0.33.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9e9c261f7ce75418b3beadfb3f0eb1299fe8eb9640deba45ffa2cb783098697d" +dependencies = [ + "ahash", + "fluent-uri", + "once_cell", + "parking_lot", + "percent-encoding", + "serde_json", ] [[package]] name = "regex" -version = "1.11.1" +version = "1.11.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "b544ef1b4eac5dc2db33ea63606ae9ffcfac26c1416a2806ae0bf5f56b201191" +checksum = "23d7fd106d8c02486a8d64e778353d1cffe08ce79ac2e82f540c86d0facf6912" dependencies = [ "aho-corasick", "memchr", @@ -2658,9 +3066,9 @@ dependencies = [ [[package]] name = "regex-automata" -version = "0.4.9" +version = "0.4.10" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "809e8dc61f6de73b46c85f4c96486310fe304c434cfa43669d7b40f711150908" +checksum = "6b9458fa0bfeeac22b5ca447c63aaf45f28439a709ccd244698632f9aa6394d6" dependencies = [ "aho-corasick", "memchr", @@ -2669,9 +3077,9 @@ dependencies = [ [[package]] name = "regex-syntax" -version = "0.8.5" +version = "0.8.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2b15c43186be67a4fd63bee50d0303afffcef381492ebe2c5d87f324e1b8815c" +checksum = "caf4aa5b0f434c91fe5c7f1ecb6a5ece2130b02ad2a590589dda5146df959001" [[package]] name = "relative-path" @@ -2688,6 +3096,7 @@ dependencies = [ "async-compression", "base64", "bytes", + "futures-channel", "futures-core", "futures-util", "http", @@ -2718,6 +3127,39 @@ dependencies = [ "web-sys", ] +[[package]] +name = "reqwest-middleware" +version = "0.4.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "57f17d28a6e6acfe1733fe24bcd30774d13bffa4b8a22535b4c8c98423088d4e" +dependencies = [ + "anyhow", + "async-trait", + "http", + "reqwest", + "serde", + "thiserror 1.0.69", + "tower-service", +] + +[[package]] +name = "reqwest-tracing" +version = "0.5.8" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d70ea85f131b2ee9874f0b160ac5976f8af75f3c9badfe0d955880257d10bd83" +dependencies = [ + "anyhow", + "async-trait", + "getrandom 0.2.16", + "http", + "matchit", + "opentelemetry", + "reqwest", + "reqwest-middleware", + "tracing", + "tracing-opentelemetry", +] + [[package]] name = "ring" version = "0.17.14" @@ -2734,9 +3176,9 @@ dependencies = [ [[package]] name = "rmcp" -version = "0.2.1" +version = "0.6.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "37f2048a81a7ff7e8ef6bc5abced70c3d9114c8f03d85d7aaaafd9fd04f12e9e" +checksum = "41ab0892f4938752b34ae47cb53910b1b0921e55e77ddb6e44df666cab17939f" dependencies = [ "axum", "base64", @@ -2750,11 +3192,11 @@ dependencies = [ "pin-project-lite", "rand 0.9.2", "rmcp-macros", - "schemars 0.8.22", + "schemars", "serde", "serde_json", "sse-stream", - "thiserror 2.0.14", + "thiserror 2.0.16", "tokio", "tokio-stream", "tokio-util", @@ -2765,15 +3207,15 @@ dependencies = [ [[package]] name = "rmcp-macros" -version = "0.2.1" +version = "0.6.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "72398e694b9f6dbb5de960cf158c8699e6a1854cb5bbaac7de0646b2005763c4" +checksum = "1827cd98dab34cade0513243c6fe0351f0f0b2c9d6825460bcf45b42804bdda0" dependencies = [ - "darling 0.20.11", + "darling", "proc-macro2", "quote", "serde_json", - "syn 2.0.105", + "syn 2.0.106", ] [[package]] @@ -2814,7 +3256,7 @@ dependencies = [ "regex", "relative-path", "rustc_version", - "syn 2.0.105", + "syn 2.0.106", "unicode-ident", ] @@ -2861,7 +3303,7 @@ version = "0.38.44" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "fdb5bc1ae2baa591800df16c9ca78619bf65c0488b41b96ccec5d11220d8c154" dependencies = [ - "bitflags 2.9.1", + "bitflags 2.9.4", "errno", "libc", "linux-raw-sys 0.4.15", @@ -2870,15 +3312,15 @@ dependencies = [ [[package]] name = "rustix" -version = "1.0.8" +version = "1.1.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "11181fbabf243db407ef8df94a6ce0b2f9a733bd8be4ad02b4eda9602296cac8" +checksum = "cd15f8a2c5551a84d56efdc1cd049089e409ac19a3072d5037a17fd70719ff3e" dependencies = [ - "bitflags 2.9.1", + "bitflags 2.9.4", "errno", "libc", - "linux-raw-sys 0.9.4", - "windows-sys 0.60.2", + "linux-raw-sys 0.11.0", + "windows-sys 0.61.0", ] [[package]] @@ -2913,24 +3355,11 @@ dependencies = [ [[package]] name = "schannel" -version = "0.1.27" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1f29ebaa345f945cec9fbbc532eb307f0fdad8161f281b6369539c8d84876b3d" -dependencies = [ - "windows-sys 0.59.0", -] - -[[package]] -name = "schemars" -version = "0.8.22" +version = "0.1.28" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3fbf2ae1b8bc8e02df939598064d22402220cd5bbcca1c76f7d6a310974d5615" +checksum = "891d81b926048e76efe18581bf793546b4c0eaf8448d72be8de2bbee5fd166e1" dependencies = [ - "chrono", - "dyn-clone", - "schemars_derive 0.8.22", - "serde", - "serde_json", + "windows-sys 0.61.0", ] [[package]] @@ -2939,26 +3368,15 @@ version = "1.0.4" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "82d20c4491bc164fa2f6c5d44565947a52ad80b9505d8e36f8d54c27c739fcd0" dependencies = [ + "chrono", "dyn-clone", "ref-cast", - "schemars_derive 1.0.4", + "schemars_derive", "serde", "serde_json", "url", ] -[[package]] -name = "schemars_derive" -version = "0.8.22" -source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "32e265784ad618884abaea0600a9adf15393368d840e0222d101a072f3f7534d" -dependencies = [ - "proc-macro2", - "quote", - "serde_derive_internals", - "syn 2.0.105", -] - [[package]] name = "schemars_derive" version = "1.0.4" @@ -2968,7 +3386,7 @@ dependencies = [ "proc-macro2", "quote", "serde_derive_internals", - "syn 2.0.105", + "syn 2.0.106", ] [[package]] @@ -2993,7 +3411,7 @@ version = "2.11.1" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "897b2245f0b511c87893af39b033e5ca9cce68824c4d7e7630b5a1d339658d02" dependencies = [ - "bitflags 2.9.1", + "bitflags 2.9.4", "core-foundation", "core-foundation-sys", "libc", @@ -3002,9 +3420,9 @@ dependencies = [ [[package]] name = "security-framework-sys" -version = "2.14.0" +version = "2.15.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "49db231d56a190491cb4aeda9527f1ad45345af50b0851622a7adb8c03b01c32" +checksum = "cc1f0cbffaac4852523ce30d8bd3c5cdc873501d96ff467ca09b6767bb8cd5c0" dependencies = [ "core-foundation-sys", "libc", @@ -3012,28 +3430,38 @@ dependencies = [ [[package]] name = "semver" -version = "1.0.26" +version = "1.0.27" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "56e6fa9c48d24d85fb3de5ad847117517440f6beceb7798af16b4a87d616b8d0" +checksum = "d767eb0aabc880b29956c35734170f26ed551a859dbd361d140cdbeca61ab1e2" [[package]] name = "serde" -version = "1.0.219" +version = "1.0.226" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0dca6411025b24b60bfa7ec1fe1f8e710ac09782dca409ee8237ba74b51295fd" +dependencies = [ + "serde_core", + "serde_derive", +] + +[[package]] +name = "serde_core" +version = "1.0.226" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5f0e2c6ed6606019b4e29e69dbaba95b11854410e5347d525002456dbbb786b6" +checksum = "ba2ba63999edb9dac981fb34b3e5c0d111a69b0924e253ed29d83f7c99e966a4" dependencies = [ "serde_derive", ] [[package]] name = "serde_derive" -version = "1.0.219" +version = "1.0.226" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5b0276cf7f2c73365f7157c8123c21cd9a50fbbd844757af28ca1f5925fc2a00" +checksum = "8db53ae22f34573731bafa1db20f04027b2d25e02d8205921b569171699cdb33" dependencies = [ "proc-macro2", "quote", - "syn 2.0.105", + "syn 2.0.106", ] [[package]] @@ -3044,20 +3472,21 @@ checksum = "18d26a20a969b9e3fdf2fc2d9f21eda6c40e2de84c9408bb5d3b05d499aae711" dependencies = [ "proc-macro2", "quote", - "syn 2.0.105", + "syn 2.0.106", ] [[package]] name = "serde_json" -version = "1.0.142" +version = "1.0.145" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "030fedb782600dcbd6f02d479bf0d817ac3bb40d644745b769d6a96bc3afc5a7" +checksum = "402a6f66d8c709116cf22f558eab210f5a50187f702eb4d7e5ef38d9a7f1c79c" dependencies = [ "indexmap", "itoa", "memchr", "ryu", "serde", + "serde_core", ] [[package]] @@ -3077,12 +3506,22 @@ dependencies = [ [[package]] name = "serde_path_to_error" -version = "0.1.17" +version = "0.1.20" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "59fab13f937fa393d08645bf3a84bdfe86e296747b506ada67bb15f10f218b2a" +checksum = "10a9ff822e371bb5403e391ecd83e182e0e77ba7f6fe0160b795797109d1b457" dependencies = [ "itoa", "serde", + "serde_core", +] + +[[package]] +name = "serde_spanned" +version = "1.0.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5417783452c2be558477e104686f7de5dae53dba813c28435e0e70f82d9b04ee" +dependencies = [ + "serde_core", ] [[package]] @@ -3134,9 +3573,9 @@ dependencies = [ [[package]] name = "shape" -version = "0.5.2" +version = "0.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "362c1523018b16b65737aa0ea76a731edbcd399e273c0130ba829b148f89dbd2" +checksum = "914e2afe9130bf8acf52c5e20b4222f7d2e5eb8327e05fb668fe70aad4b3a896" dependencies = [ "apollo-compiler", "indexmap", @@ -3181,8 +3620,8 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "297f631f50729c8c99b84667867963997ec0b50f32b2a7dbcab828ef0541e8bb" dependencies = [ "num-bigint", - "num-traits", - "thiserror 2.0.14", + "num-traits 0.2.19", + "thiserror 2.0.16", "time", ] @@ -3257,7 +3696,7 @@ dependencies = [ "heck 0.5.0", "proc-macro2", "quote", - "syn 2.0.105", + "syn 2.0.106", ] [[package]] @@ -3273,9 +3712,9 @@ dependencies = [ [[package]] name = "syn" -version = "2.0.105" +version = "2.0.106" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7bc3fcb250e53458e712715cf74285c1f889686520d79294a9ef3bd7aa1fc619" +checksum = "ede7c438028d4436d71104916910f5bb611972c5cfd7f89b8300a8186e6fada6" dependencies = [ "proc-macro2", "quote", @@ -3299,7 +3738,7 @@ checksum = "728a70f3dbaf5bab7f0c4b1ac8d7ae5ea60a4b5549c8a5914361c99147a709d2" dependencies = [ "proc-macro2", "quote", - "syn 2.0.105", + "syn 2.0.106", ] [[package]] @@ -3348,7 +3787,7 @@ dependencies = [ "tantivy-stacker", "tantivy-tokenizer-api", "tempfile", - "thiserror 2.0.14", + "thiserror 2.0.16", "time", "uuid", "winapi", @@ -3450,15 +3889,15 @@ dependencies = [ [[package]] name = "tempfile" -version = "3.20.0" +version = "3.23.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "e8a64e3985349f2441a1a9ef0b853f869006c3855f2cda6862a94d26ebb9d6a1" +checksum = "2d31c77bdf42a745371d260a26ca7163f1e0924b64afa0b688e61b5a9fa02f16" dependencies = [ "fastrand", "getrandom 0.3.3", "once_cell", - "rustix 1.0.8", - "windows-sys 0.59.0", + "rustix 1.1.2", + "windows-sys 0.61.0", ] [[package]] @@ -3479,7 +3918,7 @@ checksum = "451b374529930d7601b1eef8d32bc79ae870b6079b069401709c2a8bf9e75f36" dependencies = [ "proc-macro2", "quote", - "syn 2.0.105", + "syn 2.0.106", ] [[package]] @@ -3499,11 +3938,11 @@ dependencies = [ [[package]] name = "thiserror" -version = "2.0.14" +version = "2.0.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "0b0949c3a6c842cbde3f1686d6eea5a010516deb7085f79db747562d4102f41e" +checksum = "3467d614147380f2e4e374161426ff399c91084acd2363eaf549172b3d5e60c0" dependencies = [ - "thiserror-impl 2.0.14", + "thiserror-impl 2.0.16", ] [[package]] @@ -3514,18 +3953,18 @@ checksum = "4fee6c4efc90059e10f81e6d42c60a18f76588c3d74cb83a0b242a2b6c7504c1" dependencies = [ "proc-macro2", "quote", - "syn 2.0.105", + "syn 2.0.106", ] [[package]] name = "thiserror-impl" -version = "2.0.14" +version = "2.0.16" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cc5b44b4ab9c2fdd0e0512e6bece8388e214c0749f5862b114cc5b7a25daf227" +checksum = "6c5e1be1c48b9172ee610da68fd9cd2770e7a4056cb3fc98710ee6906f0c7960" dependencies = [ "proc-macro2", "quote", - "syn 2.0.105", + "syn 2.0.106", ] [[package]] @@ -3539,9 +3978,9 @@ dependencies = [ [[package]] name = "time" -version = "0.3.41" +version = "0.3.44" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8a7619e19bc266e0f9c5e6686659d394bc57973859340060a69221e57dbc0c40" +checksum = "91e7d9e3bb61134e77bde20dd4825b97c010155709965fedf0f49bb138e52a9d" dependencies = [ "deranged", "itoa", @@ -3556,15 +3995,15 @@ dependencies = [ [[package]] name = "time-core" -version = "0.1.4" +version = "0.1.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c9e9a38711f559d9e3ce1cdb06dd7c5b8ea546bc90052da6d06bb76da74bb07c" +checksum = "40868e7c1d2f0b8d73e4a8c7f0ff63af4f6d19be117e90bd73eb1d62cf831c6b" [[package]] name = "time-macros" -version = "0.2.22" +version = "0.2.24" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "3526739392ec93fd8b359c8e98514cb3e8e021beb4e5f597b00a0221f8ed8a49" +checksum = "30cfb0125f12d9c277f35663a0a33f8c30190f4e4574868a330595412d34ebf3" dependencies = [ "num-conv", "time-core", @@ -3608,7 +4047,7 @@ checksum = "6e06d43f1345a3bcd39f6a56dbb7dcab2ba47e68e8ac134855e7e2bdbaf8cab8" dependencies = [ "proc-macro2", "quote", - "syn 2.0.105", + "syn 2.0.106", ] [[package]] @@ -3645,23 +4084,83 @@ dependencies = [ "tokio", ] +[[package]] +name = "toml" +version = "0.9.7" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "00e5e5d9bf2475ac9d4f0d9edab68cc573dc2fd644b0dba36b0c30a92dd9eaa0" +dependencies = [ + "indexmap", + "serde_core", + "serde_spanned", + "toml_datetime", + "toml_parser", + "toml_writer", + "winnow", +] + [[package]] name = "toml_datetime" -version = "0.6.11" +version = "0.7.2" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "22cddaf88f4fbc13c51aebbf5f8eceb5c7c5a9da2ac40a13519eb5b0a0e8f11c" +checksum = "32f1085dec27c2b6632b04c80b3bb1b4300d6495d1e129693bdda7d91e72eec1" +dependencies = [ + "serde_core", +] [[package]] name = "toml_edit" -version = "0.22.27" +version = "0.23.6" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "41fe8c660ae4257887cf66394862d21dbca4a6ddd26f04a3560410406a2f819a" +checksum = "f3effe7c0e86fdff4f69cdd2ccc1b96f933e24811c5441d44904e8683e27184b" dependencies = [ "indexmap", "toml_datetime", + "toml_parser", + "winnow", +] + +[[package]] +name = "toml_parser" +version = "1.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "4cf893c33be71572e0e9aa6dd15e6677937abd686b066eac3f8cd3531688a627" +dependencies = [ "winnow", ] +[[package]] +name = "toml_writer" +version = "1.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d163a63c116ce562a22cda521fcc4d79152e7aba014456fb5eb442f6d6a10109" + +[[package]] +name = "tonic" +version = "0.13.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "7e581ba15a835f4d9ea06c55ab1bd4dce26fc53752c69a04aac00703bfb49ba9" +dependencies = [ + "async-trait", + "base64", + "bytes", + "http", + "http-body", + "http-body-util", + "hyper", + "hyper-timeout", + "hyper-util", + "percent-encoding", + "pin-project", + "prost", + "tokio", + "tokio-stream", + "tower", + "tower-layer", + "tower-service", + "tracing", +] + [[package]] name = "tower" version = "0.5.2" @@ -3670,9 +4169,12 @@ checksum = "d039ad9159c98b70ecfd540b2573b97f7f52c3e8d9f8ad57a24b916a536975f9" dependencies = [ "futures-core", "futures-util", + "indexmap", "pin-project-lite", + "slab", "sync_wrapper", "tokio", + "tokio-util", "tower-layer", "tower-service", "tracing", @@ -3684,7 +4186,7 @@ version = "0.6.6" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "adc82fd73de2a9722ac5da747f12383d2bfdb93591ee6c58486e0097890f05f2" dependencies = [ - "bitflags 2.9.1", + "bitflags 2.9.4", "bytes", "futures-util", "http", @@ -3694,6 +4196,7 @@ dependencies = [ "tower", "tower-layer", "tower-service", + "tracing", ] [[package]] @@ -3740,7 +4243,7 @@ checksum = "81383ab64e72a7a8b8e13130c49e3dab29def6d0c7d76a03087b3cf71c5c6903" dependencies = [ "proc-macro2", "quote", - "syn 2.0.105", + "syn 2.0.106", ] [[package]] @@ -3776,6 +4279,36 @@ dependencies = [ "tracing-core", ] +[[package]] +name = "tracing-opentelemetry" +version = "0.31.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ddcf5959f39507d0d04d6413119c04f33b623f4f951ebcbdddddfad2d0623a9c" +dependencies = [ + "js-sys", + "once_cell", + "opentelemetry", + "opentelemetry_sdk", + "smallvec", + "tracing", + "tracing-core", + "tracing-log", + "tracing-subscriber", + "web-time", +] + +[[package]] +name = "tracing-opentelemetry-instrumentation-sdk" +version = "0.29.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "a13836788f587ab71400ef44b07196430782b5e483e189933c38dddb81381574" +dependencies = [ + "http", + "opentelemetry", + "tracing", + "tracing-opentelemetry", +] + [[package]] name = "tracing-serde" version = "0.2.0" @@ -3825,7 +4358,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "04659ddb06c87d233c566112c1c9c5b9e98256d9af50ec3bc9c8327f873a7568" dependencies = [ "quote", - "syn 2.0.105", + "syn 2.0.106", ] [[package]] @@ -3873,9 +4406,9 @@ dependencies = [ [[package]] name = "unicode-ident" -version = "1.0.18" +version = "1.0.19" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "5a5f39404a5da50712a4c1eecf25e90dd62b613502b7e925fd4e4d19b5c96512" +checksum = "f63a545481291138910575129486daeaf8ac54aee4387fe7906919f7830c7d9d" [[package]] name = "unicode-segmentation" @@ -3909,9 +4442,9 @@ checksum = "8ecb6da28b8a351d773b68d5825ac39017e680750f980f3a1a85cd8dd28a47c1" [[package]] name = "url" -version = "2.5.4" +version = "2.5.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "32f8b686cadd1473f4bd0117a5d28d36b1ade384ea9b5069a1c40aefed7fda60" +checksum = "08bc136a29a3d1758e07a9cca267be308aeebf5cfd5a10f3f67ab2097683ef5b" dependencies = [ "form_urlencoded", "idna", @@ -3939,9 +4472,9 @@ checksum = "06abde3611657adf66d383f00b093d7faecc7fa57071cce2578660c9f1010821" [[package]] name = "uuid" -version = "1.18.0" +version = "1.18.1" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f33196643e165781c20a5ead5582283a7dacbb87855d867fbc2df3f81eddc1be" +checksum = "2f87b8aa10b915a06587d0dec516c282ff295b475d94abf425d62b57710070a2" dependencies = [ "getrandom 0.3.3", "js-sys", @@ -3949,6 +4482,17 @@ dependencies = [ "wasm-bindgen", ] +[[package]] +name = "uuid-simd" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "23b082222b4f6619906941c17eb2297fff4c2fb96cb60164170522942a200bd8" +dependencies = [ + "outref", + "uuid", + "vsimd", +] + [[package]] name = "valuable" version = "0.1.1" @@ -3967,6 +4511,12 @@ version = "0.9.5" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "0b928f33d975fc6ad9f86c8f283853ad26bdd5b10b7f1542aa2fa15e2289105a" +[[package]] +name = "vsimd" +version = "0.8.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "5c3082ca00d5a5ef149bb8b555a72ae84c9c59f7250f013ac822ac2e49b19c64" + [[package]] name = "walkdir" version = "2.5.0" @@ -3994,44 +4544,54 @@ checksum = "ccf3ec651a847eb01de73ccad15eb7d99f80485de043efb2f370cd654f4ea44b" [[package]] name = "wasi" -version = "0.14.2+wasi-0.2.4" +version = "0.14.7+wasi-0.2.4" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9683f9a5a998d873c0d21fcbe3c083009670149a8fab228644b8bd36b2c48cb3" +checksum = "883478de20367e224c0090af9cf5f9fa85bed63a95c1abf3afc5c083ebc06e8c" dependencies = [ - "wit-bindgen-rt", + "wasip2", +] + +[[package]] +name = "wasip2" +version = "1.0.1+wasi-0.2.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "0562428422c63773dad2c345a1882263bbf4d65cf3f42e90921f787ef5ad58e7" +dependencies = [ + "wit-bindgen", ] [[package]] name = "wasm-bindgen" -version = "0.2.100" +version = "0.2.104" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1edc8929d7499fc4e8f0be2262a241556cfc54a0bea223790e71446f2aab1ef5" +checksum = "c1da10c01ae9f1ae40cbfac0bac3b1e724b320abfcf52229f80b547c0d250e2d" dependencies = [ "cfg-if", "once_cell", "rustversion", "wasm-bindgen-macro", + "wasm-bindgen-shared", ] [[package]] name = "wasm-bindgen-backend" -version = "0.2.100" +version = "0.2.104" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "2f0a0651a5c2bc21487bde11ee802ccaf4c51935d0d3d42a6101f98161700bc6" +checksum = "671c9a5a66f49d8a47345ab942e2cb93c7d1d0339065d4f8139c486121b43b19" dependencies = [ "bumpalo", "log", "proc-macro2", "quote", - "syn 2.0.105", + "syn 2.0.106", "wasm-bindgen-shared", ] [[package]] name = "wasm-bindgen-futures" -version = "0.4.50" +version = "0.4.54" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "555d470ec0bc3bb57890405e5d4322cc9ea83cebb085523ced7be4144dac1e61" +checksum = "7e038d41e478cc73bae0ff9b36c60cff1c98b8f38f8d7e8061e79ee63608ac5c" dependencies = [ "cfg-if", "js-sys", @@ -4042,9 +4602,9 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro" -version = "0.2.100" +version = "0.2.104" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "7fe63fc6d09ed3792bd0897b314f53de8e16568c2b3f7982f468c0bf9bd0b407" +checksum = "7ca60477e4c59f5f2986c50191cd972e3a50d8a95603bc9434501cf156a9a119" dependencies = [ "quote", "wasm-bindgen-macro-support", @@ -4052,31 +4612,41 @@ dependencies = [ [[package]] name = "wasm-bindgen-macro-support" -version = "0.2.100" +version = "0.2.104" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "8ae87ea40c9f689fc23f209965b6fb8a99ad69aeeb0231408be24920604395de" +checksum = "9f07d2f20d4da7b26400c9f4a0511e6e0345b040694e8a75bd41d578fa4421d7" dependencies = [ "proc-macro2", "quote", - "syn 2.0.105", + "syn 2.0.106", "wasm-bindgen-backend", "wasm-bindgen-shared", ] [[package]] name = "wasm-bindgen-shared" -version = "0.2.100" +version = "0.2.104" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1a05d73b933a847d6cccdda8f838a22ff101ad9bf93e33684f39c1f5f0eece3d" +checksum = "bad67dc8b2a1a6e5448428adec4c3e84c43e561d8c9ee8a9e5aabeb193ec41d1" dependencies = [ "unicode-ident", ] [[package]] name = "web-sys" -version = "0.3.77" +version = "0.3.81" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "9367c417a924a74cae129e6a2ae3b47fabb1f8995595ab474029da749a8be120" +dependencies = [ + "js-sys", + "wasm-bindgen", +] + +[[package]] +name = "web-time" +version = "1.1.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "33b6dd2ef9186f1f2072e409e99cd22a975331a6b3591b12c764e0e55c60d5d2" +checksum = "5a6580f308b1fad9207618087a65c04e7a10bc77e02c8e84e9b00dd4b12fa0bb" dependencies = [ "js-sys", "wasm-bindgen", @@ -4100,11 +4670,11 @@ checksum = "ac3b87c63620426dd9b991e5ce0329eff545bccbbb34f3be09ff6fb6ab51b7b6" [[package]] name = "winapi-util" -version = "0.1.9" +version = "0.1.11" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "cf221c93e13a30d793f7645a0e7762c55d169dbb0a49671918a2319d289b10bb" +checksum = "c2a7b1c03c876122aa43f3020e6c3c3ee5c05081c9a00739faf7503aeba10d22" dependencies = [ - "windows-sys 0.59.0", + "windows-sys 0.61.0", ] [[package]] @@ -4115,13 +4685,13 @@ checksum = "712e227841d057c1ee1cd2fb22fa7e5a5461ae8e48fa2ca79ec42cfc1931183f" [[package]] name = "windows-core" -version = "0.61.2" +version = "0.62.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "c0fdd3ddb90610c7638aa2b3a3ab2904fb9e5cdbecc643ddb3647212781c4ae3" +checksum = "57fe7168f7de578d2d8a05b07fd61870d2e73b4020e9f49aa00da8471723497c" dependencies = [ "windows-implement", "windows-interface", - "windows-link", + "windows-link 0.2.0", "windows-result", "windows-strings", ] @@ -4134,7 +4704,7 @@ checksum = "a47fddd13af08290e67f4acabf4b459f647552718f683a7b415d290ac744a836" dependencies = [ "proc-macro2", "quote", - "syn 2.0.105", + "syn 2.0.106", ] [[package]] @@ -4145,7 +4715,7 @@ checksum = "bd9211b69f8dcdfa817bfd14bf1c97c9188afa36f4750130fcdf3f400eca9fa8" dependencies = [ "proc-macro2", "quote", - "syn 2.0.105", + "syn 2.0.106", ] [[package]] @@ -4154,22 +4724,28 @@ version = "0.1.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "5e6ad25900d524eaabdbbb96d20b4311e1e7ae1699af4fb28c17ae66c80d798a" +[[package]] +name = "windows-link" +version = "0.2.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "45e46c0661abb7180e7b9c281db115305d49ca1709ab8242adf09666d2173c65" + [[package]] name = "windows-result" -version = "0.3.4" +version = "0.4.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "56f42bd332cc6c8eac5af113fc0c1fd6a8fd2aa08a0119358686e5160d0586c6" +checksum = "7084dcc306f89883455a206237404d3eaf961e5bd7e0f312f7c91f57eb44167f" dependencies = [ - "windows-link", + "windows-link 0.2.0", ] [[package]] name = "windows-strings" -version = "0.4.2" +version = "0.5.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "56e6c93f3a0c3b36176cb1327a4958a0353d5d166c2a35cb268ace15e91d3b57" +checksum = "7218c655a553b0bed4426cf54b20d7ba363ef543b52d515b3e48d7fd55318dda" dependencies = [ - "windows-link", + "windows-link 0.2.0", ] [[package]] @@ -4208,6 +4784,15 @@ dependencies = [ "windows-targets 0.53.3", ] +[[package]] +name = "windows-sys" +version = "0.61.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e201184e40b2ede64bc2ea34968b28e33622acdbbf37104f0e4a33f7abe657aa" +dependencies = [ + "windows-link 0.2.0", +] + [[package]] name = "windows-targets" version = "0.42.2" @@ -4245,7 +4830,7 @@ version = "0.53.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "d5fe6031c4041849d7c496a8ded650796e7b6ecc19df1a431c1a363342e5dc91" dependencies = [ - "windows-link", + "windows-link 0.1.3", "windows_aarch64_gnullvm 0.53.0", "windows_aarch64_msvc 0.53.0", "windows_i686_gnu 0.53.0", @@ -4396,21 +4981,20 @@ checksum = "271414315aff87387382ec3d271b52d7ae78726f5d44ac98b4f4030c91880486" [[package]] name = "winnow" -version = "0.7.12" +version = "0.7.13" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "f3edebf492c8125044983378ecb5766203ad3b4c2f7a922bd7dd207f6d443e95" +checksum = "21a0236b59786fed61e2a80582dd500fe61f18b5dca67a4a067d0bc9039339cf" dependencies = [ "memchr", ] [[package]] name = "wiremock" -version = "0.6.4" +version = "0.6.5" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "a2b8b99d4cdbf36b239a9532e31fe4fb8acc38d1897c1761e161550a7dc78e6a" +checksum = "08db1edfb05d9b3c1542e521aea074442088292f00b5f28e435c714a98f85031" dependencies = [ "assert-json-diff", - "async-trait", "base64", "deadpool", "futures", @@ -4428,13 +5012,10 @@ dependencies = [ ] [[package]] -name = "wit-bindgen-rt" -version = "0.39.0" +name = "wit-bindgen" +version = "0.46.0" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "6f42320e61fe2cfd34354ecb597f86f413484a798ba44a8ca1165c58d42da6c1" -dependencies = [ - "bitflags 2.9.1", -] +checksum = "f17a85883d4e6d00e8a97c586de764dabcc06133f7f1d55dce5cdc070ad7fe59" [[package]] name = "writeable" @@ -4468,28 +5049,28 @@ checksum = "38da3c9736e16c5d3c8c597a9aaa5d1fa565d0532ae05e27c24aa62fb32c0ab6" dependencies = [ "proc-macro2", "quote", - "syn 2.0.105", + "syn 2.0.106", "synstructure", ] [[package]] name = "zerocopy" -version = "0.8.26" +version = "0.8.27" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "1039dd0d3c310cf05de012d8a39ff557cb0d23087fd44cad61df08fc31907a2f" +checksum = "0894878a5fa3edfd6da3f88c4805f4c8558e2b996227a3d864f47fe11e38282c" dependencies = [ "zerocopy-derive", ] [[package]] name = "zerocopy-derive" -version = "0.8.26" +version = "0.8.27" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "9ecf5b4cc5364572d7f4c329661bcc82724222973f2cab6f050a4e5c22f75181" +checksum = "88d2b8d9c68ad2b9e4340d7832716a4d21a22a1154777ad56ea55c51a9cf3831" dependencies = [ "proc-macro2", "quote", - "syn 2.0.105", + "syn 2.0.106", ] [[package]] @@ -4509,7 +5090,7 @@ checksum = "d71e5d6e06ab090c67b5e44993ec16b72dcbaabc526db883a360057678b48502" dependencies = [ "proc-macro2", "quote", - "syn 2.0.105", + "syn 2.0.106", "synstructure", ] @@ -4549,7 +5130,7 @@ checksum = "5b96237efa0c878c64bd89c436f661be4e46b2f3eff1ebb976f7ef2321d2f58f" dependencies = [ "proc-macro2", "quote", - "syn 2.0.105", + "syn 2.0.106", ] [[package]] @@ -4572,9 +5153,9 @@ dependencies = [ [[package]] name = "zstd-sys" -version = "2.0.15+zstd.1.5.7" +version = "2.0.16+zstd.1.5.7" source = "registry+https://github.com/rust-lang/crates.io-index" -checksum = "eb81183ddd97d0c74cedf1d50d85c8d08c1b8b68ee863bdee9e706eedba1a237" +checksum = "91e19ebc2adc8f83e43039e79776e3fda8ca919132d68a1fed6a5faca2683748" dependencies = [ "cc", "pkg-config", diff --git a/Cargo.toml b/Cargo.toml index 54fe14fd..a47efc59 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -8,7 +8,11 @@ members = [ [workspace.package] authors = ["Apollo "] -version = "0.7.5" +edition = "2024" +license-file = "LICENSE" +repository = "https://github.com/apollographql/apollo-mcp-server" +rust-version = "1.89.0" +version = "0.9.0" [workspace.dependencies] apollo-compiler = "1.27.0" @@ -50,6 +54,12 @@ url = { version = "2.4", features = ["serde"] } [workspace.metadata] crane.name = "apollo-mcp" +# This allows usage of coverage(off) attribute without causing a linting error. +# This attribute doesn't work in stable Rust yet and can be removed whenever it does. +# See https://github.com/apollographql/apollo-mcp-server/pull/372 +[workspace.lints.rust] +unexpected_cfgs = { level = "warn", check-cfg = ['cfg(coverage_nightly)'] } + [workspace.lints.clippy] exit = "deny" expect_used = "deny" diff --git a/README.md b/README.md index 21718f02..196e5561 100644 --- a/README.md +++ b/README.md @@ -7,6 +7,7 @@ ![release binaries workflow status](https://img.shields.io/github/actions/workflow/status/apollographql/apollo-mcp-server/release-bins.yml?label=release%20binaries) ![release container workflow status](https://img.shields.io/github/actions/workflow/status/apollographql/apollo-mcp-server/release-container.yml?label=release%20container) ![license](https://img.shields.io/github/license/apollographql/apollo-mcp-server) +[![codecov](https://codecov.io/github/apollographql/apollo-mcp-server/graph/badge.svg?token=6NHuvZQ8ak)](https://codecov.io/github/apollographql/apollo-mcp-server) # Apollo MCP Server diff --git a/codecov.yml b/codecov.yml new file mode 100644 index 00000000..eb8e5e8a --- /dev/null +++ b/codecov.yml @@ -0,0 +1,10 @@ +coverage: + status: + project: + default: + # Should increase overall coverage on each PR + target: auto + patch: + default: + # Require 80% coverage on all new/modified code + target: 80% diff --git a/crates/apollo-mcp-registry/Cargo.toml b/crates/apollo-mcp-registry/Cargo.toml index bf77a385..6a6c8565 100644 --- a/crates/apollo-mcp-registry/Cargo.toml +++ b/crates/apollo-mcp-registry/Cargo.toml @@ -1,10 +1,12 @@ [package] name = "apollo-mcp-registry" -version.workspace = true -edition = "2024" authors.workspace = true -license-file = "../LICENSE" -repository = "https://github.com/apollographql/apollo-mcp-server" +edition.workspace = true +license-file.workspace = true +repository.workspace = true +rust-version.workspace = true +version.workspace = true + description = "Registry providing schema and operations to the MCP Server" [dependencies] diff --git a/crates/apollo-mcp-registry/src/platform_api/operation_collections/collection_poller.rs b/crates/apollo-mcp-registry/src/platform_api/operation_collections/collection_poller.rs index 9f1a6605..faeab984 100644 --- a/crates/apollo-mcp-registry/src/platform_api/operation_collections/collection_poller.rs +++ b/crates/apollo-mcp-registry/src/platform_api/operation_collections/collection_poller.rs @@ -248,7 +248,7 @@ impl From<&OperationCollectionDefaultEntry> for OperationData { } } -#[derive(Clone)] +#[derive(Clone, Debug)] pub enum CollectionSource { Id(String, PlatformApiConfig), Default(String, PlatformApiConfig), diff --git a/crates/apollo-mcp-registry/src/uplink.rs b/crates/apollo-mcp-registry/src/uplink.rs index 2e52e589..ed64c1c1 100644 --- a/crates/apollo-mcp-registry/src/uplink.rs +++ b/crates/apollo-mcp-registry/src/uplink.rs @@ -402,15 +402,294 @@ where .send() .await .inspect_err(|e| { - if let Some(hyper_err) = e.source() { - if let Some(os_err) = hyper_err.source() { - if os_err.to_string().contains("tcp connect error: Cannot assign requested address (os error 99)") { - tracing::warn!("If your MCP server is executing within a kubernetes pod, this failure may be caused by istio-proxy injection. See https://github.com/apollographql/router/issues/3533 for more details about how to solve this"); - } - } + if let Some(hyper_err) = e.source() && + let Some(os_err) = hyper_err.source() && + os_err.to_string().contains("tcp connect error: Cannot assign requested address (os error 99)") + { + tracing::warn!("If your MCP server is executing within a kubernetes pod, this failure may be caused by istio-proxy injection. See https://github.com/apollographql/router/issues/3533 for more details about how to solve this"); } })?; tracing::debug!("uplink response {:?}", res); let response_body: graphql_client::Response = res.json().await?; Ok(response_body) } + +#[cfg(test)] +mod test { + use super::*; + use futures::stream::StreamExt; + use secrecy::SecretString; + use std::str::FromStr; + use std::time::Duration; + use url::Url; + + #[tokio::test] + async fn test_stream_from_uplink() { + for url in &[GCP_URL, AWS_URL] { + if let (Ok(apollo_key), Ok(apollo_graph_ref)) = ( + std::env::var("TEST_APOLLO_KEY"), + std::env::var("TEST_APOLLO_GRAPH_REF"), + ) { + let results = + stream_from_uplink::(UplinkConfig { + apollo_key: SecretString::from(apollo_key), + apollo_graph_ref, + endpoints: Some(Endpoints::fallback(vec![ + Url::from_str(url).expect("url must be valid"), + ])), + poll_interval: Duration::from_secs(1), + timeout: Duration::from_secs(5), + }) + .take(1) + .collect::>() + .await; + + let schema = results + .first() + .unwrap_or_else(|| panic!("expected one result from {url}")) + .as_ref() + .unwrap_or_else(|_| panic!("schema should be OK from {url}")); + assert!(schema.contains("type Product")) + } + } + } + + #[test] + fn test_uplink_config_for_tests() { + let endpoints = Endpoints::fallback(vec![ + Url::from_str("http://test1.example.com").unwrap(), + Url::from_str("http://test2.example.com").unwrap(), + ]); + + let config = UplinkConfig::for_tests(endpoints.clone()); + + assert_eq!(config.apollo_key.expose_secret(), "key"); + assert_eq!(config.apollo_graph_ref, "graph"); + assert_eq!(config.poll_interval, Duration::from_secs(2)); + assert_eq!(config.timeout, Duration::from_secs(5)); + + // Check endpoints + if let Some(Endpoints::Fallback { urls }) = config.endpoints { + assert_eq!(urls.len(), 2); + assert_eq!(urls[0].as_str(), "http://test1.example.com/"); + assert_eq!(urls[1].as_str(), "http://test2.example.com/"); + } else { + panic!("Expected fallback endpoints"); + } + } + + #[test] + fn test_endpoints_fallback() { + let urls = vec![ + Url::from_str("http://test1.example.com").unwrap(), + Url::from_str("http://test2.example.com").unwrap(), + ]; + let endpoints = Endpoints::fallback(urls.clone()); + + if let Endpoints::Fallback { + urls: fallback_urls, + } = endpoints + { + assert_eq!(fallback_urls.len(), 2); + assert_eq!(fallback_urls[0], urls[0]); + assert_eq!(fallback_urls[1], urls[1]); + } else { + panic!("Expected fallback endpoints"); + } + } + + #[test] + fn test_endpoints_round_robin() { + let urls = vec![ + Url::from_str("http://test1.example.com").unwrap(), + Url::from_str("http://test2.example.com").unwrap(), + ]; + let endpoints = Endpoints::round_robin(urls.clone()); + + if let Endpoints::RoundRobin { + urls: rr_urls, + current, + } = endpoints + { + assert_eq!(rr_urls.len(), 2); + assert_eq!(rr_urls[0], urls[0]); + assert_eq!(rr_urls[1], urls[1]); + assert_eq!(current, 0); + } else { + panic!("Expected round robin endpoints"); + } + } + + #[test] + fn test_endpoints_url_count() { + let urls = vec![ + Url::from_str("http://test1.example.com").unwrap(), + Url::from_str("http://test2.example.com").unwrap(), + Url::from_str("http://test3.example.com").unwrap(), + ]; + + let fallback = Endpoints::fallback(urls.clone()); + assert_eq!(fallback.url_count(), 3); + + let round_robin = Endpoints::round_robin(urls); + assert_eq!(round_robin.url_count(), 3); + } + + #[test] + fn test_endpoints_iter_fallback() { + let urls = vec![ + Url::from_str("http://test1.example.com").unwrap(), + Url::from_str("http://test2.example.com").unwrap(), + ]; + let mut endpoints = Endpoints::fallback(urls.clone()); + + { + let iter_urls: Vec<&Url> = endpoints.iter().collect(); + assert_eq!(iter_urls.len(), 2); + assert_eq!(iter_urls[0], &urls[0]); + assert_eq!(iter_urls[1], &urls[1]); + } + + // Fallback should always return the same order + { + let iter_urls2: Vec<&Url> = endpoints.iter().collect(); + assert_eq!(iter_urls2.len(), 2); + assert_eq!(iter_urls2[0], &urls[0]); + assert_eq!(iter_urls2[1], &urls[1]); + } + } + + #[test] + fn test_endpoints_iter_round_robin() { + let urls = vec![ + Url::from_str("http://test1.example.com").unwrap(), + Url::from_str("http://test2.example.com").unwrap(), + Url::from_str("http://test3.example.com").unwrap(), + ]; + let mut endpoints = Endpoints::round_robin(urls.clone()); + + // First iteration should start at index 0 + { + let iter_urls1: Vec<&Url> = endpoints.iter().collect(); + assert_eq!(iter_urls1.len(), 3); + assert_eq!(iter_urls1[0], &urls[0]); + assert_eq!(iter_urls1[1], &urls[1]); + assert_eq!(iter_urls1[2], &urls[2]); + } + + // Second iteration should start at index 3 (current incremented to 3, then mod 3 = 0) + // But since the inspect closure increments current for each item yielded, + // the actual behavior is that current advances as the iterator is consumed + { + let iter_urls2: Vec<&Url> = endpoints.iter().collect(); + assert_eq!(iter_urls2.len(), 3); + // After the first iteration consumed 3 items, current should be 3, then 3 % 3 = 0 + assert_eq!(iter_urls2[0], &urls[0]); + assert_eq!(iter_urls2[1], &urls[1]); + assert_eq!(iter_urls2[2], &urls[2]); + } + } + + #[test] + fn test_endpoints_default() { + let endpoints = Endpoints::default(); + assert_eq!(endpoints.url_count(), 2); // GCP_URL and AWS_URL + + if let Endpoints::Fallback { urls } = endpoints { + // URLs parsed with trailing slash + assert_eq!(urls[0].as_str(), "https://uplink.api.apollographql.com/"); + assert_eq!( + urls[1].as_str(), + "https://aws.uplink.api.apollographql.com/" + ); + } else { + panic!("Expected fallback endpoints"); + } + } + + #[test] + fn test_uplink_config_default() { + let config = UplinkConfig::default(); + + assert_eq!(config.apollo_key.expose_secret(), ""); + assert_eq!(config.apollo_graph_ref, ""); + assert!(config.endpoints.is_none()); + assert_eq!(config.poll_interval, Duration::from_secs(0)); + assert_eq!(config.timeout, Duration::from_secs(0)); + } + + #[test] + fn test_error_display() { + let error1 = Error::FetchFailedSingle; + assert_eq!( + error1.to_string(), + "fetch failed from uplink endpoint, and there are no fallback endpoints configured" + ); + + let error2 = Error::FetchFailedMultiple { url_count: 3 }; + assert_eq!( + error2.to_string(), + "fetch failed from all 3 uplink endpoints" + ); + + let error3 = Error::UplinkError { + code: "AUTH_FAILED".to_string(), + message: "Invalid API key".to_string(), + }; + assert_eq!( + error3.to_string(), + "uplink error: code=AUTH_FAILED message=Invalid API key" + ); + + let error4 = Error::UplinkErrorNoRetry { + code: "UNKNOWN_REF".to_string(), + message: "Graph not found".to_string(), + }; + assert_eq!( + error4.to_string(), + "uplink error, the request will not be retried: code=UNKNOWN_REF message=Graph not found" + ); + } + + #[test] + fn test_uplink_request_debug() { + let request = UplinkRequest { + api_key: "test_api_key".to_string(), + graph_ref: "test@main".to_string(), + id: Some("test_id".to_string()), + }; + + let debug_output = format!("{:?}", request); + assert!(debug_output.contains("test_api_key")); + assert!(debug_output.contains("test@main")); + assert!(debug_output.contains("test_id")); + } + + #[test] + fn test_uplink_response_debug() { + let response_new = UplinkResponse::New { + response: "test_response".to_string(), + id: "test_id".to_string(), + delay: 30, + }; + let debug_new = format!("{:?}", response_new); + assert!(debug_new.contains("New")); + assert!(debug_new.contains("test_response")); + + let response_unchanged = UplinkResponse::::Unchanged { + id: Some("test_id".to_string()), + delay: Some(30), + }; + let debug_unchanged = format!("{:?}", response_unchanged); + assert!(debug_unchanged.contains("Unchanged")); + + let response_error = UplinkResponse::::Error { + retry_later: true, + code: "RETRY_LATER".to_string(), + message: "Try again".to_string(), + }; + let debug_error = format!("{:?}", response_error); + assert!(debug_error.contains("Error")); + assert!(debug_error.contains("retry_later: true")); + } +} diff --git a/crates/apollo-mcp-registry/src/uplink/schema.rs b/crates/apollo-mcp-registry/src/uplink/schema.rs index b9fc9d4e..b03bbefd 100644 --- a/crates/apollo-mcp-registry/src/uplink/schema.rs +++ b/crates/apollo-mcp-registry/src/uplink/schema.rs @@ -9,7 +9,6 @@ use std::pin::Pin; use std::time::Duration; use crate::uplink::UplinkConfig; -use crate::uplink::schema::schema_stream::SupergraphSdlQuery; use crate::uplink::stream_from_uplink; use derive_more::Display; use derive_more::From; @@ -17,6 +16,7 @@ use educe::Educe; use event::Event; use event::Event::{NoMoreSchema, UpdateSchema}; use futures::prelude::*; +pub(crate) use schema_stream::SupergraphSdlQuery; use url::Url; /// Represents the new state of a schema after an update. @@ -181,12 +181,6 @@ impl SchemaSource { } } -#[derive(thiserror::Error, Debug)] -enum FetcherError { - #[error("failed to build http client")] - InitializationError(#[from] reqwest::Error), -} - // Encapsulates fetching the schema from the first viable url. // It will try each url in order until it finds one that works. #[allow(clippy::unwrap_used)] // TODO - existing unwrap from router code diff --git a/crates/apollo-mcp-registry/src/uplink/schema/event.rs b/crates/apollo-mcp-registry/src/uplink/schema/event.rs index c987a946..1c132295 100644 --- a/crates/apollo-mcp-registry/src/uplink/schema/event.rs +++ b/crates/apollo-mcp-registry/src/uplink/schema/event.rs @@ -1,6 +1,7 @@ -use crate::uplink::schema::SchemaState; +use super::SchemaState; use std::fmt::Debug; use std::fmt::Formatter; +use std::fmt::Result; /// Schema events pub enum Event { @@ -12,7 +13,7 @@ pub enum Event { } impl Debug for Event { - fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + fn fmt(&self, f: &mut Formatter) -> Result { match self { Event::UpdateSchema(_) => { write!(f, "UpdateSchema()") @@ -23,3 +24,28 @@ impl Debug for Event { } } } + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_debug_event_no_more_schema() { + let event = Event::NoMoreSchema; + let output = format!("{:?}", event); + assert_eq!(output, "NoMoreSchema"); + } + + #[test] + fn test_debug_redacts_update_schema() { + let event = Event::UpdateSchema(SchemaState { + sdl: "type Query { hello: String }".to_string(), + launch_id: Some("test-launch-123".to_string()), + }); + + let output = format!("{:?}", event); + assert_eq!(output, "UpdateSchema()"); + assert!(!output.contains("type Query")); + assert!(!output.contains("test-launch-123")); + } +} diff --git a/crates/apollo-mcp-registry/src/uplink/schema/schema_stream.rs b/crates/apollo-mcp-registry/src/uplink/schema/schema_stream.rs index be04b494..3c230f37 100644 --- a/crates/apollo-mcp-registry/src/uplink/schema/schema_stream.rs +++ b/crates/apollo-mcp-registry/src/uplink/schema/schema_stream.rs @@ -99,47 +99,81 @@ impl From for UplinkResponse { #[cfg(test)] mod test { - use std::str::FromStr; - use std::time::Duration; - - use futures::stream::StreamExt; - use secrecy::SecretString; - use url::Url; - - use crate::uplink::AWS_URL; - use crate::uplink::Endpoints; - use crate::uplink::GCP_URL; - use crate::uplink::UplinkConfig; - use crate::uplink::schema::schema_stream::SupergraphSdlQuery; - use crate::uplink::stream_from_uplink; - - #[tokio::test] - async fn integration_test() { - for url in &[GCP_URL, AWS_URL] { - if let (Ok(apollo_key), Ok(apollo_graph_ref)) = ( - std::env::var("TEST_APOLLO_KEY"), - std::env::var("TEST_APOLLO_GRAPH_REF"), - ) { - let results = stream_from_uplink::(UplinkConfig { - apollo_key: SecretString::from(apollo_key), - apollo_graph_ref, - endpoints: Some(Endpoints::fallback(vec![ - Url::from_str(url).expect("url must be valid"), - ])), - poll_interval: Duration::from_secs(1), - timeout: Duration::from_secs(5), - }) - .take(1) - .collect::>() - .await; - - let schema = results - .first() - .unwrap_or_else(|| panic!("expected one result from {url}")) - .as_ref() - .unwrap_or_else(|_| panic!("schema should be OK from {url}")); - assert!(schema.contains("type Product")) - } - } + use super::*; + + #[test] + fn test_uplink_request_to_graphql_variables() { + let request = UplinkRequest { + api_key: "test_key".to_string(), + graph_ref: "test_ref".to_string(), + id: Some("test_id".to_string()), + }; + + let variables: supergraph_sdl_query::Variables = request.into(); + + assert_eq!(variables.api_key, "test_key"); + assert_eq!(variables.graph_ref, "test_ref"); + assert_eq!(variables.if_after_id, Some("test_id".to_string())); + } + + #[test] + fn test_graphql_response_to_uplink_response_new() { + let response = supergraph_sdl_query::ResponseData { + router_config: SupergraphSdlQueryRouterConfig::RouterConfigResult( + supergraph_sdl_query::SupergraphSdlQueryRouterConfigOnRouterConfigResult { + supergraph_sdl: "test_sdl".to_string(), + id: "result_id".to_string(), + min_delay_seconds: 42.0, + }, + ), + }; + + let uplink_response: UplinkResponse = response.into(); + + assert!(matches!( + uplink_response, + UplinkResponse::New { response, id, delay } + if response == "test_sdl" && id == "result_id" && delay == 42 + )); + } + + #[test] + fn test_graphql_response_to_uplink_response_unchanged() { + let response = supergraph_sdl_query::ResponseData { + router_config: SupergraphSdlQueryRouterConfig::Unchanged( + supergraph_sdl_query::SupergraphSdlQueryRouterConfigOnUnchanged { + id: "unchanged_id".to_string(), + min_delay_seconds: 30.0, + }, + ), + }; + + let uplink_response: UplinkResponse = response.into(); + + assert!(matches!( + uplink_response, + UplinkResponse::Unchanged { id, delay } + if id == Some("unchanged_id".to_string()) && delay == Some(30) + )); + } + + #[test] + fn test_graphql_response_to_uplink_response_error() { + let response = supergraph_sdl_query::ResponseData { + router_config: SupergraphSdlQueryRouterConfig::FetchError( + supergraph_sdl_query::SupergraphSdlQueryRouterConfigOnFetchError { + code: FetchErrorCode::RETRY_LATER, + message: "Try again later".to_string(), + }, + ), + }; + + let uplink_response: UplinkResponse = response.into(); + + assert!(matches!( + uplink_response, + UplinkResponse::Error { retry_later, code, message } + if retry_later && code == "RETRY_LATER" && message == "Try again later" + )); } } diff --git a/crates/apollo-mcp-server/Cargo.toml b/crates/apollo-mcp-server/Cargo.toml index 0c99746d..b2b0fcbc 100644 --- a/crates/apollo-mcp-server/Cargo.toml +++ b/crates/apollo-mcp-server/Cargo.toml @@ -1,9 +1,13 @@ [package] name = "apollo-mcp-server" -version.workspace = true authors.workspace = true -edition = "2024" -license-file = "../LICENSE" +edition.workspace = true +license-file.workspace = true +repository.workspace = true +rust-version.workspace = true +version.workspace = true +build = "build.rs" + default-run = "apollo-mcp-server" [dependencies] @@ -14,6 +18,8 @@ apollo-mcp-registry = { path = "../apollo-mcp-registry" } apollo-schema-index = { path = "../apollo-schema-index" } axum = "0.8.4" axum-extra = { version = "0.10.1", features = ["typed-header"] } +axum-otel-metrics = "0.12.0" +axum-tracing-opentelemetry = "0.29.0" bon = "3.6.3" clap = { version = "4.5.36", features = ["derive", "env"] } figment = { version = "0.10.19", features = ["env", "yaml"] } @@ -21,12 +27,30 @@ futures.workspace = true headers = "0.4.1" http = "1.3.1" humantime-serde = "1.1.1" +jsonschema = "0.33.0" jsonwebtoken = "9" jwks = "0.4.0" lz-str = "0.2.1" +opentelemetry = "0.30.0" +opentelemetry-appender-log = "0.30.0" +opentelemetry-otlp = { version = "0.30.0", features = [ + "grpc-tonic", + "tonic", + "http-proto", + "metrics", + "trace", +] } +opentelemetry-resource-detectors = "0.9.0" +opentelemetry-semantic-conventions = "0.30.0" +opentelemetry-stdout = "0.30.0" +opentelemetry_sdk = { version = "0.30.0", features = [ + "spec_unstable_metrics_views", +] } regex = "1.11.1" +reqwest-middleware = "0.4.2" +reqwest-tracing = { version = "0.5.8", features = ["opentelemetry_0_30"] } reqwest.workspace = true -rmcp = { version = "0.2", features = [ +rmcp = { version = "0.6", features = [ "server", "transport-io", "transport-sse-server", @@ -37,22 +61,35 @@ serde.workspace = true serde_json.workspace = true thiserror.workspace = true tokio.workspace = true -tracing.workspace = true +tokio-util = "0.7.15" +tower-http = { version = "0.6.6", features = ["cors", "trace"] } tracing-appender = "0.2.3" +tracing-core.workspace = true +tracing-opentelemetry = "0.31.0" tracing-subscriber = { version = "0.3.19", features = ["env-filter"] } -tokio-util = "0.7.15" -tower-http = { version = "0.6.6", features = ["cors"] } +tracing.workspace = true url.workspace = true +async-trait = "0.1.89" [dev-dependencies] chrono = { version = "0.4.41", default-features = false, features = ["now"] } figment = { version = "0.10.19", features = ["test"] } insta.workspace = true mockito = "1.7.0" +opentelemetry_sdk = { version = "0.30.0", features = ["testing"] } rstest.workspace = true tokio.workspace = true +tower = "0.5.2" tracing-test = "0.2.5" +[build-dependencies] +cruet = "0.15.0" +prettyplease = "0.2.37" +quote = "1.0.40" +serde.workspace = true +syn = "2.0.106" +toml = "0.9.5" + [lints] workspace = true diff --git a/crates/apollo-mcp-server/build.rs b/crates/apollo-mcp-server/build.rs new file mode 100644 index 00000000..f213cc35 --- /dev/null +++ b/crates/apollo-mcp-server/build.rs @@ -0,0 +1,169 @@ +#![allow(clippy::unwrap_used)] +#![allow(clippy::expect_used)] +#![allow(clippy::panic)] + +//! Build Script for the Apollo MCP Server +//! +//! This mostly compiles all the available telemetry attributes +use quote::__private::TokenStream; +use quote::quote; +use serde::Deserialize; +use std::io::Write; +use std::{collections::VecDeque, io::Read as _}; +use syn::{Ident, parse2}; + +#[derive(Deserialize)] +struct TelemetryTomlData { + attributes: toml::Table, + metrics: toml::Table, +} + +#[derive(Eq, PartialEq, Debug, Clone)] +struct TelemetryData { + name: String, + alias: String, + value: String, + description: String, +} + +fn flatten(table: toml::Table) -> Vec { + let mut to_visit = VecDeque::from_iter(table.into_iter().map(|(key, val)| (vec![key], val))); + let mut telemetry_data = Vec::new(); + + while let Some((key, value)) = to_visit.pop_front() { + match value { + toml::Value::String(val) => { + let last_key = key.last().unwrap().clone(); + telemetry_data.push(TelemetryData { + name: cruet::to_pascal_case(last_key.as_str()), + alias: last_key, + value: key.join("."), + description: val, + }); + } + toml::Value::Table(map) => to_visit.extend( + map.into_iter() + .map(|(nested_key, value)| ([key.clone(), vec![nested_key]].concat(), value)), + ), + + _ => panic!("telemetry values should be string descriptions"), + }; + } + + telemetry_data +} + +fn generate_enum(telemetry_data: &[TelemetryData]) -> Vec { + telemetry_data + .iter() + .map(|t| { + let enum_value_ident = quote::format_ident!("{}", &t.name); + let alias = &t.alias; + let doc_message = &t.description; + quote! { + #[doc = #doc_message] + #[serde(alias = #alias)] + #enum_value_ident + } + }) + .collect::>() +} + +fn generate_enum_as_str_matches( + telemetry_data: &[TelemetryData], + enum_ident: Ident, +) -> Vec { + telemetry_data + .iter() + .map(|t| { + let name_ident = quote::format_ident!("{}", &t.name); + let value = &t.value; + quote! { + #enum_ident::#name_ident => #value + } + }) + .collect::>() +} + +fn main() { + // Parse the telemetry file + let telemetry: TelemetryTomlData = { + let mut raw = String::new(); + std::fs::File::open("telemetry.toml") + .expect("could not open telemetry file") + .read_to_string(&mut raw) + .expect("could not read telemetry file"); + + toml::from_str(&raw).expect("could not parse telemetry file") + }; + + // Generate the keys + let telemetry_attribute_data = flatten(telemetry.attributes); + let telemetry_metrics_data = flatten(telemetry.metrics); + + // Write out the generated keys + let out_dir = std::env::var_os("OUT_DIR").expect("could not retrieve output directory"); + let dest_path = std::path::Path::new(&out_dir).join("telemetry_attributes.rs"); + let mut generated_file = + std::fs::File::create(&dest_path).expect("could not create generated code file"); + + let attribute_keys_len = telemetry_attribute_data.len(); + let attribute_enum_keys = generate_enum(&telemetry_attribute_data); + let all_attribute_enum_values = &telemetry_attribute_data + .iter() + .map(|t| quote::format_ident!("{}", t.name)); + let all_attribute_enum_values = (*all_attribute_enum_values).clone(); + let attribute_enum_name = quote::format_ident!("{}", "TelemetryAttribute"); + let attribute_enum_as_str_matches = + generate_enum_as_str_matches(&telemetry_attribute_data, attribute_enum_name.clone()); + + let metric_enum_name = quote::format_ident!("{}", "TelemetryMetric"); + let metric_enum_keys = generate_enum(&telemetry_metrics_data); + let metric_enum_as_str_matches = + generate_enum_as_str_matches(&telemetry_metrics_data, metric_enum_name.clone()); + + let tokens = quote! { + /// All TelemetryAttribute values + pub const ALL_ATTRS: &[TelemetryAttribute; #attribute_keys_len] = &[#(TelemetryAttribute::#all_attribute_enum_values),*]; + + /// Supported telemetry attribute (tags) values + #[derive(Debug, ::serde::Deserialize, ::schemars::JsonSchema, Clone, Eq, PartialEq, Hash, Copy)] + pub enum #attribute_enum_name { + #(#attribute_enum_keys),* + } + + impl #attribute_enum_name { + /// Converts TelemetryAttribute to &str + pub const fn as_str(&self) -> &'static str { + match self { + #(#attribute_enum_as_str_matches),* + } + } + } + + /// Supported telemetry metrics + #[derive(Debug, ::serde::Deserialize, ::schemars::JsonSchema, Clone, Eq, PartialEq, Hash, Copy)] + pub enum #metric_enum_name { + #(#metric_enum_keys),* + } + + impl #metric_enum_name { + /// Converts TelemetryMetric to &str + pub const fn as_str(&self) -> &'static str { + match self { + #(#metric_enum_as_str_matches),* + } + } + } + }; + + let file = parse2(tokens).expect("Could not parse TokenStream"); + let code = prettyplease::unparse(&file); + + write!(generated_file, "{}", code).expect("Failed to write generated code"); + + // Inform cargo that we only want this to run when either this file or the telemetry + // one changes. + println!("cargo::rerun-if-changed=build.rs"); + println!("cargo::rerun-if-changed=telemetry.toml"); +} diff --git a/crates/apollo-mcp-server/src/auth.rs b/crates/apollo-mcp-server/src/auth.rs index fc1f4bd6..e52e1729 100644 --- a/crates/apollo-mcp-server/src/auth.rs +++ b/crates/apollo-mcp-server/src/auth.rs @@ -46,6 +46,10 @@ pub struct Config { /// Supported OAuth scopes by this resource server pub scopes: Vec, + + /// Whether to disable the auth token passthrough to upstream API + #[serde(default)] + pub disable_auth_token_passthrough: bool, } impl Config { @@ -80,6 +84,7 @@ impl Config { } /// Validate that requests made have a corresponding bearer JWT token +#[tracing::instrument(skip_all, fields(status_code, reason))] async fn oauth_validate( State(auth_config): State, token: Option>>, @@ -100,17 +105,85 @@ async fn oauth_validate( }; let validator = NetworkedTokenValidator::new(&auth_config.audiences, &auth_config.servers); - let token = token.ok_or_else(unauthorized_error)?; - - let valid_token = validator - .validate(token.0) - .await - .ok_or_else(unauthorized_error)?; + let token = token.ok_or_else(|| { + tracing::Span::current().record("reason", "missing_token"); + tracing::Span::current().record("status_code", StatusCode::UNAUTHORIZED.as_u16()); + unauthorized_error() + })?; + + let valid_token = validator.validate(token.0).await.ok_or_else(|| { + tracing::Span::current().record("reason", "invalid_token"); + tracing::Span::current().record("status_code", StatusCode::UNAUTHORIZED.as_u16()); + unauthorized_error() + })?; // Insert new context to ensure that handlers only use our enforced token verification // for propagation request.extensions_mut().insert(valid_token); let response = next.run(request).await; + tracing::Span::current().record("status_code", response.status().as_u16()); Ok(response) } + +#[cfg(test)] +mod tests { + use super::*; + use axum::middleware::from_fn_with_state; + use axum::routing::get; + use axum::{ + Router, + body::Body, + http::{Request, StatusCode}, + }; + use http::header::{AUTHORIZATION, WWW_AUTHENTICATE}; + use tower::ServiceExt; // for .oneshot() + use url::Url; + + fn test_config() -> Config { + Config { + servers: vec![Url::parse("http://localhost:1234").unwrap()], + audiences: vec!["test-audience".to_string()], + resource: Url::parse("http://localhost:4000").unwrap(), + resource_documentation: None, + scopes: vec!["read".to_string()], + disable_auth_token_passthrough: false, + } + } + + fn test_router(config: Config) -> Router { + Router::new() + .route("/test", get(|| async { "ok" })) + .layer(from_fn_with_state(config, oauth_validate)) + } + + #[tokio::test] + async fn missing_token_returns_unauthorized() { + let config = test_config(); + let app = test_router(config.clone()); + let req = Request::builder().uri("/test").body(Body::empty()).unwrap(); + let res = app.oneshot(req).await.unwrap(); + assert_eq!(res.status(), StatusCode::UNAUTHORIZED); + let headers = res.headers(); + let www_auth = headers.get(WWW_AUTHENTICATE).unwrap().to_str().unwrap(); + assert!(www_auth.contains("Bearer")); + assert!(www_auth.contains("resource_metadata")); + } + + #[tokio::test] + async fn invalid_token_returns_unauthorized() { + let config = test_config(); + let app = test_router(config.clone()); + let req = Request::builder() + .uri("/test") + .header(AUTHORIZATION, "Bearer invalidtoken") + .body(Body::empty()) + .unwrap(); + let res = app.oneshot(req).await.unwrap(); + assert_eq!(res.status(), StatusCode::UNAUTHORIZED); + let headers = res.headers(); + let www_auth = headers.get(WWW_AUTHENTICATE).unwrap().to_str().unwrap(); + assert!(www_auth.contains("Bearer")); + assert!(www_auth.contains("resource_metadata")); + } +} diff --git a/crates/apollo-mcp-server/src/cors.rs b/crates/apollo-mcp-server/src/cors.rs new file mode 100644 index 00000000..8b8facd9 --- /dev/null +++ b/crates/apollo-mcp-server/src/cors.rs @@ -0,0 +1,837 @@ +use http::Method; +use regex::Regex; +use schemars::JsonSchema; +use serde::Deserialize; +use tower_http::cors::{AllowOrigin, Any, CorsLayer}; +use url::Url; + +use crate::errors::ServerError; + +/// CORS configuration options +#[derive(Debug, Clone, Deserialize, JsonSchema)] +#[serde(default)] +pub struct CorsConfig { + /// Enable CORS support + pub enabled: bool, + + /// List of allowed origins (exact match) + pub origins: Vec, + + /// List of origin patterns (regex matching) + pub match_origins: Vec, + + /// Allow any origin (use with caution) + pub allow_any_origin: bool, + + /// Allow credentials in CORS requests + pub allow_credentials: bool, + + /// Allowed HTTP methods + pub allow_methods: Vec, + + /// Allowed request headers + pub allow_headers: Vec, + + /// Headers exposed to the browser + pub expose_headers: Vec, + + /// Max age for preflight cache (in seconds) + pub max_age: Option, +} + +impl Default for CorsConfig { + fn default() -> Self { + Self { + enabled: false, + origins: Vec::new(), + match_origins: Vec::new(), + allow_any_origin: false, + allow_credentials: false, + allow_methods: vec![ + "GET".to_string(), + "POST".to_string(), + "DELETE".to_string(), // Clients that no longer need a particular session SHOULD send an HTTP DELETE to explicitly terminate the session + ], + allow_headers: vec![ + "content-type".to_string(), + "mcp-protocol-version".to_string(), // https://modelcontextprotocol.io/specification/2025-06-18/basic/transports#protocol-version-header + "mcp-session-id".to_string(), // https://modelcontextprotocol.io/specification/2025-06-18/basic/transports#session-management + "traceparent".to_string(), // https://www.w3.org/TR/trace-context/#traceparent-header + "tracestate".to_string(), // https://www.w3.org/TR/trace-context/#tracestate-header + ], + expose_headers: vec![ + "mcp-session-id".to_string(), // https://modelcontextprotocol.io/specification/2025-06-18/basic/transports#session-management + "traceparent".to_string(), // https://www.w3.org/TR/trace-context/#traceparent-header + "tracestate".to_string(), // https://www.w3.org/TR/trace-context/#tracestate-header + ], + max_age: Some(7200), // 2 hours + } + } +} + +impl CorsConfig { + /// Build a CorsLayer from this configuration + pub fn build_cors_layer(&self) -> Result { + if !self.enabled { + return Err(ServerError::Cors("CORS is not enabled".to_string())); + } + + // Validate configuration + self.validate()?; + + let mut cors = CorsLayer::new(); + + // Configure origins + if self.allow_any_origin { + cors = cors.allow_origin(Any); + } else { + // Collect all origins (exact and regex patterns) + let mut origin_list = Vec::new(); + + // Parse exact origins + for origin_str in &self.origins { + let origin = origin_str.parse::().map_err(|e| { + ServerError::Cors(format!("Invalid origin '{}': {}", origin_str, e)) + })?; + origin_list.push(origin); + } + + // For regex patterns, we need to use a predicate function + if !self.match_origins.is_empty() { + // Parse regex patterns to validate them + let mut regex_patterns = Vec::new(); + for pattern in &self.match_origins { + let regex = Regex::new(pattern).map_err(|e| { + ServerError::Cors(format!("Invalid origin pattern '{}': {}", pattern, e)) + })?; + regex_patterns.push(regex); + } + + // Use predicate function that combines exact origins and regex patterns + let exact_origins = origin_list; + cors = cors.allow_origin(AllowOrigin::predicate(move |origin, _| { + let origin_str = origin.to_str().unwrap_or(""); + + // Check exact origins + if exact_origins + .iter() + .any(|exact| exact.as_bytes() == origin.as_bytes()) + { + return true; + } + + // Check regex patterns + regex_patterns + .iter() + .any(|regex| regex.is_match(origin_str)) + })); + } else if !origin_list.is_empty() { + // Only exact origins, no regex + cors = cors.allow_origin(origin_list); + } + } + + // Configure credentials + cors = cors.allow_credentials(self.allow_credentials); + + // Configure methods + let methods: Result, _> = self + .allow_methods + .iter() + .map(|m| m.parse::()) + .collect(); + let methods = + methods.map_err(|e| ServerError::Cors(format!("Invalid HTTP method: {}", e)))?; + cors = cors.allow_methods(methods); + + // Configure headers + if !self.allow_headers.is_empty() { + let headers: Result, _> = self + .allow_headers + .iter() + .map(|h| h.parse::()) + .collect(); + let headers = + headers.map_err(|e| ServerError::Cors(format!("Invalid header name: {}", e)))?; + cors = cors.allow_headers(headers); + } + + // Configure exposed headers + if !self.expose_headers.is_empty() { + let headers: Result, _> = self + .expose_headers + .iter() + .map(|h| h.parse::()) + .collect(); + let headers = headers + .map_err(|e| ServerError::Cors(format!("Invalid exposed header name: {}", e)))?; + cors = cors.expose_headers(headers); + } + + // Configure max age + if let Some(max_age) = self.max_age { + cors = cors.max_age(std::time::Duration::from_secs(max_age)); + } + + Ok(cors) + } + + /// Validate the configuration for consistency + fn validate(&self) -> Result<(), ServerError> { + // Cannot use credentials with any origin + if self.allow_credentials && self.allow_any_origin { + return Err(ServerError::Cors( + "Cannot use allow_credentials with allow_any_origin for security reasons. See: https://developer.mozilla.org/en-US/docs/Web/HTTP/Guides/CORS#requests_with_credentials" + .to_string(), + )); + } + + // Must have at least some origin configuration if not allowing any origin + if !self.allow_any_origin && self.origins.is_empty() && self.match_origins.is_empty() { + return Err(ServerError::Cors( + "Must specify origins, match_origins, or allow_any_origin when CORS is enabled" + .to_string(), + )); + } + + // Validate that origin strings are valid URLs + for origin in &self.origins { + Url::parse(origin).map_err(|e| { + ServerError::Cors(format!("Invalid origin URL '{}': {}", origin, e)) + })?; + } + + // Validate regex patterns + for pattern in &self.match_origins { + Regex::new(pattern).map_err(|e| { + ServerError::Cors(format!("Invalid regex pattern '{}': {}", pattern, e)) + })?; + } + + Ok(()) + } +} + +#[cfg(test)] +mod tests { + use super::*; + use axum::{Router, routing::get}; + use http::{HeaderValue, Method, Request, StatusCode}; + use tower::util::ServiceExt; + + #[test] + fn test_default_config() { + let config = CorsConfig::default(); + assert!(!config.enabled); + assert!(!config.allow_any_origin); + assert!(!config.allow_credentials); + assert_eq!( + config.allow_methods, + vec!["GET".to_string(), "POST".to_string(), "DELETE".to_string()] + ); + assert_eq!( + config.allow_headers, + vec![ + "content-type".to_string(), + "mcp-protocol-version".to_string(), + "mcp-session-id".to_string(), + "traceparent".to_string(), + "tracestate".to_string(), + ] + ); + assert_eq!( + config.expose_headers, + vec![ + "mcp-session-id".to_string(), + "traceparent".to_string(), + "tracestate".to_string(), + ] + ); + assert_eq!(config.max_age, Some(7200)); + } + + #[test] + fn test_disabled_cors_fails_to_build() { + let config = CorsConfig::default(); + assert!(config.build_cors_layer().is_err()); + } + + #[test] + fn test_allow_any_origin_builds() { + let config = CorsConfig { + enabled: true, + allow_any_origin: true, + ..Default::default() + }; + assert!(config.build_cors_layer().is_ok()); + } + + #[test] + fn test_specific_origins_build() { + let config = CorsConfig { + enabled: true, + origins: vec![ + "http://localhost:3000".to_string(), + "https://studio.apollographql.com".to_string(), + ], + ..Default::default() + }; + assert!(config.build_cors_layer().is_ok()); + } + + #[test] + fn test_regex_origins_build() { + let config = CorsConfig { + enabled: true, + match_origins: vec!["^http://localhost:[0-9]+$".to_string()], + ..Default::default() + }; + assert!(config.build_cors_layer().is_ok()); + } + + #[test] + fn test_credentials_with_any_origin_fails() { + let config = CorsConfig { + enabled: true, + allow_any_origin: true, + allow_credentials: true, + ..Default::default() + }; + assert!(config.build_cors_layer().is_err()); + } + + #[test] + fn test_no_origins_fails() { + let config = CorsConfig { + enabled: true, + allow_any_origin: false, + origins: vec![], + match_origins: vec![], + ..Default::default() + }; + assert!(config.build_cors_layer().is_err()); + } + + #[test] + fn test_invalid_origin_fails() { + let config = CorsConfig { + enabled: true, + origins: vec!["not-a-valid-url".to_string()], + ..Default::default() + }; + assert!(config.build_cors_layer().is_err()); + } + + #[test] + fn test_invalid_regex_fails() { + let config = CorsConfig { + enabled: true, + match_origins: vec!["[invalid regex".to_string()], + ..Default::default() + }; + assert!(config.build_cors_layer().is_err()); + } + + #[test] + fn test_invalid_method_fails() { + let config = CorsConfig { + enabled: true, + origins: vec!["http://localhost:3000".to_string()], + allow_methods: vec!["invalid method with spaces".to_string()], + ..Default::default() + }; + assert!(config.build_cors_layer().is_err()); + } + + #[tokio::test] + async fn test_preflight_request_with_exact_origin() { + let config = CorsConfig { + enabled: true, + origins: vec!["http://localhost:3000".to_string()], + max_age: Some(3600), + ..Default::default() + }; + + let app = Router::new().layer(config.build_cors_layer().unwrap()); + + let request = Request::builder() + .method(Method::OPTIONS) + .uri("/test") + .header("Origin", "http://localhost:3000") + .header("Access-Control-Request-Method", "POST") + .header( + "Access-Control-Request-Headers", + "content-type,authorization", + ) + .body(axum::body::Body::empty()) + .unwrap(); + + let response = app.oneshot(request).await.unwrap(); + + assert_eq!(response.status(), StatusCode::OK); + assert_eq!( + response.headers().get("access-control-allow-origin"), + Some(&HeaderValue::from_static("http://localhost:3000")) + ); + } + + #[tokio::test] + async fn test_simple_request_with_exact_origin() { + let config = CorsConfig { + enabled: true, + origins: vec!["http://localhost:3000".to_string()], + ..Default::default() + }; + + let app = Router::new() + .route("/health", get(|| async { "test response" })) + .layer(config.build_cors_layer().unwrap()); + + let request = Request::builder() + .method(Method::GET) + .uri("/health") + .header("Origin", "http://localhost:3000") + .body(axum::body::Body::empty()) + .unwrap(); + + let response = app.oneshot(request).await.unwrap(); + + assert_eq!(response.status(), StatusCode::OK); + assert_eq!( + response.headers().get("access-control-allow-origin"), + Some(&HeaderValue::from_static("http://localhost:3000")) + ); + } + + #[tokio::test] + async fn test_preflight_request_with_regex_origin() { + let config = CorsConfig { + enabled: true, + match_origins: vec!["^http://localhost:[0-9]+$".to_string()], + ..Default::default() + }; + + let app = Router::new().layer(config.build_cors_layer().unwrap()); + + // Test matching port + let request = Request::builder() + .method(Method::OPTIONS) + .uri("/test") + .header("Origin", "http://localhost:4321") + .header("Access-Control-Request-Method", "POST") + .header( + "Access-Control-Request-Headers", + "content-type,authorization", + ) + .body(axum::body::Body::empty()) + .unwrap(); + + let response = app.oneshot(request).await.unwrap(); + + assert_eq!(response.status(), StatusCode::OK); + assert_eq!( + response.headers().get("access-control-allow-origin"), + Some(&HeaderValue::from_static("http://localhost:4321")) + ); + } + + #[tokio::test] + async fn test_simple_request_with_regex_origin() { + let config = CorsConfig { + enabled: true, + match_origins: vec!["^https://.*\\.apollographql\\.com$".to_string()], + ..Default::default() + }; + + let app = Router::new() + .route("/test", get(|| async { "test response" })) + .layer(config.build_cors_layer().unwrap()); + + let request = Request::builder() + .method(Method::GET) + .uri("/test") + .header("Origin", "https://www.apollographql.com") + .body(axum::body::Body::empty()) + .unwrap(); + + let response = app.oneshot(request).await.unwrap(); + + assert_eq!(response.status(), StatusCode::OK); + assert_eq!( + response.headers().get("access-control-allow-origin"), + Some(&HeaderValue::from_static("https://www.apollographql.com")) + ); + } + + #[tokio::test] + async fn test_mixed_exact_and_regex_origins() { + let config = CorsConfig { + enabled: true, + origins: vec!["http://localhost:3000".to_string()], + match_origins: vec!["^https://.*\\.apollographql\\.com$".to_string()], + ..Default::default() + }; + + let cors_layer = config.build_cors_layer().unwrap(); + + // Test exact origin + let app1 = Router::new() + .route("/test", get(|| async { "test response" })) + .layer(cors_layer.clone()); + + let request1 = Request::builder() + .method(Method::GET) + .uri("/test") + .header("Origin", "http://localhost:3000") + .body(axum::body::Body::empty()) + .unwrap(); + + let response1 = app1.oneshot(request1).await.unwrap(); + assert_eq!( + response1.headers().get("access-control-allow-origin"), + Some(&HeaderValue::from_static("http://localhost:3000")) + ); + + // Test regex origin + let app2 = Router::new() + .route("/test", get(|| async { "test response" })) + .layer(cors_layer); + + let request2 = Request::builder() + .method(Method::GET) + .uri("/test") + .header("Origin", "https://studio.apollographql.com") + .body(axum::body::Body::empty()) + .unwrap(); + + let response2 = app2.oneshot(request2).await.unwrap(); + assert_eq!( + response2.headers().get("access-control-allow-origin"), + Some(&HeaderValue::from_static( + "https://studio.apollographql.com" + )) + ); + } + + #[tokio::test] + async fn test_preflight_request_rejected_origin_exact() { + let config = CorsConfig { + enabled: true, + origins: vec!["https://allowed.com".to_string()], + ..Default::default() + }; + + let app = Router::new().layer(config.build_cors_layer().unwrap()); + + let request = Request::builder() + .method(Method::OPTIONS) + .uri("/test") + .header("Origin", "https://blocked.com") + .header("Access-Control-Request-Method", "POST") + .header( + "Access-Control-Request-Headers", + "content-type,authorization", + ) + .body(axum::body::Body::empty()) + .unwrap(); + + let response = app.oneshot(request).await.unwrap(); + + assert!( + response + .headers() + .get("access-control-allow-origin") + .is_none() + ); + } + + #[tokio::test] + async fn test_simple_request_rejected_origin_exact() { + let config = CorsConfig { + enabled: true, + origins: vec!["https://allowed.com".to_string()], + ..Default::default() + }; + + let app = Router::new() + .route("/test", get(|| async { "test response" })) + .layer(config.build_cors_layer().unwrap()); + + let request = Request::builder() + .method(Method::GET) + .uri("/test") + .header("Origin", "https://blocked.com") + .body(axum::body::Body::empty()) + .unwrap(); + + let response = app.oneshot(request).await.unwrap(); + + assert_eq!(response.status(), StatusCode::OK); + assert!( + response + .headers() + .get("access-control-allow-origin") + .is_none() + ); + } + + #[tokio::test] + async fn test_preflight_request_rejected_origin_regex() { + let config = CorsConfig { + enabled: true, + match_origins: vec!["^https://.*\\.allowed\\.com$".to_string()], + ..Default::default() + }; + + let cors_layer = config.build_cors_layer().unwrap(); + let app = Router::new().layer(cors_layer); + + let request = Request::builder() + .method(Method::OPTIONS) + .uri("/test") + .header("Origin", "https://malicious.blocked.com") + .header("Access-Control-Request-Method", "POST") + .header( + "Access-Control-Request-Headers", + "content-type,authorization", + ) + .body(axum::body::Body::empty()) + .unwrap(); + + let response = app.oneshot(request).await.unwrap(); + + assert_eq!(response.status(), StatusCode::OK); + assert!( + response + .headers() + .get("access-control-allow-origin") + .is_none() + ); + } + + #[tokio::test] + async fn test_simple_request_rejected_origin_regex() { + let config = CorsConfig { + enabled: true, + match_origins: vec!["^https://.*\\.allowed\\.com$".to_string()], + ..Default::default() + }; + + let cors_layer = config.build_cors_layer().unwrap(); + let app = Router::new() + .route("/test", get(|| async { "test response" })) + .layer(cors_layer); + + let request = Request::builder() + .method(Method::GET) + .uri("/test") + .header("Origin", "https://malicious.blocked.com") + .body(axum::body::Body::empty()) + .unwrap(); + + let response = app.oneshot(request).await.unwrap(); + + assert_eq!(response.status(), StatusCode::OK); + assert!( + response + .headers() + .get("access-control-allow-origin") + .is_none() + ); + } + + #[tokio::test] + async fn test_preflight_request_any_origin() { + let config = CorsConfig { + enabled: true, + allow_any_origin: true, + ..Default::default() + }; + + let app = Router::new().layer(config.build_cors_layer().unwrap()); + + let request = Request::builder() + .method(Method::OPTIONS) + .uri("/test") + .header("Origin", "https://any-domain.com") + .header("Access-Control-Request-Method", "POST") + .header( + "Access-Control-Request-Headers", + "content-type,authorization", + ) + .body(axum::body::Body::empty()) + .unwrap(); + + let response = app.oneshot(request).await.unwrap(); + + assert_eq!(response.status(), StatusCode::OK); + assert_eq!( + response.headers().get("access-control-allow-origin"), + Some(&HeaderValue::from_static("*")) + ); + } + + #[tokio::test] + async fn test_simple_request_any_origin() { + let config = CorsConfig { + enabled: true, + allow_any_origin: true, + ..Default::default() + }; + + let app = Router::new() + .route("/test", get(|| async { "test response" })) + .layer(config.build_cors_layer().unwrap()); + + let request = Request::builder() + .method(Method::GET) + .uri("/test") + .header("Origin", "https://any-domain.com") + .body(axum::body::Body::empty()) + .unwrap(); + + let response = app.oneshot(request).await.unwrap(); + + assert_eq!(response.status(), StatusCode::OK); + assert_eq!( + response.headers().get("access-control-allow-origin"), + Some(&HeaderValue::from_static("*")) + ); + } + + #[tokio::test] + async fn test_non_cors_request() { + let config = CorsConfig { + enabled: true, + origins: vec!["https://allowed.com".to_string()], + ..Default::default() + }; + + let cors_layer = config.build_cors_layer().unwrap(); + let app = Router::new() + .route("/test", get(|| async { "test response" })) + .layer(cors_layer); + + let request = Request::builder() + .method(Method::GET) + .uri("/test") + // No Origin header + .body(axum::body::Body::empty()) + .unwrap(); + + let response = app.oneshot(request).await.unwrap(); + + // Request should succeed but without CORS headers + assert_eq!(response.status(), StatusCode::OK); + assert!( + response + .headers() + .get("access-control-allow-origin") + .is_none() + ); + } + + #[tokio::test] + async fn test_multiple_request_headers() { + let config = CorsConfig { + enabled: true, + origins: vec!["https://allowed.com".to_string()], + allow_headers: vec![ + "content-type".to_string(), + "authorization".to_string(), + "x-api-key".to_string(), + "x-requested-with".to_string(), + ], + ..Default::default() + }; + + let app = Router::new().layer(config.build_cors_layer().unwrap()); + + let request = Request::builder() + .method(Method::OPTIONS) + .uri("/test") + .header("Origin", "https://allowed.com") + .header("Access-Control-Request-Method", "POST") + .header( + "Access-Control-Request-Headers", + "content-type,authorization,x-api-key,disallowed-header", + ) + .body(axum::body::Body::empty()) + .unwrap(); + + let response = app.oneshot(request).await.unwrap(); + + assert_eq!(response.status(), StatusCode::OK); + let allow_headers = response + .headers() + .get("access-control-allow-headers") + .unwrap(); + let headers_str = allow_headers.to_str().unwrap(); + assert!(headers_str.contains("content-type")); + assert!(headers_str.contains("authorization")); + assert!(headers_str.contains("x-api-key")); + assert!(!headers_str.contains("disallowed-header")); + } + + #[tokio::test] + async fn test_preflight_request_with_credentials() { + let config = CorsConfig { + enabled: true, + origins: vec!["https://allowed.com".to_string()], + allow_credentials: true, + ..Default::default() + }; + + let app = Router::new().layer(config.build_cors_layer().unwrap()); + + let request = Request::builder() + .method(Method::OPTIONS) + .uri("/test") + .header("Origin", "https://allowed.com") + .header("Access-Control-Request-Method", "POST") + .header( + "Access-Control-Request-Headers", + "content-type,authorization", + ) + .body(axum::body::Body::empty()) + .unwrap(); + + let response = app.oneshot(request).await.unwrap(); + + assert_eq!(response.status(), StatusCode::OK); + assert_eq!( + response.headers().get("access-control-allow-credentials"), + Some(&HeaderValue::from_static("true")) + ); + } + + #[tokio::test] + async fn test_simple_request_with_credentials() { + let config = CorsConfig { + enabled: true, + origins: vec!["https://allowed.com".to_string()], + allow_credentials: true, + ..Default::default() + }; + + let app = Router::new() + .route("/test", get(|| async { "test response" })) + .layer(config.build_cors_layer().unwrap()); + + let request = Request::builder() + .method(Method::GET) + .uri("/test") + .header("Origin", "https://allowed.com") + .header("Cookie", "sessionid=abc123") + .body(axum::body::Body::empty()) + .unwrap(); + + let response = app.oneshot(request).await.unwrap(); + + assert_eq!(response.status(), StatusCode::OK); + assert_eq!( + response.headers().get("access-control-allow-credentials"), + Some(&HeaderValue::from_static("true")) + ); + } +} diff --git a/crates/apollo-mcp-server/src/custom_scalar_map.rs b/crates/apollo-mcp-server/src/custom_scalar_map.rs index 69cd820a..746e1b23 100644 --- a/crates/apollo-mcp-server/src/custom_scalar_map.rs +++ b/crates/apollo-mcp-server/src/custom_scalar_map.rs @@ -1,8 +1,6 @@ use crate::errors::ServerError; -use rmcp::{ - schemars::schema::{Schema, SchemaObject, SingleOrVec}, - serde_json, -}; +use rmcp::serde_json; +use schemars::Schema; use std::{collections::HashMap, path::PathBuf, str::FromStr}; impl FromStr for CustomScalarMap { @@ -14,26 +12,23 @@ impl FromStr for CustomScalarMap { serde_json::from_str(string_custom_scalar_file) .map_err(ServerError::CustomScalarConfig)?; - // Validate each of the values in the map and coerce into schemars::schema::SchemaObject + // Try to parse each as a schema let custom_scalar_map = parsed_custom_scalar_file .into_iter() .map(|(key, value)| { - let value_string = value.to_string(); - // The only way I could find to do this was to reparse it. - let schema: SchemaObject = serde_json::from_str(value_string.as_str()) - .map_err(ServerError::CustomScalarConfig)?; - - if has_invalid_schema(&Schema::Object(schema.clone())) { - Err(ServerError::CustomScalarJsonSchema(value)) - } else { - Ok((key, schema)) + // The schemars crate does not enforce schema validation anymore, so we use jsonschema + // to ensure that the supplied schema is valid. + if let Err(e) = jsonschema::meta::validate(&value) { + return Err(ServerError::CustomScalarJsonSchema(e.to_string())); } + + Schema::try_from(value.clone()) + .map(|schema| (key, schema)) + .map_err(|e| ServerError::CustomScalarJsonSchema(e.to_string())) }) .collect::>()?; - // panic!("hello2! {:?}", parsed_custom_scalar_file); - - Ok::<_, ServerError>(CustomScalarMap(custom_scalar_map)) + Ok(CustomScalarMap(custom_scalar_map)) } } @@ -49,44 +44,19 @@ impl TryFrom<&PathBuf> for CustomScalarMap { } #[derive(Debug, Clone)] -pub struct CustomScalarMap(HashMap); +pub struct CustomScalarMap(HashMap); impl CustomScalarMap { - pub fn get(&self, key: &str) -> Option<&SchemaObject> { + pub fn get(&self, key: &str) -> Option<&Schema> { self.0.get(key) } } -// Unknown keys will be put into "extensions" in the schema object, check for those and consider those invalid -fn has_invalid_schema(schema: &Schema) -> bool { - match schema { - Schema::Object(schema_object) => { - !schema_object.extensions.is_empty() - || schema_object - .object - .as_ref() - .is_some_and(|object| object.properties.values().any(has_invalid_schema)) - || schema_object.array.as_ref().is_some_and(|object| { - object.items.as_ref().is_some_and(|items| match items { - SingleOrVec::Single(item) => has_invalid_schema(item), - SingleOrVec::Vec(items) => items.iter().any(has_invalid_schema), - }) - }) - } - Schema::Bool(_) => false, - } -} - #[cfg(test)] mod tests { - use std::{ - collections::{BTreeMap, HashMap}, - str::FromStr, - }; + use std::{collections::HashMap, str::FromStr}; - use rmcp::schemars::schema::{ - InstanceType, ObjectValidation, Schema, SchemaObject, SingleOrVec, - }; + use schemars::json_schema; use crate::custom_scalar_map::CustomScalarMap; @@ -103,7 +73,8 @@ mod tests { #[test] fn only_spaces() { - let result = CustomScalarMap::from_str(" ").err().unwrap(); + let result = + CustomScalarMap::from_str(" ").expect_err("empty space should be valid schema"); insta::assert_debug_snapshot!(result, @r#" CustomScalarConfig( @@ -128,20 +99,17 @@ mod tests { let result = CustomScalarMap::from_str( r###"{ "custom": { - "test": true + "type": "bool" } }"###, ) - .err() - .unwrap(); + .expect_err("schema should have been invalid"); - insta::assert_debug_snapshot!(result, @r#" + insta::assert_debug_snapshot!(result, @r###" CustomScalarJsonSchema( - Object { - "test": Bool(true), - }, + "\"bool\" is not valid under any of the schemas listed in the 'anyOf' keyword", ) - "#) + "###) } #[test] @@ -152,25 +120,17 @@ mod tests { "type": "object", "properties": { "test": { - "test": true + "type": "obbbject" } } } }"###, ) - .err() - .unwrap(); + .expect_err("schema should have been invalid"); insta::assert_debug_snapshot!(result, @r#" CustomScalarJsonSchema( - Object { - "type": String("object"), - "properties": Object { - "test": Object { - "test": Bool(true), - }, - }, - }, + "\"obbbject\" is not valid under any of the schemas listed in the 'anyOf' keyword", ) "#) } @@ -196,31 +156,23 @@ mod tests { let expected_data = HashMap::from_iter([ ( "simple".to_string(), - SchemaObject { - instance_type: Some(SingleOrVec::Single(Box::new(InstanceType::String))), - ..Default::default() - }, + json_schema!({ + "type": "string", + }), ), ( "complex".to_string(), - SchemaObject { - instance_type: Some(SingleOrVec::Single(Box::new(InstanceType::Object))), - object: Some(Box::new(ObjectValidation { - properties: BTreeMap::from_iter([( - "name".to_string(), - Schema::Object(SchemaObject { - instance_type: Some(SingleOrVec::Single(Box::new( - InstanceType::String, - ))), - ..Default::default() - }), - )]), - ..Default::default() - })), - ..Default::default() - }, + json_schema!({ + "type": "object", + "properties": { + "name": { + "type": "string" + } + } + }), ), ]); + assert_eq!(result, expected_data); } } diff --git a/crates/apollo-mcp-server/src/errors.rs b/crates/apollo-mcp-server/src/errors.rs index e19dc152..51771036 100644 --- a/crates/apollo-mcp-server/src/errors.rs +++ b/crates/apollo-mcp-server/src/errors.rs @@ -54,7 +54,7 @@ pub enum ServerError { GraphQLDocumentSchema(Box>), #[error("Federation error in GraphQL schema: {0}")] - Federation(FederationError), + Federation(Box), #[error("Invalid JSON: {0}")] Json(#[from] serde_json::Error), @@ -78,7 +78,7 @@ pub enum ServerError { CustomScalarConfig(serde_json::Error), #[error("invalid json schema: {0}")] - CustomScalarJsonSchema(serde_json::Value), + CustomScalarJsonSchema(String), #[error("Missing environment variable: {0}")] EnvironmentVariable(String), @@ -93,13 +93,16 @@ pub enum ServerError { StartupError(#[from] JoinError), #[error("Failed to initialize MCP server")] - McpInitializeError(#[from] rmcp::service::ServerInitializeError), + McpInitializeError(#[from] Box), #[error(transparent)] UrlParseError(ParseError), #[error("Failed to index schema: {0}")] Indexing(#[from] IndexingError), + + #[error("CORS configuration error: {0}")] + Cors(String), } /// An MCP tool error diff --git a/crates/apollo-mcp-server/src/event.rs b/crates/apollo-mcp-server/src/event.rs index f5f4e0b4..a8c4651f 100644 --- a/crates/apollo-mcp-server/src/event.rs +++ b/crates/apollo-mcp-server/src/event.rs @@ -3,6 +3,7 @@ use apollo_mcp_registry::platform_api::operation_collections::error::CollectionE use apollo_mcp_registry::uplink::schema::event::Event as SchemaEvent; use std::fmt::Debug; use std::fmt::Formatter; +use std::fmt::Result; use std::io; /// MCP Server events @@ -24,7 +25,7 @@ pub enum Event { } impl Debug for Event { - fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { + fn fmt(&self, f: &mut Formatter) -> Result { match self { Event::SchemaUpdated(event) => { write!(f, "SchemaUpdated({event:?})") @@ -44,3 +45,46 @@ impl Debug for Event { } } } + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn test_debug_event_schema_updated() { + let event = Event::SchemaUpdated(SchemaEvent::NoMoreSchema); + let output = format!("{:?}", event); + assert_eq!(output, "SchemaUpdated(NoMoreSchema)"); + } + + #[test] + fn test_debug_event_operations_updated() { + let event = Event::OperationsUpdated(vec![]); + let output = format!("{:?}", event); + assert_eq!(output, "OperationsChanged([])"); + } + + #[test] + fn test_debug_event_operation_error() { + let event = Event::OperationError(std::io::Error::other("TEST"), None); + let output = format!("{:?}", event); + assert_eq!( + output, + r#"OperationError(Custom { kind: Other, error: "TEST" }, None)"# + ); + } + + #[test] + fn test_debug_event_collection_error() { + let event = Event::CollectionError(CollectionError::Response("TEST".to_string())); + let output = format!("{:?}", event); + assert_eq!(output, r#"OperationError(Response("TEST"))"#); + } + + #[test] + fn test_debug_event_shutdown() { + let event = Event::Shutdown; + let output = format!("{:?}", event); + assert_eq!(output, "Shutdown"); + } +} diff --git a/crates/apollo-mcp-server/src/explorer.rs b/crates/apollo-mcp-server/src/explorer.rs index d422e798..e0cbfe79 100644 --- a/crates/apollo-mcp-server/src/explorer.rs +++ b/crates/apollo-mcp-server/src/explorer.rs @@ -82,8 +82,10 @@ impl Explorer { let url = self.create_explorer_url(input)?; debug!(?url, input=?pretty, "Created URL to open operation in Apollo Explorer"); Ok(CallToolResult { - content: vec![Content::text(url)], + content: vec![Content::text(url.clone())], + meta: None, is_error: None, + structured_content: Some(Value::Array(vec![url.into()])), }) } } diff --git a/crates/apollo-mcp-server/src/graphql.rs b/crates/apollo-mcp-server/src/graphql.rs index 7d09b782..8ed86941 100644 --- a/crates/apollo-mcp-server/src/graphql.rs +++ b/crates/apollo-mcp-server/src/graphql.rs @@ -1,11 +1,17 @@ //! Execute GraphQL operations from an MCP tool use crate::errors::McpError; +use crate::generated::telemetry::{TelemetryAttribute, TelemetryMetric}; +use crate::meter; +use opentelemetry::KeyValue; use reqwest::header::{HeaderMap, HeaderValue}; +use reqwest_middleware::{ClientBuilder, Extension}; +use reqwest_tracing::{OtelName, TracingMiddleware}; use rmcp::model::{CallToolResult, Content, ErrorCode}; use serde_json::{Map, Value}; use url::Url; +#[derive(Debug)] pub struct Request<'a> { pub input: Value, pub endpoint: &'a Url, @@ -33,7 +39,11 @@ pub trait Executable { fn headers(&self, default_headers: &HeaderMap) -> HeaderMap; /// Execute as a GraphQL operation using the endpoint and headers + #[tracing::instrument(skip(self, request))] async fn execute(&self, request: Request<'_>) -> Result { + let meter = &meter::METER; + let start = std::time::Instant::now(); + let mut op_id: Option = None; let client_metadata = serde_json::json!({ "name": "mcp", "version": std::env!("CARGO_PKG_VERSION") @@ -55,6 +65,7 @@ pub trait Executable { "clientLibrary": client_metadata, }), ); + op_id = Some(id.to_string()); } else { let OperationDetails { query, @@ -70,11 +81,17 @@ pub trait Executable { ); if let Some(op_name) = operation_name { + op_id = Some(op_name.clone()); request_body.insert(String::from("operationName"), Value::String(op_name)); } } - reqwest::Client::new() + let client = ClientBuilder::new(reqwest::Client::new()) + .with_init(Extension(OtelName("mcp-graphql-client".into()))) + .with(TracingMiddleware::default()) + .build(); + + let result = client .post(request.endpoint.as_str()) .headers(self.headers(&request.headers)) .body(Value::Object(request_body).to_string()) @@ -107,15 +124,52 @@ pub trait Executable { .filter(|value| !matches!(value, Value::Null)) .is_none(), ), - }) + meta: None, + structured_content: Some(json), + }); + + // Record response metrics + let attributes = vec![ + KeyValue::new( + TelemetryAttribute::Success.to_key(), + result.as_ref().is_ok_and(|r| r.is_error != Some(true)), + ), + KeyValue::new( + TelemetryAttribute::OperationId.to_key(), + op_id.unwrap_or("".to_string()), + ), + KeyValue::new( + TelemetryAttribute::OperationSource.to_key(), + match self.persisted_query_id() { + Some(_) => "persisted_query", + None => "operation", + }, + ), + ]; + meter + .f64_histogram(TelemetryMetric::OperationDuration.as_str()) + .build() + .record(start.elapsed().as_millis() as f64, &attributes); + meter + .u64_counter(TelemetryMetric::OperationCount.as_str()) + .build() + .add(1, &attributes); + + result } } #[cfg(test)] mod test { use crate::errors::McpError; + use crate::generated::telemetry::TelemetryMetric; use crate::graphql::{Executable, OperationDetails, Request}; use http::{HeaderMap, HeaderValue}; + use opentelemetry::global; + use opentelemetry_sdk::metrics::data::{AggregatedMetrics, MetricData}; + use opentelemetry_sdk::metrics::{ + InMemoryMetricExporter, MeterProviderBuilder, PeriodicReader, + }; use serde_json::{Map, Value, json}; use url::Url; @@ -355,4 +409,76 @@ mod test { assert!(result.is_error.is_some()); assert!(result.is_error.unwrap()); } + + #[tokio::test] + async fn validate_metric_attributes_success_false() { + // given + let exporter = InMemoryMetricExporter::default(); + let meter_provider = MeterProviderBuilder::default() + .with_reader(PeriodicReader::builder(exporter.clone()).build()) + .build(); + global::set_meter_provider(meter_provider.clone()); + + let mut server = mockito::Server::new_async().await; + let url = Url::parse(server.url().as_str()).unwrap(); + let mock_request = Request { + input: json!({}), + endpoint: &url, + headers: HeaderMap::new(), + }; + + server + .mock("POST", "/") + .with_status(200) + .with_header("content-type", "application/json") + .with_body(json!({ "data": null, "errors": ["an error"] }).to_string()) + .expect(1) + .create_async() + .await; + + // when + let test_executable = TestExecutableWithPersistedQueryId {}; + let result = test_executable.execute(mock_request).await.unwrap(); + + // then + assert!(result.is_error.is_some()); + assert!(result.is_error.unwrap()); + + // Retrieve the finished metrics from the exporter + let finished_metrics = exporter.get_finished_metrics().unwrap(); + + // validate the attributes of the apollo.mcp.operation.count counter + for resource_metrics in finished_metrics { + if let Some(scope_metrics) = resource_metrics + .scope_metrics() + .find(|scope_metrics| scope_metrics.scope().name() == "apollo.mcp") + { + for metric in scope_metrics.metrics() { + if metric.name() == TelemetryMetric::OperationCount.as_str() + && let AggregatedMetrics::U64(MetricData::Sum(data)) = metric.data() + { + for point in data.data_points() { + let attributes = point.attributes(); + let mut attr_map = std::collections::HashMap::new(); + for kv in attributes { + attr_map.insert(kv.key.as_str(), kv.value.as_str()); + } + assert_eq!( + attr_map.get("operation.id").map(|s| s.as_ref()), + Some("mock_operation") + ); + assert_eq!( + attr_map.get("operation.type").map(|s| s.as_ref()), + Some("persisted_query") + ); + assert_eq!( + attr_map.get("success"), + Some(&std::borrow::Cow::Borrowed("false")) + ); + } + } + } + } + } + } } diff --git a/crates/apollo-mcp-server/src/introspection/minify.rs b/crates/apollo-mcp-server/src/introspection/minify.rs index 81af9534..6cc47a8a 100644 --- a/crates/apollo-mcp-server/src/introspection/minify.rs +++ b/crates/apollo-mcp-server/src/introspection/minify.rs @@ -1,6 +1,6 @@ use apollo_compiler::schema::{ExtendedType, Type}; use regex::Regex; -use std::sync::OnceLock; +use std::{collections::HashMap, sync::OnceLock}; pub trait MinifyExt { /// Serialize in minified form @@ -72,6 +72,38 @@ fn minify_input_object(input_object_type: &apollo_compiler::schema::InputObjectT format!("I:{type_name}:{fields}") } +// We should only minify directives that assist the LLM in understanding the schema. This included @deprecated +fn minify_directives(directives: &apollo_compiler::ast::DirectiveList) -> String { + let mut result = String::new(); + + static DIRECTIVES_TO_MINIFY: OnceLock> = OnceLock::new(); + let directives_to_minify = + DIRECTIVES_TO_MINIFY.get_or_init(|| HashMap::from([("deprecated", "D")])); + + for directive in directives.iter() { + if let Some(minified_name) = directives_to_minify.get(directive.name.as_str()) { + // Since we're only handling @deprecated right now we can just add the reason and minify it. + // We should handle this more generically in the future. + if !directive.arguments.is_empty() + && let Some(reason) = directive + .arguments + .iter() + .find(|a| a.name == "reason") + .and_then(|a| a.value.as_str()) + { + result.push_str(&format!( + "@{}(\"{}\")", + minified_name, + normalize_description(reason) + )); + } else { + result.push_str(&format!("@{}", minified_name)); + } + } + } + result +} + fn minify_fields( fields: &apollo_compiler::collections::IndexMap< apollo_compiler::Name, @@ -99,6 +131,8 @@ fn minify_fields( // Add field type result.push(':'); result.push_str(&type_name(&field.ty)); + result.push_str(&minify_directives(&field.directives)); + result.push(','); } @@ -128,6 +162,7 @@ fn minify_input_fields( result.push_str(field_name.as_str()); result.push(':'); result.push_str(&type_name(&field.ty)); + result.push_str(&minify_directives(&field.directives)); result.push(','); } @@ -147,13 +182,19 @@ fn minify_arguments( .map(|arg| { if let Some(desc) = arg.description.as_ref() { format!( - "\"{}\"{}:{}", + "\"{}\"{}:{}{}", normalize_description(desc), arg.name.as_str(), - type_name(&arg.ty) + type_name(&arg.ty), + minify_directives(&arg.directives) ) } else { - format!("{}:{}", arg.name.as_str(), type_name(&arg.ty)) + format!( + "{}:{}{}", + arg.name.as_str(), + type_name(&arg.ty), + minify_directives(&arg.directives) + ) } }) .collect::>() @@ -211,3 +252,27 @@ fn normalize_description(desc: &str) -> String { let re = WHITESPACE_PATTERN.get_or_init(|| Regex::new(r"\s+").expect("regex pattern compiles")); re.replace_all(desc, "").to_string() } + +#[cfg(test)] +mod tests { + use super::*; + + const TEST_SCHEMA: &str = include_str!("tools/testdata/schema.graphql"); + + #[test] + fn test_minify_schema() { + let schema = apollo_compiler::schema::Schema::parse(TEST_SCHEMA, "schema.graphql") + .expect("Failed to parse schema") + .validate() + .expect("Failed to validate schema"); + + let minified = schema + .types + .iter() + .map(|(_, type_)| format!("{}: {}", type_.name().as_str(), type_.minify())) + .collect::>() + .join("\n"); + + insta::assert_snapshot!(minified); + } +} diff --git a/crates/apollo-mcp-server/src/introspection/snapshots/apollo_mcp_server__introspection__minify__tests__minify_schema.snap b/crates/apollo-mcp-server/src/introspection/snapshots/apollo_mcp_server__introspection__minify__tests__minify_schema.snap new file mode 100644 index 00000000..439853a6 --- /dev/null +++ b/crates/apollo-mcp-server/src/introspection/snapshots/apollo_mcp_server__introspection__minify__tests__minify_schema.snap @@ -0,0 +1,64 @@ +--- +source: crates/apollo-mcp-server/src/introspection/minify.rs +expression: minified +--- +__Schema: T:"AGraphQLSchemadefinesthecapabilitiesofaGraphQLserver.Itexposesallavailabletypesanddirectivesontheserver,aswellastheentrypointsforquery,mutation,andsubscriptionoperations."__Schema:description:s,"Alistofalltypessupportedbythisserver."types:[__Type],"Thetypethatqueryoperationswillberootedat."queryType:__Type!,"Ifthisserversupportsmutation,thetypethatmutationoperationswillberootedat."mutationType:__Type,"Ifthisserversupportsubscription,thetypethatsubscriptionoperationswillberootedat."subscriptionType:__Type,"Alistofalldirectivessupportedbythisserver."directives:[__Directive] +__Type: T:"ThefundamentalunitofanyGraphQLSchemaisthetype.TherearemanykindsoftypesinGraphQLasrepresentedbythe`__TypeKind`enum.Dependingonthekindofatype,certainfieldsdescribeinformationaboutthattype.Scalartypesprovidenoinformationbeyondaname,descriptionandoptional`specifiedByURL`,whileEnumtypesprovidetheirvalues.ObjectandInterfacetypesprovidethefieldstheydescribe.Abstracttypes,UnionandInterface,providetheObjecttypespossibleatruntime.ListandNonNulltypescomposeothertypes."__Type:kind:__TypeKind!,name:s,description:s,fields(includeDeprecated:b):[__Field],interfaces:[__Type],possibleTypes:[__Type],enumValues(includeDeprecated:b):[__EnumValue],inputFields(includeDeprecated:b):[__InputValue],ofType:__Type,specifiedByURL:s +__TypeKind: E:"Anenumdescribingwhatkindoftypeagiven`__Type`is."__TypeKind:SCALAR,OBJECT,INTERFACE,UNION,ENUM,INPUT_OBJECT,LIST,NON_NULL +__Field: T:"ObjectandInterfacetypesaredescribedbyalistofFields,eachofwhichhasaname,potentiallyalistofarguments,andareturntype."__Field:name:s!,description:s,args(includeDeprecated:b):[__InputValue],type:__Type!,isDeprecated:b!,deprecationReason:s +__InputValue: T:"ArgumentsprovidedtoFieldsorDirectivesandtheinputfieldsofanInputObjectarerepresentedasInputValueswhichdescribetheirtypeandoptionallyadefaultvalue."__InputValue:name:s!,description:s,type:__Type!,"AGraphQL-formattedstringrepresentingthedefaultvalueforthisinputvalue."defaultValue:s,isDeprecated:b!,deprecationReason:s +__EnumValue: T:"OnepossiblevalueforagivenEnum.Enumvaluesareuniquevalues,notaplaceholderforastringornumericvalue.HoweveranEnumvalueisreturnedinaJSONresponseasastring."__EnumValue:name:s!,description:s,isDeprecated:b!,deprecationReason:s +__Directive: T:"ADirectiveprovidesawaytodescribealternateruntimeexecutionandtypevalidationbehaviorinaGraphQLdocument.Insomecases,youneedtoprovideoptionstoalterGraphQL'sexecutionbehaviorinwaysfieldargumentswillnotsuffice,suchasconditionallyincludingorskippingafield.Directivesprovidethisbydescribingadditionalinformationtotheexecutor."__Directive:name:s!,description:s,locations:[__DirectiveLocation],args(includeDeprecated:b):[__InputValue],isRepeatable:b! +__DirectiveLocation: E:"ADirectivecanbeadjacenttomanypartsoftheGraphQLlanguage,a__DirectiveLocationdescribesonesuchpossibleadjacencies."__DirectiveLocation:QUERY,MUTATION,SUBSCRIPTION,FIELD,FRAGMENT_DEFINITION,FRAGMENT_SPREAD,INLINE_FRAGMENT,VARIABLE_DEFINITION,SCHEMA,SCALAR,OBJECT,FIELD_DEFINITION,ARGUMENT_DEFINITION,INTERFACE,UNION,ENUM,ENUM_VALUE,INPUT_OBJECT,INPUT_FIELD_DEFINITION +Int: i +Float: f +String: s +Boolean: b +ID: d +DateTime: DateTime +JSON: JSON +Upload: Upload +UserRole: E:UserRole:ADMIN,MODERATOR,USER,GUEST +ContentStatus: E:ContentStatus:DRAFT,PUBLISHED,ARCHIVED,DELETED +NotificationPriority: E:NotificationPriority:LOW,MEDIUM,HIGH,URGENT +MediaType: E:MediaType:IMAGE,VIDEO,AUDIO,DOCUMENT +Node: F:Node:id:d!,createdAt:DateTime!,updatedAt:DateTime! +Content: F:Content:id:d!,title:s!,status:ContentStatus!,author:User!,metadata:JSON +User: T:User:id:d!,createdAt:DateTime!,updatedAt:DateTime!,username:s!,email:s!,role:UserRole!,profile:UserProfile,posts:[Post],comments:[Comment],notifications:[Notification],preferences:UserPreferences! +UserProfile: T:UserProfile:firstName:s,lastName:s,bio:s,avatar:Media,socialLinks:[SocialLink],location:Location +Location: T:Location:country:s!,city:s,coordinates:Coordinates +Coordinates: T:Coordinates:latitude:f!,longitude:f! +SocialLink: T:SocialLink:platform:s!,url:s!,verified:b! +Post: T:Post:id:d!,createdAt:DateTime!,updatedAt:DateTime!,title:s!,content:s!,status:ContentStatus!,author:User!,metadata:JSON,comments:[Comment],media:[Media],tags:[Tag],analytics:PostAnalytics! +Comment: T:Comment:id:d!,createdAt:DateTime!,updatedAt:DateTime!,content:s!,author:User!,post:Post!,parentComment:Comment,replies:[Comment],reactions:[Reaction] +Media: T:Media:id:d!,type:MediaType!,url:s!,thumbnail:s,metadata:MediaMetadata!,uploader:User! +MediaMetadata: T:MediaMetadata:size:i!,format:s!,dimensions:Dimensions,duration:i +Dimensions: T:Dimensions:width:i!,height:i! +Tag: T:Tag:id:d!,name:s!,slug:s!,description:s,posts:[Post] +Reaction: T:Reaction:id:d!,type:s!,user:User!,comment:Comment!,createdAt:DateTime! +Notification: T:Notification:id:d!,type:s!,priority:NotificationPriority!,message:s!,recipient:User!,read:b!,createdAt:DateTime!,metadata:JSON +PostAnalytics: T:PostAnalytics:views:i!,likes:i!,shares:i!,comments:i!,engagement:f!,demographics:Demographics! +Demographics: T:Demographics:ageGroups:[AgeGroup],locations:[LocationStats],devices:[DeviceStats] +AgeGroup: T:AgeGroup:range:s!,percentage:f! +LocationStats: T:LocationStats:country:s!,count:i! +DeviceStats: T:DeviceStats:type:s!,count:i! +UserPreferences: T:UserPreferences:theme:s!,oldTheme:s@D,language:s!,notifications:NotificationPreferences!,privacy:PrivacySettings! +NotificationPreferences: T:NotificationPreferences:email:b!,push:b!,sms:b!,frequency:s! +PrivacySettings: T:PrivacySettings:profileVisibility:s!,showEmail:b!,showLocation:b! +CreateUserInput: I:CreateUserInput:username:s!,email:s!,password:s!,role:UserRole,profile:CreateUserProfileInput +CreateUserProfileInput: I:CreateUserProfileInput:firstName:s,lastName:s,bio:s,location:CreateLocationInput +CreateLocationInput: I:CreateLocationInput:country:s!,city:s,coordinates:CreateCoordinatesInput +CreateCoordinatesInput: I:CreateCoordinatesInput:latitude:f!,longitude:f! +CreatePostInput: I:CreatePostInput:title:s!,content:s!,status:ContentStatus,tags:[s],media:[Upload] +UpdatePostInput: I:UpdatePostInput:title:s,content:s,status:ContentStatus,tags:[s] +CreateCommentInput: I:CreateCommentInput:content:s!,postId:d!,parentCommentId:d +NotificationFilter: I:NotificationFilter:priority:NotificationPriority,read:b,type:s,startDate:DateTime,endDate:DateTime +Query: T:Query:node(id:d!):Node,user(id:d!):User,post(id:d!):Post,postsOld(filter:[d]):[Post]@D("Usepostsinstead"),posts(filter:PostFilter):[Post],comments(postId:d!):[Comment],notifications(filter:NotificationFilter):[Notification],search(query:s!):SearchResult! +Mutation: T:Mutation:createUser(input:CreateUserInput!):User!,createPost(input:CreatePostInput!):Post!,updatePost(id:d!,input:UpdatePostInput!):Post!,createComment(input:CreateCommentInput!):Comment!,deletePost(id:d!):b!,uploadMedia(file:Upload!):Media!,updateUserPreferences(id:d!,preferences:UserPreferencesInput!):UserPreferences! +Subscription: T:Subscription:postUpdated(id:d!):Post!,newComment(postId:d!):Comment!,notificationReceived(userId:d!):Notification! +SearchResult: U:SearchResult:User,Post,Comment,Tag +PostFilter: I:PostFilter:status:ContentStatus,authorId:d,tags:[s],dateRange:DateRangeInput +DateRangeInput: I:DateRangeInput:start:DateTime!,end:DateTime! +UserPreferencesInput: I:UserPreferencesInput:theme:s,language:s,notifications:NotificationPreferencesInput,privacy:PrivacySettingsInput +NotificationPreferencesInput: I:NotificationPreferencesInput:email:b,push:b,sms:b,frequency:s +PrivacySettingsInput: I:PrivacySettingsInput:profileVisibility:s,showEmail:b,showLocation:b diff --git a/crates/apollo-mcp-server/src/introspection/tools/introspect.rs b/crates/apollo-mcp-server/src/introspection/tools/introspect.rs index e7878f1d..0bdcc2b3 100644 --- a/crates/apollo-mcp-server/src/introspection/tools/introspect.rs +++ b/crates/apollo-mcp-server/src/introspection/tools/introspect.rs @@ -27,7 +27,7 @@ pub struct Introspect { } /// Input for the introspect tool. -#[derive(JsonSchema, Deserialize)] +#[derive(JsonSchema, Deserialize, Debug)] pub struct Input { /// The name of the type to get information about. type_name: String, @@ -55,6 +55,7 @@ impl Introspect { } } + #[tracing::instrument(skip(self))] pub async fn execute(&self, input: Input) -> Result { let schema = self.schema.lock().await; let type_name = input.type_name.as_str(); @@ -73,6 +74,8 @@ impl Introspect { return Ok(CallToolResult { content: vec![], is_error: None, + meta: None, + structured_content: None, }); } } @@ -99,6 +102,9 @@ impl Introspect { .map(Content::text) .collect(), is_error: None, + meta: None, + // The content being returned is a raw string, so no need to create structured content for it + structured_content: None, }) } @@ -117,16 +123,12 @@ fn tool_description( minify: bool, ) -> String { if minify { - "Get GraphQL type information - T=type,I=input,E=enum,U=union,F=interface;s=String,i=Int,f=Float,b=Boolean,d=ID;!=required,[]=list,<>=implements;".to_string() + "Get GraphQL type information - T=type,I=input,E=enum,U=union,F=interface;s=String,i=Int,f=Float,b=Boolean,d=ID;@D=deprecated;!=required,[]=list,<>=implements;".to_string() } else { format!( - "Get detailed information about types from the GraphQL schema.{}{}", - root_query_type - .map(|t| format!(" Use the type name `{t}` to get root query fields.")) - .unwrap_or_default(), - root_mutation_type - .map(|t| format!(" Use the type name `{t}` to get root mutation fields.")) - .unwrap_or_default() + "Get information about a given GraphQL type defined in the schema. Instructions: Use this tool to explore the schema by providing specific type names. Start with the root query ({}) or mutation ({}) types to discover available fields. If the search tool is also available, use this tool first to get the fields, then use the search tool with relevant field return types and argument input types (ignore default GraphQL scalars) as search terms.", + root_query_type.as_deref().unwrap_or("Query"), + root_mutation_type.as_deref().unwrap_or("Mutation") ) } } @@ -135,3 +137,55 @@ fn tool_description( fn default_depth() -> usize { 1 } + +#[cfg(test)] +mod tests { + use super::*; + use apollo_compiler::Schema; + use apollo_compiler::validation::Valid; + use rstest::{fixture, rstest}; + use std::sync::Arc; + use tokio::sync::Mutex; + + const TEST_SCHEMA: &str = include_str!("testdata/schema.graphql"); + + #[fixture] + fn schema() -> Valid { + Schema::parse(TEST_SCHEMA, "schema.graphql") + .expect("Failed to parse test schema") + .validate() + .expect("Failed to validate test schema") + } + + #[rstest] + #[tokio::test] + async fn test_introspect_tool_description_is_not_minified(schema: Valid) { + let introspect = Introspect::new(Arc::new(Mutex::new(schema)), None, None, false); + + let description = introspect.tool.description.unwrap(); + + assert!( + description + .contains("Get information about a given GraphQL type defined in the schema") + ); + assert!(description.contains("Instructions: Use this tool to explore the schema")); + // Should not contain minification legend + assert!(!description.contains("T=type,I=input")); + // Should mention conditional search tool usage + assert!(description.contains("If the search tool is also available")); + } + + #[rstest] + #[tokio::test] + async fn test_introspect_tool_description_is_minified_with_an_appropriate_legend( + schema: Valid, + ) { + let introspect = Introspect::new(Arc::new(Mutex::new(schema)), None, None, true); + + let description = introspect.tool.description.unwrap(); + + // Should contain minification legend + assert!(description.contains("T=type,I=input,E=enum,U=union,F=interface")); + assert!(description.contains("s=String,i=Int,f=Float,b=Boolean,d=ID")); + } +} diff --git a/crates/apollo-mcp-server/src/introspection/tools/search.rs b/crates/apollo-mcp-server/src/introspection/tools/search.rs index 8ab6e808..595f11da 100644 --- a/crates/apollo-mcp-server/src/introspection/tools/search.rs +++ b/crates/apollo-mcp-server/src/introspection/tools/search.rs @@ -36,7 +36,7 @@ pub struct Search { } /// Input for the search tool. -#[derive(JsonSchema, Deserialize)] +#[derive(JsonSchema, Deserialize, Debug)] pub struct Input { /// The search terms terms: Vec, @@ -75,9 +75,9 @@ impl Search { tool: Tool::new( SEARCH_TOOL_NAME, format!( - "Search a GraphQL schema{}", + "Search a GraphQL schema for types matching the provided search terms. Returns complete type definitions including all related types needed to construct GraphQL operations. Instructions: If the introspect tool is also available, you can discover type names by using the introspect tool starting from the root Query or Mutation types. Avoid reusing previously searched terms for more efficient exploration.{}", if minify { - " - T=type,I=input,E=enum,U=union,F=interface;s=String,i=Int,f=Float,b=Boolean,d=ID;!=required,[]=list,<>=implements" + " - T=type,I=input,E=enum,U=union,F=interface;s=String,i=Int,f=Float,b=Boolean,d=ID;@D=deprecated;!=required,[]=list,<>=implements" } else { "" } @@ -87,6 +87,7 @@ impl Search { }) } + #[tracing::instrument(skip(self))] pub async fn execute(&self, input: Input) -> Result { let mut root_paths = self .index @@ -167,6 +168,10 @@ impl Search { .map(Content::text) .collect(), is_error: None, + meta: None, + + // Note: The returned content is treated as text, so no need to structure its output + structured_content: None, }) } } @@ -242,4 +247,37 @@ mod tests { "Expected to find the createUser mutation in search results" ); } + + #[rstest] + #[tokio::test] + async fn test_search_tool_description_is_not_minified(schema: Valid) { + let schema = Arc::new(Mutex::new(schema)); + let search = Search::new(schema.clone(), false, 1, 15_000_000, false) + .expect("Failed to create search tool"); + + let description = search.tool.description.unwrap(); + + assert!( + description + .contains("Search a GraphQL schema for types matching the provided search terms") + ); + assert!(description.contains("Instructions: If the introspect tool is also available")); + assert!(description.contains("Avoid reusing previously searched terms")); + // Should not contain minification legend + assert!(!description.contains("T=type,I=input")); + } + + #[rstest] + #[tokio::test] + async fn test_tool_description_minified(schema: Valid) { + let schema = Arc::new(Mutex::new(schema)); + let search = Search::new(schema.clone(), false, 1, 15_000_000, true) + .expect("Failed to create search tool"); + + let description = search.tool.description.unwrap(); + + // Should contain minification legend + assert!(description.contains("T=type,I=input,E=enum,U=union,F=interface")); + assert!(description.contains("s=String,i=Int,f=Float,b=Boolean,d=ID")); + } } diff --git a/crates/apollo-mcp-server/src/introspection/tools/snapshots/apollo_mcp_server__introspection__tools__search__tests__search_tool.snap b/crates/apollo-mcp-server/src/introspection/tools/snapshots/apollo_mcp_server__introspection__tools__search__tests__search_tool.snap index e62de8e1..997c8996 100644 --- a/crates/apollo-mcp-server/src/introspection/tools/snapshots/apollo_mcp_server__introspection__tools__search__tests__search_tool.snap +++ b/crates/apollo-mcp-server/src/introspection/tools/snapshots/apollo_mcp_server__introspection__tools__search__tests__search_tool.snap @@ -28,7 +28,7 @@ type Post implements Node & Content { type Query { user(id: ID!): User - post(id: ID!): Post + postsOld(filter: [ID!]): [Post!]! @deprecated(reason: "Use posts instead") posts(filter: PostFilter): [Post!]! } diff --git a/crates/apollo-mcp-server/src/introspection/tools/testdata/schema.graphql b/crates/apollo-mcp-server/src/introspection/tools/testdata/schema.graphql index 5407ba1b..5c4bbc0d 100644 --- a/crates/apollo-mcp-server/src/introspection/tools/testdata/schema.graphql +++ b/crates/apollo-mcp-server/src/introspection/tools/testdata/schema.graphql @@ -191,6 +191,7 @@ type DeviceStats { type UserPreferences { theme: String! + oldTheme: String @deprecated language: String! notifications: NotificationPreferences! privacy: PrivacySettings! @@ -268,6 +269,7 @@ type Query { node(id: ID!): Node user(id: ID!): User post(id: ID!): Post + postsOld(filter: [ID!]) : [Post!]! @deprecated(reason: "Use posts instead") posts(filter: PostFilter): [Post!]! comments(postId: ID!): [Comment!]! notifications(filter: NotificationFilter): [Notification!]! diff --git a/crates/apollo-mcp-server/src/introspection/tools/validate.rs b/crates/apollo-mcp-server/src/introspection/tools/validate.rs index 17a66051..7d403f12 100644 --- a/crates/apollo-mcp-server/src/introspection/tools/validate.rs +++ b/crates/apollo-mcp-server/src/introspection/tools/validate.rs @@ -25,7 +25,7 @@ pub struct Validate { } /// Input for the validate tool -#[derive(JsonSchema, Deserialize)] +#[derive(JsonSchema, Deserialize, Debug)] pub struct Input { /// The GraphQL operation operation: String, @@ -46,6 +46,7 @@ impl Validate { } /// Validates the provided GraphQL query + #[tracing::instrument(skip(self))] pub async fn execute(&self, input: Value) -> Result { let input = serde_json::from_value::(input).map_err(|_| { McpError::new(ErrorCode::INVALID_PARAMS, "Invalid input".to_string(), None) @@ -70,6 +71,11 @@ impl Validate { Ok(CallToolResult { content: vec![Content::text("Operation is valid")], is_error: None, + meta: None, + + // Note: We don't really return any meaningful content to the client here, so we can leave the + // structured content as none. + structured_content: None, }) } } diff --git a/crates/apollo-mcp-server/src/lib.rs b/crates/apollo-mcp-server/src/lib.rs index 21b89ce8..1737b4e1 100644 --- a/crates/apollo-mcp-server/src/lib.rs +++ b/crates/apollo-mcp-server/src/lib.rs @@ -1,4 +1,7 @@ +#![cfg_attr(coverage_nightly, feature(coverage_attribute))] + pub mod auth; +pub mod cors; pub mod custom_scalar_map; pub mod errors; pub mod event; @@ -7,7 +10,16 @@ mod graphql; pub mod health; mod introspection; pub mod json_schema; +pub(crate) mod meter; pub mod operations; pub mod sanitize; pub(crate) mod schema_tree_shake; pub mod server; +pub mod telemetry_attributes; + +/// These values are generated at build time by build.rs using telemetry.toml as input. +pub mod generated { + pub mod telemetry { + include!(concat!(env!("OUT_DIR"), "/telemetry_attributes.rs")); + } +} diff --git a/crates/apollo-mcp-server/src/main.rs b/crates/apollo-mcp-server/src/main.rs index ae5102e6..0d80e937 100644 --- a/crates/apollo-mcp-server/src/main.rs +++ b/crates/apollo-mcp-server/src/main.rs @@ -11,7 +11,6 @@ use clap::Parser; use clap::builder::Styles; use clap::builder::styling::{AnsiColor, Effects}; use runtime::IdOrDefault; -use runtime::logging::Logging; use tracing::{info, warn}; mod runtime; @@ -42,9 +41,7 @@ async fn main() -> anyhow::Result<()> { None => runtime::read_config_from_env().unwrap_or_default(), }; - // WorkerGuard is not used but needed to be at least defined or else the guard - // is cleaned up too early and file appender logging does not work - let _guard = Logging::setup(&config)?; + let _guard = runtime::telemetry::init_tracing_subscriber(&config)?; info!( "Apollo MCP Server v{} // (c) Apollo Graph, Inc. // Licensed under MIT", @@ -109,6 +106,8 @@ async fn main() -> anyhow::Result<()> { .then(|| config.graphos.graph_ref()) .transpose()?; + let transport = config.transport.clone(); + Ok(Server::builder() .transport(config.transport) .schema_source(schema_source) @@ -125,6 +124,15 @@ async fn main() -> anyhow::Result<()> { .mutation_mode(config.overrides.mutation_mode) .disable_type_description(config.overrides.disable_type_description) .disable_schema_description(config.overrides.disable_schema_description) + .disable_auth_token_passthrough(match transport { + apollo_mcp_server::server::Transport::Stdio => false, + apollo_mcp_server::server::Transport::SSE { auth, .. } => auth + .map(|a| a.disable_auth_token_passthrough) + .unwrap_or(false), + apollo_mcp_server::server::Transport::StreamableHttp { auth, .. } => auth + .map(|a| a.disable_auth_token_passthrough) + .unwrap_or(false), + }) .custom_scalar_map( config .custom_scalars @@ -134,6 +142,7 @@ async fn main() -> anyhow::Result<()> { .search_leaf_depth(config.introspection.search.leaf_depth) .index_memory_bytes(config.introspection.search.index_memory_bytes) .health_check(config.health_check) + .cors(config.cors) .build() .start() .await?) diff --git a/crates/apollo-mcp-server/src/meter.rs b/crates/apollo-mcp-server/src/meter.rs new file mode 100644 index 00000000..a52f7447 --- /dev/null +++ b/crates/apollo-mcp-server/src/meter.rs @@ -0,0 +1,4 @@ +use opentelemetry::{global, metrics::Meter}; +use std::sync::LazyLock; + +pub static METER: LazyLock = LazyLock::new(|| global::meter(env!("CARGO_PKG_NAME"))); diff --git a/crates/apollo-mcp-server/src/operations.rs b/crates/apollo-mcp-server/src/operations.rs index 7cec37e0..169c5379 100644 --- a/crates/apollo-mcp-server/src/operations.rs +++ b/crates/apollo-mcp-server/src/operations.rs @@ -1,3541 +1,15 @@ -use crate::custom_scalar_map::CustomScalarMap; -use crate::errors::{McpError, OperationError}; -use crate::event::Event; -use crate::graphql::{self, OperationDetails}; -use crate::schema_tree_shake::{DepthLimit, SchemaTreeShaker}; -use apollo_compiler::ast::{Document, OperationType, Selection}; -use apollo_compiler::schema::ExtendedType; -use apollo_compiler::validation::Valid; -use apollo_compiler::{ - Name, Node, Schema as GraphqlSchema, - ast::{Definition, OperationDefinition, Type}, - parser::Parser, -}; -use apollo_mcp_registry::files; -use apollo_mcp_registry::platform_api::operation_collections::collection_poller::{ - CollectionSource, OperationData, -}; -use apollo_mcp_registry::platform_api::operation_collections::error::CollectionError; -use apollo_mcp_registry::platform_api::operation_collections::event::CollectionEvent; -use apollo_mcp_registry::uplink::persisted_queries::ManifestSource; -use apollo_mcp_registry::uplink::persisted_queries::event::Event as ManifestEvent; -use futures::{Stream, StreamExt}; -use regex::Regex; -use reqwest::header::{HeaderMap, HeaderName, HeaderValue}; -use rmcp::model::{ErrorCode, ToolAnnotations}; -use rmcp::schemars::Map; -use rmcp::{ - model::Tool, - schemars::schema::{ - ArrayValidation, InstanceType, Metadata, ObjectValidation, RootSchema, Schema, - SchemaObject, SingleOrVec, SubschemaValidation, - }, - serde_json::{self, Value}, -}; -use schemars::JsonSchema; -use serde::{Deserialize, Serialize}; -use std::collections::HashMap; -use std::fs; -use std::path::PathBuf; -use std::str::FromStr; -use std::sync::{Arc, Mutex}; -use tracing::{debug, info, warn}; - -const OPERATION_DOCUMENT_EXTENSION: &str = "graphql"; - -/// The source of the operations exposed as MCP tools -#[derive(Clone)] -pub enum OperationSource { - /// GraphQL document files - Files(Vec), - - /// Persisted Query manifest - Manifest(ManifestSource), - - /// Operation collection - Collection(CollectionSource), - - /// No operations provided - None, -} - -impl OperationSource { - pub async fn into_stream(self) -> impl Stream { - match self { - OperationSource::Files(paths) => Self::stream_file_changes(paths).boxed(), - OperationSource::Manifest(manifest_source) => manifest_source - .into_stream() - .await - .map(|event| { - let ManifestEvent::UpdateManifest(operations) = event; - Event::OperationsUpdated( - operations.into_iter().map(RawOperation::from).collect(), - ) - }) - .boxed(), - OperationSource::Collection(collection_source) => collection_source - .into_stream() - .map(|event| match event { - CollectionEvent::UpdateOperationCollection(operations) => { - match operations - .iter() - .map(RawOperation::try_from) - .collect::, _>>() - { - Ok(operations) => Event::OperationsUpdated(operations), - Err(e) => Event::CollectionError(e), - } - } - CollectionEvent::CollectionError(error) => Event::CollectionError(error), - }) - .boxed(), - OperationSource::None => { - futures::stream::once(async { Event::OperationsUpdated(vec![]) }).boxed() - } - } - } - - fn stream_file_changes(paths: Vec) -> impl Stream { - let path_count = paths.len(); - let state = Arc::new(Mutex::new(HashMap::>::new())); - futures::stream::select_all(paths.into_iter().map(|path| { - let state = Arc::clone(&state); - files::watch(path.as_ref()) - .filter_map(move |_| { - let path = path.clone(); - let state = Arc::clone(&state); - async move { - let mut operations = Vec::new(); - if path.is_dir() { - // Handle a directory - if let Ok(entries) = fs::read_dir(&path) { - for entry in entries.flatten() { - let entry_path = entry.path(); - if entry_path.extension().and_then(|e| e.to_str()) - == Some(OPERATION_DOCUMENT_EXTENSION) - { - match fs::read_to_string(&entry_path) { - Ok(content) => { - // Be forgiving of empty files in the directory case. - // It likely means a new file was created in an editor, - // but the operation hasn't been written yet. - if !content.trim().is_empty() { - operations.push(RawOperation::from(( - content, - entry_path.to_str().map(|s| s.to_string()), - ))); - } - } - Err(e) => { - return Some(Event::OperationError( - e, - path.to_str().map(|s| s.to_string()), - )); - } - } - } - } - } - } else { - // Handle a single file - match fs::read_to_string(&path) { - Ok(content) => { - if !content.trim().is_empty() { - operations.push(RawOperation::from(( - content, - path.to_str().map(|s| s.to_string()), - ))); - } else { - warn!(?path, "Empty operation file"); - } - } - Err(e) => { - return Some(Event::OperationError( - e, - path.to_str().map(|s| s.to_string()), - )); - } - } - } - match state.lock() { - Ok(mut state) => { - state.insert(path.clone(), operations); - // All paths send an initial event on startup. To avoid repeated - // operation events on startup, wait until all paths have been - // loaded, then send a single event with the operations for all - // paths. - if state.len() == path_count { - Some(Event::OperationsUpdated( - state.values().flatten().cloned().collect::>(), - )) - } else { - None - } - } - Err(_) => Some(Event::OperationError( - std::io::Error::other("State mutex poisoned"), - path.to_str().map(|s| s.to_string()), - )), - } - } - }) - .boxed() - })) - .boxed() - } -} - -impl From for OperationSource { - fn from(manifest_source: ManifestSource) -> Self { - OperationSource::Manifest(manifest_source) - } -} - -impl From> for OperationSource { - fn from(paths: Vec) -> Self { - OperationSource::Files(paths) - } -} - -#[derive(Clone, Default, Debug, Deserialize, Serialize, PartialEq, Copy, JsonSchema)] -#[serde(rename_all = "snake_case")] -pub enum MutationMode { - /// Don't allow any mutations - #[default] - None, - /// Allow explicit mutations, but don't allow the LLM to build them - Explicit, - /// Allow the LLM to build mutations - All, -} - -#[derive(Debug, Clone)] -pub struct RawOperation { - source_text: String, - persisted_query_id: Option, - headers: Option>, - variables: Option>, - source_path: Option, -} - -// Custom Serialize implementation for RawOperation -// This is needed because reqwest HeaderMap/HeaderValue/HeaderName don't derive Serialize -impl serde::Serialize for RawOperation { - fn serialize(&self, serializer: S) -> Result - where - S: serde::Serializer, - { - use serde::ser::SerializeStruct; - let mut state = serializer.serialize_struct("RawOperation", 4)?; - state.serialize_field("source_text", &self.source_text)?; - if let Some(ref id) = self.persisted_query_id { - state.serialize_field("persisted_query_id", id)?; - } - if let Some(ref variables) = self.variables { - state.serialize_field("variables", variables)?; - } - if let Some(ref headers) = self.headers { - state.serialize_field( - "headers", - headers - .iter() - .map(|(name, value)| { - format!("{}: {}", name, value.to_str().unwrap_or_default()) - }) - .collect::>() - .join("\n") - .as_str(), - )?; - } - if let Some(ref path) = self.source_path { - state.serialize_field("source_path", path)?; - } - - state.end() - } -} - -impl From<(String, Option)> for RawOperation { - fn from((source_text, source_path): (String, Option)) -> Self { - Self { - persisted_query_id: None, - source_text, - headers: None, - variables: None, - source_path, - } - } -} - -impl From<(String, String)> for RawOperation { - fn from((persisted_query_id, source_text): (String, String)) -> Self { - Self { - persisted_query_id: Some(persisted_query_id), - source_text, - headers: None, - variables: None, - source_path: None, - } - } -} - -impl TryFrom<&OperationData> for RawOperation { - type Error = CollectionError; - - fn try_from(operation_data: &OperationData) -> Result { - let variables = if let Some(variables) = operation_data.variables.as_ref() { - if variables.trim().is_empty() { - Some(HashMap::new()) - } else { - Some( - serde_json::from_str::>(variables) - .map_err(|_| CollectionError::InvalidVariables(variables.clone()))?, - ) - } - } else { - None - }; - - let headers = if let Some(headers) = operation_data.headers.as_ref() { - let mut header_map = HeaderMap::new(); - for header in headers { - header_map.insert( - HeaderName::from_str(&header.0).map_err(CollectionError::HeaderName)?, - HeaderValue::from_str(&header.1).map_err(CollectionError::HeaderValue)?, - ); - } - Some(header_map) - } else { - None - }; - - Ok(Self { - persisted_query_id: None, - source_text: operation_data.source_text.clone(), - headers, - variables, - source_path: None, - }) - } -} - -impl RawOperation { - pub(crate) fn into_operation( - self, - schema: &Valid, - custom_scalars: Option<&CustomScalarMap>, - mutation_mode: MutationMode, - disable_type_description: bool, - disable_schema_description: bool, - ) -> Result, OperationError> { - Operation::from_document( - self, - schema, - custom_scalars, - mutation_mode, - disable_type_description, - disable_schema_description, - ) - } -} - -#[derive(Debug, Clone, Serialize)] -pub struct Operation { - tool: Tool, - inner: RawOperation, - operation_name: String, -} - -impl AsRef for Operation { - fn as_ref(&self) -> &Tool { - &self.tool - } -} - -impl From for Tool { - fn from(value: Operation) -> Tool { - value.tool - } -} - -impl Operation { - pub(crate) fn into_inner(self) -> RawOperation { - self.inner - } -} - -#[allow(clippy::type_complexity)] -pub fn operation_defs( - source_text: &str, - allow_mutations: bool, - source_path: Option, -) -> Result, Option)>, OperationError> { - let source_path_clone = source_path.clone(); - let document = Parser::new() - .parse_ast( - source_text, - source_path_clone.unwrap_or_else(|| "operation.graphql".to_string()), - ) - .map_err(|e| OperationError::GraphQLDocument(Box::new(e)))?; - let mut last_offset: Option = Some(0); - let mut operation_defs = document.definitions.clone().into_iter().filter_map(|def| { - let description = match def.location() { - Some(source_span) => { - let description = last_offset - .map(|start_offset| &source_text[start_offset..source_span.offset()]); - last_offset = Some(source_span.end_offset()); - description - } - None => { - last_offset = None; - None - } - }; - - match def { - Definition::OperationDefinition(operation_def) => { - Some((operation_def, description)) - } - Definition::FragmentDefinition(_) => None, - _ => { - eprintln!("Schema definitions were passed in, but only operations and fragments are allowed"); - None - } - } - }); - - let (operation, comments) = match (operation_defs.next(), operation_defs.next()) { - (None, _) => { - return Err(OperationError::NoOperations { source_path }); - } - (_, Some(_)) => { - return Err(OperationError::TooManyOperations { - source_path, - count: 2 + operation_defs.count(), - }); - } - (Some(op), None) => op, - }; - - match operation.operation_type { - OperationType::Subscription => { - debug!( - "Skipping subscription operation {}", - operation_name(&operation, source_path)? - ); - return Ok(None); - } - OperationType::Mutation => { - if !allow_mutations { - warn!( - "Skipping mutation operation {}", - operation_name(&operation, source_path)? - ); - return Ok(None); - } - } - OperationType::Query => {} - } - - Ok(Some((document, operation, comments.map(|c| c.to_string())))) -} - -pub fn extract_and_format_comments(comments: Option) -> Option { - comments.and_then(|comments| { - let content = Regex::new(r"(\n|^)(\s*,*)*#") - .ok()? - .replace_all(comments.as_str(), "$1"); - let trimmed = content.trim(); - - if trimmed.is_empty() { - None - } else { - Some(trimmed.to_string()) - } - }) -} - -pub fn find_opening_parens_offset( - source_text: &str, - operation_definition: &Node, -) -> Option { - let regex = match Regex::new(r"(?m)^\s*\(") { - Ok(regex) => regex, - Err(_) => return None, - }; - - operation_definition - .name - .as_ref() - .and_then(|n| n.location()) - .map(|span| { - regex - .find(source_text[span.end_offset()..].as_ref()) - .map(|m| m.start() + m.len() + span.end_offset()) - .unwrap_or(0) - }) -} - -pub fn variable_description_overrides( - source_text: &str, - operation_definition: &Node, -) -> HashMap { - let mut argument_overrides_map: HashMap = HashMap::new(); - let mut last_offset = find_opening_parens_offset(source_text, operation_definition); - operation_definition - .variables - .iter() - .for_each(|v| match v.location() { - Some(source_span) => { - let comment = last_offset - .map(|start_offset| &source_text[start_offset..source_span.offset()]); - - if let Some(description) = comment.filter(|d| !d.is_empty() && d.contains('#')) { - if let Some(description) = - extract_and_format_comments(Some(description.to_string())) - { - argument_overrides_map.insert(v.name.to_string(), description); - } - } - - last_offset = Some(source_span.end_offset()); - } - None => { - last_offset = None; - } - }); - - argument_overrides_map -} - -impl Operation { - pub fn from_document( - raw_operation: RawOperation, - graphql_schema: &GraphqlSchema, - custom_scalar_map: Option<&CustomScalarMap>, - mutation_mode: MutationMode, - disable_type_description: bool, - disable_schema_description: bool, - ) -> Result, OperationError> { - if let Some((document, operation, comments)) = operation_defs( - &raw_operation.source_text, - mutation_mode != MutationMode::None, - raw_operation.source_path.clone(), - )? { - let operation_name = match operation_name(&operation, raw_operation.source_path.clone()) - { - Ok(name) => name, - Err(OperationError::MissingName { - source_path, - operation, - }) => { - if let Some(path) = source_path { - warn!("Skipping unnamed operation in {path}: {operation}"); - } else { - warn!("Skipping unnamed operation: {operation}"); - } - return Ok(None); - } - Err(e) => return Err(e), - }; - let variable_description_overrides = - variable_description_overrides(&raw_operation.source_text, &operation); - let mut tree_shaker = SchemaTreeShaker::new(graphql_schema); - tree_shaker.retain_operation(&operation, &document, DepthLimit::Unlimited); - - let description = Self::tool_description( - comments, - &mut tree_shaker, - graphql_schema, - &operation, - disable_type_description, - disable_schema_description, - ); - - let mut object = serde_json::to_value(get_json_schema( - &operation, - tree_shaker.argument_descriptions(), - &variable_description_overrides, - graphql_schema, - custom_scalar_map, - raw_operation.variables.as_ref(), - ))?; - - // make sure that the properties field exists since schemas::ObjectValidation is - // configured to skip empty maps (in the case where there are no input args) - ensure_properties_exists(&mut object); - - let Value::Object(schema) = object else { - return Err(OperationError::Internal( - "Schemars should have returned an object".to_string(), - )); - }; - - let tool: Tool = Tool::new(operation_name.clone(), description, schema).annotate( - ToolAnnotations::new() - .read_only(operation.operation_type != OperationType::Mutation), - ); - let character_count = tool_character_length(&tool); - match character_count { - Ok(length) => info!( - "Tool {} loaded with a character count of {}. Estimated tokens: {}", - operation_name, - length, - length / 4 // We don't know the tokenization algorithm, so we just use 4 characters per token as a rough estimate. https://docs.anthropic.com/en/docs/resources/glossary#tokens - ), - Err(_) => info!( - "Tool {} loaded with an unknown character count", - operation_name - ), - } - Ok(Some(Operation { - tool, - inner: raw_operation, - operation_name, - })) - } else { - Ok(None) - } - } - - /// Generate a description for an operation based on documentation in the schema - fn tool_description( - comments: Option, - tree_shaker: &mut SchemaTreeShaker, - graphql_schema: &GraphqlSchema, - operation_def: &Node, - disable_type_description: bool, - disable_schema_description: bool, - ) -> String { - let comment_description = extract_and_format_comments(comments); - - match comment_description { - Some(description) => description, - None => { - // Add the tree-shaken types to the end of the tool description - let mut lines = vec![]; - if !disable_type_description { - let descriptions = operation_def - .selection_set - .iter() - .filter_map(|selection| { - match selection { - Selection::Field(field) => { - let field_name = field.name.to_string(); - let operation_type = operation_def.operation_type; - if let Some(root_name) = - graphql_schema.root_operation(operation_type) - { - // Find the root field referenced by the operation - let root = graphql_schema.get_object(root_name)?; - let field_definition = root - .fields - .iter() - .find(|(name, _)| { - let name = name.to_string(); - name == field_name - }) - .map(|(_, field_definition)| { - field_definition.node.clone() - }); - - // Add the root field description to the tool description - let field_description = field_definition - .clone() - .and_then(|field| field.description.clone()) - .map(|node| node.to_string()); - - // Add information about the return type - let ty = field_definition.map(|field| field.ty.clone()); - let type_description = - ty.as_ref().map(Self::type_description); - - Some( - vec![field_description, type_description] - .into_iter() - .flatten() - .collect::>() - .join("\n"), - ) - } else { - None - } - } - _ => None, - } - }) - .collect::>() - .join("\n---\n"); - - // Add the tree-shaken types to the end of the tool description - - lines.push(descriptions); - } - if !disable_schema_description { - let shaken_schema = - tree_shaker.shaken().unwrap_or_else(|schema| schema.partial); - - let mut types = shaken_schema - .types - .iter() - .filter(|(_name, extended_type)| { - !extended_type.is_built_in() - && matches!( - extended_type, - ExtendedType::Object(_) - | ExtendedType::Scalar(_) - | ExtendedType::Enum(_) - | ExtendedType::Interface(_) - | ExtendedType::Union(_) - ) - && graphql_schema - .root_operation(operation_def.operation_type) - .is_none_or(|op_name| extended_type.name() != op_name) - && graphql_schema - .root_operation(OperationType::Query) - .is_none_or(|op_name| extended_type.name() != op_name) - }) - .peekable(); - if types.peek().is_some() { - lines.push(String::from("---")); - } - - for ty in types { - lines.push(ty.1.serialize().to_string()); - } - } - lines.join("\n") - } - } - } - - fn type_description(ty: &Type) -> String { - let type_name = ty.inner_named_type(); - let mut lines = vec![]; - let optional = if ty.is_non_null() { - "" - } else { - "is optional and " - }; - let array = if ty.is_list() { - "is an array of type" - } else { - "has type" - }; - lines.push(format!( - "The returned value {optional}{array} `{type_name}`" - )); - - lines.join("\n") - } -} - -fn ensure_properties_exists(json_object: &mut Value) { - if let Some(obj_type) = json_object.get("type") { - if obj_type == "object" { - if let Some(obj_map) = json_object.as_object_mut() { - let props = obj_map - .entry("properties") - .or_insert_with(|| Value::Object(serde_json::Map::new())); - if !props.is_object() { - *props = Value::Object(serde_json::Map::new()); - } - } - } - } -} - -pub fn operation_name( - operation: &Node, - source_path: Option, -) -> Result { - Ok(operation - .name - .as_ref() - .ok_or_else(|| OperationError::MissingName { - source_path, - operation: operation.serialize().no_indent().to_string(), - })? - .to_string()) -} - -fn tool_character_length(tool: &Tool) -> Result { - let tool_schema_string = serde_json::to_string_pretty(&serde_json::json!(tool.input_schema))?; - Ok(tool.name.len() - + tool.description.as_ref().map(|d| d.len()).unwrap_or(0) - + tool_schema_string.len()) -} - -fn get_json_schema( - operation: &Node, - schema_argument_descriptions: &HashMap>, - argument_descriptions_overrides: &HashMap, - graphql_schema: &GraphqlSchema, - custom_scalar_map: Option<&CustomScalarMap>, - variable_overrides: Option<&HashMap>, -) -> RootSchema { - let mut obj = ObjectValidation::default(); - let mut definitions = Map::new(); - - operation.variables.iter().for_each(|variable| { - let variable_name = variable.name.to_string(); - if !variable_overrides - .map(|o| o.contains_key(&variable_name)) - .unwrap_or_default() - { - // use overridden description if there is one, otherwise use the schema description - let description: Option = - match argument_descriptions_overrides.get(&variable_name) { - Some(description) => Some(description.clone()), - None => schema_argument_descriptions - .get(&variable_name) - .filter(|d| !d.is_empty()) - .map(|d| d.join("#")), - }; - - let schema = type_to_schema( - description, - variable.ty.as_ref(), - graphql_schema, - custom_scalar_map, - &mut definitions, - ); - obj.properties.insert(variable_name.clone(), schema); - if variable.ty.is_non_null() { - obj.required.insert(variable_name); - } - } - }); - - RootSchema { - schema: SchemaObject { - instance_type: Some(SingleOrVec::Single(Box::new(InstanceType::Object))), - object: Some(Box::new(obj)), - ..Default::default() - }, - definitions, - ..Default::default() - } -} - -fn schema_factory( - description: Option, - instance_type: Option, - object_validation: Option, - array_validation: Option, - subschema_validation: Option, - enum_values: Option>, -) -> Schema { - Schema::Object(SchemaObject { - instance_type: instance_type - .map(|instance_type| SingleOrVec::Single(Box::new(instance_type))), - object: object_validation.map(Box::new), - array: array_validation.map(Box::new), - subschemas: subschema_validation.map(Box::new), - enum_values, - metadata: Some(Box::new(Metadata { - description, - ..Default::default() - })), - ..Default::default() - }) -} - -fn input_object_description(name: &Name, graphql_schema: &GraphqlSchema) -> Option { - if let Some(input_object) = graphql_schema.get_input_object(name) { - input_object.description.as_ref().map(|d| d.to_string()) - } else if let Some(scalar) = graphql_schema.get_scalar(name) { - scalar.description.as_ref().map(|d| d.to_string()) - } else if let Some(enum_type) = graphql_schema.get_enum(name) { - let values = enum_type - .values - .iter() - .map(|(name, value)| { - format!( - "{}: {}", - name, - value - .description - .as_ref() - .map(|d| d.to_string()) - .unwrap_or_default() - ) - }) - .collect::>() - .join("\n"); - Some(format!( - "{}\n\nValues:\n{}", - enum_type - .description - .as_ref() - .map(|d| d.to_string()) - .unwrap_or_default(), - values - )) - } else { - None - } -} - -fn type_to_schema( - description: Option, - variable_type: &Type, - graphql_schema: &GraphqlSchema, - custom_scalar_map: Option<&CustomScalarMap>, - definitions: &mut Map, -) -> Schema { - match variable_type { - Type::NonNullNamed(named) | Type::Named(named) => match named.as_str() { - "String" | "ID" => schema_factory( - description, - Some(InstanceType::String), - None, - None, - None, - None, - ), - "Int" | "Float" => schema_factory( - description, - Some(InstanceType::Number), - None, - None, - None, - None, - ), - "Boolean" => schema_factory( - description, - Some(InstanceType::Boolean), - None, - None, - None, - None, - ), - _ => { - if let Some(input_type) = graphql_schema.get_input_object(named) { - if !definitions.contains_key(named.as_str()) { - definitions - .insert(named.to_string(), Schema::Object(SchemaObject::default())); // Insert temporary value into map so any recursive references will not try to also create it. - let mut obj = ObjectValidation::default(); - - input_type.fields.iter().for_each(|(name, field)| { - let description = field.description.as_ref().map(|n| n.to_string()); - obj.properties.insert( - name.to_string(), - type_to_schema( - description, - field.ty.as_ref(), - graphql_schema, - custom_scalar_map, - definitions, - ), - ); - - if field.is_required() { - obj.required.insert(name.to_string()); - } - }); - - definitions.insert( - named.to_string(), - schema_factory( - input_object_description(named, graphql_schema), - Some(InstanceType::Object), - Some(obj), - None, - None, - None, - ), - ); - } - - Schema::Object(SchemaObject { - metadata: Some(Box::new(Metadata { - description, - ..Default::default() - })), - reference: Some(format!("#/definitions/{named}")), - ..Default::default() - }) - } else if graphql_schema.get_scalar(named).is_some() { - if !definitions.contains_key(named.as_str()) { - let default_description = input_object_description(named, graphql_schema); - if let Some(custom_scalar_map) = custom_scalar_map { - if let Some(custom_scalar_schema_object) = - custom_scalar_map.get(named.as_str()) - { - let mut custom_schema = custom_scalar_schema_object.clone(); - let mut meta = *custom_schema.metadata.unwrap_or_default(); - // If description isn't included in custom schema, inject the one from the schema - if meta.description.is_none() { - meta.description = default_description; - } - custom_schema.metadata = Some(Box::new(meta)); - definitions - .insert(named.to_string(), Schema::Object(custom_schema)); - } else { - warn!(name=?named, "custom scalar missing from custom_scalar_map"); - definitions.insert( - named.to_string(), - schema_factory( - default_description, - None, - None, - None, - None, - None, - ), - ); - } - } else { - warn!(name=?named, "custom scalars aren't currently supported without a custom_scalar_map"); - definitions.insert( - named.to_string(), - schema_factory(default_description, None, None, None, None, None), - ); - } - } - Schema::Object(SchemaObject { - metadata: Some(Box::new(Metadata { - description, - ..Default::default() - })), - reference: Some(format!("#/definitions/{named}")), - ..Default::default() - }) - } else if let Some(enum_type) = graphql_schema.get_enum(named) { - if !definitions.contains_key(named.as_str()) { - definitions.insert( - named.to_string(), - schema_factory( - input_object_description(named, graphql_schema), - Some(InstanceType::String), - None, - None, - None, - Some( - enum_type - .values - .iter() - .map(|(_name, value)| serde_json::json!(value.value)) - .collect(), - ), - ), - ); - } - Schema::Object(SchemaObject { - metadata: Some(Box::new(Metadata { - description, - ..Default::default() - })), - reference: Some(format!("#/definitions/{named}")), - ..Default::default() - }) - } else { - warn!(name=?named, "Type not found in schema"); - schema_factory(None, None, None, None, None, None) - } - } - }, - Type::NonNullList(list_type) | Type::List(list_type) => { - let inner_type_schema = type_to_schema( - description, - list_type, - graphql_schema, - custom_scalar_map, - definitions, - ); - let items_schema = if list_type.is_non_null() { - inner_type_schema - } else { - schema_factory( - None, - None, - None, - None, - Some(SubschemaValidation { - one_of: Some(vec![ - inner_type_schema, - Schema::Object(SchemaObject { - instance_type: Some(SingleOrVec::Single(Box::new( - InstanceType::Null, - ))), - ..Default::default() - }), - ]), - ..Default::default() - }), - None, - ) - }; - - schema_factory( - None, - Some(InstanceType::Array), - None, - Some(ArrayValidation { - items: Some(SingleOrVec::Single(Box::new(items_schema))), - ..Default::default() - }), - None, - None, - ) - } - } -} - -impl graphql::Executable for Operation { - fn persisted_query_id(&self) -> Option { - // TODO: id was being overridden, should we be returning? Should this be behind a flag? self.inner.persisted_query_id.clone() - None - } - - fn operation(&self, _input: Value) -> Result { - Ok(OperationDetails { - query: self.inner.source_text.clone(), - operation_name: Some(self.operation_name.clone()), - }) - } - - fn variables(&self, input_variables: Value) -> Result { - if let Some(raw_variables) = self.inner.variables.as_ref() { - let mut variables = match input_variables { - Value::Null => Ok(serde_json::Map::new()), - Value::Object(obj) => Ok(obj.clone()), - _ => Err(McpError::new( - ErrorCode::INVALID_PARAMS, - "Invalid input".to_string(), - None, - )), - }?; - - raw_variables.iter().try_for_each(|(key, value)| { - if variables.contains_key(key) { - Err(McpError::new( - ErrorCode::INVALID_PARAMS, - "No such parameter: {key}", - None, - )) - } else { - variables.insert(key.clone(), value.clone()); - Ok(()) - } - })?; - - Ok(Value::Object(variables)) - } else { - Ok(input_variables) - } - } - - fn headers(&self, default_headers: &HeaderMap) -> HeaderMap { - match self.inner.headers.as_ref() { - None => default_headers.clone(), - Some(raw_headers) if default_headers.is_empty() => raw_headers.clone(), - Some(raw_headers) => { - let mut headers = default_headers.clone(); - raw_headers.iter().for_each(|(key, value)| { - if headers.contains_key(key) { - tracing::debug!( - "Header {} has a default value, overwriting with operation value", - key - ); - } - headers.insert(key, value.clone()); - }); - headers - } - } - } -} - -#[cfg(test)] -mod tests { - use crate::graphql::Executable; - use apollo_compiler::{Schema, parser::Parser, validation::Valid}; - use rmcp::serde_json::Value; - use rmcp::{model::Tool, serde_json}; - use std::{collections::HashMap, str::FromStr, sync::LazyLock}; - use tracing_test::traced_test; - - use crate::{ - custom_scalar_map::CustomScalarMap, - operations::{MutationMode, Operation, RawOperation}, - }; - - // Example schema for tests - static SCHEMA: LazyLock> = LazyLock::new(|| { - Schema::parse( - r#" - type Query { - id: String - enum: RealEnum - customQuery(""" id description """ id: ID!, """ a flag """ flag: Boolean): OutputType - testOp: OpResponse - } - type Mutation {id: String } - - """ - RealCustomScalar exists - """ - scalar RealCustomScalar - input RealInputObject { - """ - optional is a input field that is optional - """ - optional: String - - """ - required is a input field that is required - """ - required: String! - } - - type OpResponse { - id: String - } - - """ - the description for the enum - """ - enum RealEnum { - """ - ENUM_VALUE_1 is a value - """ - ENUM_VALUE_1 - - """ - ENUM_VALUE_2 is a value - """ - ENUM_VALUE_2 - } - - """ - custom output type - """ - type OutputType { - id: ID! - } - "#, - "operation.graphql", - ) - .expect("schema should parse") - .validate() - .expect("schema should be valid") - }); - - #[test] - fn subscriptions() { - assert!( - Operation::from_document( - RawOperation { - source_text: "subscription SubscriptionName { id }".to_string(), - persisted_query_id: None, - headers: None, - variables: None, - source_path: None, - }, - &SCHEMA, - None, - MutationMode::None, - false, - false, - ) - .unwrap() - .is_none() - ); - } - - #[test] - fn mutation_mode_none() { - assert!( - Operation::from_document( - RawOperation { - source_text: "mutation MutationName { id }".to_string(), - persisted_query_id: None, - headers: None, - variables: None, - source_path: None, - }, - &SCHEMA, - None, - MutationMode::None, - false, - false, - ) - .ok() - .unwrap() - .is_none() - ); - } - - #[test] - fn mutation_mode_explicit() { - let operation = Operation::from_document( - RawOperation { - source_text: "mutation MutationName { id }".to_string(), - persisted_query_id: None, - headers: None, - variables: None, - source_path: None, - }, - &SCHEMA, - None, - MutationMode::Explicit, - false, - false, - ) - .unwrap() - .unwrap(); - - insta::assert_debug_snapshot!(operation, @r#" - Operation { - tool: Tool { - name: "MutationName", - description: Some( - "The returned value is optional and has type `String`", - ), - input_schema: { - "type": String("object"), - "properties": Object {}, - }, - annotations: Some( - ToolAnnotations { - title: None, - read_only_hint: Some( - false, - ), - destructive_hint: None, - idempotent_hint: None, - open_world_hint: None, - }, - ), - }, - inner: RawOperation { - source_text: "mutation MutationName { id }", - persisted_query_id: None, - headers: None, - variables: None, - source_path: None, - }, - operation_name: "MutationName", - } - "#); - } - - #[test] - fn mutation_mode_all() { - let operation = Operation::from_document( - RawOperation { - source_text: "mutation MutationName { id }".to_string(), - persisted_query_id: None, - headers: None, - variables: None, - source_path: None, - }, - &SCHEMA, - None, - MutationMode::All, - false, - false, - ) - .unwrap() - .unwrap(); - - insta::assert_debug_snapshot!(operation, @r#" - Operation { - tool: Tool { - name: "MutationName", - description: Some( - "The returned value is optional and has type `String`", - ), - input_schema: { - "type": String("object"), - "properties": Object {}, - }, - annotations: Some( - ToolAnnotations { - title: None, - read_only_hint: Some( - false, - ), - destructive_hint: None, - idempotent_hint: None, - open_world_hint: None, - }, - ), - }, - inner: RawOperation { - source_text: "mutation MutationName { id }", - persisted_query_id: None, - headers: None, - variables: None, - source_path: None, - }, - operation_name: "MutationName", - } - "#); - } - - #[test] - fn no_variables() { - let operation = Operation::from_document( - RawOperation { - source_text: "query QueryName { id }".to_string(), - persisted_query_id: None, - headers: None, - variables: None, - source_path: None, - }, - &SCHEMA, - None, - MutationMode::None, - false, - false, - ) - .unwrap() - .unwrap(); - let tool = Tool::from(operation); - - insta::assert_debug_snapshot!(tool, @r#" - Tool { - name: "QueryName", - description: Some( - "The returned value is optional and has type `String`", - ), - input_schema: { - "type": String("object"), - "properties": Object {}, - }, - annotations: Some( - ToolAnnotations { - title: None, - read_only_hint: Some( - true, - ), - destructive_hint: None, - idempotent_hint: None, - open_world_hint: None, - }, - ), - } - "#); - insta::assert_snapshot!(serde_json::to_string_pretty(&serde_json::json!(tool.input_schema)).unwrap(), @r#" - { - "type": "object", - "properties": {} - } - "#); - } - - #[test] - fn nullable_named_type() { - let operation = Operation::from_document( - RawOperation { - source_text: "query QueryName($id: ID) { id }".to_string(), - persisted_query_id: None, - headers: None, - variables: None, - source_path: None, - }, - &SCHEMA, - None, - MutationMode::None, - false, - false, - ) - .unwrap() - .unwrap(); - let tool = Tool::from(operation); - - insta::assert_debug_snapshot!(tool, @r#" - Tool { - name: "QueryName", - description: Some( - "The returned value is optional and has type `String`", - ), - input_schema: { - "type": String("object"), - "properties": Object { - "id": Object { - "type": String("string"), - }, - }, - }, - annotations: Some( - ToolAnnotations { - title: None, - read_only_hint: Some( - true, - ), - destructive_hint: None, - idempotent_hint: None, - open_world_hint: None, - }, - ), - } - "#); - insta::assert_snapshot!(serde_json::to_string_pretty(&serde_json::json!(tool.input_schema)).unwrap(), @r#" - { - "type": "object", - "properties": { - "id": { - "type": "string" - } - } - } - "#); - } - - #[test] - fn non_nullable_named_type() { - let operation = Operation::from_document( - RawOperation { - source_text: "query QueryName($id: ID!) { id }".to_string(), - persisted_query_id: None, - headers: None, - variables: None, - source_path: None, - }, - &SCHEMA, - None, - MutationMode::None, - false, - false, - ) - .unwrap() - .unwrap(); - let tool = Tool::from(operation); - - insta::assert_debug_snapshot!(tool, @r#" - Tool { - name: "QueryName", - description: Some( - "The returned value is optional and has type `String`", - ), - input_schema: { - "type": String("object"), - "required": Array [ - String("id"), - ], - "properties": Object { - "id": Object { - "type": String("string"), - }, - }, - }, - annotations: Some( - ToolAnnotations { - title: None, - read_only_hint: Some( - true, - ), - destructive_hint: None, - idempotent_hint: None, - open_world_hint: None, - }, - ), - } - "#); - insta::assert_snapshot!(serde_json::to_string_pretty(&serde_json::json!(tool.input_schema)).unwrap(), @r#" - { - "type": "object", - "required": [ - "id" - ], - "properties": { - "id": { - "type": "string" - } - } - } - "#); - } - - #[test] - fn non_nullable_list_of_nullable_named_type() { - let operation = Operation::from_document( - RawOperation { - source_text: "query QueryName($id: [ID]!) { id }".to_string(), - persisted_query_id: None, - headers: None, - variables: None, - source_path: None, - }, - &SCHEMA, - None, - MutationMode::None, - false, - false, - ) - .unwrap() - .unwrap(); - let tool = Tool::from(operation); - - insta::assert_debug_snapshot!(tool, @r#" - Tool { - name: "QueryName", - description: Some( - "The returned value is optional and has type `String`", - ), - input_schema: { - "type": String("object"), - "required": Array [ - String("id"), - ], - "properties": Object { - "id": Object { - "type": String("array"), - "items": Object { - "oneOf": Array [ - Object { - "type": String("string"), - }, - Object { - "type": String("null"), - }, - ], - }, - }, - }, - }, - annotations: Some( - ToolAnnotations { - title: None, - read_only_hint: Some( - true, - ), - destructive_hint: None, - idempotent_hint: None, - open_world_hint: None, - }, - ), - } - "#); - insta::assert_snapshot!(serde_json::to_string_pretty(&serde_json::json!(tool.input_schema)).unwrap(), @r#" - { - "type": "object", - "required": [ - "id" - ], - "properties": { - "id": { - "type": "array", - "items": { - "oneOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ] - } - } - } - } - "#); - } - - #[test] - fn non_nullable_list_of_non_nullable_named_type() { - let operation = Operation::from_document( - RawOperation { - source_text: "query QueryName($id: [ID!]!) { id }".to_string(), - persisted_query_id: None, - headers: None, - variables: None, - source_path: None, - }, - &SCHEMA, - None, - MutationMode::None, - false, - false, - ) - .unwrap() - .unwrap(); - let tool = Tool::from(operation); - - insta::assert_debug_snapshot!(tool, @r#" - Tool { - name: "QueryName", - description: Some( - "The returned value is optional and has type `String`", - ), - input_schema: { - "type": String("object"), - "required": Array [ - String("id"), - ], - "properties": Object { - "id": Object { - "type": String("array"), - "items": Object { - "type": String("string"), - }, - }, - }, - }, - annotations: Some( - ToolAnnotations { - title: None, - read_only_hint: Some( - true, - ), - destructive_hint: None, - idempotent_hint: None, - open_world_hint: None, - }, - ), - } - "#); - insta::assert_snapshot!(serde_json::to_string_pretty(&serde_json::json!(tool.input_schema)).unwrap(), @r#" - { - "type": "object", - "required": [ - "id" - ], - "properties": { - "id": { - "type": "array", - "items": { - "type": "string" - } - } - } - } - "#); - } - - #[test] - fn nullable_list_of_nullable_named_type() { - let operation = Operation::from_document( - RawOperation { - source_text: "query QueryName($id: [ID]) { id }".to_string(), - persisted_query_id: None, - headers: None, - variables: None, - source_path: None, - }, - &SCHEMA, - None, - MutationMode::None, - false, - false, - ) - .unwrap() - .unwrap(); - let tool = Tool::from(operation); - - insta::assert_debug_snapshot!(tool, @r#" - Tool { - name: "QueryName", - description: Some( - "The returned value is optional and has type `String`", - ), - input_schema: { - "type": String("object"), - "properties": Object { - "id": Object { - "type": String("array"), - "items": Object { - "oneOf": Array [ - Object { - "type": String("string"), - }, - Object { - "type": String("null"), - }, - ], - }, - }, - }, - }, - annotations: Some( - ToolAnnotations { - title: None, - read_only_hint: Some( - true, - ), - destructive_hint: None, - idempotent_hint: None, - open_world_hint: None, - }, - ), - } - "#); - insta::assert_snapshot!(serde_json::to_string_pretty(&serde_json::json!(tool.input_schema)).unwrap(), @r#" - { - "type": "object", - "properties": { - "id": { - "type": "array", - "items": { - "oneOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ] - } - } - } - } - "#); - } - - #[test] - fn nullable_list_of_non_nullable_named_type() { - let operation = Operation::from_document( - RawOperation { - source_text: "query QueryName($id: [ID!]) { id }".to_string(), - persisted_query_id: None, - headers: None, - variables: None, - source_path: None, - }, - &SCHEMA, - None, - MutationMode::None, - false, - false, - ) - .unwrap() - .unwrap(); - let tool = Tool::from(operation); - - insta::assert_debug_snapshot!(tool, @r#" - Tool { - name: "QueryName", - description: Some( - "The returned value is optional and has type `String`", - ), - input_schema: { - "type": String("object"), - "properties": Object { - "id": Object { - "type": String("array"), - "items": Object { - "type": String("string"), - }, - }, - }, - }, - annotations: Some( - ToolAnnotations { - title: None, - read_only_hint: Some( - true, - ), - destructive_hint: None, - idempotent_hint: None, - open_world_hint: None, - }, - ), - } - "#); - insta::assert_snapshot!(serde_json::to_string_pretty(&serde_json::json!(tool.input_schema)).unwrap(), @r#" - { - "type": "object", - "properties": { - "id": { - "type": "array", - "items": { - "type": "string" - } - } - } - } - "#); - } - - #[test] - fn nullable_list_of_nullable_lists_of_nullable_named_types() { - let operation = Operation::from_document( - RawOperation { - source_text: "query QueryName($id: [[ID]]) { id }".to_string(), - persisted_query_id: None, - headers: None, - variables: None, - source_path: None, - }, - &SCHEMA, - None, - MutationMode::None, - false, - false, - ) - .unwrap() - .unwrap(); - let tool = Tool::from(operation); - - insta::assert_debug_snapshot!(tool, @r#" - Tool { - name: "QueryName", - description: Some( - "The returned value is optional and has type `String`", - ), - input_schema: { - "type": String("object"), - "properties": Object { - "id": Object { - "type": String("array"), - "items": Object { - "oneOf": Array [ - Object { - "type": String("array"), - "items": Object { - "oneOf": Array [ - Object { - "type": String("string"), - }, - Object { - "type": String("null"), - }, - ], - }, - }, - Object { - "type": String("null"), - }, - ], - }, - }, - }, - }, - annotations: Some( - ToolAnnotations { - title: None, - read_only_hint: Some( - true, - ), - destructive_hint: None, - idempotent_hint: None, - open_world_hint: None, - }, - ), - } - "#); - insta::assert_snapshot!(serde_json::to_string_pretty(&serde_json::json!(tool.input_schema)).unwrap(), @r#" - { - "type": "object", - "properties": { - "id": { - "type": "array", - "items": { - "oneOf": [ - { - "type": "array", - "items": { - "oneOf": [ - { - "type": "string" - }, - { - "type": "null" - } - ] - } - }, - { - "type": "null" - } - ] - } - } - } - } - "#); - } - - #[test] - fn nullable_input_object() { - let operation = Operation::from_document( - RawOperation { - source_text: "query QueryName($id: RealInputObject) { id }".to_string(), - persisted_query_id: None, - headers: None, - variables: None, - source_path: None, - }, - &SCHEMA, - None, - MutationMode::None, - false, - false, - ) - .unwrap() - .unwrap(); - let tool = Tool::from(operation); - - insta::assert_debug_snapshot!(tool, @r##" - Tool { - name: "QueryName", - description: Some( - "The returned value is optional and has type `String`", - ), - input_schema: { - "type": String("object"), - "properties": Object { - "id": Object { - "$ref": String("#/definitions/RealInputObject"), - }, - }, - "definitions": Object { - "RealInputObject": Object { - "type": String("object"), - "required": Array [ - String("required"), - ], - "properties": Object { - "optional": Object { - "description": String("optional is a input field that is optional"), - "type": String("string"), - }, - "required": Object { - "description": String("required is a input field that is required"), - "type": String("string"), - }, - }, - }, - }, - }, - annotations: Some( - ToolAnnotations { - title: None, - read_only_hint: Some( - true, - ), - destructive_hint: None, - idempotent_hint: None, - open_world_hint: None, - }, - ), - } - "##); - } - - #[test] - fn non_nullable_enum() { - let operation = Operation::from_document( - RawOperation { - source_text: "query QueryName($id: RealEnum!) { id }".to_string(), - persisted_query_id: None, - headers: None, - variables: None, - source_path: None, - }, - &SCHEMA, - None, - MutationMode::None, - false, - false, - ) - .unwrap() - .unwrap(); - let tool = Tool::from(operation); - - insta::assert_debug_snapshot!(tool, @r##" - Tool { - name: "QueryName", - description: Some( - "The returned value is optional and has type `String`", - ), - input_schema: { - "type": String("object"), - "required": Array [ - String("id"), - ], - "properties": Object { - "id": Object { - "$ref": String("#/definitions/RealEnum"), - }, - }, - "definitions": Object { - "RealEnum": Object { - "description": String("the description for the enum\n\nValues:\nENUM_VALUE_1: ENUM_VALUE_1 is a value\nENUM_VALUE_2: ENUM_VALUE_2 is a value"), - "type": String("string"), - "enum": Array [ - String("ENUM_VALUE_1"), - String("ENUM_VALUE_2"), - ], - }, - }, - }, - annotations: Some( - ToolAnnotations { - title: None, - read_only_hint: Some( - true, - ), - destructive_hint: None, - idempotent_hint: None, - open_world_hint: None, - }, - ), - } - "##); - } - - #[test] - fn multiple_operations_should_error() { - let operation = Operation::from_document( - RawOperation { - source_text: "query QueryName { id } query QueryName { id }".to_string(), - persisted_query_id: None, - headers: None, - variables: None, - source_path: Some("operation.graphql".to_string()), - }, - &SCHEMA, - None, - MutationMode::None, - false, - false, - ); - insta::assert_debug_snapshot!(operation, @r#" - Err( - TooManyOperations { - source_path: Some( - "operation.graphql", - ), - count: 2, - }, - ) - "#); - } - - #[test] - #[traced_test] - fn unnamed_operations_should_be_skipped() { - let operation = Operation::from_document( - RawOperation { - source_text: "query { id }".to_string(), - persisted_query_id: None, - headers: None, - variables: None, - source_path: Some("operation.graphql".to_string()), - }, - &SCHEMA, - None, - MutationMode::None, - false, - false, - ); - assert!(operation.unwrap().is_none()); - - logs_assert(|lines: &[&str]| { - lines - .iter() - .filter(|line| line.contains("WARN")) - .any(|line| { - line.contains("Skipping unnamed operation in operation.graphql: { id }") - }) - .then_some(()) - .ok_or("Expected warning about unnamed operation in logs".to_string()) - }); - } - - #[test] - fn no_operations_should_error() { - let operation = Operation::from_document( - RawOperation { - source_text: "fragment Test on Query { id }".to_string(), - persisted_query_id: None, - headers: None, - variables: None, - source_path: Some("operation.graphql".to_string()), - }, - &SCHEMA, - None, - MutationMode::None, - false, - false, - ); - insta::assert_debug_snapshot!(operation, @r#" - Err( - NoOperations { - source_path: Some( - "operation.graphql", - ), - }, - ) - "#); - } - - #[test] - fn schema_should_error() { - let operation = Operation::from_document( - RawOperation { - source_text: "type Query { id: String }".to_string(), - persisted_query_id: None, - headers: None, - variables: None, - source_path: None, - }, - &SCHEMA, - None, - MutationMode::None, - false, - false, - ); - insta::assert_debug_snapshot!(operation, @r" - Err( - NoOperations { - source_path: None, - }, - ) - "); - } - - #[test] - #[traced_test] - fn unknown_type_should_be_any() { - let operation = Operation::from_document( - RawOperation { - source_text: "query QueryName($id: FakeType) { id }".to_string(), - persisted_query_id: None, - headers: None, - variables: None, - source_path: None, - }, - &SCHEMA, - None, - MutationMode::None, - false, - false, - ) - .unwrap() - .unwrap(); - let tool = Tool::from(operation); - - // Verify that a warning was logged - logs_assert(|lines: &[&str]| { - lines - .iter() - .filter(|line| line.contains("WARN")) - .any(|line| line.contains("Type not found in schema name=\"FakeType\"")) - .then_some(()) - .ok_or("Expected warning about unknown type in logs".to_string()) - }); - - insta::assert_debug_snapshot!(tool, @r#" - Tool { - name: "QueryName", - description: Some( - "The returned value is optional and has type `String`", - ), - input_schema: { - "type": String("object"), - "properties": Object { - "id": Object {}, - }, - }, - annotations: Some( - ToolAnnotations { - title: None, - read_only_hint: Some( - true, - ), - destructive_hint: None, - idempotent_hint: None, - open_world_hint: None, - }, - ), - } - "#); - } - - #[test] - #[traced_test] - fn custom_scalar_without_map_should_be_any() { - let operation = Operation::from_document( - RawOperation { - source_text: "query QueryName($id: RealCustomScalar) { id }".to_string(), - persisted_query_id: None, - headers: None, - variables: None, - source_path: None, - }, - &SCHEMA, - None, - MutationMode::None, - false, - false, - ) - .unwrap() - .unwrap(); - let tool = Tool::from(operation); - - // Verify that a warning was logged - logs_assert(|lines: &[&str]| { - lines - .iter() - .filter(|line| line.contains("WARN")) - .any(|line| line.contains("custom scalars aren't currently supported without a custom_scalar_map name=\"RealCustomScalar\"")) - .then_some(()) - .ok_or("Expected warning about custom scalar without map in logs".to_string()) - }); - - insta::assert_debug_snapshot!(tool, @r##" - Tool { - name: "QueryName", - description: Some( - "The returned value is optional and has type `String`", - ), - input_schema: { - "type": String("object"), - "properties": Object { - "id": Object { - "$ref": String("#/definitions/RealCustomScalar"), - }, - }, - "definitions": Object { - "RealCustomScalar": Object { - "description": String("RealCustomScalar exists"), - }, - }, - }, - annotations: Some( - ToolAnnotations { - title: None, - read_only_hint: Some( - true, - ), - destructive_hint: None, - idempotent_hint: None, - open_world_hint: None, - }, - ), - } - "##); - } - - #[test] - #[traced_test] - fn custom_scalar_with_map_but_not_found_should_error() { - let operation = Operation::from_document( - RawOperation { - source_text: "query QueryName($id: RealCustomScalar) { id }".to_string(), - persisted_query_id: None, - headers: None, - variables: None, - source_path: None, - }, - &SCHEMA, - Some(&CustomScalarMap::from_str("{}").unwrap()), - MutationMode::None, - false, - false, - ) - .unwrap() - .unwrap(); - let tool = Tool::from(operation); - - // Verify that a warning was logged - logs_assert(|lines: &[&str]| { - lines - .iter() - .filter(|line| line.contains("WARN")) - .any(|line| { - line.contains( - "custom scalar missing from custom_scalar_map name=\"RealCustomScalar\"", - ) - }) - .then_some(()) - .ok_or("Expected warning about custom scalar missing in logs".to_string()) - }); - - insta::assert_debug_snapshot!(tool, @r##" - Tool { - name: "QueryName", - description: Some( - "The returned value is optional and has type `String`", - ), - input_schema: { - "type": String("object"), - "properties": Object { - "id": Object { - "$ref": String("#/definitions/RealCustomScalar"), - }, - }, - "definitions": Object { - "RealCustomScalar": Object { - "description": String("RealCustomScalar exists"), - }, - }, - }, - annotations: Some( - ToolAnnotations { - title: None, - read_only_hint: Some( - true, - ), - destructive_hint: None, - idempotent_hint: None, - open_world_hint: None, - }, - ), - } - "##); - } - - #[test] - fn custom_scalar_with_map() { - let custom_scalar_map = - CustomScalarMap::from_str("{ \"RealCustomScalar\": { \"type\": \"string\" }}"); - - let operation = Operation::from_document( - RawOperation { - source_text: "query QueryName($id: RealCustomScalar) { id }".to_string(), - persisted_query_id: None, - headers: None, - variables: None, - source_path: None, - }, - &SCHEMA, - custom_scalar_map.ok().as_ref(), - MutationMode::None, - false, - false, - ) - .unwrap() - .unwrap(); - let tool = Tool::from(operation); - - insta::assert_debug_snapshot!(tool, @r##" - Tool { - name: "QueryName", - description: Some( - "The returned value is optional and has type `String`", - ), - input_schema: { - "type": String("object"), - "properties": Object { - "id": Object { - "$ref": String("#/definitions/RealCustomScalar"), - }, - }, - "definitions": Object { - "RealCustomScalar": Object { - "description": String("RealCustomScalar exists"), - "type": String("string"), - }, - }, - }, - annotations: Some( - ToolAnnotations { - title: None, - read_only_hint: Some( - true, - ), - destructive_hint: None, - idempotent_hint: None, - open_world_hint: None, - }, - ), - } - "##); - } - - #[test] - fn test_tool_description() { - const SCHEMA: &str = r#" - type Query { - """ - Get a list of A - """ - a(input: String!): [A]! - - """ - Get a B - """ - b: B - - """ - Get a Z - """ - z: Z - } - - """ - A - """ - type A { - c: String - d: D - } - - """ - B - """ - type B { - d: D - u: U - } - - """ - D - """ - type D { - e: E - f: String - g: String - } - - """ - E - """ - enum E { - """ - one - """ - ONE - """ - two - """ - TWO - } - - """ - F - """ - scalar F - - """ - U - """ - union U = M | W - - """ - M - """ - type M { - m: Int - } - - """ - W - """ - type W { - w: Int - } - - """ - Z - """ - type Z { - z: Int - zz: Int - zzz: Int - } - "#; - - let document = Parser::new().parse_ast(SCHEMA, "schema.graphql").unwrap(); - let schema = document.to_schema().unwrap(); - - let operation = Operation::from_document( - RawOperation { - source_text: r###" - query GetABZ($state: String!) { - a(input: $input) { - d { - e - } - } - b { - d { - ...JustF - } - u { - ... on M { - m - } - ... on W { - w - } - } - } - z { - ...JustZZZ - } - } - - fragment JustF on D { - f - } - - fragment JustZZZ on Z { - zzz - } - "### - .to_string(), - persisted_query_id: None, - headers: None, - variables: None, - source_path: None, - }, - &schema, - None, - MutationMode::None, - false, - false, - ) - .unwrap() - .unwrap(); - - insta::assert_snapshot!( - operation.tool.description.unwrap(), - @r#" - Get a list of A - The returned value is an array of type `A` - --- - Get a B - The returned value is optional and has type `B` - --- - Get a Z - The returned value is optional and has type `Z` - --- - """A""" - type A { - d: D - } - - """B""" - type B { - d: D - u: U - } - - """D""" - type D { - e: E - f: String - } - - """E""" - enum E { - """one""" - ONE - """two""" - TWO - } - - """U""" - union U = M | W - - """M""" - type M { - m: Int - } - - """W""" - type W { - w: Int - } - - """Z""" - type Z { - zzz: Int - } - "# - ); - } - - #[test] - fn tool_comment_description() { - let operation = Operation::from_document( - RawOperation { - source_text: r###" - # Overridden tool #description - query GetABZ($state: String!) { - b { - d { - f - } - } - } - "### - .to_string(), - persisted_query_id: None, - headers: None, - variables: None, - source_path: None, - }, - &SCHEMA, - None, - MutationMode::None, - false, - false, - ) - .unwrap() - .unwrap(); - - insta::assert_snapshot!( - operation.tool.description.unwrap(), - @"Overridden tool #description" - ); - } - - #[test] - fn tool_empty_comment_description() { - let operation = Operation::from_document( - RawOperation { - source_text: r###" - # - - # - query GetABZ($state: String!) { - id - } - "### - .to_string(), - persisted_query_id: None, - headers: None, - variables: None, - source_path: None, - }, - &SCHEMA, - None, - MutationMode::None, - false, - false, - ) - .unwrap() - .unwrap(); - - insta::assert_snapshot!( - operation.tool.description.unwrap(), - @"The returned value is optional and has type `String`" - ); - } - - #[test] - fn no_schema_description() { - let operation = Operation::from_document( - RawOperation { - source_text: r###"query GetABZ($state: String!) { id enum }"###.to_string(), - persisted_query_id: None, - headers: None, - variables: None, - source_path: None, - }, - &SCHEMA, - None, - MutationMode::None, - false, - true, - ) - .unwrap() - .unwrap(); - - insta::assert_snapshot!( - operation.tool.description.unwrap(), - @r" - The returned value is optional and has type `String` - --- - The returned value is optional and has type `RealEnum` - " - ); - } - - #[test] - fn no_type_description() { - let operation = Operation::from_document( - RawOperation { - source_text: r###"query GetABZ($state: String!) { id enum }"###.to_string(), - persisted_query_id: None, - headers: None, - variables: None, - source_path: None, - }, - &SCHEMA, - None, - MutationMode::None, - true, - false, - ) - .unwrap() - .unwrap(); - - insta::assert_snapshot!( - operation.tool.description.unwrap(), - @r#" - --- - """the description for the enum""" - enum RealEnum { - """ENUM_VALUE_1 is a value""" - ENUM_VALUE_1 - """ENUM_VALUE_2 is a value""" - ENUM_VALUE_2 - } - "# - ); - } - - #[test] - fn no_type_description_or_schema_description() { - let operation = Operation::from_document( - RawOperation { - source_text: r###"query GetABZ($state: String!) { id enum }"###.to_string(), - persisted_query_id: None, - headers: None, - variables: None, - source_path: None, - }, - &SCHEMA, - None, - MutationMode::None, - true, - true, - ) - .unwrap() - .unwrap(); - - insta::assert_snapshot!( - operation.tool.description.unwrap(), - @"" - ); - } - - #[test] - fn recursive_inputs() { - let operation = Operation::from_document( - RawOperation { - source_text: r###"query Test($filter: Filter){ - field(filter: $filter) { - id - } - }"### - .to_string(), - persisted_query_id: None, - headers: None, - variables: None, - source_path: None, - }, - &Schema::parse( - r#" - """the filter input""" - input Filter { - """the filter.field field""" - field: String - """the filter.filter field""" - filter: Filter - } - type Query { - """the Query.field field""" - field( - """the filter argument""" - filter: Filter - ): String - } - "#, - "operation.graphql", - ) - .unwrap(), - None, - MutationMode::None, - true, - true, - ) - .unwrap() - .unwrap(); - - insta::assert_debug_snapshot!(operation.tool, @r##" - Tool { - name: "Test", - description: Some( - "", - ), - input_schema: { - "type": String("object"), - "properties": Object { - "filter": Object { - "description": String("the filter argument"), - "$ref": String("#/definitions/Filter"), - }, - }, - "definitions": Object { - "Filter": Object { - "description": String("the filter input"), - "type": String("object"), - "properties": Object { - "field": Object { - "description": String("the filter.field field"), - "type": String("string"), - }, - "filter": Object { - "description": String("the filter.filter field"), - "$ref": String("#/definitions/Filter"), - }, - }, - }, - }, - }, - annotations: Some( - ToolAnnotations { - title: None, - read_only_hint: Some( - true, - ), - destructive_hint: None, - idempotent_hint: None, - open_world_hint: None, - }, - ), - } - "##); - } - - #[test] - fn with_variable_overrides() { - let operation = Operation::from_document( - RawOperation { - source_text: "query QueryName($id: ID, $name: String) { id }".to_string(), - persisted_query_id: None, - headers: None, - variables: Some(HashMap::from([( - "id".to_string(), - serde_json::Value::String("v".to_string()), - )])), - source_path: None, - }, - &SCHEMA, - None, - MutationMode::None, - false, - false, - ) - .unwrap() - .unwrap(); - let tool = Tool::from(operation); - - insta::assert_debug_snapshot!(tool, @r#" - Tool { - name: "QueryName", - description: Some( - "The returned value is optional and has type `String`", - ), - input_schema: { - "type": String("object"), - "properties": Object { - "name": Object { - "type": String("string"), - }, - }, - }, - annotations: Some( - ToolAnnotations { - title: None, - read_only_hint: Some( - true, - ), - destructive_hint: None, - idempotent_hint: None, - open_world_hint: None, - }, - ), - } - "#); - } - - #[test] - fn input_schema_includes_variable_descriptions() { - let operation = Operation::from_document( - RawOperation { - source_text: "query QueryName($idArg: ID) { customQuery(id: $idArg) { id } }" - .to_string(), - persisted_query_id: None, - headers: None, - variables: None, - source_path: None, - }, - &SCHEMA, - None, - MutationMode::None, - false, - false, - ) - .unwrap() - .unwrap(); - let tool = Tool::from(operation); - - insta::assert_snapshot!(serde_json::to_string_pretty(&serde_json::json!(tool.input_schema)).unwrap(), @r#" - { - "type": "object", - "properties": { - "idArg": { - "description": "id description", - "type": "string" - } - } - } - "#); - } - - #[test] - fn input_schema_includes_joined_variable_descriptions_if_multiple() { - let operation = Operation::from_document( - RawOperation { - source_text: "query QueryName($idArg: ID, $flag: Boolean) { customQuery(id: $idArg, flag: $flag) { id @skip(if: $flag) } }".to_string(), - persisted_query_id: None, - headers: None, - variables: None, - source_path: None, - }, - &SCHEMA, - None, - MutationMode::None, - false, - false, - ) - .unwrap() - .unwrap(); - let tool = Tool::from(operation); - - insta::assert_snapshot!(serde_json::to_string_pretty(&serde_json::json!(tool.input_schema)).unwrap(), @r#" - { - "type": "object", - "properties": { - "flag": { - "description": "Skipped when true.#a flag", - "type": "boolean" - }, - "idArg": { - "description": "id description", - "type": "string" - } - } - } - "#); - } - - #[test] - fn input_schema_includes_directive_variable_descriptions() { - let operation = Operation::from_document( - RawOperation { - source_text: "query QueryName($idArg: ID, $skipArg: Boolean) { customQuery(id: $idArg) { id @skip(if: $skipArg) } }".to_string(), - persisted_query_id: None, - headers: None, - variables: None, - source_path: None, - }, - &SCHEMA, - None, - MutationMode::None, - false, - false, - ) - .unwrap() - .unwrap(); - let tool = Tool::from(operation); - - insta::assert_snapshot!(serde_json::to_string_pretty(&serde_json::json!(tool.input_schema)).unwrap(), @r#" - { - "type": "object", - "properties": { - "idArg": { - "description": "id description", - "type": "string" - }, - "skipArg": { - "description": "Skipped when true.", - "type": "boolean" - } - } - } - "#); - } - - #[test] - fn test_operation_name_with_named_query() { - let source_text = "query GetUser($id: ID!) { user(id: $id) { name email } }"; - let raw_op = RawOperation { - source_text: source_text.to_string(), - persisted_query_id: None, - headers: None, - variables: None, - source_path: None, - }; - let operation = - Operation::from_document(raw_op, &SCHEMA, None, MutationMode::None, false, false) - .unwrap() - .unwrap(); - - let op_details = operation.operation(Value::Null).unwrap(); - assert_eq!(op_details.operation_name, Some(String::from("GetUser"))); - } - - #[test] - fn test_operation_name_with_named_mutation() { - let source_text = - "mutation CreateUser($input: UserInput!) { createUser(input: $input) { id name } }"; - let raw_op = RawOperation { - source_text: source_text.to_string(), - persisted_query_id: None, - headers: None, - variables: None, - source_path: None, - }; - let operation = - Operation::from_document(raw_op, &SCHEMA, None, MutationMode::Explicit, false, false) - .unwrap() - .unwrap(); - - let op_details = operation.operation(Value::Null).unwrap(); - assert_eq!(op_details.operation_name, Some(String::from("CreateUser"))); - } - - #[test] - fn operation_variable_comments_override_schema_descriptions() { - let operation = Operation::from_document( - RawOperation { - source_text: "# operation description\nquery QueryName(# id comment override\n$idArg: ID) { customQuery(id: $idArg) { id } }".to_string(), - persisted_query_id: None, - headers: None, - variables: None, - source_path: None, - }, - &SCHEMA, - None, - MutationMode::None, - false, - false, - ) - .unwrap() - .unwrap(); - let tool = Tool::from(operation); - - insta::assert_snapshot!(serde_json::to_string_pretty(&serde_json::json!(tool.input_schema)).unwrap(), @r#" - { - "type": "object", - "properties": { - "idArg": { - "description": "id comment override", - "type": "string" - } - } - } - "#); - } - - #[test] - fn operation_variable_comment_override_supports_multiline_comments() { - let operation = Operation::from_document( - RawOperation { - source_text: "# operation description\nquery QueryName(# id comment override\n # multi-line comment \n$idArg: ID) { customQuery(id: $idArg) { id } }".to_string(), - persisted_query_id: None, - headers: None, - variables: None, - source_path: None, - }, - &SCHEMA, - None, - MutationMode::None, - false, - false, - ) - .unwrap() - .unwrap(); - let tool = Tool::from(operation); - - insta::assert_snapshot!(serde_json::to_string_pretty(&serde_json::json!(tool.input_schema)).unwrap(), @r#" - { - "type": "object", - "properties": { - "idArg": { - "description": "id comment override\n multi-line comment", - "type": "string" - } - } - } - "#); - } - - #[test] - fn comment_with_parens_has_comments_extracted_correctly() { - let operation = Operation::from_document( - RawOperation { - source_text: "query QueryName # a comment (with parens)\n(# id comment override\n # multi-line comment \n$idArg: ID) { customQuery(id: $idArg) { id } }".to_string(), - persisted_query_id: None, - headers: None, - variables: None, - source_path: None, - }, - &SCHEMA, - None, - MutationMode::None, - false, - false, - ) - .unwrap() - .unwrap(); - let tool = Tool::from(operation); - - insta::assert_snapshot!(serde_json::to_string_pretty(&serde_json::json!(tool.input_schema)).unwrap(), @r#" - { - "type": "object", - "properties": { - "idArg": { - "description": "id comment override\n multi-line comment", - "type": "string" - } - } - } - "#); - } - - #[test] - fn multiline_comment_with_odd_spacing_and_parens_has_comments_extracted_correctly() { - let operation = Operation::from_document( - RawOperation { - source_text: "# operation comment\n\nquery QueryName # a comment \n# extra space\n\n\n# blank lines (with parens)\n\n# another (paren)\n(# id comment override\n # multi-line comment \n$idArg: ID\n, \n# a flag\n$flag: Boolean) { customQuery(id: $idArg, skip: $flag) { id } }".to_string(), - persisted_query_id: None, - headers: None, - variables: None, - source_path: None, - }, - &SCHEMA, - None, - MutationMode::None, - false, - false, - ) - .unwrap() - .unwrap(); - let tool = Tool::from(operation); - - insta::assert_snapshot!(serde_json::to_string_pretty(&serde_json::json!(tool.input_schema)).unwrap(), @r#" - { - "type": "object", - "properties": { - "flag": { - "description": "a flag", - "type": "boolean" - }, - "idArg": { - "description": "id comment override\n multi-line comment", - "type": "string" - } - } - } - "#); - } - - #[test] - fn operation_with_no_variables_is_handled_properly() { - let operation = Operation::from_document( - RawOperation { - source_text: "query QueryName { customQuery(id: \"123\") { id } }".to_string(), - persisted_query_id: None, - headers: None, - variables: None, - source_path: None, - }, - &SCHEMA, - None, - MutationMode::None, - false, - false, - ) - .unwrap() - .unwrap(); - let tool = Tool::from(operation); - - insta::assert_snapshot!(serde_json::to_string_pretty(&serde_json::json!(tool.input_schema)).unwrap(), @r#" - { - "type": "object", - "properties": {} - } - "#); - } - - #[test] - fn commas_between_variables_are_ignored() { - let operation = Operation::from_document( - RawOperation { - source_text: "query QueryName(# id arg\n $idArg: ID,,\n,,\n # a flag\n $flag: Boolean, ,,) { customQuery(id: $idArg, flag: $flag) { id } }".to_string(), - persisted_query_id: None, - headers: None, - variables: None, - source_path: None, - }, - &SCHEMA, - None, - MutationMode::None, - false, - false, - ) - .unwrap() - .unwrap(); - let tool = Tool::from(operation); - - insta::assert_snapshot!(serde_json::to_string_pretty(&serde_json::json!(tool.input_schema)).unwrap(), @r#" - { - "type": "object", - "properties": { - "flag": { - "description": "a flag", - "type": "boolean" - }, - "idArg": { - "description": "id arg", - "type": "string" - } - } - } - "#); - } - - #[test] - fn input_schema_include_properties_field_even_when_operation_has_no_input_args() { - let operation = Operation::from_document( - RawOperation { - source_text: "query TestOp { testOp { id } }".to_string(), - persisted_query_id: None, - headers: None, - variables: None, - source_path: None, - }, - &SCHEMA, - None, - MutationMode::None, - false, - false, - ) - .unwrap() - .unwrap(); - let tool = Tool::from(operation); - - insta::assert_snapshot!(serde_json::to_string_pretty(&serde_json::json!(tool.input_schema)).unwrap(), @r#" - { - "type": "object", - "properties": {} - } - "#); - } - - #[test] - fn nullable_list_of_nullable_input_objects() { - let operation = Operation::from_document( - RawOperation { - source_text: "query QueryName($objects: [RealInputObject]) { id }".to_string(), - persisted_query_id: None, - headers: None, - variables: None, - source_path: None, - }, - &SCHEMA, - None, - MutationMode::None, - false, - false, - ) - .unwrap() - .unwrap(); - let tool = Tool::from(operation); - - insta::assert_debug_snapshot!(tool, @r##" - Tool { - name: "QueryName", - description: Some( - "The returned value is optional and has type `String`", - ), - input_schema: { - "type": String("object"), - "properties": Object { - "objects": Object { - "type": String("array"), - "items": Object { - "oneOf": Array [ - Object { - "$ref": String("#/definitions/RealInputObject"), - }, - Object { - "type": String("null"), - }, - ], - }, - }, - }, - "definitions": Object { - "RealInputObject": Object { - "type": String("object"), - "required": Array [ - String("required"), - ], - "properties": Object { - "optional": Object { - "description": String("optional is a input field that is optional"), - "type": String("string"), - }, - "required": Object { - "description": String("required is a input field that is required"), - "type": String("string"), - }, - }, - }, - }, - }, - annotations: Some( - ToolAnnotations { - title: None, - read_only_hint: Some( - true, - ), - destructive_hint: None, - idempotent_hint: None, - open_world_hint: None, - }, - ), - } - "##); - insta::assert_snapshot!(serde_json::to_string_pretty(&serde_json::json!(tool.input_schema)).unwrap(), @r##" - { - "type": "object", - "properties": { - "objects": { - "type": "array", - "items": { - "oneOf": [ - { - "$ref": "#/definitions/RealInputObject" - }, - { - "type": "null" - } - ] - } - } - }, - "definitions": { - "RealInputObject": { - "type": "object", - "required": [ - "required" - ], - "properties": { - "optional": { - "description": "optional is a input field that is optional", - "type": "string" - }, - "required": { - "description": "required is a input field that is required", - "type": "string" - } - } - } - } - } - "##); - } - - #[test] - fn non_nullable_list_of_non_nullable_input_objects() { - let operation = Operation::from_document( - RawOperation { - source_text: "query QueryName($objects: [RealInputObject!]!) { id }".to_string(), - persisted_query_id: None, - headers: None, - variables: None, - source_path: None, - }, - &SCHEMA, - None, - MutationMode::None, - false, - false, - ) - .unwrap() - .unwrap(); - let tool = Tool::from(operation); - - insta::assert_debug_snapshot!(tool, @r##" - Tool { - name: "QueryName", - description: Some( - "The returned value is optional and has type `String`", - ), - input_schema: { - "type": String("object"), - "required": Array [ - String("objects"), - ], - "properties": Object { - "objects": Object { - "type": String("array"), - "items": Object { - "$ref": String("#/definitions/RealInputObject"), - }, - }, - }, - "definitions": Object { - "RealInputObject": Object { - "type": String("object"), - "required": Array [ - String("required"), - ], - "properties": Object { - "optional": Object { - "description": String("optional is a input field that is optional"), - "type": String("string"), - }, - "required": Object { - "description": String("required is a input field that is required"), - "type": String("string"), - }, - }, - }, - }, - }, - annotations: Some( - ToolAnnotations { - title: None, - read_only_hint: Some( - true, - ), - destructive_hint: None, - idempotent_hint: None, - open_world_hint: None, - }, - ), - } - "##); - insta::assert_snapshot!(serde_json::to_string_pretty(&serde_json::json!(tool.input_schema)).unwrap(), @r##" - { - "type": "object", - "required": [ - "objects" - ], - "properties": { - "objects": { - "type": "array", - "items": { - "$ref": "#/definitions/RealInputObject" - } - } - }, - "definitions": { - "RealInputObject": { - "type": "object", - "required": [ - "required" - ], - "properties": { - "optional": { - "description": "optional is a input field that is optional", - "type": "string" - }, - "required": { - "description": "required is a input field that is required", - "type": "string" - } - } - } - } - } - "##); - } -} +//! Operations +//! +//! This module includes transformation utilities that convert GraphQL operations +//! into MCP tools. + +mod mutation_mode; +mod operation; +mod operation_source; +mod raw_operation; +mod schema_walker; + +pub use mutation_mode::MutationMode; +pub use operation::{Operation, operation_defs, operation_name}; +pub use operation_source::OperationSource; +pub use raw_operation::RawOperation; diff --git a/crates/apollo-mcp-server/src/operations/mutation_mode.rs b/crates/apollo-mcp-server/src/operations/mutation_mode.rs new file mode 100644 index 00000000..9c7692d7 --- /dev/null +++ b/crates/apollo-mcp-server/src/operations/mutation_mode.rs @@ -0,0 +1,14 @@ +use schemars::JsonSchema; +use serde::{Deserialize, Serialize}; + +#[derive(Clone, Default, Debug, Deserialize, Serialize, PartialEq, Copy, JsonSchema)] +#[serde(rename_all = "snake_case")] +pub enum MutationMode { + /// Don't allow any mutations + #[default] + None, + /// Allow explicit mutations, but don't allow the LLM to build them + Explicit, + /// Allow the LLM to build mutations + All, +} diff --git a/crates/apollo-mcp-server/src/operations/operation.rs b/crates/apollo-mcp-server/src/operations/operation.rs new file mode 100644 index 00000000..33a230fc --- /dev/null +++ b/crates/apollo-mcp-server/src/operations/operation.rs @@ -0,0 +1,3065 @@ +use std::collections::HashMap; + +use apollo_compiler::{ + Node, Schema as GraphqlSchema, + ast::{Definition, Document, OperationDefinition, OperationType, Selection, Type}, + parser::Parser, + schema::ExtendedType, +}; +use http::{HeaderMap, HeaderValue}; +use regex::Regex; +use rmcp::model::{ErrorCode, Tool, ToolAnnotations}; +use schemars::{Schema, json_schema}; +use serde::Serialize; +use serde_json::{Map, Value}; +use tracing::{debug, info, warn}; + +use crate::{ + custom_scalar_map::CustomScalarMap, + errors::{McpError, OperationError}, + graphql::{self, OperationDetails}, + schema_tree_shake::{DepthLimit, SchemaTreeShaker}, +}; + +use super::{MutationMode, RawOperation, schema_walker}; + +/// A valid GraphQL operation +#[derive(Debug, Clone, Serialize)] +pub struct Operation { + tool: Tool, + inner: RawOperation, + operation_name: String, +} + +impl AsRef for Operation { + fn as_ref(&self) -> &Tool { + &self.tool + } +} + +impl From for Tool { + fn from(value: Operation) -> Tool { + value.tool + } +} + +impl Operation { + pub(crate) fn into_inner(self) -> RawOperation { + self.inner + } + + #[tracing::instrument(skip(graphql_schema, custom_scalar_map))] + pub fn from_document( + raw_operation: RawOperation, + graphql_schema: &GraphqlSchema, + custom_scalar_map: Option<&CustomScalarMap>, + mutation_mode: MutationMode, + disable_type_description: bool, + disable_schema_description: bool, + ) -> Result, OperationError> { + if let Some((document, operation, comments)) = operation_defs( + &raw_operation.source_text, + mutation_mode != MutationMode::None, + raw_operation.source_path.clone(), + )? { + let operation_name = match operation_name(&operation, raw_operation.source_path.clone()) + { + Ok(name) => name, + Err(OperationError::MissingName { + source_path, + operation, + }) => { + if let Some(path) = source_path { + warn!("Skipping unnamed operation in {path}: {operation}"); + } else { + warn!("Skipping unnamed operation: {operation}"); + } + return Ok(None); + } + Err(e) => return Err(e), + }; + let variable_description_overrides = + variable_description_overrides(&raw_operation.source_text, &operation); + let mut tree_shaker = SchemaTreeShaker::new(graphql_schema); + tree_shaker.retain_operation(&operation, &document, DepthLimit::Unlimited); + + let description = Self::tool_description( + comments, + &mut tree_shaker, + graphql_schema, + &operation, + disable_type_description, + disable_schema_description, + ); + + let mut object = serde_json::to_value(get_json_schema( + &operation, + tree_shaker.argument_descriptions(), + &variable_description_overrides, + graphql_schema, + custom_scalar_map, + raw_operation.variables.as_ref(), + ))?; + + // make sure that the properties field exists since schemas::ObjectValidation is + // configured to skip empty maps (in the case where there are no input args) + ensure_properties_exists(&mut object); + + let Value::Object(schema) = object else { + return Err(OperationError::Internal( + "Schemars should have returned an object".to_string(), + )); + }; + + let tool: Tool = Tool::new(operation_name.clone(), description, schema).annotate( + ToolAnnotations::new() + .read_only(operation.operation_type != OperationType::Mutation), + ); + let character_count = tool_character_length(&tool); + match character_count { + Ok(length) => info!( + "Tool {} loaded with a character count of {}. Estimated tokens: {}", + operation_name, + length, + length / 4 // We don't know the tokenization algorithm, so we just use 4 characters per token as a rough estimate. https://docs.anthropic.com/en/docs/resources/glossary#tokens + ), + Err(_) => info!( + "Tool {} loaded with an unknown character count", + operation_name + ), + } + Ok(Some(Operation { + tool, + inner: raw_operation, + operation_name, + })) + } else { + Ok(None) + } + } + + /// Generate a description for an operation based on documentation in the schema + #[tracing::instrument(skip(comments, tree_shaker, graphql_schema))] + fn tool_description( + comments: Option, + tree_shaker: &mut SchemaTreeShaker, + graphql_schema: &GraphqlSchema, + operation_def: &Node, + disable_type_description: bool, + disable_schema_description: bool, + ) -> String { + let comment_description = extract_and_format_comments(comments); + + match comment_description { + Some(description) => description, + None => { + // Add the tree-shaken types to the end of the tool description + let mut lines = vec![]; + if !disable_type_description { + let descriptions = operation_def + .selection_set + .iter() + .filter_map(|selection| { + match selection { + Selection::Field(field) => { + let field_name = field.name.to_string(); + let operation_type = operation_def.operation_type; + if let Some(root_name) = + graphql_schema.root_operation(operation_type) + { + // Find the root field referenced by the operation + let root = graphql_schema.get_object(root_name)?; + let field_definition = root + .fields + .iter() + .find(|(name, _)| { + let name = name.to_string(); + name == field_name + }) + .map(|(_, field_definition)| { + field_definition.node.clone() + }); + + // Add the root field description to the tool description + let field_description = field_definition + .clone() + .and_then(|field| field.description.clone()) + .map(|node| node.to_string()); + + // Add information about the return type + let ty = field_definition.map(|field| field.ty.clone()); + let type_description = + ty.as_ref().map(Self::type_description); + + Some( + vec![field_description, type_description] + .into_iter() + .flatten() + .collect::>() + .join("\n"), + ) + } else { + None + } + } + _ => None, + } + }) + .collect::>() + .join("\n---\n"); + + // Add the tree-shaken types to the end of the tool description + + lines.push(descriptions); + } + if !disable_schema_description { + let shaken_schema = + tree_shaker.shaken().unwrap_or_else(|schema| schema.partial); + + let mut types = shaken_schema + .types + .iter() + .filter(|(_name, extended_type)| { + !extended_type.is_built_in() + && matches!( + extended_type, + ExtendedType::Object(_) + | ExtendedType::Scalar(_) + | ExtendedType::Enum(_) + | ExtendedType::Interface(_) + | ExtendedType::Union(_) + ) + && graphql_schema + .root_operation(operation_def.operation_type) + .is_none_or(|op_name| extended_type.name() != op_name) + && graphql_schema + .root_operation(OperationType::Query) + .is_none_or(|op_name| extended_type.name() != op_name) + }) + .peekable(); + if types.peek().is_some() { + lines.push(String::from("---")); + } + + for ty in types { + lines.push(ty.1.serialize().to_string()); + } + } + lines.join("\n") + } + } + } + + fn type_description(ty: &Type) -> String { + let type_name = ty.inner_named_type(); + let mut lines = vec![]; + let optional = if ty.is_non_null() { + "" + } else { + "is optional and " + }; + let array = if ty.is_list() { + "is an array of type" + } else { + "has type" + }; + lines.push(format!( + "The returned value {optional}{array} `{type_name}`" + )); + + lines.join("\n") + } +} + +impl graphql::Executable for Operation { + fn persisted_query_id(&self) -> Option { + // TODO: id was being overridden, should we be returning? Should this be behind a flag? self.inner.persisted_query_id.clone() + None + } + + fn operation(&self, _input: Value) -> Result { + Ok(OperationDetails { + query: self.inner.source_text.clone(), + operation_name: Some(self.operation_name.clone()), + }) + } + + fn variables(&self, input_variables: Value) -> Result { + if let Some(raw_variables) = self.inner.variables.as_ref() { + let mut variables = match input_variables { + Value::Null => Ok(serde_json::Map::new()), + Value::Object(obj) => Ok(obj.clone()), + _ => Err(McpError::new( + ErrorCode::INVALID_PARAMS, + "Invalid input".to_string(), + None, + )), + }?; + + raw_variables.iter().try_for_each(|(key, value)| { + if variables.contains_key(key) { + Err(McpError::new( + ErrorCode::INVALID_PARAMS, + "No such parameter: {key}", + None, + )) + } else { + variables.insert(key.clone(), value.clone()); + Ok(()) + } + })?; + + Ok(Value::Object(variables)) + } else { + Ok(input_variables) + } + } + + fn headers(&self, default_headers: &HeaderMap) -> HeaderMap { + match self.inner.headers.as_ref() { + None => default_headers.clone(), + Some(raw_headers) if default_headers.is_empty() => raw_headers.clone(), + Some(raw_headers) => { + let mut headers = default_headers.clone(); + raw_headers.iter().for_each(|(key, value)| { + if headers.contains_key(key) { + tracing::debug!( + "Header {} has a default value, overwriting with operation value", + key + ); + } + headers.insert(key, value.clone()); + }); + headers + } + } + } +} + +#[allow(clippy::type_complexity)] +#[tracing::instrument(skip_all)] +pub fn operation_defs( + source_text: &str, + allow_mutations: bool, + source_path: Option, +) -> Result, Option)>, OperationError> { + let source_path_clone = source_path.clone(); + let document = Parser::new() + .parse_ast( + source_text, + source_path_clone.unwrap_or_else(|| "operation.graphql".to_string()), + ) + .map_err(|e| OperationError::GraphQLDocument(Box::new(e)))?; + let mut last_offset: Option = Some(0); + let mut operation_defs = document.definitions.clone().into_iter().filter_map(|def| { + let description = match def.location() { + Some(source_span) => { + let description = last_offset + .map(|start_offset| &source_text[start_offset..source_span.offset()]); + last_offset = Some(source_span.end_offset()); + description + } + None => { + last_offset = None; + None + } + }; + + match def { + Definition::OperationDefinition(operation_def) => { + Some((operation_def, description)) + } + Definition::FragmentDefinition(_) => None, + _ => { + eprintln!("Schema definitions were passed in, but only operations and fragments are allowed"); + None + } + } + }); + + let (operation, comments) = match (operation_defs.next(), operation_defs.next()) { + (None, _) => { + return Err(OperationError::NoOperations { source_path }); + } + (_, Some(_)) => { + return Err(OperationError::TooManyOperations { + source_path, + count: 2 + operation_defs.count(), + }); + } + (Some(op), None) => op, + }; + + match operation.operation_type { + OperationType::Subscription => { + debug!( + "Skipping subscription operation {}", + operation_name(&operation, source_path)? + ); + return Ok(None); + } + OperationType::Mutation => { + if !allow_mutations { + warn!( + "Skipping mutation operation {}", + operation_name(&operation, source_path)? + ); + return Ok(None); + } + } + OperationType::Query => {} + } + + Ok(Some((document, operation, comments.map(|c| c.to_string())))) +} + +pub fn operation_name( + operation: &Node, + source_path: Option, +) -> Result { + Ok(operation + .name + .as_ref() + .ok_or_else(|| OperationError::MissingName { + source_path, + operation: operation.serialize().no_indent().to_string(), + })? + .to_string()) +} + +#[tracing::instrument(skip(source_text))] +pub fn variable_description_overrides( + source_text: &str, + operation_definition: &Node, +) -> HashMap { + let mut argument_overrides_map: HashMap = HashMap::new(); + let mut last_offset = find_opening_parens_offset(source_text, operation_definition); + operation_definition + .variables + .iter() + .for_each(|v| match v.location() { + Some(source_span) => { + let comment = last_offset + .map(|start_offset| &source_text[start_offset..source_span.offset()]); + + if let Some(description) = comment.filter(|d| !d.is_empty() && d.contains('#')) + && let Some(description) = + extract_and_format_comments(Some(description.to_string())) + { + argument_overrides_map.insert(v.name.to_string(), description); + } + + last_offset = Some(source_span.end_offset()); + } + None => { + last_offset = None; + } + }); + + argument_overrides_map +} + +#[tracing::instrument(skip(source_text))] +pub fn find_opening_parens_offset( + source_text: &str, + operation_definition: &Node, +) -> Option { + let regex = match Regex::new(r"(?m)^\s*\(") { + Ok(regex) => regex, + Err(_) => return None, + }; + + operation_definition + .name + .as_ref() + .and_then(|n| n.location()) + .map(|span| { + regex + .find(source_text[span.end_offset()..].as_ref()) + .map(|m| m.start() + m.len() + span.end_offset()) + .unwrap_or(0) + }) +} + +pub fn extract_and_format_comments(comments: Option) -> Option { + comments.and_then(|comments| { + let content = Regex::new(r"(\n|^)(\s*,*)*#") + .ok()? + .replace_all(comments.as_str(), "$1"); + let trimmed = content.trim(); + + if trimmed.is_empty() { + None + } else { + Some(trimmed.to_string()) + } + }) +} + +fn ensure_properties_exists(json_object: &mut Value) { + if let Some(obj_type) = json_object.get("type") + && obj_type == "object" + && let Some(obj_map) = json_object.as_object_mut() + { + let props = obj_map + .entry("properties") + .or_insert_with(|| Value::Object(serde_json::Map::new())); + if !props.is_object() { + *props = Value::Object(serde_json::Map::new()); + } + } +} + +fn tool_character_length(tool: &Tool) -> Result { + let tool_schema_string = serde_json::to_string_pretty(&serde_json::json!(tool.input_schema))?; + Ok(tool.name.len() + + tool.description.as_ref().map(|d| d.len()).unwrap_or(0) + + tool_schema_string.len()) +} + +#[tracing::instrument(skip_all)] +fn get_json_schema( + operation: &Node, + schema_argument_descriptions: &HashMap>, + argument_descriptions_overrides: &HashMap, + graphql_schema: &GraphqlSchema, + custom_scalar_map: Option<&CustomScalarMap>, + variable_overrides: Option<&HashMap>, +) -> Schema { + // Default initialize the schema with the bare minimum needed to be a valid object + let mut schema = json_schema!({"type": "object", "properties": {}}); + let mut definitions = Map::new(); + + // TODO: Can this be unwrapped to use `schema_walker::walk` instead? This functionality is doubled + // in some cases. + operation.variables.iter().for_each(|variable| { + let variable_name = variable.name.to_string(); + if !variable_overrides + .map(|o| o.contains_key(&variable_name)) + .unwrap_or_default() + { + // use overridden description if there is one, otherwise use the schema description + let description = argument_descriptions_overrides + .get(&variable_name) + .cloned() + .or_else(|| { + schema_argument_descriptions + .get(&variable_name) + .filter(|d| !d.is_empty()) + .map(|d| d.join("#")) + }); + + let nested = schema_walker::type_to_schema( + variable.ty.as_ref(), + graphql_schema, + &mut definitions, + custom_scalar_map, + description, + ); + schema + .ensure_object() + .entry("properties") + .or_insert(Value::Object(Default::default())) + .as_object_mut() + .get_or_insert(&mut Map::default()) + .insert(variable_name.clone(), nested.into()); + + if variable.ty.is_non_null() { + schema + .ensure_object() + .entry("required") + .or_insert(serde_json::Value::Array(Vec::new())) + .as_array_mut() + .get_or_insert(&mut Vec::default()) + .push(variable_name.into()); + } + } + }); + + // Add the definitions to the overall schema if needed + if !definitions.is_empty() { + schema + .ensure_object() + .insert("definitions".to_string(), definitions.into()); + } + + schema +} + +#[cfg(test)] +mod tests { + use std::{collections::HashMap, str::FromStr as _, sync::LazyLock}; + + use apollo_compiler::{Schema, parser::Parser, validation::Valid}; + use rmcp::model::Tool; + use serde_json::Value; + use tracing_test::traced_test; + + use crate::{ + custom_scalar_map::CustomScalarMap, + graphql::Executable as _, + operations::{MutationMode, Operation, RawOperation}, + }; + + // Example schema for tests + static SCHEMA: LazyLock> = LazyLock::new(|| { + Schema::parse( + r#" + type Query { + id: String + enum: RealEnum + customQuery(""" id description """ id: ID!, """ a flag """ flag: Boolean): OutputType + testOp: OpResponse + } + type Mutation {id: String } + + """ + RealCustomScalar exists + """ + scalar RealCustomScalar + input RealInputObject { + """ + optional is a input field that is optional + """ + optional: String + + """ + required is a input field that is required + """ + required: String! + } + + type OpResponse { + id: String + } + + """ + the description for the enum + """ + enum RealEnum { + """ + ENUM_VALUE_1 is a value + """ + ENUM_VALUE_1 + + """ + ENUM_VALUE_2 is a value + """ + ENUM_VALUE_2 + } + + """ + custom output type + """ + type OutputType { + id: ID! + } + "#, + "operation.graphql", + ) + .expect("schema should parse") + .validate() + .expect("schema should be valid") + }); + + /// Serializes the input to JSON, sorting the object keys + macro_rules! to_sorted_json { + ($json:expr) => {{ + let mut j = serde_json::json!($json); + j.sort_all_objects(); + + j + }}; + } + + #[test] + fn nullable_named_type() { + let operation = Operation::from_document( + RawOperation { + source_text: "query QueryName($id: ID) { id }".to_string(), + persisted_query_id: None, + headers: None, + variables: None, + source_path: None, + }, + &SCHEMA, + None, + MutationMode::None, + false, + false, + ) + .unwrap() + .unwrap(); + let tool = Tool::from(operation); + + insta::assert_debug_snapshot!(tool, @r###" + Tool { + name: "QueryName", + title: None, + description: Some( + "The returned value is optional and has type `String`", + ), + input_schema: { + "type": String("object"), + "properties": Object { + "id": Object { + "type": String("string"), + }, + }, + }, + output_schema: None, + annotations: Some( + ToolAnnotations { + title: None, + read_only_hint: Some( + true, + ), + destructive_hint: None, + idempotent_hint: None, + open_world_hint: None, + }, + ), + icons: None, + } + "###); + + let json = to_sorted_json!(tool.input_schema); + insta::assert_snapshot!(serde_json::to_string_pretty(&json).unwrap(), @r#" + { + "properties": { + "id": { + "type": "string" + } + }, + "type": "object" + } + "#); + } + + #[test] + fn non_nullable_named_type() { + let operation = Operation::from_document( + RawOperation { + source_text: "query QueryName($id: ID!) { id }".to_string(), + persisted_query_id: None, + headers: None, + variables: None, + source_path: None, + }, + &SCHEMA, + None, + MutationMode::None, + false, + false, + ) + .unwrap() + .unwrap(); + let tool = Tool::from(operation); + + insta::assert_debug_snapshot!(tool, @r###" + Tool { + name: "QueryName", + title: None, + description: Some( + "The returned value is optional and has type `String`", + ), + input_schema: { + "type": String("object"), + "properties": Object { + "id": Object { + "type": String("string"), + }, + }, + "required": Array [ + String("id"), + ], + }, + output_schema: None, + annotations: Some( + ToolAnnotations { + title: None, + read_only_hint: Some( + true, + ), + destructive_hint: None, + idempotent_hint: None, + open_world_hint: None, + }, + ), + icons: None, + } + "###); + insta::assert_snapshot!(serde_json::to_string_pretty(&serde_json::json!(tool.input_schema)).unwrap(), @r###" + { + "type": "object", + "properties": { + "id": { + "type": "string" + } + }, + "required": [ + "id" + ] + } + "###); + } + + #[test] + fn non_nullable_list_of_nullable_named_type() { + let operation = Operation::from_document( + RawOperation { + source_text: "query QueryName($id: [ID]!) { id }".to_string(), + persisted_query_id: None, + headers: None, + variables: None, + source_path: None, + }, + &SCHEMA, + None, + MutationMode::None, + false, + false, + ) + .unwrap() + .unwrap(); + let tool = Tool::from(operation); + + insta::assert_debug_snapshot!(tool, @r###" + Tool { + name: "QueryName", + title: None, + description: Some( + "The returned value is optional and has type `String`", + ), + input_schema: { + "type": String("object"), + "properties": Object { + "id": Object { + "type": String("array"), + "items": Object { + "oneOf": Array [ + Object { + "type": String("string"), + }, + Object { + "type": String("null"), + }, + ], + }, + }, + }, + "required": Array [ + String("id"), + ], + }, + output_schema: None, + annotations: Some( + ToolAnnotations { + title: None, + read_only_hint: Some( + true, + ), + destructive_hint: None, + idempotent_hint: None, + open_world_hint: None, + }, + ), + icons: None, + } + "###); + insta::assert_snapshot!(serde_json::to_string_pretty(&serde_json::json!(tool.input_schema)).unwrap(), @r###" + { + "type": "object", + "properties": { + "id": { + "type": "array", + "items": { + "oneOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ] + } + } + }, + "required": [ + "id" + ] + } + "###); + } + + #[test] + fn non_nullable_list_of_non_nullable_named_type() { + let operation = Operation::from_document( + RawOperation { + source_text: "query QueryName($id: [ID!]!) { id }".to_string(), + persisted_query_id: None, + headers: None, + variables: None, + source_path: None, + }, + &SCHEMA, + None, + MutationMode::None, + false, + false, + ) + .unwrap() + .unwrap(); + let tool = Tool::from(operation); + + insta::assert_debug_snapshot!(tool, @r###" + Tool { + name: "QueryName", + title: None, + description: Some( + "The returned value is optional and has type `String`", + ), + input_schema: { + "type": String("object"), + "properties": Object { + "id": Object { + "type": String("array"), + "items": Object { + "type": String("string"), + }, + }, + }, + "required": Array [ + String("id"), + ], + }, + output_schema: None, + annotations: Some( + ToolAnnotations { + title: None, + read_only_hint: Some( + true, + ), + destructive_hint: None, + idempotent_hint: None, + open_world_hint: None, + }, + ), + icons: None, + } + "###); + insta::assert_snapshot!(serde_json::to_string_pretty(&serde_json::json!(tool.input_schema)).unwrap(), @r###" + { + "type": "object", + "properties": { + "id": { + "type": "array", + "items": { + "type": "string" + } + } + }, + "required": [ + "id" + ] + } + "###); + } + + #[test] + fn nullable_list_of_nullable_named_type() { + let operation = Operation::from_document( + RawOperation { + source_text: "query QueryName($id: [ID]) { id }".to_string(), + persisted_query_id: None, + headers: None, + variables: None, + source_path: None, + }, + &SCHEMA, + None, + MutationMode::None, + false, + false, + ) + .unwrap() + .unwrap(); + let tool = Tool::from(operation); + + insta::assert_debug_snapshot!(tool, @r###" + Tool { + name: "QueryName", + title: None, + description: Some( + "The returned value is optional and has type `String`", + ), + input_schema: { + "type": String("object"), + "properties": Object { + "id": Object { + "type": String("array"), + "items": Object { + "oneOf": Array [ + Object { + "type": String("string"), + }, + Object { + "type": String("null"), + }, + ], + }, + }, + }, + }, + output_schema: None, + annotations: Some( + ToolAnnotations { + title: None, + read_only_hint: Some( + true, + ), + destructive_hint: None, + idempotent_hint: None, + open_world_hint: None, + }, + ), + icons: None, + } + "###); + insta::assert_snapshot!(serde_json::to_string_pretty(&serde_json::json!(tool.input_schema)).unwrap(), @r#" + { + "type": "object", + "properties": { + "id": { + "type": "array", + "items": { + "oneOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ] + } + } + } + } + "#); + } + + #[test] + fn nullable_list_of_non_nullable_named_type() { + let operation = Operation::from_document( + RawOperation { + source_text: "query QueryName($id: [ID!]) { id }".to_string(), + persisted_query_id: None, + headers: None, + variables: None, + source_path: None, + }, + &SCHEMA, + None, + MutationMode::None, + false, + false, + ) + .unwrap() + .unwrap(); + let tool = Tool::from(operation); + + insta::assert_debug_snapshot!(tool, @r###" + Tool { + name: "QueryName", + title: None, + description: Some( + "The returned value is optional and has type `String`", + ), + input_schema: { + "type": String("object"), + "properties": Object { + "id": Object { + "type": String("array"), + "items": Object { + "type": String("string"), + }, + }, + }, + }, + output_schema: None, + annotations: Some( + ToolAnnotations { + title: None, + read_only_hint: Some( + true, + ), + destructive_hint: None, + idempotent_hint: None, + open_world_hint: None, + }, + ), + icons: None, + } + "###); + insta::assert_snapshot!(serde_json::to_string_pretty(&serde_json::json!(tool.input_schema)).unwrap(), @r#" + { + "type": "object", + "properties": { + "id": { + "type": "array", + "items": { + "type": "string" + } + } + } + } + "#); + } + + #[test] + fn nullable_list_of_nullable_lists_of_nullable_named_types() { + let operation = Operation::from_document( + RawOperation { + source_text: "query QueryName($id: [[ID]]) { id }".to_string(), + persisted_query_id: None, + headers: None, + variables: None, + source_path: None, + }, + &SCHEMA, + None, + MutationMode::None, + false, + false, + ) + .unwrap() + .unwrap(); + let tool = Tool::from(operation); + + insta::assert_debug_snapshot!(tool, @r###" + Tool { + name: "QueryName", + title: None, + description: Some( + "The returned value is optional and has type `String`", + ), + input_schema: { + "type": String("object"), + "properties": Object { + "id": Object { + "type": String("array"), + "items": Object { + "oneOf": Array [ + Object { + "type": String("array"), + "items": Object { + "oneOf": Array [ + Object { + "type": String("string"), + }, + Object { + "type": String("null"), + }, + ], + }, + }, + Object { + "type": String("null"), + }, + ], + }, + }, + }, + }, + output_schema: None, + annotations: Some( + ToolAnnotations { + title: None, + read_only_hint: Some( + true, + ), + destructive_hint: None, + idempotent_hint: None, + open_world_hint: None, + }, + ), + icons: None, + } + "###); + insta::assert_snapshot!(serde_json::to_string_pretty(&serde_json::json!(tool.input_schema)).unwrap(), @r#" + { + "type": "object", + "properties": { + "id": { + "type": "array", + "items": { + "oneOf": [ + { + "type": "array", + "items": { + "oneOf": [ + { + "type": "string" + }, + { + "type": "null" + } + ] + } + }, + { + "type": "null" + } + ] + } + } + } + } + "#); + } + + #[test] + fn nullable_input_object() { + let operation = Operation::from_document( + RawOperation { + source_text: "query QueryName($id: RealInputObject) { id }".to_string(), + persisted_query_id: None, + headers: None, + variables: None, + source_path: None, + }, + &SCHEMA, + None, + MutationMode::None, + false, + false, + ) + .unwrap() + .unwrap(); + let tool = Tool::from(operation); + + insta::assert_debug_snapshot!(tool, @r###" + Tool { + name: "QueryName", + title: None, + description: Some( + "The returned value is optional and has type `String`", + ), + input_schema: { + "type": String("object"), + "properties": Object { + "id": Object { + "$ref": String("#/definitions/RealInputObject"), + }, + }, + "definitions": Object { + "RealInputObject": Object { + "type": String("object"), + "properties": Object { + "optional": Object { + "description": String("optional is a input field that is optional"), + "type": String("string"), + }, + "required": Object { + "description": String("required is a input field that is required"), + "type": String("string"), + }, + }, + "required": Array [ + String("required"), + ], + }, + }, + }, + output_schema: None, + annotations: Some( + ToolAnnotations { + title: None, + read_only_hint: Some( + true, + ), + destructive_hint: None, + idempotent_hint: None, + open_world_hint: None, + }, + ), + icons: None, + } + "###); + } + + #[test] + fn non_nullable_enum() { + let operation = Operation::from_document( + RawOperation { + source_text: "query QueryName($id: RealEnum!) { id }".to_string(), + persisted_query_id: None, + headers: None, + variables: None, + source_path: None, + }, + &SCHEMA, + None, + MutationMode::None, + false, + false, + ) + .unwrap() + .unwrap(); + let tool = Tool::from(operation); + + insta::assert_debug_snapshot!(tool, @r###" + Tool { + name: "QueryName", + title: None, + description: Some( + "The returned value is optional and has type `String`", + ), + input_schema: { + "type": String("object"), + "properties": Object { + "id": Object { + "$ref": String("#/definitions/RealEnum"), + }, + }, + "required": Array [ + String("id"), + ], + "definitions": Object { + "RealEnum": Object { + "description": String("the description for the enum\n\nValues:\nENUM_VALUE_1: ENUM_VALUE_1 is a value\nENUM_VALUE_2: ENUM_VALUE_2 is a value"), + "type": String("string"), + "enum": Array [ + String("ENUM_VALUE_1"), + String("ENUM_VALUE_2"), + ], + }, + }, + }, + output_schema: None, + annotations: Some( + ToolAnnotations { + title: None, + read_only_hint: Some( + true, + ), + destructive_hint: None, + idempotent_hint: None, + open_world_hint: None, + }, + ), + icons: None, + } + "###); + } + + #[test] + fn multiple_operations_should_error() { + let operation = Operation::from_document( + RawOperation { + source_text: "query QueryName { id } query QueryName { id }".to_string(), + persisted_query_id: None, + headers: None, + variables: None, + source_path: Some("operation.graphql".to_string()), + }, + &SCHEMA, + None, + MutationMode::None, + false, + false, + ); + insta::assert_debug_snapshot!(operation, @r#" + Err( + TooManyOperations { + source_path: Some( + "operation.graphql", + ), + count: 2, + }, + ) + "#); + } + + #[test] + #[traced_test] + fn unnamed_operations_should_be_skipped() { + let operation = Operation::from_document( + RawOperation { + source_text: "query { id }".to_string(), + persisted_query_id: None, + headers: None, + variables: None, + source_path: Some("operation.graphql".to_string()), + }, + &SCHEMA, + None, + MutationMode::None, + false, + false, + ); + assert!(operation.unwrap().is_none()); + + logs_assert(|lines: &[&str]| { + lines + .iter() + .filter(|line| line.contains("WARN")) + .any(|line| { + line.contains("Skipping unnamed operation in operation.graphql: { id }") + }) + .then_some(()) + .ok_or("Expected warning about unnamed operation in logs".to_string()) + }); + } + + #[test] + fn no_operations_should_error() { + let operation = Operation::from_document( + RawOperation { + source_text: "fragment Test on Query { id }".to_string(), + persisted_query_id: None, + headers: None, + variables: None, + source_path: Some("operation.graphql".to_string()), + }, + &SCHEMA, + None, + MutationMode::None, + false, + false, + ); + insta::assert_debug_snapshot!(operation, @r#" + Err( + NoOperations { + source_path: Some( + "operation.graphql", + ), + }, + ) + "#); + } + + #[test] + fn schema_should_error() { + let operation = Operation::from_document( + RawOperation { + source_text: "type Query { id: String }".to_string(), + persisted_query_id: None, + headers: None, + variables: None, + source_path: None, + }, + &SCHEMA, + None, + MutationMode::None, + false, + false, + ); + insta::assert_debug_snapshot!(operation, @r" + Err( + NoOperations { + source_path: None, + }, + ) + "); + } + + #[test] + #[traced_test] + fn unknown_type_should_be_any() { + let operation = Operation::from_document( + RawOperation { + source_text: "query QueryName($id: FakeType) { id }".to_string(), + persisted_query_id: None, + headers: None, + variables: None, + source_path: None, + }, + &SCHEMA, + None, + MutationMode::None, + false, + false, + ) + .unwrap() + .unwrap(); + let tool = Tool::from(operation); + + // Verify that a warning was logged + logs_assert(|lines: &[&str]| { + lines + .iter() + .filter(|line| line.contains("WARN")) + .any(|line| line.contains("Type not found in schema name=\"FakeType\"")) + .then_some(()) + .ok_or("Expected warning about unknown type in logs".to_string()) + }); + + insta::assert_debug_snapshot!(tool, @r###" + Tool { + name: "QueryName", + title: None, + description: Some( + "The returned value is optional and has type `String`", + ), + input_schema: { + "type": String("object"), + "properties": Object { + "id": Object {}, + }, + }, + output_schema: None, + annotations: Some( + ToolAnnotations { + title: None, + read_only_hint: Some( + true, + ), + destructive_hint: None, + idempotent_hint: None, + open_world_hint: None, + }, + ), + icons: None, + } + "###); + } + + #[test] + #[traced_test] + fn custom_scalar_without_map_should_be_any() { + let operation = Operation::from_document( + RawOperation { + source_text: "query QueryName($id: RealCustomScalar) { id }".to_string(), + persisted_query_id: None, + headers: None, + variables: None, + source_path: None, + }, + &SCHEMA, + None, + MutationMode::None, + false, + false, + ) + .unwrap() + .unwrap(); + let tool = Tool::from(operation); + + // Verify that a warning was logged + logs_assert(|lines: &[&str]| { + lines + .iter() + .filter(|line| line.contains("WARN")) + .any(|line| line.contains("custom scalars aren't currently supported without a custom_scalar_map name=\"RealCustomScalar\"")) + .then_some(()) + .ok_or("Expected warning about custom scalar without map in logs".to_string()) + }); + + insta::assert_debug_snapshot!(tool, @r###" + Tool { + name: "QueryName", + title: None, + description: Some( + "The returned value is optional and has type `String`", + ), + input_schema: { + "type": String("object"), + "properties": Object { + "id": Object { + "$ref": String("#/definitions/RealCustomScalar"), + }, + }, + "definitions": Object { + "RealCustomScalar": Object { + "description": String("RealCustomScalar exists"), + }, + }, + }, + output_schema: None, + annotations: Some( + ToolAnnotations { + title: None, + read_only_hint: Some( + true, + ), + destructive_hint: None, + idempotent_hint: None, + open_world_hint: None, + }, + ), + icons: None, + } + "###); + } + + #[test] + #[traced_test] + fn custom_scalar_with_map_but_not_found_should_error() { + let operation = Operation::from_document( + RawOperation { + source_text: "query QueryName($id: RealCustomScalar) { id }".to_string(), + persisted_query_id: None, + headers: None, + variables: None, + source_path: None, + }, + &SCHEMA, + Some(&CustomScalarMap::from_str("{}").unwrap()), + MutationMode::None, + false, + false, + ) + .unwrap() + .unwrap(); + let tool = Tool::from(operation); + + // Verify that a warning was logged + logs_assert(|lines: &[&str]| { + lines + .iter() + .filter(|line| line.contains("WARN")) + .any(|line| { + line.contains( + "custom scalar missing from custom_scalar_map name=\"RealCustomScalar\"", + ) + }) + .then_some(()) + .ok_or("Expected warning about custom scalar missing in logs".to_string()) + }); + + insta::assert_debug_snapshot!(tool, @r###" + Tool { + name: "QueryName", + title: None, + description: Some( + "The returned value is optional and has type `String`", + ), + input_schema: { + "type": String("object"), + "properties": Object { + "id": Object { + "$ref": String("#/definitions/RealCustomScalar"), + }, + }, + "definitions": Object { + "RealCustomScalar": Object { + "description": String("RealCustomScalar exists"), + }, + }, + }, + output_schema: None, + annotations: Some( + ToolAnnotations { + title: None, + read_only_hint: Some( + true, + ), + destructive_hint: None, + idempotent_hint: None, + open_world_hint: None, + }, + ), + icons: None, + } + "###); + } + + #[test] + fn custom_scalar_with_map() { + let custom_scalar_map = + CustomScalarMap::from_str("{ \"RealCustomScalar\": { \"type\": \"string\" }}"); + + let operation = Operation::from_document( + RawOperation { + source_text: "query QueryName($id: RealCustomScalar) { id }".to_string(), + persisted_query_id: None, + headers: None, + variables: None, + source_path: None, + }, + &SCHEMA, + custom_scalar_map.ok().as_ref(), + MutationMode::None, + false, + false, + ) + .unwrap() + .unwrap(); + let tool = Tool::from(operation); + + insta::assert_debug_snapshot!(tool, @r###" + Tool { + name: "QueryName", + title: None, + description: Some( + "The returned value is optional and has type `String`", + ), + input_schema: { + "type": String("object"), + "properties": Object { + "id": Object { + "$ref": String("#/definitions/RealCustomScalar"), + }, + }, + "definitions": Object { + "RealCustomScalar": Object { + "description": String("RealCustomScalar exists"), + "type": String("string"), + }, + }, + }, + output_schema: None, + annotations: Some( + ToolAnnotations { + title: None, + read_only_hint: Some( + true, + ), + destructive_hint: None, + idempotent_hint: None, + open_world_hint: None, + }, + ), + icons: None, + } + "###); + } + + #[test] + fn test_tool_description() { + const SCHEMA: &str = r#" + type Query { + """ + Get a list of A + """ + a(input: String!): [A]! + + """ + Get a B + """ + b: B + + """ + Get a Z + """ + z: Z + } + + """ + A + """ + type A { + c: String + d: D + } + + """ + B + """ + type B { + d: D + u: U + } + + """ + D + """ + type D { + e: E + f: String + g: String + } + + """ + E + """ + enum E { + """ + one + """ + ONE + """ + two + """ + TWO + } + + """ + F + """ + scalar F + + """ + U + """ + union U = M | W + + """ + M + """ + type M { + m: Int + } + + """ + W + """ + type W { + w: Int + } + + """ + Z + """ + type Z { + z: Int + zz: Int + zzz: Int + } + "#; + + let document = Parser::new().parse_ast(SCHEMA, "schema.graphql").unwrap(); + let schema = document.to_schema().unwrap(); + + let operation = Operation::from_document( + RawOperation { + source_text: r###" + query GetABZ($state: String!) { + a(input: $input) { + d { + e + } + } + b { + d { + ...JustF + } + u { + ... on M { + m + } + ... on W { + w + } + } + } + z { + ...JustZZZ + } + } + + fragment JustF on D { + f + } + + fragment JustZZZ on Z { + zzz + } + "### + .to_string(), + persisted_query_id: None, + headers: None, + variables: None, + source_path: None, + }, + &schema, + None, + MutationMode::None, + false, + false, + ) + .unwrap() + .unwrap(); + + insta::assert_snapshot!( + operation.tool.description.unwrap(), + @r#" + Get a list of A + The returned value is an array of type `A` + --- + Get a B + The returned value is optional and has type `B` + --- + Get a Z + The returned value is optional and has type `Z` + --- + """A""" + type A { + d: D + } + + """B""" + type B { + d: D + u: U + } + + """D""" + type D { + e: E + f: String + } + + """E""" + enum E { + """one""" + ONE + """two""" + TWO + } + + """U""" + union U = M | W + + """M""" + type M { + m: Int + } + + """W""" + type W { + w: Int + } + + """Z""" + type Z { + zzz: Int + } + "# + ); + } + + #[test] + fn tool_comment_description() { + let operation = Operation::from_document( + RawOperation { + source_text: r###" + # Overridden tool #description + query GetABZ($state: String!) { + b { + d { + f + } + } + } + "### + .to_string(), + persisted_query_id: None, + headers: None, + variables: None, + source_path: None, + }, + &SCHEMA, + None, + MutationMode::None, + false, + false, + ) + .unwrap() + .unwrap(); + + insta::assert_snapshot!( + operation.tool.description.unwrap(), + @"Overridden tool #description" + ); + } + + #[test] + fn tool_empty_comment_description() { + let operation = Operation::from_document( + RawOperation { + source_text: r###" + # + + # + query GetABZ($state: String!) { + id + } + "### + .to_string(), + persisted_query_id: None, + headers: None, + variables: None, + source_path: None, + }, + &SCHEMA, + None, + MutationMode::None, + false, + false, + ) + .unwrap() + .unwrap(); + + insta::assert_snapshot!( + operation.tool.description.unwrap(), + @"The returned value is optional and has type `String`" + ); + } + + #[test] + fn no_schema_description() { + let operation = Operation::from_document( + RawOperation { + source_text: r###"query GetABZ($state: String!) { id enum }"###.to_string(), + persisted_query_id: None, + headers: None, + variables: None, + source_path: None, + }, + &SCHEMA, + None, + MutationMode::None, + false, + true, + ) + .unwrap() + .unwrap(); + + insta::assert_snapshot!( + operation.tool.description.unwrap(), + @r" + The returned value is optional and has type `String` + --- + The returned value is optional and has type `RealEnum` + " + ); + } + + #[test] + fn no_type_description() { + let operation = Operation::from_document( + RawOperation { + source_text: r###"query GetABZ($state: String!) { id enum }"###.to_string(), + persisted_query_id: None, + headers: None, + variables: None, + source_path: None, + }, + &SCHEMA, + None, + MutationMode::None, + true, + false, + ) + .unwrap() + .unwrap(); + + insta::assert_snapshot!( + operation.tool.description.unwrap(), + @r#" + --- + """the description for the enum""" + enum RealEnum { + """ENUM_VALUE_1 is a value""" + ENUM_VALUE_1 + """ENUM_VALUE_2 is a value""" + ENUM_VALUE_2 + } + "# + ); + } + + #[test] + fn no_type_description_or_schema_description() { + let operation = Operation::from_document( + RawOperation { + source_text: r###"query GetABZ($state: String!) { id enum }"###.to_string(), + persisted_query_id: None, + headers: None, + variables: None, + source_path: None, + }, + &SCHEMA, + None, + MutationMode::None, + true, + true, + ) + .unwrap() + .unwrap(); + + insta::assert_snapshot!( + operation.tool.description.unwrap(), + @"" + ); + } + + #[test] + fn recursive_inputs() { + let operation = Operation::from_document( + RawOperation { + source_text: r###"query Test($filter: Filter){ + field(filter: $filter) { + id + } + }"### + .to_string(), + persisted_query_id: None, + headers: None, + variables: None, + source_path: None, + }, + &Schema::parse( + r#" + """the filter input""" + input Filter { + """the filter.field field""" + field: String + """the filter.filter field""" + filter: Filter + } + type Query { + """the Query.field field""" + field( + """the filter argument""" + filter: Filter + ): String + } + "#, + "operation.graphql", + ) + .unwrap(), + None, + MutationMode::None, + true, + true, + ) + .unwrap() + .unwrap(); + + insta::assert_debug_snapshot!(operation.tool, @r###" + Tool { + name: "Test", + title: None, + description: Some( + "", + ), + input_schema: { + "type": String("object"), + "properties": Object { + "filter": Object { + "description": String("the filter argument"), + "$ref": String("#/definitions/Filter"), + }, + }, + "definitions": Object { + "Filter": Object { + "description": String("the filter input"), + "type": String("object"), + "properties": Object { + "field": Object { + "description": String("the filter.field field"), + "type": String("string"), + }, + "filter": Object { + "description": String("the filter.filter field"), + "$ref": String("#/definitions/Filter"), + }, + }, + }, + }, + }, + output_schema: None, + annotations: Some( + ToolAnnotations { + title: None, + read_only_hint: Some( + true, + ), + destructive_hint: None, + idempotent_hint: None, + open_world_hint: None, + }, + ), + icons: None, + } + "###); + } + + #[test] + fn with_variable_overrides() { + let operation = Operation::from_document( + RawOperation { + source_text: "query QueryName($id: ID, $name: String) { id }".to_string(), + persisted_query_id: None, + headers: None, + variables: Some(HashMap::from([( + "id".to_string(), + serde_json::Value::String("v".to_string()), + )])), + source_path: None, + }, + &SCHEMA, + None, + MutationMode::None, + false, + false, + ) + .unwrap() + .unwrap(); + let tool = Tool::from(operation); + + insta::assert_debug_snapshot!(tool, @r###" + Tool { + name: "QueryName", + title: None, + description: Some( + "The returned value is optional and has type `String`", + ), + input_schema: { + "type": String("object"), + "properties": Object { + "name": Object { + "type": String("string"), + }, + }, + }, + output_schema: None, + annotations: Some( + ToolAnnotations { + title: None, + read_only_hint: Some( + true, + ), + destructive_hint: None, + idempotent_hint: None, + open_world_hint: None, + }, + ), + icons: None, + } + "###); + } + + #[test] + fn input_schema_includes_variable_descriptions() { + let operation = Operation::from_document( + RawOperation { + source_text: "query QueryName($idArg: ID) { customQuery(id: $idArg) { id } }" + .to_string(), + persisted_query_id: None, + headers: None, + variables: None, + source_path: None, + }, + &SCHEMA, + None, + MutationMode::None, + false, + false, + ) + .unwrap() + .unwrap(); + let tool = Tool::from(operation); + + let json = to_sorted_json!(tool.input_schema); + insta::assert_snapshot!(serde_json::to_string_pretty(&json).unwrap(), @r###" + { + "properties": { + "idArg": { + "description": "id description", + "type": "string" + } + }, + "type": "object" + } + "###); + } + + #[test] + fn input_schema_includes_joined_variable_descriptions_if_multiple() { + let operation = Operation::from_document( + RawOperation { + source_text: "query QueryName($idArg: ID, $flag: Boolean) { customQuery(id: $idArg, flag: $flag) { id @skip(if: $flag) } }".to_string(), + persisted_query_id: None, + headers: None, + variables: None, + source_path: None, + }, + &SCHEMA, + None, + MutationMode::None, + false, + false, + ) + .unwrap() + .unwrap(); + let tool = Tool::from(operation); + + let json = to_sorted_json!(tool.input_schema); + insta::assert_snapshot!(serde_json::to_string_pretty(&json).unwrap(), @r###" + { + "properties": { + "flag": { + "description": "Skipped when true.#a flag", + "type": "boolean" + }, + "idArg": { + "description": "id description", + "type": "string" + } + }, + "type": "object" + } + "###); + } + + #[test] + fn input_schema_includes_directive_variable_descriptions() { + let operation = Operation::from_document( + RawOperation { + source_text: "query QueryName($idArg: ID, $skipArg: Boolean) { customQuery(id: $idArg) { id @skip(if: $skipArg) } }".to_string(), + persisted_query_id: None, + headers: None, + variables: None, + source_path: None, + }, + &SCHEMA, + None, + MutationMode::None, + false, + false, + ) + .unwrap() + .unwrap(); + let tool = Tool::from(operation); + + insta::assert_snapshot!(serde_json::to_string_pretty(&serde_json::json!(tool.input_schema)).unwrap(), @r#" + { + "type": "object", + "properties": { + "idArg": { + "description": "id description", + "type": "string" + }, + "skipArg": { + "description": "Skipped when true.", + "type": "boolean" + } + } + } + "#); + } + + #[test] + fn test_operation_name_with_named_query() { + let source_text = "query GetUser($id: ID!) { user(id: $id) { name email } }"; + let raw_op = RawOperation { + source_text: source_text.to_string(), + persisted_query_id: None, + headers: None, + variables: None, + source_path: None, + }; + let operation = + Operation::from_document(raw_op, &SCHEMA, None, MutationMode::None, false, false) + .unwrap() + .unwrap(); + + let op_details = operation.operation(Value::Null).unwrap(); + assert_eq!(op_details.operation_name, Some(String::from("GetUser"))); + } + + #[test] + fn test_operation_name_with_named_mutation() { + let source_text = + "mutation CreateUser($input: UserInput!) { createUser(input: $input) { id name } }"; + let raw_op = RawOperation { + source_text: source_text.to_string(), + persisted_query_id: None, + headers: None, + variables: None, + source_path: None, + }; + let operation = + Operation::from_document(raw_op, &SCHEMA, None, MutationMode::Explicit, false, false) + .unwrap() + .unwrap(); + + let op_details = operation.operation(Value::Null).unwrap(); + assert_eq!(op_details.operation_name, Some(String::from("CreateUser"))); + } + + #[test] + fn operation_variable_comments_override_schema_descriptions() { + let operation = Operation::from_document( + RawOperation { + source_text: "# operation description\nquery QueryName(# id comment override\n$idArg: ID) { customQuery(id: $idArg) { id } }".to_string(), + persisted_query_id: None, + headers: None, + variables: None, + source_path: None, + }, + &SCHEMA, + None, + MutationMode::None, + false, + false, + ) + .unwrap() + .unwrap(); + let tool = Tool::from(operation); + + let json = to_sorted_json!(tool.input_schema); + insta::assert_snapshot!(serde_json::to_string_pretty(&json).unwrap(), @r###" + { + "properties": { + "idArg": { + "description": "id comment override", + "type": "string" + } + }, + "type": "object" + } + "###); + } + + #[test] + fn operation_variable_comment_override_supports_multiline_comments() { + let operation = Operation::from_document( + RawOperation { + source_text: "# operation description\nquery QueryName(# id comment override\n # multi-line comment \n$idArg: ID) { customQuery(id: $idArg) { id } }".to_string(), + persisted_query_id: None, + headers: None, + variables: None, + source_path: None, + }, + &SCHEMA, + None, + MutationMode::None, + false, + false, + ) + .unwrap() + .unwrap(); + let tool = Tool::from(operation); + + let json = to_sorted_json!(tool.input_schema); + insta::assert_snapshot!(serde_json::to_string_pretty(&json).unwrap(), @r###" + { + "properties": { + "idArg": { + "description": "id comment override\n multi-line comment", + "type": "string" + } + }, + "type": "object" + } + "###); + } + + #[test] + fn comment_with_parens_has_comments_extracted_correctly() { + let operation = Operation::from_document( + RawOperation { + source_text: "query QueryName # a comment (with parens)\n(# id comment override\n # multi-line comment \n$idArg: ID) { customQuery(id: $idArg) { id } }".to_string(), + persisted_query_id: None, + headers: None, + variables: None, + source_path: None, + }, + &SCHEMA, + None, + MutationMode::None, + false, + false, + ) + .unwrap() + .unwrap(); + let tool = Tool::from(operation); + + let json = to_sorted_json!(tool.input_schema); + insta::assert_snapshot!(serde_json::to_string_pretty(&json).unwrap(), @r###" + { + "properties": { + "idArg": { + "description": "id comment override\n multi-line comment", + "type": "string" + } + }, + "type": "object" + } + "###); + } + + #[test] + fn multiline_comment_with_odd_spacing_and_parens_has_comments_extracted_correctly() { + let operation = Operation::from_document( + RawOperation { + source_text: "# operation comment\n\nquery QueryName # a comment \n# extra space\n\n\n# blank lines (with parens)\n\n# another (paren)\n(# id comment override\n # multi-line comment \n$idArg: ID\n, \n# a flag\n$flag: Boolean) { customQuery(id: $idArg, skip: $flag) { id } }".to_string(), + persisted_query_id: None, + headers: None, + variables: None, + source_path: None, + }, + &SCHEMA, + None, + MutationMode::None, + false, + false, + ) + .unwrap() + .unwrap(); + let tool = Tool::from(operation); + + let json = to_sorted_json!(tool.input_schema); + insta::assert_snapshot!(serde_json::to_string_pretty(&json).unwrap(), @r###" + { + "properties": { + "flag": { + "description": "a flag", + "type": "boolean" + }, + "idArg": { + "description": "id comment override\n multi-line comment", + "type": "string" + } + }, + "type": "object" + } + "###); + } + + #[test] + fn operation_with_no_variables_is_handled_properly() { + let operation = Operation::from_document( + RawOperation { + source_text: "query QueryName { customQuery(id: \"123\") { id } }".to_string(), + persisted_query_id: None, + headers: None, + variables: None, + source_path: None, + }, + &SCHEMA, + None, + MutationMode::None, + false, + false, + ) + .unwrap() + .unwrap(); + let tool = Tool::from(operation); + + let json = to_sorted_json!(tool.input_schema); + insta::assert_snapshot!(serde_json::to_string_pretty(&json).unwrap(), @r###" + { + "properties": {}, + "type": "object" + } + "###); + } + + #[test] + fn commas_between_variables_are_ignored() { + let operation = Operation::from_document( + RawOperation { + source_text: "query QueryName(# id arg\n $idArg: ID,,\n,,\n # a flag\n $flag: Boolean, ,,) { customQuery(id: $idArg, flag: $flag) { id } }".to_string(), + persisted_query_id: None, + headers: None, + variables: None, + source_path: None, + }, + &SCHEMA, + None, + MutationMode::None, + false, + false, + ) + .unwrap() + .unwrap(); + let tool = Tool::from(operation); + + let json = to_sorted_json!(tool.input_schema); + insta::assert_snapshot!(serde_json::to_string_pretty(&json).unwrap(), @r###" + { + "properties": { + "flag": { + "description": "a flag", + "type": "boolean" + }, + "idArg": { + "description": "id arg", + "type": "string" + } + }, + "type": "object" + } + "###); + } + + #[test] + fn input_schema_include_properties_field_even_when_operation_has_no_input_args() { + let operation = Operation::from_document( + RawOperation { + source_text: "query TestOp { testOp { id } }".to_string(), + persisted_query_id: None, + headers: None, + variables: None, + source_path: None, + }, + &SCHEMA, + None, + MutationMode::None, + false, + false, + ) + .unwrap() + .unwrap(); + let tool = Tool::from(operation); + + let json = to_sorted_json!(tool.input_schema); + insta::assert_snapshot!(serde_json::to_string_pretty(&json).unwrap(), @r#" + { + "properties": {}, + "type": "object" + } + "#); + } + + #[test] + fn nullable_list_of_nullable_input_objects() { + let operation = Operation::from_document( + RawOperation { + source_text: "query QueryName($objects: [RealInputObject]) { id }".to_string(), + persisted_query_id: None, + headers: None, + variables: None, + source_path: None, + }, + &SCHEMA, + None, + MutationMode::None, + false, + false, + ) + .unwrap() + .unwrap(); + let tool = Tool::from(operation); + + insta::assert_debug_snapshot!(tool, @r###" + Tool { + name: "QueryName", + title: None, + description: Some( + "The returned value is optional and has type `String`", + ), + input_schema: { + "type": String("object"), + "properties": Object { + "objects": Object { + "type": String("array"), + "items": Object { + "oneOf": Array [ + Object { + "$ref": String("#/definitions/RealInputObject"), + }, + Object { + "type": String("null"), + }, + ], + }, + }, + }, + "definitions": Object { + "RealInputObject": Object { + "type": String("object"), + "properties": Object { + "optional": Object { + "description": String("optional is a input field that is optional"), + "type": String("string"), + }, + "required": Object { + "description": String("required is a input field that is required"), + "type": String("string"), + }, + }, + "required": Array [ + String("required"), + ], + }, + }, + }, + output_schema: None, + annotations: Some( + ToolAnnotations { + title: None, + read_only_hint: Some( + true, + ), + destructive_hint: None, + idempotent_hint: None, + open_world_hint: None, + }, + ), + icons: None, + } + "###); + + let json = to_sorted_json!(tool.input_schema); + insta::assert_snapshot!(serde_json::to_string_pretty(&json).unwrap(), @r###" + { + "definitions": { + "RealInputObject": { + "properties": { + "optional": { + "description": "optional is a input field that is optional", + "type": "string" + }, + "required": { + "description": "required is a input field that is required", + "type": "string" + } + }, + "required": [ + "required" + ], + "type": "object" + } + }, + "properties": { + "objects": { + "items": { + "oneOf": [ + { + "$ref": "#/definitions/RealInputObject" + }, + { + "type": "null" + } + ] + }, + "type": "array" + } + }, + "type": "object" + } + "###); + } + + #[test] + fn non_nullable_list_of_non_nullable_input_objects() { + let operation = Operation::from_document( + RawOperation { + source_text: "query QueryName($objects: [RealInputObject!]!) { id }".to_string(), + persisted_query_id: None, + headers: None, + variables: None, + source_path: None, + }, + &SCHEMA, + None, + MutationMode::None, + false, + false, + ) + .unwrap() + .unwrap(); + let tool = Tool::from(operation); + + insta::assert_debug_snapshot!(tool, @r###" + Tool { + name: "QueryName", + title: None, + description: Some( + "The returned value is optional and has type `String`", + ), + input_schema: { + "type": String("object"), + "properties": Object { + "objects": Object { + "type": String("array"), + "items": Object { + "$ref": String("#/definitions/RealInputObject"), + }, + }, + }, + "required": Array [ + String("objects"), + ], + "definitions": Object { + "RealInputObject": Object { + "type": String("object"), + "properties": Object { + "optional": Object { + "description": String("optional is a input field that is optional"), + "type": String("string"), + }, + "required": Object { + "description": String("required is a input field that is required"), + "type": String("string"), + }, + }, + "required": Array [ + String("required"), + ], + }, + }, + }, + output_schema: None, + annotations: Some( + ToolAnnotations { + title: None, + read_only_hint: Some( + true, + ), + destructive_hint: None, + idempotent_hint: None, + open_world_hint: None, + }, + ), + icons: None, + } + "###); + + let json = to_sorted_json!(tool.input_schema); + insta::assert_snapshot!(serde_json::to_string_pretty(&json).unwrap(), @r###" + { + "definitions": { + "RealInputObject": { + "properties": { + "optional": { + "description": "optional is a input field that is optional", + "type": "string" + }, + "required": { + "description": "required is a input field that is required", + "type": "string" + } + }, + "required": [ + "required" + ], + "type": "object" + } + }, + "properties": { + "objects": { + "items": { + "$ref": "#/definitions/RealInputObject" + }, + "type": "array" + } + }, + "required": [ + "objects" + ], + "type": "object" + } + "###); + } + + #[test] + fn subscriptions() { + assert!( + Operation::from_document( + RawOperation { + source_text: "subscription SubscriptionName { id }".to_string(), + persisted_query_id: None, + headers: None, + variables: None, + source_path: None, + }, + &SCHEMA, + None, + MutationMode::None, + false, + false, + ) + .unwrap() + .is_none() + ); + } + + #[test] + fn mutation_mode_none() { + assert!( + Operation::from_document( + RawOperation { + source_text: "mutation MutationName { id }".to_string(), + persisted_query_id: None, + headers: None, + variables: None, + source_path: None, + }, + &SCHEMA, + None, + MutationMode::None, + false, + false, + ) + .ok() + .unwrap() + .is_none() + ); + } + + #[test] + fn mutation_mode_explicit() { + let operation = Operation::from_document( + RawOperation { + source_text: "mutation MutationName { id }".to_string(), + persisted_query_id: None, + headers: None, + variables: None, + source_path: None, + }, + &SCHEMA, + None, + MutationMode::Explicit, + false, + false, + ) + .unwrap() + .unwrap(); + + insta::assert_debug_snapshot!(operation, @r###" + Operation { + tool: Tool { + name: "MutationName", + title: None, + description: Some( + "The returned value is optional and has type `String`", + ), + input_schema: { + "type": String("object"), + "properties": Object {}, + }, + output_schema: None, + annotations: Some( + ToolAnnotations { + title: None, + read_only_hint: Some( + false, + ), + destructive_hint: None, + idempotent_hint: None, + open_world_hint: None, + }, + ), + icons: None, + }, + inner: RawOperation { + source_text: "mutation MutationName { id }", + persisted_query_id: None, + headers: None, + variables: None, + source_path: None, + }, + operation_name: "MutationName", + } + "###); + } + + #[test] + fn mutation_mode_all() { + let operation = Operation::from_document( + RawOperation { + source_text: "mutation MutationName { id }".to_string(), + persisted_query_id: None, + headers: None, + variables: None, + source_path: None, + }, + &SCHEMA, + None, + MutationMode::All, + false, + false, + ) + .unwrap() + .unwrap(); + + insta::assert_debug_snapshot!(operation, @r###" + Operation { + tool: Tool { + name: "MutationName", + title: None, + description: Some( + "The returned value is optional and has type `String`", + ), + input_schema: { + "type": String("object"), + "properties": Object {}, + }, + output_schema: None, + annotations: Some( + ToolAnnotations { + title: None, + read_only_hint: Some( + false, + ), + destructive_hint: None, + idempotent_hint: None, + open_world_hint: None, + }, + ), + icons: None, + }, + inner: RawOperation { + source_text: "mutation MutationName { id }", + persisted_query_id: None, + headers: None, + variables: None, + source_path: None, + }, + operation_name: "MutationName", + } + "###); + } + + #[test] + fn no_variables() { + let operation = Operation::from_document( + RawOperation { + source_text: "query QueryName { id }".to_string(), + persisted_query_id: None, + headers: None, + variables: None, + source_path: None, + }, + &SCHEMA, + None, + MutationMode::None, + false, + false, + ) + .unwrap() + .unwrap(); + let tool = Tool::from(operation); + + insta::assert_debug_snapshot!(tool, @r###" + Tool { + name: "QueryName", + title: None, + description: Some( + "The returned value is optional and has type `String`", + ), + input_schema: { + "type": String("object"), + "properties": Object {}, + }, + output_schema: None, + annotations: Some( + ToolAnnotations { + title: None, + read_only_hint: Some( + true, + ), + destructive_hint: None, + idempotent_hint: None, + open_world_hint: None, + }, + ), + icons: None, + } + "###); + insta::assert_snapshot!(serde_json::to_string_pretty(&serde_json::json!(tool.input_schema)).unwrap(), @r#" + { + "type": "object", + "properties": {} + } + "#); + } +} diff --git a/crates/apollo-mcp-server/src/operations/operation_source.rs b/crates/apollo-mcp-server/src/operations/operation_source.rs new file mode 100644 index 00000000..65f49890 --- /dev/null +++ b/crates/apollo-mcp-server/src/operations/operation_source.rs @@ -0,0 +1,178 @@ +use std::{ + collections::HashMap, + fs, + path::PathBuf, + sync::{Arc, Mutex}, +}; + +use apollo_mcp_registry::{ + files, + platform_api::operation_collections::{ + collection_poller::CollectionSource, event::CollectionEvent, + }, + uplink::persisted_queries::{ManifestSource, event::Event as ManifestEvent}, +}; +use futures::{Stream, StreamExt as _}; +use tracing::warn; + +use crate::event::Event; + +use super::RawOperation; + +const OPERATION_DOCUMENT_EXTENSION: &str = "graphql"; + +/// The source of the operations exposed as MCP tools +#[derive(Clone, Debug)] +pub enum OperationSource { + /// GraphQL document files + Files(Vec), + + /// Persisted Query manifest + Manifest(ManifestSource), + + /// Operation collection + Collection(CollectionSource), + + /// No operations provided + None, +} + +impl OperationSource { + #[tracing::instrument(skip_all, fields(operation_source = ?self))] + pub async fn into_stream(self) -> impl Stream { + match self { + OperationSource::Files(paths) => Self::stream_file_changes(paths).boxed(), + OperationSource::Manifest(manifest_source) => manifest_source + .into_stream() + .await + .map(|event| { + let ManifestEvent::UpdateManifest(operations) = event; + Event::OperationsUpdated( + operations.into_iter().map(RawOperation::from).collect(), + ) + }) + .boxed(), + OperationSource::Collection(collection_source) => collection_source + .into_stream() + .map(|event| match event { + CollectionEvent::UpdateOperationCollection(operations) => { + match operations + .iter() + .map(RawOperation::try_from) + .collect::, _>>() + { + Ok(operations) => Event::OperationsUpdated(operations), + Err(e) => Event::CollectionError(e), + } + } + CollectionEvent::CollectionError(error) => Event::CollectionError(error), + }) + .boxed(), + OperationSource::None => { + futures::stream::once(async { Event::OperationsUpdated(vec![]) }).boxed() + } + } + } + + #[tracing::instrument] + fn stream_file_changes(paths: Vec) -> impl Stream { + let path_count = paths.len(); + let state = Arc::new(Mutex::new(HashMap::>::new())); + futures::stream::select_all(paths.into_iter().map(|path| { + let state = Arc::clone(&state); + files::watch(path.as_ref()) + .filter_map(move |_| { + let path = path.clone(); + let state = Arc::clone(&state); + async move { + let mut operations = Vec::new(); + if path.is_dir() { + // Handle a directory + if let Ok(entries) = fs::read_dir(&path) { + for entry in entries.flatten() { + let entry_path = entry.path(); + if entry_path.extension().and_then(|e| e.to_str()) + == Some(OPERATION_DOCUMENT_EXTENSION) + { + match fs::read_to_string(&entry_path) { + Ok(content) => { + // Be forgiving of empty files in the directory case. + // It likely means a new file was created in an editor, + // but the operation hasn't been written yet. + if !content.trim().is_empty() { + operations.push(RawOperation::from(( + content, + entry_path.to_str().map(|s| s.to_string()), + ))); + } + } + Err(e) => { + return Some(Event::OperationError( + e, + path.to_str().map(|s| s.to_string()), + )); + } + } + } + } + } + } else { + // Handle a single file + match fs::read_to_string(&path) { + Ok(content) => { + if !content.trim().is_empty() { + operations.push(RawOperation::from(( + content, + path.to_str().map(|s| s.to_string()), + ))); + } else { + warn!(?path, "Empty operation file"); + } + } + Err(e) => { + return Some(Event::OperationError( + e, + path.to_str().map(|s| s.to_string()), + )); + } + } + } + match state.lock() { + Ok(mut state) => { + state.insert(path.clone(), operations); + // All paths send an initial event on startup. To avoid repeated + // operation events on startup, wait until all paths have been + // loaded, then send a single event with the operations for all + // paths. + if state.len() == path_count { + Some(Event::OperationsUpdated( + state.values().flatten().cloned().collect::>(), + )) + } else { + None + } + } + Err(_) => Some(Event::OperationError( + std::io::Error::other("State mutex poisoned"), + path.to_str().map(|s| s.to_string()), + )), + } + } + }) + .boxed() + })) + .boxed() + } +} + +impl From for OperationSource { + fn from(manifest_source: ManifestSource) -> Self { + OperationSource::Manifest(manifest_source) + } +} + +impl From> for OperationSource { + fn from(paths: Vec) -> Self { + OperationSource::Files(paths) + } +} diff --git a/crates/apollo-mcp-server/src/operations/raw_operation.rs b/crates/apollo-mcp-server/src/operations/raw_operation.rs new file mode 100644 index 00000000..c850319c --- /dev/null +++ b/crates/apollo-mcp-server/src/operations/raw_operation.rs @@ -0,0 +1,144 @@ +use std::{collections::HashMap, str::FromStr as _}; + +use apollo_compiler::validation::Valid; +use apollo_mcp_registry::platform_api::operation_collections::{ + collection_poller::OperationData, error::CollectionError, +}; +use http::{HeaderMap, HeaderName, HeaderValue}; +use serde_json::Value; + +use crate::{custom_scalar_map::CustomScalarMap, errors::OperationError}; + +use super::{MutationMode, operation::Operation}; + +#[derive(Debug, Clone)] +pub struct RawOperation { + pub(super) source_text: String, + pub(super) persisted_query_id: Option, + pub(super) headers: Option>, + pub(super) variables: Option>, + pub(super) source_path: Option, +} + +impl RawOperation { + pub(crate) fn into_operation( + self, + schema: &Valid, + custom_scalars: Option<&CustomScalarMap>, + mutation_mode: MutationMode, + disable_type_description: bool, + disable_schema_description: bool, + ) -> Result, OperationError> { + Operation::from_document( + self, + schema, + custom_scalars, + mutation_mode, + disable_type_description, + disable_schema_description, + ) + } +} + +impl From<(String, Option)> for RawOperation { + fn from((source_text, source_path): (String, Option)) -> Self { + Self { + persisted_query_id: None, + source_text, + headers: None, + variables: None, + source_path, + } + } +} + +impl From<(String, String)> for RawOperation { + fn from((persisted_query_id, source_text): (String, String)) -> Self { + Self { + persisted_query_id: Some(persisted_query_id), + source_text, + headers: None, + variables: None, + source_path: None, + } + } +} + +impl TryFrom<&OperationData> for RawOperation { + type Error = CollectionError; + + fn try_from(operation_data: &OperationData) -> Result { + let variables = if let Some(variables) = operation_data.variables.as_ref() { + if variables.trim().is_empty() { + Some(HashMap::new()) + } else { + Some( + serde_json::from_str::>(variables) + .map_err(|_| CollectionError::InvalidVariables(variables.clone()))?, + ) + } + } else { + None + }; + + let headers = if let Some(headers) = operation_data.headers.as_ref() { + let mut header_map = HeaderMap::new(); + for header in headers { + header_map.insert( + HeaderName::from_str(&header.0).map_err(CollectionError::HeaderName)?, + HeaderValue::from_str(&header.1).map_err(CollectionError::HeaderValue)?, + ); + } + Some(header_map) + } else { + None + }; + + Ok(Self { + persisted_query_id: None, + source_text: operation_data.source_text.clone(), + headers, + variables, + source_path: None, + }) + } +} + +// TODO: This can be greatly simplified by using `serde::serialize_with` on the specific field that does not +// implement `Serialize`. +// Custom Serialize implementation for RawOperation +// This is needed because reqwest HeaderMap/HeaderValue/HeaderName don't derive Serialize +impl serde::Serialize for RawOperation { + fn serialize(&self, serializer: S) -> Result + where + S: serde::Serializer, + { + use serde::ser::SerializeStruct; + let mut state = serializer.serialize_struct("RawOperation", 4)?; + state.serialize_field("source_text", &self.source_text)?; + if let Some(ref id) = self.persisted_query_id { + state.serialize_field("persisted_query_id", id)?; + } + if let Some(ref variables) = self.variables { + state.serialize_field("variables", variables)?; + } + if let Some(ref headers) = self.headers { + state.serialize_field( + "headers", + headers + .iter() + .map(|(name, value)| { + format!("{}: {}", name, value.to_str().unwrap_or_default()) + }) + .collect::>() + .join("\n") + .as_str(), + )?; + } + if let Some(ref path) = self.source_path { + state.serialize_field("source_path", path)?; + } + + state.end() + } +} diff --git a/crates/apollo-mcp-server/src/operations/schema_walker.rs b/crates/apollo-mcp-server/src/operations/schema_walker.rs new file mode 100644 index 00000000..0229d255 --- /dev/null +++ b/crates/apollo-mcp-server/src/operations/schema_walker.rs @@ -0,0 +1,46 @@ +//! JSON Schema generation utilities +//! +//! The types in this module generate JSON schemas for GraphQL types by walking +//! the types recursively. + +use apollo_compiler::{Schema as GraphQLSchema, ast::Type}; +use schemars::Schema; +use serde_json::{Map, Value}; + +use crate::custom_scalar_map::CustomScalarMap; + +mod name; +mod r#type; + +/// Convert a GraphQL type into a JSON Schema. +/// +/// Note: This is recursive, which might cause a stack overflow if the type is +/// sufficiently nested / complex. +pub fn type_to_schema( + r#type: &Type, + schema: &GraphQLSchema, + definitions: &mut Map, + custom_scalar_map: Option<&CustomScalarMap>, + description: Option, +) -> Schema { + r#type::Type { + cache: definitions, + custom_scalar_map, + description: &description, + schema, + r#type, + } + .into() +} + +/// Modifies a schema to include an optional description +fn with_desc(mut schema: Schema, description: &Option) -> Schema { + if let Some(desc) = description { + schema + .ensure_object() + .entry("description") + .or_insert(desc.clone().into()); + } + + schema +} diff --git a/crates/apollo-mcp-server/src/operations/schema_walker/name.rs b/crates/apollo-mcp-server/src/operations/schema_walker/name.rs new file mode 100644 index 00000000..c6452bda --- /dev/null +++ b/crates/apollo-mcp-server/src/operations/schema_walker/name.rs @@ -0,0 +1,199 @@ +use apollo_compiler::{Name as GraphQLName, Node, Schema as GraphQLSchema, schema::ExtendedType}; +use schemars::{Schema as JSONSchema, json_schema}; +use serde_json::{Map, Value}; +use tracing::warn; + +use crate::custom_scalar_map::CustomScalarMap; + +use super::{r#type::Type, with_desc}; + +/// A GraphQL Named Walker +pub(super) struct Name<'a> { + /// The definition cache which contains full schemas for nested types + pub(super) cache: &'a mut Map, + + /// Custom scalar map for supplementing information from the GraphQL schema + pub(super) custom_scalar_map: Option<&'a CustomScalarMap>, + + /// The optional description of the named type, from comments in the schema + pub(super) description: &'a Option, + + /// The actual named type to translate into a JSON schema + pub(super) name: &'a GraphQLName, + + /// The original GraphQL schema with all type information + pub(super) schema: &'a GraphQLSchema, +} + +impl From> for JSONSchema { + fn from( + Name { + cache, + custom_scalar_map, + description, + name, + schema, + }: Name, + ) -> Self { + let unknown_type = json_schema!({}); + + let result = match name.as_str() { + // Basic types map nicely + "String" | "ID" => json_schema!({"type": "string"}), + "Int" | "Float" => json_schema!({"type": "number"}), + "Boolean" => json_schema!({"type": "boolean"}), + + // If we've already cached it, then return the reference immediately + cached if cache.contains_key(cached) => { + JSONSchema::new_ref(format!("#/definitions/{cached}")) + } + + // Otherwise generate the dependent type + other => match schema.types.get(other) { + // Enums need to collect descriptions per field while also enumerating + // all possible values + Some(ExtendedType::Enum(r#enum)) => { + // Collect all fields such that each field is shown as + // : + let values = r#enum + .values + .iter() + .map(|(name, value)| { + format!( + "{}: {}", + name, + value + .description + .as_ref() + .map(|d| d.to_string()) + .unwrap_or_default() + ) + }) + .collect::>() + .join("\n"); + + // Consolidate all of the values such that we get a high-level + // description (from the schema) followed by its values + let description = format!( + "{}\n\nValues:\n{}", + r#enum + .description + .as_ref() + .map(Node::as_str) + .unwrap_or_default(), + values + ); + + cache.insert( + other.to_string(), + with_desc(json_schema!({ + "type": "string", + "enum": r#enum.values.iter().map(|(_, value)| serde_json::json!(value.value)).collect::>(), + }), + &Some(description), + ).into(), + ); + JSONSchema::new_ref(format!("#/definitions/{other}")) + } + + // Input types need to be traversed over their fields to ensure that they copy over + // nested structure. + Some(ExtendedType::InputObject(input)) => { + // Insert temporary value into map so any recursive references will not try to also create it. + cache.insert(other.to_string(), Default::default()); + + let mut input_schema = with_desc( + json_schema!({"type": "object", "properties": {}}), + &input.description.as_ref().map(Node::to_string), + ); + for (name, field) in input.fields.iter() { + let field_description = field.description.as_ref().map(|n| n.to_string()); + input_schema + .ensure_object() + .entry("properties") + .or_insert(Value::Object(Default::default())) + .as_object_mut() + .get_or_insert(&mut Map::default()) + .insert( + name.to_string(), + JSONSchema::from(Type { + cache, + custom_scalar_map, + description: &field_description, + schema, + r#type: &field.ty, + }) + .into(), + ); + + // Mark any non-nullable fields as being required + if field.is_required() { + input_schema + .ensure_object() + .entry("required") + .or_insert(Value::Array(Default::default())) + .as_array_mut() + .get_or_insert(&mut Vec::default()) + .push(name.to_string().into()); + } + } + + cache.insert(other.to_string(), input_schema.into()); + JSONSchema::new_ref(format!("#/definitions/{other}")) + } + + // Custom scalars need to be opaquely copied over as types with no further processing + Some(ExtendedType::Scalar(scalar)) => { + // The default scalar description should always be from the scalar in the schema itself + let default_scalar_description = + scalar.description.as_ref().map(Node::to_string); + + if let Some(custom_scalar_map) = custom_scalar_map { + if let Some(custom_scalar_schema_object) = custom_scalar_map.get(other) { + // The custom scalar schema might have an override for the description, so we extract it here. + let mut scalar_schema = custom_scalar_schema_object.clone(); + let description = scalar_schema + .ensure_object() + .get("description") + .and_then(Value::as_str) + .map(str::to_string); + + cache.insert( + other.to_string(), + with_desc( + custom_scalar_schema_object.clone(), + // The description could have been overridden by the custom schema, so we prioritize it here + &description.or(default_scalar_description), + ) + .into(), + ); + } else { + warn!(name=?other, "custom scalar missing from custom_scalar_map"); + cache.insert( + other.to_string(), + with_desc(JSONSchema::default(), &default_scalar_description) + .into(), + ); + } + } else { + warn!(name=?other, "custom scalars aren't currently supported without a custom_scalar_map"); + cache.insert( + other.to_string(), + with_desc(JSONSchema::default(), &default_scalar_description).into(), + ); + } + + JSONSchema::new_ref(format!("#/definitions/{other}")) + } + + // Anything else is unhandled + _ => { + warn!(name=?other, "Type not found in schema"); + unknown_type + } + }, + }; + + with_desc(result, description) + } +} diff --git a/crates/apollo-mcp-server/src/operations/schema_walker/type.rs b/crates/apollo-mcp-server/src/operations/schema_walker/type.rs new file mode 100644 index 00000000..e38df4a0 --- /dev/null +++ b/crates/apollo-mcp-server/src/operations/schema_walker/type.rs @@ -0,0 +1,74 @@ +use apollo_compiler::{Schema as GraphQLSchema, ast::Type as GraphQLType}; +use schemars::{Schema as JSONSchema, json_schema}; +use serde_json::{Map, Value}; + +use crate::custom_scalar_map::CustomScalarMap; + +use super::name::Name; + +pub(super) struct Type<'a> { + /// The definition cache which contains full schemas for nested types + pub(super) cache: &'a mut Map, + + /// Custom scalar map for supplementing information from the GraphQL schema + pub(super) custom_scalar_map: Option<&'a CustomScalarMap>, + + /// The optional description of the type, from comments in the schema + pub(super) description: &'a Option, + + /// The original GraphQL schema with all type information + pub(super) schema: &'a GraphQLSchema, + + /// The actual type to translate into a JSON schema + pub(super) r#type: &'a GraphQLType, +} + +impl From> for JSONSchema { + fn from( + Type { + cache, + custom_scalar_map, + description, + schema, + r#type, + }: Type, + ) -> Self { + // JSON Schema assumes that all properties are nullable unless there is a + // required field, so we treat cases the same here. + match r#type { + GraphQLType::List(list) | GraphQLType::NonNullList(list) => { + let nested_schema: JSONSchema = Type { + cache, + custom_scalar_map, + description, + schema, + r#type: list, + } + .into(); + + // Arrays, however, do need to specify that fields can be null + let nested_schema = if list.is_non_null() { + nested_schema + } else { + json_schema!({"oneOf": [ + nested_schema, + {"type": "null"}, + ]}) + }; + + json_schema!({ + "type": "array", + "items": nested_schema, + }) + } + + GraphQLType::Named(name) | GraphQLType::NonNullNamed(name) => JSONSchema::from(Name { + cache, + custom_scalar_map, + description, + name, + schema, + }), + } + } +} diff --git a/crates/apollo-mcp-server/src/runtime.rs b/crates/apollo-mcp-server/src/runtime.rs index bc63f7af..71a39684 100644 --- a/crates/apollo-mcp-server/src/runtime.rs +++ b/crates/apollo-mcp-server/src/runtime.rs @@ -5,6 +5,7 @@ mod config; mod endpoint; +mod filtering_exporter; mod graphos; mod introspection; pub mod logging; @@ -12,6 +13,7 @@ mod operation_source; mod overrides; mod schema_source; mod schemas; +pub mod telemetry; use std::path::Path; @@ -130,8 +132,13 @@ mod test { let config = " endpoint: http://from_file:4000/ "; + let saved_path = std::env::var("PATH").unwrap_or_default(); + let workspace = env!("CARGO_MANIFEST_DIR"); figment::Jail::expect_with(move |jail| { + jail.clear_env(); + jail.set_env("PATH", &saved_path); + jail.set_env("INSTA_WORKSPACE_ROOT", workspace); let path = "config.yaml"; jail.create_file(path, config)?; @@ -144,6 +151,33 @@ mod test { insta::assert_debug_snapshot!(config, @r#" Config { + cors: CorsConfig { + enabled: false, + origins: [], + match_origins: [], + allow_any_origin: false, + allow_credentials: false, + allow_methods: [ + "GET", + "POST", + "DELETE", + ], + allow_headers: [ + "content-type", + "mcp-protocol-version", + "mcp-session-id", + "traceparent", + "tracestate", + ], + expose_headers: [ + "mcp-session-id", + "traceparent", + "tracestate", + ], + max_age: Some( + 7200, + ), + }, custom_scalars: None, endpoint: Endpoint( Url { @@ -242,6 +276,11 @@ mod test { path: None, rotation: Hourly, }, + telemetry: Telemetry { + exporters: None, + service_name: None, + version: None, + }, operations: Infer, overrides: Overrides { disable_type_description: false, diff --git a/crates/apollo-mcp-server/src/runtime/config.rs b/crates/apollo-mcp-server/src/runtime/config.rs index 8f64f518..598462bd 100644 --- a/crates/apollo-mcp-server/src/runtime/config.rs +++ b/crates/apollo-mcp-server/src/runtime/config.rs @@ -1,6 +1,6 @@ use std::path::PathBuf; -use apollo_mcp_server::{health::HealthCheckConfig, server::Transport}; +use apollo_mcp_server::{cors::CorsConfig, health::HealthCheckConfig, server::Transport}; use reqwest::header::HeaderMap; use schemars::JsonSchema; use serde::Deserialize; @@ -8,13 +8,16 @@ use url::Url; use super::{ OperationSource, SchemaSource, endpoint::Endpoint, graphos::GraphOSConfig, - introspection::Introspection, logging::Logging, overrides::Overrides, + introspection::Introspection, logging::Logging, overrides::Overrides, telemetry::Telemetry, }; /// Configuration for the MCP server #[derive(Debug, Default, Deserialize, JsonSchema)] #[serde(default)] pub struct Config { + /// CORS configuration + pub cors: CorsConfig, + /// Path to a custom scalar map pub custom_scalars: Option, @@ -40,6 +43,9 @@ pub struct Config { /// Logging configuration pub logging: Logging, + /// Telemetry configuration + pub telemetry: Telemetry, + /// Operations pub operations: OperationSource, diff --git a/crates/apollo-mcp-server/src/runtime/filtering_exporter.rs b/crates/apollo-mcp-server/src/runtime/filtering_exporter.rs new file mode 100644 index 00000000..e56fc3eb --- /dev/null +++ b/crates/apollo-mcp-server/src/runtime/filtering_exporter.rs @@ -0,0 +1,232 @@ +use opentelemetry::{Key, KeyValue}; +use opentelemetry_sdk::Resource; +use opentelemetry_sdk::error::OTelSdkResult; +use opentelemetry_sdk::trace::{SpanData, SpanExporter}; +use std::collections::HashSet; +use std::fmt::Debug; + +#[derive(Debug)] +pub struct FilteringExporter { + inner: E, + omitted: HashSet, +} + +impl FilteringExporter { + pub fn new(inner: E, omitted: impl IntoIterator) -> Self { + Self { + inner, + omitted: omitted.into_iter().collect(), + } + } +} + +impl SpanExporter for FilteringExporter +where + E: SpanExporter + Send + Sync, +{ + fn export(&self, mut batch: Vec) -> impl Future + Send { + for span in &mut batch { + span.attributes + .retain(|kv| filter_omitted_apollo_attributes(kv, &self.omitted)); + } + + self.inner.export(batch) + } + + fn shutdown(&mut self) -> OTelSdkResult { + self.inner.shutdown() + } + fn force_flush(&mut self) -> OTelSdkResult { + self.inner.force_flush() + } + fn set_resource(&mut self, r: &Resource) { + self.inner.set_resource(r) + } +} + +fn filter_omitted_apollo_attributes(kv: &KeyValue, omitted_attributes: &HashSet) -> bool { + !kv.key.as_str().starts_with("apollo.") || !omitted_attributes.contains(&kv.key) +} + +#[cfg(test)] +mod tests { + use crate::runtime::filtering_exporter::FilteringExporter; + use opentelemetry::trace::{SpanContext, SpanKind, Status, TraceState}; + use opentelemetry::{InstrumentationScope, Key, KeyValue, SpanId, TraceFlags, TraceId}; + use opentelemetry_sdk::Resource; + use opentelemetry_sdk::error::{OTelSdkError, OTelSdkResult}; + use opentelemetry_sdk::trace::{SpanData, SpanEvents, SpanExporter, SpanLinks}; + use std::collections::HashSet; + use std::fmt::Debug; + use std::future::ready; + use std::time::SystemTime; + + #[cfg_attr(coverage_nightly, coverage(off))] + fn create_mock_span_data() -> SpanData { + let span_context: SpanContext = SpanContext::new( + TraceId::from_u128(1), + SpanId::from_u64(12345), + TraceFlags::default(), + true, // is_remote + TraceState::default(), + ); + + SpanData { + span_context, + parent_span_id: SpanId::from_u64(54321), + span_kind: SpanKind::Internal, + name: "test-span".into(), + start_time: SystemTime::UNIX_EPOCH, + end_time: SystemTime::UNIX_EPOCH, + attributes: vec![ + KeyValue::new("http.method", "GET"), + KeyValue::new("apollo.mock", "mock"), + ], + dropped_attributes_count: 0, + events: SpanEvents::default(), + links: SpanLinks::default(), + status: Status::Ok, + instrumentation_scope: InstrumentationScope::builder("test-service") + .with_version("1.0.0") + .build(), + } + } + + #[tokio::test] + async fn filtering_exporter_filters_omitted_apollo_attributes() { + #[derive(Debug)] + struct TestExporter {} + + #[cfg_attr(coverage_nightly, coverage(off))] + impl SpanExporter for TestExporter { + fn export(&self, batch: Vec) -> impl Future + Send { + batch.into_iter().for_each(|span| { + if span + .attributes + .iter() + .any(|kv| kv.key.as_str().starts_with("apollo.")) + { + panic!("Omitted attributes were not filtered"); + } + }); + + ready(Ok(())) + } + + fn shutdown(&mut self) -> OTelSdkResult { + Ok(()) + } + + fn force_flush(&mut self) -> OTelSdkResult { + Ok(()) + } + + fn set_resource(&mut self, _resource: &Resource) {} + } + + let mut omitted = HashSet::new(); + omitted.insert(Key::from_static_str("apollo.mock")); + let mock_exporter = TestExporter {}; + let mock_span_data = create_mock_span_data(); + + let filtering_exporter = FilteringExporter::new(mock_exporter, omitted); + filtering_exporter + .export(vec![mock_span_data]) + .await + .expect("Export error"); + } + + #[tokio::test] + async fn filtering_exporter_calls_inner_exporter_on_shutdown() { + #[derive(Debug)] + struct TestExporter {} + + #[cfg_attr(coverage_nightly, coverage(off))] + impl SpanExporter for TestExporter { + fn export(&self, _batch: Vec) -> impl Future + Send { + ready(Err(OTelSdkError::InternalFailure( + "unexpected call".to_string(), + ))) + } + + fn shutdown(&mut self) -> OTelSdkResult { + Ok(()) + } + + fn force_flush(&mut self) -> OTelSdkResult { + Err(OTelSdkError::InternalFailure("unexpected call".to_string())) + } + + fn set_resource(&mut self, _resource: &Resource) { + unreachable!("should not be called"); + } + } + + let mock_exporter = TestExporter {}; + + let mut filtering_exporter = FilteringExporter::new(mock_exporter, HashSet::new()); + assert!(filtering_exporter.shutdown().is_ok()); + } + + #[tokio::test] + async fn filtering_exporter_calls_inner_exporter_on_force_flush() { + #[derive(Debug)] + struct TestExporter {} + + #[cfg_attr(coverage_nightly, coverage(off))] + impl SpanExporter for TestExporter { + fn export(&self, _batch: Vec) -> impl Future + Send { + ready(Err(OTelSdkError::InternalFailure( + "unexpected call".to_string(), + ))) + } + + fn shutdown(&mut self) -> OTelSdkResult { + Err(OTelSdkError::InternalFailure("unexpected call".to_string())) + } + + fn force_flush(&mut self) -> OTelSdkResult { + Ok(()) + } + + fn set_resource(&mut self, _resource: &Resource) { + unreachable!("should not be called"); + } + } + + let mock_exporter = TestExporter {}; + + let mut filtering_exporter = FilteringExporter::new(mock_exporter, HashSet::new()); + assert!(filtering_exporter.force_flush().is_ok()); + } + + #[tokio::test] + async fn filtering_exporter_calls_inner_exporter_on_set_resource() { + #[derive(Debug)] + struct TestExporter {} + + #[cfg_attr(coverage_nightly, coverage(off))] + impl SpanExporter for TestExporter { + fn export(&self, _batch: Vec) -> impl Future + Send { + ready(Err(OTelSdkError::InternalFailure( + "unexpected call".to_string(), + ))) + } + + fn shutdown(&mut self) -> OTelSdkResult { + Err(OTelSdkError::InternalFailure("unexpected call".to_string())) + } + + fn force_flush(&mut self) -> OTelSdkResult { + Err(OTelSdkError::InternalFailure("unexpected call".to_string())) + } + + fn set_resource(&mut self, _resource: &Resource) {} + } + + let mock_exporter = TestExporter {}; + + let mut filtering_exporter = FilteringExporter::new(mock_exporter, HashSet::new()); + filtering_exporter.set_resource(&Resource::builder_empty().build()); + } +} diff --git a/crates/apollo-mcp-server/src/runtime/logging.rs b/crates/apollo-mcp-server/src/runtime/logging.rs index 70a47e30..45c9d256 100644 --- a/crates/apollo-mcp-server/src/runtime/logging.rs +++ b/crates/apollo-mcp-server/src/runtime/logging.rs @@ -12,14 +12,10 @@ use schemars::JsonSchema; use serde::Deserialize; use std::path::PathBuf; use tracing::Level; -use tracing_appender::non_blocking::WorkerGuard; use tracing_appender::rolling::RollingFileAppender; use tracing_subscriber::EnvFilter; +use tracing_subscriber::fmt::Layer; use tracing_subscriber::fmt::writer::BoxMakeWriter; -use tracing_subscriber::layer::SubscriberExt; -use tracing_subscriber::util::SubscriberInitExt; - -use super::Config; /// Logging related options #[derive(Debug, Deserialize, JsonSchema)] @@ -52,31 +48,43 @@ impl Default for Logging { } } +type LoggingLayerResult = ( + Layer< + tracing_subscriber::Registry, + tracing_subscriber::fmt::format::DefaultFields, + tracing_subscriber::fmt::format::Format, + BoxMakeWriter, + >, + Option, +); + impl Logging { - pub fn setup(config: &Config) -> Result, anyhow::Error> { - let mut env_filter = - EnvFilter::from_default_env().add_directive(config.logging.level.into()); + pub fn env_filter(logging: &Logging) -> Result { + let mut env_filter = EnvFilter::from_default_env().add_directive(logging.level.into()); - if config.logging.level == Level::INFO { + if logging.level == Level::INFO { env_filter = env_filter .add_directive("rmcp=warn".parse()?) .add_directive("tantivy=warn".parse()?); } + Ok(env_filter) + } + pub fn logging_layer(logging: &Logging) -> Result { macro_rules! log_error { () => { |e| eprintln!("Failed to setup logging: {e:?}") }; } - let (writer, guard, with_ansi) = match config.logging.path.clone() { + let (writer, guard, with_ansi) = match logging.path.clone() { Some(path) => std::fs::create_dir_all(&path) .map(|_| path) .inspect_err(log_error!()) .ok() .and_then(|path| { RollingFileAppender::builder() - .rotation(config.logging.rotation.clone().into()) + .rotation(logging.rotation.clone().into()) .filename_prefix("apollo_mcp_server") .filename_suffix("log") .build(path) @@ -98,17 +106,13 @@ impl Logging { None => (BoxMakeWriter::new(std::io::stdout), None, true), }; - tracing_subscriber::registry() - .with(env_filter) - .with( - tracing_subscriber::fmt::layer() - .with_writer(writer) - .with_ansi(with_ansi) - .with_target(false), - ) - .init(); - - Ok(guard) + Ok(( + tracing_subscriber::fmt::layer() + .with_writer(writer) + .with_ansi(with_ansi) + .with_target(false), + guard, + )) } } diff --git a/crates/apollo-mcp-server/src/runtime/telemetry.rs b/crates/apollo-mcp-server/src/runtime/telemetry.rs new file mode 100644 index 00000000..d5d0688b --- /dev/null +++ b/crates/apollo-mcp-server/src/runtime/telemetry.rs @@ -0,0 +1,408 @@ +mod sampler; + +use crate::runtime::Config; +use crate::runtime::filtering_exporter::FilteringExporter; +use crate::runtime::logging::Logging; +use crate::runtime::telemetry::sampler::SamplerOption; +use apollo_mcp_server::generated::telemetry::TelemetryAttribute; +use opentelemetry::{Key, KeyValue, global, trace::TracerProvider as _}; +use opentelemetry_otlp::WithExportConfig; +use opentelemetry_sdk::metrics::{Instrument, Stream}; +use opentelemetry_sdk::{ + Resource, + metrics::{MeterProviderBuilder, PeriodicReader, SdkMeterProvider}, + propagation::TraceContextPropagator, + trace::{RandomIdGenerator, SdkTracerProvider}, +}; +use opentelemetry_semantic_conventions::{ + SCHEMA_URL, + attribute::{DEPLOYMENT_ENVIRONMENT_NAME, SERVICE_VERSION}, +}; +use schemars::JsonSchema; +use serde::Deserialize; +use std::collections::HashSet; +use tracing_opentelemetry::{MetricsLayer, OpenTelemetryLayer}; +use tracing_subscriber::{layer::SubscriberExt, util::SubscriberInitExt}; + +/// Telemetry related options +#[derive(Debug, Deserialize, JsonSchema, Default)] +pub struct Telemetry { + exporters: Option, + service_name: Option, + version: Option, +} + +#[derive(Debug, Deserialize, JsonSchema)] +pub struct Exporters { + metrics: Option, + tracing: Option, +} + +#[derive(Debug, Deserialize, JsonSchema)] +pub struct MetricsExporters { + otlp: Option, + omitted_attributes: Option>, +} + +#[derive(Debug, Deserialize, JsonSchema)] +pub struct OTLPMetricExporter { + endpoint: String, + protocol: String, +} + +impl Default for OTLPMetricExporter { + fn default() -> Self { + Self { + endpoint: "http://localhost:4317".into(), + protocol: "grpc".into(), + } + } +} + +#[derive(Debug, Deserialize, JsonSchema)] +pub struct TracingExporters { + otlp: Option, + sampler: Option, + omitted_attributes: Option>, +} + +#[derive(Debug, Deserialize, JsonSchema)] +pub struct OTLPTracingExporter { + endpoint: String, + protocol: String, +} + +impl Default for OTLPTracingExporter { + fn default() -> Self { + Self { + endpoint: "http://localhost:4317".into(), + protocol: "grpc".into(), + } + } +} + +fn resource(telemetry: &Telemetry) -> Resource { + let service_name = telemetry + .service_name + .clone() + .unwrap_or_else(|| env!("CARGO_PKG_NAME").to_string()); + + let service_version = telemetry + .version + .clone() + .unwrap_or_else(|| env!("CARGO_PKG_VERSION").to_string()); + + let deployment_env = std::env::var("ENVIRONMENT").unwrap_or_else(|_| "development".to_string()); + + Resource::builder() + .with_service_name(service_name) + .with_schema_url( + [ + KeyValue::new(SERVICE_VERSION, service_version), + KeyValue::new(DEPLOYMENT_ENVIRONMENT_NAME, deployment_env), + ], + SCHEMA_URL, + ) + .build() +} + +fn init_meter_provider(telemetry: &Telemetry) -> Result { + let metrics_exporters = telemetry + .exporters + .as_ref() + .and_then(|exporters| exporters.metrics.as_ref()); + + let otlp = metrics_exporters + .and_then(|metrics_exporters| metrics_exporters.otlp.as_ref()) + .ok_or_else(|| { + anyhow::anyhow!("No metrics exporters configured, at least one is required") + })?; + + let exporter = match otlp.protocol.as_str() { + "grpc" => opentelemetry_otlp::MetricExporter::builder() + .with_tonic() + .with_endpoint(otlp.endpoint.clone()) + .build()?, + "http/protobuf" => opentelemetry_otlp::MetricExporter::builder() + .with_http() + .with_endpoint(otlp.endpoint.clone()) + .build()?, + other => { + return Err(anyhow::anyhow!( + "Unsupported OTLP protocol: {other}. Supported protocols are: grpc, http/protobuf" + )); + } + }; + + let omitted_attributes: HashSet = metrics_exporters + .and_then(|exporters| exporters.omitted_attributes.clone()) + .unwrap_or_default(); + let included_attributes: Vec = TelemetryAttribute::included_attributes(omitted_attributes) + .iter() + .map(|a| a.to_key()) + .collect(); + + let reader = PeriodicReader::builder(exporter) + .with_interval(std::time::Duration::from_secs(30)) + .build(); + + let filtered_view = move |i: &Instrument| { + if i.name().starts_with("apollo.") { + Stream::builder() + .with_allowed_attribute_keys(included_attributes.clone()) // if available in your version + .build() + .ok() + } else { + None + } + }; + + let meter_provider = MeterProviderBuilder::default() + .with_resource(resource(telemetry)) + .with_reader(reader) + .with_view(filtered_view) + .build(); + + Ok(meter_provider) +} + +fn init_tracer_provider(telemetry: &Telemetry) -> Result { + let tracer_exporters = telemetry + .exporters + .as_ref() + .and_then(|exporters| exporters.tracing.as_ref()); + + let otlp = tracer_exporters + .and_then(|tracing_exporters| tracing_exporters.otlp.as_ref()) + .ok_or_else(|| { + anyhow::anyhow!("No tracing exporters configured, at least one is required") + })?; + + let exporter = match otlp.protocol.as_str() { + "grpc" => opentelemetry_otlp::SpanExporter::builder() + .with_tonic() + .with_endpoint(otlp.endpoint.clone()) + .build()?, + "http/protobuf" => opentelemetry_otlp::SpanExporter::builder() + .with_http() + .with_endpoint(otlp.endpoint.clone()) + .build()?, + other => { + return Err(anyhow::anyhow!( + "Unsupported OTLP protocol: {other}. Supported protocols are: grpc, http/protobuf" + )); + } + }; + + let sampler: opentelemetry_sdk::trace::Sampler = tracer_exporters + .as_ref() + .and_then(|e| e.sampler.clone()) + .unwrap_or_default() + .into(); + + let omitted_attributes: HashSet = tracer_exporters + .and_then(|exporters| exporters.omitted_attributes.clone()) + .map(|set| set.iter().map(|a| a.to_key()).collect()) + .unwrap_or_default(); + + let filtering_exporter = FilteringExporter::new(exporter, omitted_attributes); + + let tracer_provider = SdkTracerProvider::builder() + .with_id_generator(RandomIdGenerator::default()) + .with_resource(resource(telemetry)) + .with_batch_exporter(filtering_exporter) + .with_sampler(sampler) + .build(); + + Ok(tracer_provider) +} + +/// Initialize tracing-subscriber and return TelemetryGuard for logging and opentelemetry-related termination processing +pub fn init_tracing_subscriber(config: &Config) -> Result { + let tracer_provider = if let Some(exporters) = &config.telemetry.exporters { + if let Some(_tracing_exporters) = &exporters.tracing { + init_tracer_provider(&config.telemetry)? + } else { + SdkTracerProvider::builder().build() + } + } else { + SdkTracerProvider::builder().build() + }; + let meter_provider = if let Some(exporters) = &config.telemetry.exporters { + if let Some(_metrics_exporters) = &exporters.metrics { + init_meter_provider(&config.telemetry)? + } else { + SdkMeterProvider::builder().build() + } + } else { + SdkMeterProvider::builder().build() + }; + let env_filter = Logging::env_filter(&config.logging)?; + let (logging_layer, logging_guard) = Logging::logging_layer(&config.logging)?; + + let tracer = tracer_provider.tracer("apollo-mcp-trace"); + + global::set_meter_provider(meter_provider.clone()); + global::set_text_map_propagator(TraceContextPropagator::new()); + global::set_tracer_provider(tracer_provider.clone()); + + tracing_subscriber::registry() + .with(logging_layer) + .with(env_filter) + .with(MetricsLayer::new(meter_provider.clone())) + .with(OpenTelemetryLayer::new(tracer)) + .try_init()?; + + Ok(TelemetryGuard { + tracer_provider, + meter_provider, + logging_guard, + }) +} + +pub struct TelemetryGuard { + tracer_provider: SdkTracerProvider, + meter_provider: SdkMeterProvider, + logging_guard: Option, +} + +impl Drop for TelemetryGuard { + fn drop(&mut self) { + if let Err(err) = self.tracer_provider.shutdown() { + tracing::error!("{err:?}"); + } + if let Err(err) = self.meter_provider.shutdown() { + tracing::error!("{err:?}"); + } + drop(self.logging_guard.take()); + } +} + +#[cfg(test)] +mod tests { + use super::*; + + fn test_config( + service_name: Option<&str>, + version: Option<&str>, + metrics: Option, + tracing: Option, + ) -> Config { + Config { + telemetry: Telemetry { + exporters: Some(Exporters { metrics, tracing }), + service_name: service_name.map(|s| s.to_string()), + version: version.map(|v| v.to_string()), + }, + ..Default::default() + } + } + + #[tokio::test] + async fn guard_is_provided_when_tracing_configured() { + let mut ommitted = HashSet::new(); + ommitted.insert(TelemetryAttribute::RequestId); + + let config = test_config( + Some("test-config"), + Some("1.0.0"), + Some(MetricsExporters { + otlp: Some(OTLPMetricExporter::default()), + omitted_attributes: None, + }), + Some(TracingExporters { + otlp: Some(OTLPTracingExporter::default()), + sampler: Default::default(), + omitted_attributes: Some(ommitted), + }), + ); + // init_tracing_subscriber can only be called once in the test suite to avoid + // panic when calling global::set_tracer_provider multiple times + let guard = init_tracing_subscriber(&config); + assert!(guard.is_ok()); + } + + #[tokio::test] + async fn unknown_protocol_raises_meter_provider_error() { + let config = test_config( + None, + None, + Some(MetricsExporters { + otlp: Some(OTLPMetricExporter { + protocol: "bogus".to_string(), + endpoint: "http://localhost:4317".to_string(), + }), + omitted_attributes: None, + }), + None, + ); + let result = init_meter_provider(&config.telemetry); + assert!( + result + .err() + .map(|e| e.to_string().contains("Unsupported OTLP protocol")) + .unwrap_or(false) + ); + } + + #[tokio::test] + async fn http_protocol_returns_valid_meter_provider() { + let config = test_config( + None, + None, + Some(MetricsExporters { + otlp: Some(OTLPMetricExporter { + protocol: "http/protobuf".to_string(), + endpoint: "http://localhost:4318/v1/metrics".to_string(), + }), + omitted_attributes: None, + }), + None, + ); + let result = init_meter_provider(&config.telemetry); + assert!(result.is_ok()); + } + + #[tokio::test] + async fn unknown_protocol_raises_tracer_provider_error() { + let config = test_config( + None, + None, + None, + Some(TracingExporters { + otlp: Some(OTLPTracingExporter { + protocol: "bogus".to_string(), + endpoint: "http://localhost:4317".to_string(), + }), + sampler: Default::default(), + omitted_attributes: None, + }), + ); + let result = init_tracer_provider(&config.telemetry); + assert!( + result + .err() + .map(|e| e.to_string().contains("Unsupported OTLP protocol")) + .unwrap_or(false) + ); + } + + #[tokio::test] + async fn http_protocol_returns_valid_tracer_provider() { + let config = test_config( + None, + None, + None, + Some(TracingExporters { + otlp: Some(OTLPTracingExporter { + protocol: "http/protobuf".to_string(), + endpoint: "http://localhost:4318/v1/traces".to_string(), + }), + sampler: Default::default(), + omitted_attributes: None, + }), + ); + let result = init_tracer_provider(&config.telemetry); + assert!(result.is_ok()); + } +} diff --git a/crates/apollo-mcp-server/src/runtime/telemetry/sampler.rs b/crates/apollo-mcp-server/src/runtime/telemetry/sampler.rs new file mode 100644 index 00000000..873bccd7 --- /dev/null +++ b/crates/apollo-mcp-server/src/runtime/telemetry/sampler.rs @@ -0,0 +1,98 @@ +use schemars::JsonSchema; +use serde::Deserialize; + +#[derive(Clone, Debug, Deserialize, JsonSchema)] +#[serde(deny_unknown_fields, untagged)] +pub(crate) enum SamplerOption { + /// Sample a given fraction. Fractions >= 1 will always sample. + RatioBased(f64), + Always(Sampler), +} + +#[derive(Clone, Debug, Deserialize, JsonSchema)] +#[serde(deny_unknown_fields, rename_all = "snake_case")] +pub(crate) enum Sampler { + /// Always sample + AlwaysOn, + /// Never sample + AlwaysOff, +} + +impl From for opentelemetry_sdk::trace::Sampler { + fn from(s: Sampler) -> Self { + match s { + Sampler::AlwaysOn => opentelemetry_sdk::trace::Sampler::AlwaysOn, + Sampler::AlwaysOff => opentelemetry_sdk::trace::Sampler::AlwaysOff, + } + } +} + +impl From for opentelemetry_sdk::trace::Sampler { + fn from(s: SamplerOption) -> Self { + match s { + SamplerOption::Always(s) => s.into(), + SamplerOption::RatioBased(ratio) => { + opentelemetry_sdk::trace::Sampler::TraceIdRatioBased(ratio) + } + } + } +} + +impl Default for SamplerOption { + fn default() -> Self { + SamplerOption::Always(Sampler::AlwaysOn) + } +} + +#[cfg(test)] +mod tests { + use super::*; + + #[test] + fn sampler_always_on_maps_to_otel_always_on() { + assert!(matches!( + Sampler::AlwaysOn.into(), + opentelemetry_sdk::trace::Sampler::AlwaysOn + )); + } + + #[test] + fn sampler_always_off_maps_to_otel_always_off() { + assert!(matches!( + Sampler::AlwaysOff.into(), + opentelemetry_sdk::trace::Sampler::AlwaysOff + )); + } + + #[test] + fn sampler_option_always_on_maps_to_otel_always_on() { + assert!(matches!( + SamplerOption::Always(Sampler::AlwaysOn).into(), + opentelemetry_sdk::trace::Sampler::AlwaysOn + )); + } + + #[test] + fn sampler_option_always_off_maps_to_otel_always_off() { + assert!(matches!( + SamplerOption::Always(Sampler::AlwaysOff).into(), + opentelemetry_sdk::trace::Sampler::AlwaysOff + )); + } + + #[test] + fn sampler_option_ratio_based_maps_to_otel_ratio_based_sampler() { + assert!(matches!( + SamplerOption::RatioBased(0.5).into(), + opentelemetry_sdk::trace::Sampler::TraceIdRatioBased(0.5) + )); + } + + #[test] + fn default_sampler_option_is_always_on() { + assert!(matches!( + SamplerOption::default(), + SamplerOption::Always(Sampler::AlwaysOn) + )); + } +} diff --git a/crates/apollo-mcp-server/src/schema_tree_shake.rs b/crates/apollo-mcp-server/src/schema_tree_shake.rs index 5d2fa7f7..d2896b3a 100644 --- a/crates/apollo-mcp-server/src/schema_tree_shake.rs +++ b/crates/apollo-mcp-server/src/schema_tree_shake.rs @@ -532,16 +532,15 @@ fn retain_argument_descriptions( ) { let operation_argument_name = operation_arguments.get(arg.name.as_str()); - if let Some(op_arg_name) = operation_argument_name { - if let Some(description) = arg.description.as_deref() { - if !description.trim().is_empty() { - let descriptions = tree_shaker - .arguments_descriptions - .entry(op_arg_name.to_string()) - .or_default(); - descriptions.push(description.trim().to_string()) - } - } + if let Some(op_arg_name) = operation_argument_name + && let Some(description) = arg.description.as_deref() + && !description.trim().is_empty() + { + let descriptions = tree_shaker + .arguments_descriptions + .entry(op_arg_name.to_string()) + .or_default(); + descriptions.push(description.trim().to_string()) } } diff --git a/crates/apollo-mcp-server/src/server.rs b/crates/apollo-mcp-server/src/server.rs index 96c0d772..cdbd72e3 100644 --- a/crates/apollo-mcp-server/src/server.rs +++ b/crates/apollo-mcp-server/src/server.rs @@ -8,6 +8,7 @@ use serde::Deserialize; use url::Url; use crate::auth; +use crate::cors::CorsConfig; use crate::custom_scalar_map::CustomScalarMap; use crate::errors::ServerError; use crate::event::Event as ServerEvent; @@ -36,9 +37,11 @@ pub struct Server { mutation_mode: MutationMode, disable_type_description: bool, disable_schema_description: bool, + disable_auth_token_passthrough: bool, search_leaf_depth: usize, index_memory_bytes: usize, health_check: HealthCheckConfig, + cors: CorsConfig, } #[derive(Debug, Clone, Deserialize, Default, JsonSchema)] @@ -79,6 +82,9 @@ pub enum Transport { /// The port to bind to #[serde(default = "Transport::default_port")] port: u16, + + #[serde(default = "Transport::default_stateful_mode")] + stateful_mode: bool, }, } @@ -90,6 +96,10 @@ impl Transport { fn default_port() -> u16 { 5000 } + + fn default_stateful_mode() -> bool { + true + } } #[bon] @@ -112,9 +122,11 @@ impl Server { mutation_mode: MutationMode, disable_type_description: bool, disable_schema_description: bool, + disable_auth_token_passthrough: bool, search_leaf_depth: usize, index_memory_bytes: usize, health_check: HealthCheckConfig, + cors: CorsConfig, ) -> Self { let headers = { let mut headers = headers.clone(); @@ -138,9 +150,11 @@ impl Server { mutation_mode, disable_type_description, disable_schema_description, + disable_auth_token_passthrough, search_leaf_depth, index_memory_bytes, health_check, + cors, } } diff --git a/crates/apollo-mcp-server/src/server/states.rs b/crates/apollo-mcp-server/src/server/states.rs index 81211cda..c89f3a63 100644 --- a/crates/apollo-mcp-server/src/server/states.rs +++ b/crates/apollo-mcp-server/src/server/states.rs @@ -6,6 +6,7 @@ use reqwest::header::HeaderMap; use url::Url; use crate::{ + cors::CorsConfig, custom_scalar_map::CustomScalarMap, errors::{OperationError, ServerError}, health::HealthCheckConfig, @@ -44,9 +45,11 @@ struct Config { mutation_mode: MutationMode, disable_type_description: bool, disable_schema_description: bool, + disable_auth_token_passthrough: bool, search_leaf_depth: usize, index_memory_bytes: usize, health_check: HealthCheckConfig, + cors: CorsConfig, } impl StateMachine { @@ -76,9 +79,11 @@ impl StateMachine { mutation_mode: server.mutation_mode, disable_type_description: server.disable_type_description, disable_schema_description: server.disable_schema_description, + disable_auth_token_passthrough: server.disable_auth_token_passthrough, search_leaf_depth: server.search_leaf_depth, index_memory_bytes: server.index_memory_bytes, health_check: server.health_check, + cors: server.cors, }, }); @@ -151,10 +156,10 @@ impl StateMachine { #[allow(clippy::result_large_err)] fn sdl_to_api_schema(schema_state: SchemaState) -> Result, ServerError> { - match Supergraph::new(&schema_state.sdl) { + match Supergraph::new_with_router_specs(&schema_state.sdl) { Ok(supergraph) => Ok(supergraph .to_api_schema(ApiSchemaOptions::default()) - .map_err(ServerError::Federation)? + .map_err(|e| ServerError::Federation(Box::new(e)))? .schema() .clone()), Err(_) => Schema::parse_and_validate(schema_state.sdl, "schema.graphql") diff --git a/crates/apollo-mcp-server/src/server/states/running.rs b/crates/apollo-mcp-server/src/server/states/running.rs index b1b69495..2bdede77 100644 --- a/crates/apollo-mcp-server/src/server/states/running.rs +++ b/crates/apollo-mcp-server/src/server/states/running.rs @@ -3,6 +3,8 @@ use std::sync::Arc; use apollo_compiler::{Schema, validation::Valid}; use headers::HeaderMapExt as _; +use opentelemetry::trace::FutureExt; +use opentelemetry::{Context, KeyValue}; use reqwest::header::HeaderMap; use rmcp::model::Implementation; use rmcp::{ @@ -19,6 +21,8 @@ use tokio_util::sync::CancellationToken; use tracing::{debug, error}; use url::Url; +use crate::generated::telemetry::{TelemetryAttribute, TelemetryMetric}; +use crate::meter; use crate::{ auth::ValidToken, custom_scalar_map::CustomScalarMap, @@ -52,6 +56,7 @@ pub(super) struct Running { pub(super) mutation_mode: MutationMode, pub(super) disable_type_description: bool, pub(super) disable_schema_description: bool, + pub(super) disable_auth_token_passthrough: bool, pub(super) health_check: Option, } @@ -100,6 +105,7 @@ impl Running { Ok(self) } + #[tracing::instrument(skip_all)] pub(super) async fn update_operations( self, operations: Vec, @@ -141,6 +147,7 @@ impl Running { } /// Notify any peers that tools have changed. Drops unreachable peers from the list. + #[tracing::instrument(skip_all)] async fn notify_tool_list_changed(peers: Arc>>>) { let mut peers = peers.write().await; if !peers.is_empty() { @@ -169,41 +176,51 @@ impl Running { } impl ServerHandler for Running { + #[tracing::instrument(skip(self, _request))] async fn initialize( &self, _request: InitializeRequestParam, context: RequestContext, ) -> Result { + let meter = &meter::METER; + meter + .u64_counter(TelemetryMetric::InitializeCount.as_str()) + .build() + .add(1, &[]); // TODO: how to remove these? let mut peers = self.peers.write().await; peers.push(context.peer); Ok(self.get_info()) } + #[tracing::instrument(skip(self, context, request), fields(apollo.mcp.tool_name = request.name.as_ref(), apollo.mcp.request_id = %context.id.clone()))] async fn call_tool( &self, request: CallToolRequestParam, context: RequestContext, ) -> Result { - let result = match request.name.as_ref() { + let meter = &meter::METER; + let start = std::time::Instant::now(); + let tool_name = request.name.clone(); + let result = match tool_name.as_ref() { INTROSPECT_TOOL_NAME => { self.introspect_tool .as_ref() - .ok_or(tool_not_found(&request.name))? + .ok_or(tool_not_found(&tool_name))? .execute(convert_arguments(request)?) .await } SEARCH_TOOL_NAME => { self.search_tool .as_ref() - .ok_or(tool_not_found(&request.name))? + .ok_or(tool_not_found(&tool_name))? .execute(convert_arguments(request)?) .await } EXPLORER_TOOL_NAME => { self.explorer_tool .as_ref() - .ok_or(tool_not_found(&request.name))? + .ok_or(tool_not_found(&tool_name))? .execute(convert_arguments(request)?) .await } @@ -211,7 +228,9 @@ impl ServerHandler for Running { let mut headers = self.headers.clone(); if let Some(axum_parts) = context.extensions.get::() { // Optionally extract the validated token and propagate it to upstream servers if present - if let Some(token) = axum_parts.extensions.get::() { + if !self.disable_auth_token_passthrough + && let Some(token) = axum_parts.extensions.get::() + { headers.typed_insert(token.deref().clone()); } @@ -223,7 +242,7 @@ impl ServerHandler for Running { self.execute_tool .as_ref() - .ok_or(tool_not_found(&request.name))? + .ok_or(tool_not_found(&tool_name))? .execute(graphql::Request { input: Value::from(request.arguments.clone()), endpoint: &self.endpoint, @@ -234,7 +253,7 @@ impl ServerHandler for Running { VALIDATE_TOOL_NAME => { self.validate_tool .as_ref() - .ok_or(tool_not_found(&request.name))? + .ok_or(tool_not_found(&tool_name))? .execute(convert_arguments(request)?) .await } @@ -242,7 +261,9 @@ impl ServerHandler for Running { let mut headers = self.headers.clone(); if let Some(axum_parts) = context.extensions.get::() { // Optionally extract the validated token and propagate it to upstream servers if present - if let Some(token) = axum_parts.extensions.get::() { + if !self.disable_auth_token_passthrough + && let Some(token) = axum_parts.extensions.get::() + { headers.typed_insert(token.deref().clone()); } @@ -261,9 +282,10 @@ impl ServerHandler for Running { .lock() .await .iter() - .find(|op| op.as_ref().name == request.name) - .ok_or(tool_not_found(&request.name))? + .find(|op| op.as_ref().name == tool_name) + .ok_or(tool_not_found(&tool_name))? .execute(graphql_request) + .with_context(Context::current()) .await } }; @@ -273,14 +295,37 @@ impl ServerHandler for Running { health_check.record_rejection(); } + let attributes = vec![ + KeyValue::new( + TelemetryAttribute::Success.to_key(), + result.as_ref().is_ok_and(|r| r.is_error != Some(true)), + ), + KeyValue::new(TelemetryAttribute::ToolName.to_key(), tool_name), + ]; + // Record response time and status + meter + .f64_histogram(TelemetryMetric::ToolDuration.as_str()) + .build() + .record(start.elapsed().as_millis() as f64, &attributes); + meter + .u64_counter(TelemetryMetric::ToolCount.as_str()) + .build() + .add(1, &attributes); + result } + #[tracing::instrument(skip_all)] async fn list_tools( &self, _request: Option, _context: RequestContext, ) -> Result { + let meter = &meter::METER; + meter + .u64_counter(TelemetryMetric::ListToolsCount.as_str()) + .build() + .add(1, &[]); Ok(ListToolsResult { next_cursor: None, tools: self @@ -299,10 +344,20 @@ impl ServerHandler for Running { } fn get_info(&self) -> ServerInfo { + let meter = &meter::METER; + meter + .u64_counter(TelemetryMetric::GetInfoCount.as_str()) + .build() + .add(1, &[]); ServerInfo { server_info: Implementation { name: "Apollo MCP Server".to_string(), + icons: None, + title: Some("Apollo MCP Server".to_string()), version: env!("CARGO_PKG_VERSION").to_string(), + website_url: Some( + "https://www.apollographql.com/docs/apollo-mcp-server".to_string(), + ), }, capabilities: ServerCapabilities::builder() .enable_tools() @@ -355,6 +410,7 @@ mod tests { mutation_mode: MutationMode::None, disable_type_description: false, disable_schema_description: false, + disable_auth_token_passthrough: false, health_check: None, }; diff --git a/crates/apollo-mcp-server/src/server/states/starting.rs b/crates/apollo-mcp-server/src/server/states/starting.rs index a23b137b..c377da5a 100644 --- a/crates/apollo-mcp-server/src/server/states/starting.rs +++ b/crates/apollo-mcp-server/src/server/states/starting.rs @@ -2,8 +2,10 @@ use std::{net::SocketAddr, sync::Arc}; use apollo_compiler::{Name, Schema, ast::OperationType, validation::Valid}; use axum::{Router, extract::Query, http::StatusCode, response::Json, routing::get}; -use rmcp::transport::StreamableHttpService; +use axum_otel_metrics::HttpMetricsLayerBuilder; +use axum_tracing_opentelemetry::middleware::{OtelAxumLayer, OtelInResponseLayer}; use rmcp::transport::streamable_http_server::session::local::LocalSessionManager; +use rmcp::transport::{StreamableHttpServerConfig, StreamableHttpService}; use rmcp::{ ServiceExt as _, transport::{SseServer, sse_server::SseServerConfig, stdio}, @@ -11,6 +13,7 @@ use rmcp::{ use serde_json::json; use tokio::sync::{Mutex, RwLock}; use tokio_util::sync::CancellationToken; +use tower_http::trace::TraceLayer; use tracing::{Instrument as _, debug, error, info, trace}; use crate::{ @@ -126,6 +129,7 @@ impl Starting { auth: _, address: _, port: _, + stateful_mode: _, }, true, ) => Some(HealthCheck::new(self.config.health_check.clone())), @@ -148,6 +152,7 @@ impl Starting { mutation_mode: self.config.mutation_mode, disable_type_description: self.config.disable_type_description, disable_schema_description: self.config.disable_schema_description, + disable_auth_token_passthrough: self.config.disable_auth_token_passthrough, health_check: health_check.clone(), }; @@ -162,11 +167,32 @@ impl Starting { router }}; } + + // Helper to enable CORS + macro_rules! with_cors { + ($router:expr, $config:expr) => {{ + let mut router = $router; + if $config.enabled { + match $config.build_cors_layer() { + Ok(cors_layer) => { + router = router.layer(cors_layer); + } + Err(e) => { + error!("Failed to build CORS layer: {}", e); + return Err(e); + } + } + } + router + }}; + } + match self.config.transport { Transport::StreamableHttp { auth, address, port, + stateful_mode, } => { info!(port = ?port, address = ?address, "Starting MCP server in Streamable HTTP mode"); let running = running.clone(); @@ -174,16 +200,48 @@ impl Starting { let service = StreamableHttpService::new( move || Ok(running.clone()), LocalSessionManager::default().into(), - Default::default(), + StreamableHttpServerConfig { + stateful_mode, + ..Default::default() + }, + ); + let mut router = with_cors!( + with_auth!(axum::Router::new().nest_service("/mcp", service), auth), + self.config.cors + ) + .layer(HttpMetricsLayerBuilder::new().build()) + // include trace context as header into the response + .layer(OtelInResponseLayer) + //start OpenTelemetry trace on incoming request + .layer(OtelAxumLayer::default()) + // Add tower-http tracing layer for additional HTTP-level tracing + .layer( + TraceLayer::new_for_http() + .make_span_with(|request: &axum::http::Request<_>| { + tracing::info_span!( + "mcp_server", + method = %request.method(), + uri = %request.uri(), + status_code = tracing::field::Empty, + ) + }) + .on_response( + |response: &axum::http::Response<_>, + _latency: std::time::Duration, + span: &tracing::Span| { + span.record("status", tracing::field::display(response.status())); + }, + ), ); - let mut router = - with_auth!(axum::Router::new().nest_service("/mcp", service), auth); // Add health check endpoint if configured if let Some(health_check) = health_check.filter(|h| h.config().enabled) { - let health_router = Router::new() - .route(&health_check.config().path, get(health_endpoint)) - .with_state(health_check.clone()); + let health_router = with_cors!( + Router::new() + .route(&health_check.config().path, get(health_endpoint)) + .with_state(health_check.clone()), + self.config.cors + ); router = router.merge(health_router); } @@ -245,9 +303,14 @@ impl Starting { } Transport::Stdio => { info!("Starting MCP server in stdio mode"); - let service = running.clone().serve(stdio()).await.inspect_err(|e| { - error!("serving error: {:?}", e); - })?; + let service = running + .clone() + .serve(stdio()) + .await + .inspect_err(|e| { + error!("serving error: {:?}", e); + }) + .map_err(Box::new)?; service.waiting().await.map_err(ServerError::StartupError)?; } } @@ -268,3 +331,53 @@ async fn health_endpoint( Ok((status_code, Json(json!(health)))) } + +#[cfg(test)] +mod tests { + use http::HeaderMap; + use url::Url; + + use crate::health::HealthCheckConfig; + + use super::*; + + #[tokio::test] + async fn start_basic_server() { + let starting = Starting { + config: Config { + transport: Transport::StreamableHttp { + auth: None, + address: "127.0.0.1".parse().unwrap(), + port: 7799, + stateful_mode: false, + }, + endpoint: Url::parse("http://localhost:4000").expect("valid url"), + mutation_mode: MutationMode::All, + execute_introspection: true, + headers: HeaderMap::new(), + validate_introspection: true, + introspect_introspection: true, + search_introspection: true, + introspect_minify: false, + search_minify: false, + explorer_graph_ref: None, + custom_scalar_map: None, + disable_type_description: false, + disable_schema_description: false, + disable_auth_token_passthrough: false, + search_leaf_depth: 5, + index_memory_bytes: 1024 * 1024 * 1024, + health_check: HealthCheckConfig { + enabled: true, + ..Default::default() + }, + cors: Default::default(), + }, + schema: Schema::parse_and_validate("type Query { hello: String }", "test.graphql") + .expect("Valid schema"), + operations: vec![], + }; + let running = starting.start(); + assert!(running.await.is_ok()); + } +} diff --git a/crates/apollo-mcp-server/src/telemetry_attributes.rs b/crates/apollo-mcp-server/src/telemetry_attributes.rs new file mode 100644 index 00000000..49e6ab5f --- /dev/null +++ b/crates/apollo-mcp-server/src/telemetry_attributes.rs @@ -0,0 +1,36 @@ +use crate::generated::telemetry::{ALL_ATTRS, TelemetryAttribute}; +use opentelemetry::Key; +use std::collections::HashSet; + +impl TelemetryAttribute { + pub const fn to_key(self) -> Key { + match self { + TelemetryAttribute::ToolName => { + Key::from_static_str(TelemetryAttribute::ToolName.as_str()) + } + TelemetryAttribute::OperationId => { + Key::from_static_str(TelemetryAttribute::OperationId.as_str()) + } + TelemetryAttribute::OperationSource => { + Key::from_static_str(TelemetryAttribute::OperationSource.as_str()) + } + TelemetryAttribute::Success => { + Key::from_static_str(TelemetryAttribute::Success.as_str()) + } + TelemetryAttribute::RequestId => { + Key::from_static_str(TelemetryAttribute::RequestId.as_str()) + } + TelemetryAttribute::RawOperation => { + Key::from_static_str(TelemetryAttribute::RawOperation.as_str()) + } + } + } + + pub fn included_attributes(omitted: HashSet) -> Vec { + ALL_ATTRS + .iter() + .copied() + .filter(|a| !omitted.contains(a)) + .collect() + } +} diff --git a/crates/apollo-mcp-server/telemetry.toml b/crates/apollo-mcp-server/telemetry.toml new file mode 100644 index 00000000..8135456a --- /dev/null +++ b/crates/apollo-mcp-server/telemetry.toml @@ -0,0 +1,16 @@ +[attributes.apollo.mcp] +tool_name = "The tool name" +operation_id = "The operation id - either persisted query id, operation name, or unknown" +operation_source = "The operation source - either operation (local file/op collection), persisted query, or LLM generated" +request_id = "The request id" +success = "Sucess flag indicator" +raw_operation = "Graphql operation text and metadata used for Tool generation" + +[metrics.apollo.mcp] +"initialize.count" = "Number of times initialize has been called" +"tool.count" = "Number of times call_tool has been called" +"tool.duration" = "Duration of call_tool" +"list_tools.count" = "Number of times list_tools has been called" +"get_info.count" = "Number of times get_info has been called" +"operation.duration" = "Duration of graphql execute" +"operation.count" = "Number of times graphql execute has been called" diff --git a/crates/apollo-schema-index/Cargo.toml b/crates/apollo-schema-index/Cargo.toml index e2be15b6..86f8389f 100644 --- a/crates/apollo-schema-index/Cargo.toml +++ b/crates/apollo-schema-index/Cargo.toml @@ -1,10 +1,12 @@ [package] name = "apollo-schema-index" -edition = "2024" authors.workspace = true +edition.workspace = true +license-file.workspace = true +repository.workspace = true +rust-version.workspace = true version.workspace = true -license-file = "../LICENSE" -repository = "https://github.com/apollographql/apollo-mcp-server" + description = "GraphQL schema indexing" [dependencies] diff --git a/crates/apollo-schema-index/src/traverse.rs b/crates/apollo-schema-index/src/traverse.rs index fa0137e9..03139345 100644 --- a/crates/apollo-schema-index/src/traverse.rs +++ b/crates/apollo-schema-index/src/traverse.rs @@ -53,67 +53,65 @@ impl SchemaExt for Schema { ); let cloned = current_path.clone(); - if let Some(extended_type) = self.types.get(named_type) { - if !extended_type.is_built_in() { - if traverse_children { - match extended_type { - ExtendedType::Object(obj) => { - stack.extend(obj.fields.values().map(|field| { - let field_type = field.ty.inner_named_type(); - let field_args = field - .arguments - .iter() - .map(|arg| arg.ty.inner_named_type().clone()) - .collect::>(); - ( - field_type, - current_path.clone().add_child( - Some(field.name.clone()), - field_args, - field_type.clone(), - ), - ) - })); - } - ExtendedType::Interface(interface) => { - stack.extend(interface.fields.values().map(|field| { - let field_type = field.ty.inner_named_type(); - let field_args = field - .arguments - .iter() - .map(|arg| arg.ty.inner_named_type().clone()) - .collect::>(); + if let Some(extended_type) = self.types.get(named_type) + && !extended_type.is_built_in() + { + if traverse_children { + match extended_type { + ExtendedType::Object(obj) => { + stack.extend(obj.fields.values().map(|field| { + let field_type = field.ty.inner_named_type(); + let field_args = field + .arguments + .iter() + .map(|arg| arg.ty.inner_named_type().clone()) + .collect::>(); + ( + field_type, + current_path.clone().add_child( + Some(field.name.clone()), + field_args, + field_type.clone(), + ), + ) + })); + } + ExtendedType::Interface(interface) => { + stack.extend(interface.fields.values().map(|field| { + let field_type = field.ty.inner_named_type(); + let field_args = field + .arguments + .iter() + .map(|arg| arg.ty.inner_named_type().clone()) + .collect::>(); + ( + field_type, + current_path.clone().add_child( + Some(field.name.clone()), + field_args, + field_type.clone(), + ), + ) + })); + } + ExtendedType::Union(union) => { + stack.extend(union.members.iter().map(|member| &member.name).map( + |next_type| { ( - field_type, + next_type, current_path.clone().add_child( - Some(field.name.clone()), - field_args, - field_type.clone(), + None, + vec![], + next_type.clone(), ), ) - })); - } - ExtendedType::Union(union) => { - stack.extend( - union.members.iter().map(|member| &member.name).map( - |next_type| { - ( - next_type, - current_path.clone().add_child( - None, - vec![], - next_type.clone(), - ), - ) - }, - ), - ); - } - _ => {} + }, + )); } + _ => {} } - return Some((extended_type, cloned)); } + return Some((extended_type, cloned)); } } None diff --git a/docs/source/_sidebar.yaml b/docs/source/_sidebar.yaml index 4b8bf4cc..531eaceb 100644 --- a/docs/source/_sidebar.yaml +++ b/docs/source/_sidebar.yaml @@ -28,8 +28,12 @@ items: href: "./deploy" - label: "Health Checks" href: "./health-checks" + - label: "CORS" + href: "./cors" - label: "Authorization" href: "./auth" + - label: "Telemetry" + href: "./telemetry" - label: "Best Practices" href: "./best-practices" - label: "Licensing" diff --git a/docs/source/config-file.mdx b/docs/source/config-file.mdx index ba7c45dc..54021eaa 100644 --- a/docs/source/config-file.mdx +++ b/docs/source/config-file.mdx @@ -17,6 +17,7 @@ All fields are optional. | Option | Type | Default | Description | | :--------------- | :-------------------- | :----------------------- | :--------------------------------------------------------------- | +| `cors` | `Cors` | | CORS configuration | | `custom_scalars` | `FilePath` | | Path to a [custom scalar map](/apollo-mcp-server/custom-scalars) | | `endpoint` | `URL` | `http://localhost:4000/` | The target GraphQL endpoint | | `graphos` | `GraphOS` | | Apollo-specific credential overrides | @@ -28,6 +29,8 @@ All fields are optional. | `overrides` | `Overrides` | | Overrides for server behavior | | `schema` | `SchemaSource` | | Schema configuration | | `transport` | `Transport` | | The type of server transport to use | +| `telemetry` | `Telemetry` | | Configuration to export metrics and traces via OTLP | + ### GraphOS @@ -40,6 +43,22 @@ These fields are under the top-level `graphos` key and define your GraphOS graph | `apollo_registry_url` | `URL` | | The URL to use for Apollo's registry | | `apollo_uplink_endpoints` | `URL` | | List of uplink URL overrides. You can also provide this with the `APOLLO_UPLINK_ENDPOINTS` environment variable | +### CORS + +These fields are under the top-level `cors` key and configure Cross-Origin Resource Sharing (CORS) for browser-based MCP clients. + +| Option | Type | Default | Description | +| :------------------ | :------------- | :---------------------------------------------------------------------------------------- | :------------------------------------------------------------------------------------------------------- | +| `enabled` | `bool` | `false` | Enable CORS support | +| `origins` | `List` | `[]` | List of allowed origins (exact matches). Use `["*"]` to allow any origin (not recommended in production) | +| `match_origins` | `List` | `[]` | List of regex patterns to match allowed origins (e.g., `"^https://localhost:[0-9]+$"`) | +| `allow_any_origin` | `bool` | `false` | Allow requests from any origin. Cannot be used with `allow_credentials: true` | +| `allow_credentials` | `bool` | `false` | Allow credentials (cookies, authorization headers) in CORS requests | +| `allow_methods` | `List` | `["GET", "POST", "OPTIONS"]` | List of allowed HTTP methods | +| `allow_headers` | `List` | `["content-type", "mcp-protocol-version", "mcp-session-id", "traceparent", "tracestate"]` | List of allowed request headers | +| `expose_headers` | `List` | `["mcp-session-id", "traceparent", "tracestate"]` | List of response headers exposed to the browser (includes MCP and W3C Trace Context headers) | +| `max_age` | `number` | `86400` | Maximum age (in seconds) for preflight cache | + ### Health checks These fields are under the top-level `health_check` key. Learn more about [health checks](/apollo-mcp-server/health-checks). @@ -147,11 +166,12 @@ The available fields depend on the value of the nested `type` key: ##### Streamable HTTP -| Option | Value | Value Type | Description | -| :-------- | :-------------------- | :--------- | :------------------------------------------------------------------------ | -| `type` | `"streamable_http"` | | Host the MCP server on the configuration, using streamable HTTP messages. | -| `address` | `127.0.0.1` (default) | `IpAddr` | The IP address to bind to | -| `port` | `5000` (default) | `u16` | The port to bind to | +| Option | Value | Value Type | Description | +| :-------------- | :-------------------- | :--------- | :------------------------------------------------------------------------ | +| `type` | `"streamable_http"` | | Host the MCP server on the configuration, using streamable HTTP messages. | +| `address` | `127.0.0.1` (default) | `IpAddr` | The IP address to bind to | +| `port` | `5000` (default) | `u16` | The port to bind to | +| `stateful_mode` | `true` (default) | `bool` | Flag to enable or disable stateful mode and session management. | ##### SSE (Deprecated, use StreamableHTTP) @@ -165,13 +185,14 @@ The available fields depend on the value of the nested `type` key: These fields are under the top-level `transport` key, nested under the `auth` key. Learn more about [authorization and authentication](/apollo-mcp-server/auth). -| Option | Type | Default | Description | -| :----------------------- | :------------- | :------ | :------------------------------------------------------------------------------------------------- | -| `servers` | `List` | | List of upstream delegated OAuth servers (must support OIDC metadata discovery endpoint) | -| `audiences` | `List` | | List of accepted audiences from upstream signed JWTs | -| `resource` | `string` | | The externally available URL pointing to this MCP server. Can be `localhost` when testing locally. | -| `resource_documentation` | `string` | | Optional link to more documentation relating to this MCP server | -| `scopes` | `List` | | List of queryable OAuth scopes from the upstream OAuth servers | +| Option | Type | Default | Description | +| :------------------------------- | :------------- | :------ | :------------------------------------------------------------------------------------------------- | +| `servers` | `List` | | List of upstream delegated OAuth servers (must support OIDC metadata discovery endpoint) | +| `audiences` | `List` | | List of accepted audiences from upstream signed JWTs | +| `resource` | `string` | | The externally available URL pointing to this MCP server. Can be `localhost` when testing locally. | +| `resource_documentation` | `string` | | Optional link to more documentation relating to this MCP server | +| `scopes` | `List` | | List of queryable OAuth scopes from the upstream OAuth servers | +| `disable_auth_token_passthrough` | `bool` | `false` | Optional flag to disable passing validated Authorization header to downstream API | Below is an example configuration using `StreamableHTTP` transport with authentication: @@ -205,6 +226,52 @@ transport: - profile ``` +### Telemetry + +| Option | Type | Default | Description | +| :-------------- | :---------- | :-------------------------- | :--------------------------------------- | +| `service_name` | `string` | "apollo-mcp-server" | The service name in telemetry data. | +| `version` | `string` | Current crate version | The service version in telemetry data. | +| `exporters` | `Exporters` | `null` (Telemetry disabled) | Configuration for telemetry exporters. | + +#### Exporters + +| Option | Type | Default | Description | +| :--------- | :---------- | :-------------------------- | :--------------------------------------- | +| `metrics` | `Metrics` | `null` (Metrics disabled) | Configuration for exporting metrics. | +| `tracing` | `Tracing` | `null` (Tracing disabled) | Configuration for exporting traces. | + + +#### Metrics + +| Option | Type | Default | Description | +| :-------------------- | :--------------- | :-------------------------- | :--------------------------------------------- | +| `otlp` | `OTLP Exporter` | `null` (Exporting disabled) | Configuration for exporting metrics via OTLP. | +| `omitted_attributes` | `List` | | List of attributes to be omitted from metrics. | + +#### Traces + +| Option | Type | Default | Description | +| :-------------------- | :--------------- | :-------------------------- | :--------------------------------------------- | +| `otlp` | `OTLP Exporter` | `null` (Exporting disabled) | Configuration for exporting traces via OTLP. | +| `sampler` | `SamplerOption` | `ALWAYS_ON` | Configuration to control sampling of traces. | +| `omitted_attributes` | `List` | | List of attributes to be omitted from traces. | + +#### OTLP Exporter + +| Option | Type | Default | Description | +| :--------- | :-------- | :-------------------------- | :--------------------------------------------------------------- | +| `endpoint` | `URL` | `http://localhost:4137` | URL to export data to. Requires full path. | +| `protocol` | `string` | `grpc` | Protocol for export. `grpc` and `http/protobuf` are supported. | + +#### SamplerOption + +| Option | Type | Description | +| :----------- | :-------- | :------------------------------------------------------- | +| `always_on` | `string` | All traces will be exported. | +| `always_off` | `string` | Sampling is turned off, no traces will be exported. | +| `0.0-1.0` | `f64` | Percentage of traces to export. | + ## Example config file The following example file sets your endpoint to `localhost:4001`, configures transport over Streamable HTTP, enables introspection, and provides two local MCP operations for the server to expose. @@ -239,4 +306,4 @@ For example, to override the `transport.auth.servers` option, you can set the `A ```sh APOLLO_MCP_TRANSPORT__AUTH__SERVERS='[server_url_1,server_url_2]' -``` \ No newline at end of file +``` diff --git a/docs/source/cors.mdx b/docs/source/cors.mdx new file mode 100644 index 00000000..aeaea24b --- /dev/null +++ b/docs/source/cors.mdx @@ -0,0 +1,244 @@ +--- +title: Configuring CORS +--- + +## Configuring CORS + +Control browser access to your MCP server + +--- + +**This article describes CORS configuration that's specific to Apollo MCP Server**. For a more general introduction to CORS and common considerations, see [MDN's CORS documentation](https://developer.mozilla.org/en-US/docs/Web/HTTP/CORS). + +By default, Apollo MCP Server has CORS disabled. If your MCP server serves tools to browser-based applications, you need to enable CORS and configure one of the following in the `cors` section of your server's YAML config file: + +- Add the origins of those web applications to the server's list of allowed `origins`. + - Use this option if there is a known, finite list of web applications that consume your MCP server. +- Add a regex that matches the origins of those web applications to the server's list of allowed `match_origins`. + - This option comes in handy if you want to match origins against a pattern, see the example below that matches subdomains of a specific namespace. +- Enable the `allow_any_origin` option. + - Use this option if your MCP server is a public API with arbitrarily many web app consumers. + - With this option enabled, the server sends the wildcard (\*) value for the `Access-Control-Allow-Origin` header. This enables _any_ website to initiate browser connections to it (but they can't provide cookies or other credentials). +- If clients need to authenticate their requests with cookies, you _must_ use either `origins`, `match_origins`, or the combination of both options. When using both options, note that `origins` is evaluated before `match_origins`. + +The following snippet includes an example of each option (use either `allow_any_origin`, or `origins` and/or `match_origins`): + +```yaml title="mcp.yaml" +transport: + type: streamable_http + port: 5000 + +cors: + # Enable CORS support + enabled: true + + # Set to true to allow any origin + # (Defaults to false) + allow_any_origin: true + + # List of accepted origins + # (Ignored if allow_any_origin is true) + # + # An origin is a combination of scheme, hostname and port. + # It does not have any path section, so no trailing slash. + origins: + - https://www.your-app.example.com + + # List of origin patterns (regex matching) + match_origins: + - "^https://([a-z0-9]+[.])*api[.]example[.]com$" # any host that uses https and ends with .api.example.com +``` + +You can also disable CORS entirely by setting `enabled` to `false` or omitting the `cors` section: + +```yaml title="mcp.yaml" +cors: + enabled: false +``` + +If your MCP server serves exclusively _non_-browser-based clients, you probably don't need to enable CORS configuration. + +### Passing credentials + +If your MCP server requires requests to include a user's credentials (e.g., via cookies), you need to modify your CORS configuration to tell the browser those credentials are allowed. + +You can enable credentials with CORS by setting the Access-Control-Allow-Credentials HTTP header to `true`. + +To allow browsers to pass credentials to the server, set `allow_credentials` to `true`, like so: + +```yaml title="mcp.yaml" +cors: + enabled: true + origins: + - https://www.your-app.example.com + allow_credentials: true +``` + +**To support credentialed requests, your server's config file must specify individual `origins` or `match_origins`**. If your server enables `allow_any_origin`, your browser will refuse to send credentials. + +### All `cors` options + +The following snippet shows all CORS configuration defaults for Apollo MCP Server: + +```yaml title="mcp.yaml" +# +# CORS (Cross Origin Resource Sharing) +# +cors: + # Enable CORS support + enabled: false + + # Set to true to allow any origin + allow_any_origin: false + + # List of accepted origins + # (Ignored if allow_any_origin is set to true) + # + # An origin is a combination of scheme, hostname and port. + # It does not have any path section, so no trailing slash. + origins: [] + + # List of origin patterns (regex matching) + # Useful for matching dynamic ports or subdomains + match_origins: [] + + # Set to true to add the `Access-Control-Allow-Credentials` header + allow_credentials: false + + # Allowed request methods + allow_methods: + - GET + - POST + + # The headers to allow. + # These are the default headers required for MCP protocol and trace context + allow_headers: + - accept + - content-type + - mcp-protocol-version + - mcp-session-id + - traceparent # W3C Trace Context + - tracestate # W3C Trace Context + + # Which response headers are available to scripts running in the + # browser in response to a cross-origin request. + # The mcp-session-id header should be exposed for MCP session management. + # Trace context headers are exposed for distributed tracing. + expose_headers: + - mcp-session-id + - traceparent # W3C Trace Context + - tracestate # W3C Trace Context + + # Adds the Access-Control-Max-Age header + # Maximum age (in seconds) for preflight cache + max_age: 7200 # 2 hours +``` + +### Origin matching + +Apollo MCP Server supports two types of origin matching: + +#### Exact origins + +Use the `origins` array for exact origin matches: + +```yaml +cors: + enabled: true + origins: + - http://localhost:3000 + - https://myapp.example.com +``` + +#### Pattern matching + +Use the `match_origins` array for regex pattern matching: + +```yaml +cors: + enabled: true + match_origins: + - "^https://localhost:[0-9]+$" # Any localhost HTTPS port + - "^http://localhost:[0-9]+$" # Any localhost HTTP port + - "^https://.*\\.example\\.com$" # Any subdomain of example.com +``` + +### Common configurations + +#### Development setup + +For local development with hot reloading and various ports: + +```yaml title="mcp.yaml" +cors: + enabled: true + match_origins: + - "^http://localhost:[0-9]+$" + allow_credentials: true +``` + +#### Production setup + +For production with specific known origins: + +```yaml title="mcp.yaml" +cors: + enabled: true + origins: + - https://myapp.example.com + allow_credentials: true + max_age: 86400 # 24 hours +``` + +#### Public API setup + +For public APIs that don't require credentials: + +```yaml title="mcp.yaml" +cors: + enabled: true + allow_any_origin: true + allow_credentials: false # Cannot use credentials with any origin +``` + +### Browser integration example + +Here's a simple example of connecting to Apollo MCP Server from a browser: + +```javascript +async function connectToMCP() { + const response = await fetch("http://127.0.0.1:5000/mcp", { + method: "POST", + headers: { + Accept: "application/json, text/event-stream", + "Content-Type": "application/json", + "MCP-Protocol-Version": "2025-06-18", + }, + body: JSON.stringify({ + jsonrpc: "2.0", + method: "initialize", + params: { + protocolVersion: "2025-06-18", + capabilities: {}, + clientInfo: { name: "Browser Client", version: "1.0" }, + }, + id: 1, + }), + }); + + // Extract session ID from response headers (automatically exposed) + const sessionId = response.headers.get("mcp-session-id"); + + // Handle SSE format response (starts with "data: ") + const responseText = await response.text(); + const jsonData = responseText.startsWith("data: ") + ? responseText.slice(6) // Remove "data: " prefix + : responseText; + + const result = JSON.parse(jsonData); + console.log("Connected:", result); + console.log("Session ID:", sessionId); +} + +connectToMCP(); +``` diff --git a/docs/source/define-tools.mdx b/docs/source/define-tools.mdx index 2fd6c6cc..6c7f61db 100644 --- a/docs/source/define-tools.mdx +++ b/docs/source/define-tools.mdx @@ -48,24 +48,56 @@ You can also use the `operations` option to specify a directory. The server then Files and directories specified with `operations` are hot reloaded. When you specify a file, the MCP tool is updated when the file contents are modified. When you specify a directory, operations exposed as MCP tools are updated when files are added, modified, or removed from the directory. -### From Operation Collection +### From operation collections -For graphs managed by GraphOS, Apollo MCP Server can get operations from an [Operation Collection](https://www.apollographql.com/docs/graphos/platform/explorer/operation-collections). +For graphs managed by GraphOS, Apollo MCP Server can retrieve operations from an [operation collection](/graphos/platform/explorer/operation-collections). -To use a GraphOS Operation Collection, you must: +Use GraphOS Studio Explorer to create and manage operation collections. -- Set `APOLLO_GRAPH_REF` and `APOLLO_KEY` environment variables for a GraphOS graph +#### Configuring the MCP Server to use a GraphOS operation collection + +To use a GraphOS operation collection, you must set your graph credentials (`APOLLO_GRAPH_REF` and `APOLLO_KEY`) when configuring the MCP Server. + +Each graph variant has its own default MCP Tools Collection, but you can specify any shared collection by using `operations.source: collection`. -Each variant will have its own default MCP Tools Collection, but you can specify any shared collection by using `operations` with `operations.source: collection`. Apollo MCP Server automatically fetches the default collection if no ID is specified. -```yaml title="Example config file for using a GraphOS Operation Collection" +```yaml title="Example config file for using a GraphOS operation collection" operations: source: collection id: ``` -The MCP Server supports hot reloading of the GraphOS Operation Collection, so it can automatically pick up changes from GraphOS without restarting. +The MCP Server supports hot reloading of the GraphOS operation collection, so it automatically picks up changes from GraphOS without restarting. + +#### Setting operation collection variables + +When saving operation collections, remove any dynamic variables from the **Variables** panel of Explorer. This enables the LLM to modify the variables when calling the operation. + +Any variables set to any valid value (even `null`) in the Variables panel of a saved operation are used as a hardcoded override for that operation's variable. + +For example, if you create the following operation for an operation collection: + +```graphql +query GetProduct($productId: ID!) { + product(id: $productId) { + id + description + } +} +``` + +And the Variables panel has `productId` set to `1234`: + +```json +{ + "productId": "1234" +} +``` + +Then, every time the LLM calls the `GetProduct` operation, the `productId` variable is always set to `1234`. The same is true if `productId` is set to `null`. + +If you want to use dynamic variables that the LLM can modify, remove any variables from the Variables panel and save that operation to the collection. ### From persisted query manifests @@ -156,6 +188,7 @@ Both the `introspect` and `search` tools support minification of their results t - **Type prefixes**: `T=type`, `I=input`, `E=enum`, `U=union`, `F=interface` - **Scalar abbreviations**: `s=String`, `i=Int`, `f=Float`, `b=Boolean`, `d=ID` +- **Directive abbreviations**: `@D=deprecated` - **Type modifiers**: `!=required`, `[]=list`, `<>=implements` Example comparison: diff --git a/docs/source/deploy.mdx b/docs/source/deploy.mdx index 0cac85ca..4a03a633 100644 --- a/docs/source/deploy.mdx +++ b/docs/source/deploy.mdx @@ -56,3 +56,13 @@ docker run \ ``` To learn more, review the [Apollo Runtime container documentation](/graphos/routing/self-hosted/containerization/docker). + +### Using a load balancer + +Because [MCP is a stateful protocol](https://modelcontextprotocol.io/docs/learn/architecture#lifecycle-management), you need to configure your load balancer to keep each session on the _same server instance_. + +When the MCP client initializes a session with Apollo MCP Server, it receives a session identifier unique to that server instance through the `mcp-session-id` header. You must enable session affinity ("sticky sessions") in your load balancer so that all requests that share the same `mcp-session-id` are routed to the same backend instance. + +If the load balancer routes subsequent requests to a different instance, Apollo MCP Server rejects the request because it doesn't recognize the session id. + +Many load balancers offered by major cloud vendors don't support header-based session affinity. If yours does not, use software such as Nginx, HAProxy, or Envoy/Istio in front of the Apollo MCP Server instances. diff --git a/docs/source/guides/auth-auth0.mdx b/docs/source/guides/auth-auth0.mdx index 79c046dd..5768e551 100644 --- a/docs/source/guides/auth-auth0.mdx +++ b/docs/source/guides/auth-auth0.mdx @@ -225,6 +225,30 @@ Before continuing, you need to set up the Auth0 client to accept an additional c +### Step 5: Make requests with an MCP Client (Goose) + +We'll use [Goose](https://block.github.io/goose/) as our MCP Client. Goose allows you to choose between many different LLMs and provides some built-in functionality for connecting to MCP servers, called [Extensions](https://block.github.io/goose/docs/getting-started/using-extensions). + +[Install the Goose CLI](https://block.github.io/goose/docs/getting-started/installation), following the instructions for your operating system. Set up the LLM provider of your choice with `goose configure` --> **Configure Providers**. Each provider has its own set of instructions, rate limiting and pricing. + +Then, continue with the following steps: + +1. In your terminal, run `goose configure`. +1. Select or enter the following answers at the prompts: + +| Prompt | Answer | +| ----------------------------------------------------------- | ------------------------------------------ | +| "What would you like to configure?" | "Add Extension" | +| "What type of extension would you like to add?" | "Command Line Extension" | +| "What's the name of this extension?" | "mcp-auth-quickstart" | +| "What command should be run?" | `npx mcp-remote http://127.0.0.1:5000/mcp` | +| Other prompts (timeout, description, environment variables) | Use the default values | + +1. To start Goose, type `goose`. This will open a browser window and send you through the auth flow. +1. Log in to your Auth0 instance and authorize your MCP server to gain access to your tools. +1. In Goose, ask "What astronauts are in space right now?". This question is similar to the `GetAstronautsCurrentlyInSpace` operation from earlier, which fails as unauthenticated without the proper token. +1. Goose will select the `GetAstronautsCurrentlyInSpace` tool and respond with information about the astronauts found in TheSpaceDevs. + ## Troubleshooting ### Common Issues diff --git a/docs/source/index.mdx b/docs/source/index.mdx index 4dd1b288..42203d4d 100644 --- a/docs/source/index.mdx +++ b/docs/source/index.mdx @@ -3,6 +3,7 @@ title: Apollo MCP Server subtitle: Enable graph-based API orchestration with AI redirectFrom: - /apollo-mcp-server/user-guide + - /apollo-mcp-server/guides --- @@ -88,8 +89,8 @@ The architecture enables intelligent API orchestration through these components: * Persisted Query Manifests: Pre-approved operation lists from Apollo GraphOS * Schema Introspection: Dynamic operation discovery for flexible AI exploration -Secure Execution: When invoked, the server executes GraphQL operations against your API endpoint, respecting all existing authentication, headers, and security policies. -Existing Infrastructure: Your GraphQL API handles requests normally, with Apollo MCP Server acting as a controlled gateway rather than requiring any changes to your graph. +* Secure Execution: When invoked, the server executes GraphQL operations against your API endpoint, respecting all existing authentication, headers, and security policies. +* Existing Infrastructure: Your GraphQL API handles requests normally, with Apollo MCP Server acting as a controlled gateway rather than requiring any changes to your graph. This design lets you expose precise GraphQL capabilities to AI while maintaining complete control over data access and security. diff --git a/docs/source/install.mdx b/docs/source/install.mdx index 9ec643e1..b33cc5e7 100644 --- a/docs/source/install.mdx +++ b/docs/source/install.mdx @@ -26,14 +26,14 @@ To download a **specific version** of Apollo MCP Server (recommended for CI envi ```bash # Note the `v` prefixing the version number -docker image pull ghcr.io/apollographql/apollo-mcp-server:v0.7.5 +docker image pull ghcr.io/apollographql/apollo-mcp-server:v0.9.0 ``` To download a specific version of Apollo MCP Server that is a release candidate: ```bash # Note the `v` prefixing the version number and the `-rc` suffix -docker image pull ghcr.io/apollographql/apollo-mcp-server:v0.7.5-rc.1 +docker image pull ghcr.io/apollographql/apollo-mcp-server:v0.9.0-rc.1 ``` @@ -65,7 +65,7 @@ To install or upgrade to a **specific version** of Apollo MCP Server (recommende ```bash # Note the `v` prefixing the version number -curl -sSL https://mcp.apollo.dev/download/nix/v0.7.5 | sh +curl -sSL https://mcp.apollo.dev/download/nix/v0.9.0 | sh ``` If your machine doesn't have the `curl` command, you can get the latest version from the [`curl` downloads page](https://curl.se/download.html). @@ -82,5 +82,5 @@ To install or upgrade to a **specific version** of Apollo MCP Server (recommende ```bash # Note the `v` prefixing the version number -iwr 'https://mcp.apollo.dev/download/win/v0.7.5' | iex +iwr 'https://mcp.apollo.dev/download/win/v0.9.0' | iex ``` diff --git a/docs/source/telemetry.mdx b/docs/source/telemetry.mdx new file mode 100644 index 00000000..9b02f395 --- /dev/null +++ b/docs/source/telemetry.mdx @@ -0,0 +1,168 @@ +--- +title: OpenTelemetry Integration +--- + +AI agents create unpredictable usage patterns and complex request flows that are hard to monitor with traditional methods. The Apollo MCP Server's OpenTelemetry integration provides the visibility you need to run a reliable service for AI agents. + +## What you can monitor + +- **Agent behavior**: Which tools and operations are used most frequently +- **Performance**: Response times and bottlenecks across tool executions and GraphQL operations +- **Reliability**: Error rates, failed operations, and request success patterns +- **Distributed request flows**: Complete traces from agent request through your Apollo Router and subgraphs, with automatic trace context propagation + +## How it works + +The server exports metrics, traces, and events using the OpenTelemetry Protocol (OTLP), ensuring compatibility with your existing observability stack and seamless integration with other instrumented Apollo services. + +## Usage guide + +### Quick start: Local development + +The fastest way to see Apollo MCP Server telemetry in action is with a local setup that requires only Docker. + +#### 5-minute setup +1. Start local observability stack: +docker run -p 3000:3000 -p 4317:4317 -p 4318:4318 --rm -ti grafana/otel-lgtm +1. Add telemetry config to your `config.yaml`: + ```yaml + telemetry: + exporters: + metrics: + otlp: + endpoint: "http://localhost:4318/v1/metrics" + protocol: "http/protobuf" + tracing: + otlp: + endpoint: "http://localhost:4318/v1/traces" + protocol: "http/protobuf" + ``` +1. Restart your MCP server with the updated config +1. Open Grafana at `http://localhost:3000` and explore your telemetry data. Default credentials are username `admin` with password `admin`. + +For detailed steps and dashboard examples, see the [complete Grafana setup guide](guides/telemetry-grafana.mdx). + +### Production deployment + +For production environments, configure your MCP server to send telemetry to any OTLP-compatible backend. The Apollo MCP Server uses standard OpenTelemetry protocols, ensuring compatibility with all major observability platforms. + +#### Configuration example + +```yaml +telemetry: + service_name: "mcp-server-prod" # Custom service name + exporters: + metrics: + otlp: + endpoint: "https://your-metrics-endpoint" + protocol: "http/protobuf" # or "grpc" + tracing: + otlp: + endpoint: "https://your-traces-endpoint" + protocol: "http/protobuf" +``` + +#### Observability platform integration + +The MCP server works with any OTLP-compatible backend. Consult your provider's documentation for specific endpoint URLs and authentication: + +- [Datadog OTLP Integration](https://docs.datadoghq.com/opentelemetry/setup/otlp_ingest_in_the_agent/) - Native OTLP support +- [New Relic OpenTelemetry](https://docs.newrelic.com/docs/opentelemetry/best-practices/opentelemetry-otlp/) - Direct OTLP ingestion +- [AWS Observability](https://aws-otel.github.io/docs/introduction) - Via AWS Distro for OpenTelemetry +- [Grafana Cloud](https://grafana.com/docs/grafana-cloud/send-data/otlp/) - Hosted Grafana with OTLP +- [Honeycomb](https://docs.honeycomb.io/getting-data-in/opentelemetry/) - OpenTelemetry-native platform +- [Jaeger](https://www.jaegertracing.io/docs/1.50/deployment/) - Self-hosted tracing +- [OpenTelemetry Collector](https://opentelemetry.io/docs/collector/deployment/) - Self-hosted with flexible routing + +#### Production configuration best practices + +##### Environment and security +```yaml +# Set via environment variable +export ENVIRONMENT=production + +telemetry: + service_name: "apollo-mcp-server" + version: "1.0.0" # Version for correlation + exporters: + metrics: + otlp: + endpoint: "https://secure-endpoint" # Always use HTTPS + protocol: "http/protobuf" # Generally more reliable than gRPC +``` + +##### Performance considerations +- **Protocol choice**: `http/protobuf` is often more reliable through firewalls and load balancers than `grpc` +- **Batch export**: OpenTelemetry automatically batches telemetry data for efficiency +- **Network timeouts**: Default timeouts are usually appropriate, but monitor for network issues + +##### Resource correlation +- The `ENVIRONMENT` variable automatically tags all telemetry with `deployment.environment.name` +- Use consistent `service_name` across all your Apollo infrastructure (Router, subgraphs, MCP server) +- Set `version` to track releases and correlate issues with deployments + +#### Troubleshooting + +##### Common issues +- **Connection refused**: Verify endpoint URL and network connectivity +- **Authentication errors**: Check if your provider requires API keys or special headers +- **Missing data**: Confirm your observability platform supports OTLP and is configured to receive data +- **High memory usage**: Monitor telemetry export frequency and consider sampling for high-volume environments + +##### Verification +```bash +# Check if telemetry is being exported (look for connection attempts) +curl -v https://your-endpoint/v1/metrics + +# Monitor server logs for OpenTelemetry export errors +./apollo-mcp-server --config config.yaml 2>&1 | grep -i "otel\|telemetry" +``` + +## Configuration Reference + +The OpenTelemetry integration is configured via the `telemetry` section of the [configuration reference page](/apollo-mcp-server/config-file#telemetry). + +## Emitted Metrics + +The server emits the following metrics, which are invaluable for monitoring and alerting. All duration metrics are in milliseconds. + +| Metric Name | Type | Description | Attributes | +|---|---|---|---| +| `apollo.mcp.initialize.count` | Counter | Incremented for each `initialize` request. | (none) | +| `apollo.mcp.list_tools.count` | Counter | Incremented for each `list_tools` request. | (none) | +| `apollo.mcp.get_info.count` | Counter | Incremented for each `get_info` request. | (none) | +| `apollo.mcp.tool.count` | Counter | Incremented for each tool call. | `tool_name`, `success` (bool) | +| `apollo.mcp.tool.duration` | Histogram | Measures the execution duration of each tool call. | `tool_name`, `success` (bool) | +| `apollo.mcp.operation.count`| Counter | Incremented for each downstream GraphQL operation executed by a tool. | `operation.id`, `operation.type` ("persisted_query" or "operation"), `success` (bool) | +| `apollo.mcp.operation.duration`| Histogram | Measures the round-trip duration of each downstream GraphQL operation. | `operation.id`, `operation.type`, `success` (bool) | + +In addition to these metrics, the server also emits standard [HTTP server metrics](https://opentelemetry.io/docs/specs/semconv/http/http-metrics/) (e.g., `http.server.duration`, `http.server.active_requests`) courtesy of the `axum-otel-metrics` library. + + +## Emitted Traces + +Spans are generated for the following actions: + +- **Incoming HTTP Requests**: A root span is created for every HTTP request to the MCP server. +- **MCP Handler Methods**: Nested spans are created for each of the main MCP protocol methods (`initialize`, `call_tool`, `list_tools`). +- **Tool Execution**: `call_tool` spans contain nested spans for the specific tool being executed (e.g., `introspect`, `search`, or a custom GraphQL operation). +- **Downstream GraphQL Calls**: The `execute` tool and custom operation tools create child spans for their outgoing `reqwest` HTTP calls, capturing the duration of the downstream request. The `traceparent` and `tracestate` headers are propagated automatically, enabling distributed traces. + +### Cardinality Control + +High-cardinality metrics can occur in MCP Servers with large number of tools or when clients are allowed to generate freeform operations. +To prevent performance issues and reduce costs, the Apollo MCP Server provides two mechanisms to control metric cardinality, trace sampling and attribute filtering. + +#### Trace Sampling + +Configure the Apollo MCP Server to sample traces sent to your OpenTelemetry Collector using the `sampler` field in the `telemetry.tracing` configuration: + +- **always_on** - Send every trace +- **always_off** - Disable trace collection entirely +- **0.0-1.0** - Send a specified percentage of traces + +#### Attribute Filtering + +The Apollo MCP Server configuration also allows for omitting attributes such as `tool_name` or `operation_id` that can often lead to high cardinality metrics in systems that treat each collected attribute value as a new metric. +Both traces and metrics have an `omitted_attributes` option that takes a list of strings. Any attribute name in the list will be filtered out and not sent to the collector. +For detailed configuration options, see the [telemetry configuration reference](/apollo-mcp-server/config-file#telemetry). diff --git a/e2e/mcp-server-tester/run_tests.sh b/e2e/mcp-server-tester/run_tests.sh index 91647ef5..95c2a6bf 100755 --- a/e2e/mcp-server-tester/run_tests.sh +++ b/e2e/mcp-server-tester/run_tests.sh @@ -71,4 +71,4 @@ safe_dir="${safe_dir//|/\\|}" sed "s||$safe_dir|g" "$TEMPLATE_PATH" > "$GEN_CONFIG" # Run the command -npx -y mcp-server-tester tools "$TESTS" --server-config "$GEN_CONFIG" \ No newline at end of file +npx -y mcp-server-tester@1.4.0 tools "$TESTS" --server-config "$GEN_CONFIG" \ No newline at end of file diff --git a/flake.lock b/flake.lock index 5e82287f..7e811193 100644 --- a/flake.lock +++ b/flake.lock @@ -3,11 +3,11 @@ "cache-nix-action": { "flake": false, "locked": { - "lastModified": 1746350578, - "narHash": "sha256-66auSJUldF+QLnMZEvOR9y9+P6doadeHmYl5UDFqVic=", + "lastModified": 1754213534, + "narHash": "sha256-4QgmQ8UAecAuu84hh5dYni1ahlvXvu2UdCDme6Jnh68=", "owner": "nix-community", "repo": "cache-nix-action", - "rev": "76f6697d63b7378f7161d52f3d81784130ecd90d", + "rev": "e2cf51da82e145785f5db595f553f7cbc2ca54df", "type": "github" }, "original": { @@ -18,11 +18,11 @@ }, "crane": { "locked": { - "lastModified": 1751562746, - "narHash": "sha256-smpugNIkmDeicNz301Ll1bD7nFOty97T79m4GUMUczA=", + "lastModified": 1755993354, + "narHash": "sha256-FCRRAzSaL/+umLIm3RU3O/+fJ2ssaPHseI2SSFL8yZU=", "owner": "ipetkov", "repo": "crane", - "rev": "aed2020fd3dc26e1e857d4107a5a67a33ab6c1fd", + "rev": "25bd41b24426c7734278c2ff02e53258851db914", "type": "github" }, "original": { @@ -82,11 +82,11 @@ ] }, "locked": { - "lastModified": 1751510438, - "narHash": "sha256-m8PjOoyyCR4nhqtHEBP1tB/jF+gJYYguSZmUmVTEAQE=", + "lastModified": 1756089517, + "narHash": "sha256-KGinVKturJFPrRebgvyUB1BUNqf1y9FN+tSJaTPlnFE=", "owner": "oxalica", "repo": "rust-overlay", - "rev": "7f415261f298656f8164bd636c0dc05af4e95b6b", + "rev": "44774c8c83cd392c50914f86e1ff75ef8619f1cd", "type": "github" }, "original": { @@ -112,11 +112,11 @@ }, "unstable": { "locked": { - "lastModified": 1751498133, - "narHash": "sha256-QWJ+NQbMU+NcU2xiyo7SNox1fAuwksGlQhpzBl76g1I=", + "lastModified": 1756128520, + "narHash": "sha256-R94HxJBi+RK1iCm8Y4Q9pdrHZl0GZoDPIaYwjxRNPh4=", "owner": "nixos", "repo": "nixpkgs", - "rev": "d55716bb59b91ae9d1ced4b1ccdea7a442ecbfdb", + "rev": "c53baa6685261e5253a1c355a1b322f82674a824", "type": "github" }, "original": { diff --git a/rust-toolchain.toml b/rust-toolchain.toml index 03648c85..c8ad2110 100644 --- a/rust-toolchain.toml +++ b/rust-toolchain.toml @@ -1,4 +1,4 @@ [toolchain] -channel = "stable" +channel = "1.89.0" profile = "default" components = ["rust-analyzer", "rust-src"] diff --git a/scripts/nix/install.sh b/scripts/nix/install.sh index 40767447..bdc43c89 100755 --- a/scripts/nix/install.sh +++ b/scripts/nix/install.sh @@ -14,7 +14,7 @@ BINARY_DOWNLOAD_PREFIX="${APOLLO_MCP_SERVER_BINARY_DOWNLOAD_PREFIX:="https://git # Apollo MCP Server version defined in apollo-mcp-server's Cargo.toml # Note: Change this line manually during the release steps. -PACKAGE_VERSION="v0.7.5" +PACKAGE_VERSION="v0.9.0" download_binary_and_run_installer() { downloader --check diff --git a/scripts/windows/install.ps1 b/scripts/windows/install.ps1 index d8007236..6963bb75 100644 --- a/scripts/windows/install.ps1 +++ b/scripts/windows/install.ps1 @@ -8,7 +8,7 @@ # Apollo MCP Server version defined in apollo-mcp-server's Cargo.toml # Note: Change this line manually during the release steps. -$package_version = 'v0.7.5' +$package_version = 'v0.9.0' function Install-Binary($apollo_mcp_server_install_args) { $old_erroractionpreference = $ErrorActionPreference