diff --git a/.github/workflows/website-root.yml b/.github/workflows/website-root.yml index dc437b1ca92..bce0ee19d66 100644 --- a/.github/workflows/website-root.yml +++ b/.github/workflows/website-root.yml @@ -4,11 +4,11 @@ on: workflow_dispatch: push: branches: - - v1.15 + - v1.16 pull_request: types: [opened, synchronize, reopened, closed] branches: - - v1.15 + - v1.16 concurrency: # Cancel the previously triggered build for only PR build. @@ -50,18 +50,18 @@ jobs: if [ $GITHUB_EVENT_NAME == 'pull_request' ]; then STAGING_URL="https://${SWA_BASE}-${{github.event.number}}.westus2.azurestaticapps.net/" fi - hugo ${STAGING_URL+-b "$STAGING_URL"} + hugo ${STAGING_URL+-b "$STAGING_URL"} --minify - name: Deploy docs site uses: Azure/static-web-apps-deploy@v1 with: azure_static_web_apps_api_token: ${{ secrets.AZURE_STATIC_WEB_APPS_API_TOKEN_PROUD_BAY_0E9E0E81E }} repo_token: ${{ secrets.GITHUB_TOKEN }} action: "upload" - app_location: "/daprdocs/public/" + app_location: "/daprdocs/public" output_location: "/" skip_app_build: true skip_deploy_on_missing_secrets: true - - name: Upload Hugo artifacts + - name: Upload Hugo artifacts for Algolia uses: actions/upload-artifact@v4 with: name: hugo_build @@ -95,8 +95,8 @@ jobs: uses: actions/checkout@v4 with: submodules: false - - name: Download Hugo artifacts - uses: actions/download-artifact@v3 + - name: Download Hugo artifacts for Algolia + uses: actions/download-artifact@v4 with: name: hugo_build path: site/ diff --git a/README.md b/README.md index 3af7bb0833e..18d2f53b41f 100644 --- a/README.md +++ b/README.md @@ -16,8 +16,8 @@ The following branches are currently maintained: | Branch | Website | Description | | ------------------------------------------------------------ | -------------------------- | ------------------------------------------------------------------------------------------------ | -| [v1.15](https://github.com/dapr/docs) (primary) | https://docs.dapr.io | Latest Dapr release documentation. Typo fixes, clarifications, and most documentation goes here. | -| [v1.16](https://github.com/dapr/docs/tree/v1.16) (pre-release) | https://v1-16.docs.dapr.io/ | Pre-release documentation. Doc updates that are only applicable to v1.15+ go here. | +| [v1.16](https://github.com/dapr/docs) (primary) | https://docs.dapr.io | Latest Dapr release documentation. Typo fixes, clarifications, and most documentation goes here. | +| [v1.17](https://github.com/dapr/docs/tree/v1.16) (pre-release) | https://v1-17.docs.dapr.io/ | Pre-release documentation. Doc updates that are only applicable to v1.16+ go here. | For more information visit the [Dapr branch structure](https://docs.dapr.io/contributing/docs-contrib/contributing-docs/#branch-guidance) document. @@ -68,13 +68,9 @@ Continue with the [Run local server](#run-local-server) steps. 1. Ensure pre-requisites are installed. 1. [Fork](https://github.com/dapr/docs/fork) and clone this repository. -1. Change to daprdocs directory: +1. Make sure you are in the root folder for the docs repo. You should find a hugo.yaml file in this directory. -```sh -cd ./daprdocs -``` - -4. Update submodules: +1. Update submodules: ```sh git submodule update --init --recursive diff --git a/daprdocs/content/en/concepts/overview.md b/daprdocs/content/en/concepts/overview.md index 273ab87e698..881c5fd521d 100644 --- a/daprdocs/content/en/concepts/overview.md +++ b/daprdocs/content/en/concepts/overview.md @@ -141,6 +141,14 @@ Dapr can be used from any developer framework. Here are some that have been inte | [JavaScript](https://github.com/dapr/js-sdk) | [Express](https://expressjs.com/) | Build Express applications with Dapr APIs | [PHP]({{% ref php %}}) | | You can serve with Apache, Nginx, or Caddyserver. +#### Dapr Agents + +![Dapr Agents Overview](/images/dapr-agents/concepts-agents-overview.png) + + +[Dapr Agents]({{% ref "../developing-applications/dapr-agents" %}}) is a Python framework for building intelligent, durable agents powered by LLMs. It provides agent-centric capabilities such as tool calling, memory management, [MCP support](https://modelcontextprotocol.io/) and agent orchestration, while leveraging Dapr for durability, observability, and security, at scale. + + #### Integrations and extensions Visit the [integrations]({{% ref integrations %}}) page to learn about some of the first-class support Dapr has for various frameworks and external products, including: diff --git a/daprdocs/content/en/concepts/security-concept.md b/daprdocs/content/en/concepts/security-concept.md index fa284cde420..2008c38a039 100644 --- a/daprdocs/content/en/concepts/security-concept.md +++ b/daprdocs/content/en/concepts/security-concept.md @@ -244,7 +244,7 @@ The audit was a holistic security audit with the following goals: - Formalize a threat model of Dapr - Perform manual code review -- Evaluate Daprs fuzzing suite against the formalized threat model +- Evaluate Dapr's fuzzing suite against the formalized threat model - Carry out a SLSA review of Dapr. You can find the full report [here](/docs/Dapr-september-2023-security-audit-report.pdf). diff --git a/daprdocs/content/en/concepts/terminology.md b/daprdocs/content/en/concepts/terminology.md index 1c74bf22366..da9b4f08030 100644 --- a/daprdocs/content/en/concepts/terminology.md +++ b/daprdocs/content/en/concepts/terminology.md @@ -10,15 +10,17 @@ This page details all of the common terms you may come across in the Dapr docs. | Term | Definition | More information | |:-----|------------|------------------| -| App/Application | A running service/binary, usually one that you as the user create and run. +| App/Application | A running service/binary, usually one that you as the user create and run. | Building block | An API that Dapr provides to users to help in the creation of microservices and applications. | [Dapr building blocks]({{% ref building-blocks-concept %}}) | Component | Modular types of functionality that are used either individually or with a collection of other components, by a Dapr building block. | [Dapr components]({{% ref components-concept %}}) | Configuration | A YAML file declaring all of the settings for Dapr sidecars or the Dapr control plane. This is where you can configure control plane mTLS settings, or the tracing and middleware settings for an application instance. | [Dapr configuration]({{% ref configuration-concept %}}) | Dapr | Distributed Application Runtime. | [Dapr overview]({{% ref overview %}}) +| Dapr Actors | A Dapr building block that implements the virtual actor pattern for building stateful, single-threaded objects with identity, lifecycle, and concurrency management. | [Actors overview]({{% ref actors-overview %}}) +| Dapr Agents | A developer framework built on top of Dapr Python SDK for creating durable agentic applications powered by LLMs. | [Dapr Agents]({{% ref "../developing-applications/dapr-agents" %}}) | Dapr control plane | A collection of services that are part of a Dapr installation on a hosting platform such as a Kubernetes cluster. This allows Dapr-enabled applications to run on the platform and handles Dapr capabilities such as actor placement, Dapr sidecar injection, or certificate issuance/rollover. | [Self-hosted overview]({{% ref self-hosted-overview %}})
[Kubernetes overview]({{% ref kubernetes-overview %}}) +| Dapr Workflows | A Dapr building block for authoring code-first workflows with durable execution that survive crashes, support long-running processes, and enable human-in-the-loop interactions. | [Workflow overview]({{% ref workflow-overview %}}) | HTTPEndpoint | HTTPEndpoint is a Dapr resource use to identify non-Dapr endpoints to invoke via the service invocation API. | [Service invocation API]({{% ref service_invocation_api %}}) | Namespacing | Namespacing in Dapr provides isolation, and thus provides multi-tenancy. | Learn more about namespacing [components]({{% ref component-scopes %}}), [service invocation]({{% ref service-invocation-namespaces %}}), [pub/sub]({{% ref pubsub-namespaces %}}), and [actors]({{% ref namespaced-actors %}}) | Self-hosted | Windows/macOS/Linux machine(s) where you can run your applications with Dapr. Dapr provides the capability to run on machines in "self-hosted" mode. | [Self-hosted mode]({{% ref self-hosted-overview %}}) -| Service | A running application or binary. This can refer to your application or to a Dapr application. -| Sidecar | A program that runs alongside your application as a separate process or container. | [Sidecar pattern](https://docs.microsoft.com/azure/architecture/patterns/sidecar) - +| Service | A running application or binary. This can refer to your application or to a Dapr application. +| Sidecar | A program that runs alongside your application as a separate process or container. | [Sidecar pattern](https://docs.microsoft.com/azure/architecture/patterns/sidecar) diff --git a/daprdocs/content/en/contributing/docs-contrib/maintainer-guide.md b/daprdocs/content/en/contributing/docs-contrib/maintainer-guide.md index 1ebdfb88172..7c72665b2f0 100644 --- a/daprdocs/content/en/contributing/docs-contrib/maintainer-guide.md +++ b/daprdocs/content/en/contributing/docs-contrib/maintainer-guide.md @@ -103,24 +103,22 @@ These steps will prepare the latest release branch for archival. git checkout -b release_v1.0 ``` -1. In VS Code, navigate to `/daprdocs/config.toml`. -1. Add the following TOML to the `# Versioning` section (around line 154): - - ```toml - version_menu = "v1.0" - version = "v1.0" - archived_version = true - url_latest_version = "https://docs.dapr.io" - - [[params.versions]] - version = "v1.2 (preview)" - url = "v1-2.docs.dapr.io" - [[params.versions]] - version = "v1.1 (latest)" - url = "#" - [[params.versions]] - version = "v1.0" - url = "https://v1-0.docs.dapr.io" +1. In VS Code, navigate to `hugo.yaml` located in the root. +1. Add the following configuration to the `# Versioning` section (around line 121 and onwards): + + ```yaml + version_menu: "v1.0" + version: "v1.0" + archived_version: true + url_latest_version: "https://docs.dapr.io" + + versions: + - version: v1.2 (preview) + url: https://v1-2.docs.dapr.io + - version: v1.1 (latest) + url: "#" + - version: v1.0 + url: https://v1-0.docs.dapr.io ``` 1. Delete `.github/workflows/website-root.yml`. @@ -146,26 +144,25 @@ These steps will prepare the upcoming release branch for promotion to latest rel git checkout -b release_v1.1 ``` -1. In VS Code, navigate to `/daprdocs/config.toml`. -1. Update line 1 to `baseURL - https://docs.dapr.io/`. -1. Update the `# Versioning` section (around line 154) to display the correct versions and tags: +1. In VS Code, navigate to `hugo.yaml` located in the root. +1. Update line 1 to `baseURL: https://docs.dapr.io/`. +1. Update the `# Versioning` section (around line 121 and onwards) to display the correct versions and tags: - ```toml + ```yaml # Versioning - version_menu = "v1.1 (latest)" - version = "v1.1" - archived_version = false - url_latest_version = "https://docs.dapr.io" - - [[params.versions]] - version = "v1.2 (preview)" - url = "v1-2.docs.dapr.io" - [[params.versions]] - version = "v1.1 (latest)" - url = "#" - [[params.versions]] - version = "v1.0" - url = "https://v1-0.docs.dapr.io" + version_menu: "v1.1 (latest)" + version: "v1.1" + archived_version: false + url_latest_version: https://docs.dapr.io + github_branch: v1.1 + + versions: + - version: v1.2 (preview) + url: https://v1-2.docs.dapr.io + - version: v1.1 (latest) + url: "#" + - version: v1.0 + url: https://v1-0.docs.dapr.io ``` 1. Navigate to `.github/workflows/website-root.yml`. @@ -194,6 +191,7 @@ These steps will prepare the upcoming release branch for promotion to latest rel | [v1.2](https://github.com/dapr/docs/tree/v1.2) (pre-release) | https://v1-2.docs.dapr.io/ | Pre-release documentation. Doc updates that are only applicable to v1.2+ go here. | ``` +1. Update the _Supported versions_ table in `support-release-policy.md`; add a new line at the top of the table with the new version of the runtime and SDKs. Change the releases which are older than n-2 to be `Unsupported`. 1. Update the `dapr-latest-version.html` shortcode partial to the new minor/patch version (in this example, `1.1.0` and `1.1`). 1. Commit the staged changes and push to your branch (`release_v1.1`). 1. Open a PR from `release/v1.1` to `v1.1`. diff --git a/daprdocs/content/en/developing-applications/building-blocks/actors/namespaced-actors.md b/daprdocs/content/en/developing-applications/building-blocks/actors/namespaced-actors.md index b51ffc79f2b..3584e9a7160 100644 --- a/daprdocs/content/en/developing-applications/building-blocks/actors/namespaced-actors.md +++ b/daprdocs/content/en/developing-applications/building-blocks/actors/namespaced-actors.md @@ -25,7 +25,7 @@ In self-hosted mode, you can specify the namespace for a Dapr instance by settin {{% /tab %}} {{% tab "Kubernetes" %}} -On Kubernetes, you can create and configure namepaces when deploying actor applications. For example, start with the following `kubectl` commands: +On Kubernetes, you can create and configure namespaces when deploying actor applications. For example, start with the following `kubectl` commands: ```bash kubectl create namespace namespace-actorA diff --git a/daprdocs/content/en/developing-applications/building-blocks/conversation/conversation-overview.md b/daprdocs/content/en/developing-applications/building-blocks/conversation/conversation-overview.md index 4f98d726a12..7483a41c296 100644 --- a/daprdocs/content/en/developing-applications/building-blocks/conversation/conversation-overview.md +++ b/daprdocs/content/en/developing-applications/building-blocks/conversation/conversation-overview.md @@ -14,11 +14,16 @@ Dapr's conversation API reduces the complexity of securely and reliably interact Diagram showing the flow of a user's app communicating with Dapr's LLM components. -In additon to enabling critical performance and security functionality (like [prompt caching]({{% ref "#prompt-caching" %}}) and [PII scrubbing]({{% ref "#personally-identifiable-information-pii-obfuscation" %}})), you can also pair the conversation API with Dapr functionalities, like: -- Resiliency circuit breakers and retries to circumvent limit and token errors, or -- Middleware to authenticate requests coming to and from the LLM +In addition to enabling critical performance and security functionality (like [prompt caching]({{% ref "#prompt-caching" %}}) and [PII scrubbing]({{% ref "#personally-identifiable-information-pii-obfuscation" %}})), the conversation API also provides: -Dapr provides observability by issuing metrics for your LLM interactions. +- **Tool calling capabilities** that allow LLMs to interact with external functions and APIs, enabling more sophisticated AI applications +- **OpenAI-compatible interface** for seamless integration with existing AI workflows and tools + +You can also pair the conversation API with Dapr functionalities, like: + +- Resiliency policies including circuit breakers to handle repeated errors, timeouts to safeguards from slow responses, and retries for temporary network failures +- Observability with metrics and distributed tracing using OpenTelemetry and Zipkin +- Middleware to authenticate requests to and from the LLM ## Features @@ -26,7 +31,7 @@ The following features are out-of-the-box for [all the supported conversation co ### Prompt caching -Prompt caching optimizes performance by storing and reusing prompts that are often repeated across multiple API calls. To significantly reduce latency and cost, Dapr stores frequent prompts in a local cache to be reused by your cluster, pod, or other, instead of reprocessing the information for every new request. +The Conversation API includes a built-in caching mechanism (enabled by the cacheTTL parameter) that optimizes both performance and cost by storing previous model responses for faster delivery to repetitive requests. This is particularly valuable in scenarios where similar prompt patterns occur frequently. When caching is enabled, Dapr creates a deterministic hash of the prompt text and all configuration parameters, checks if a valid cached response exists for this hash within the time period (for example, 10 minutes), and returns the cached response immediately if found. If no match exists, Dapr makes the API call and stores the result. This eliminates external API calls, lowers latency, and avoids provider charges for repeated requests. The cache exists entirely within your runtime environment, with each Dapr sidecar maintaining its own local cache. ### Personally identifiable information (PII) obfuscation @@ -45,13 +50,24 @@ The PII scrubber obfuscates the following user information: - SHA-256 hex - MD5 hex +### Tool calling support + +The conversation API supports advanced tool calling capabilities that allow LLMs to interact with external functions and APIs. This enables you to build sophisticated AI applications that can: + +- Execute custom functions based on user requests +- Integrate with external services and databases +- Provide dynamic, context-aware responses +- Create multi-step workflows and automation + +Tool calling follows [OpenAI's function calling format](https://platform.openai.com/docs/guides/function-calling), making it easy to integrate with existing AI development workflows and tools. + ## Demo Watch the demo presented during [Diagrid's Dapr v1.15 celebration](https://www.diagrid.io/videos/dapr-1-15-deep-dive) to see how the conversation API works using the .NET SDK. {{< youtube id=NTnwoDhHIcQ start=5444 >}} -## Try out conversation +## Try out conversation API ### Quickstarts and tutorials diff --git a/daprdocs/content/en/developing-applications/building-blocks/conversation/howto-conversation-layer.md b/daprdocs/content/en/developing-applications/building-blocks/conversation/howto-conversation-layer.md index 99b7803b8d6..e79af9f190e 100644 --- a/daprdocs/content/en/developing-applications/building-blocks/conversation/howto-conversation-layer.md +++ b/daprdocs/content/en/developing-applications/building-blocks/conversation/howto-conversation-layer.md @@ -56,7 +56,7 @@ spec: ## Connect the conversation client -The following examples use an HTTP client to send a POST request to Dapr's sidecar HTTP endpoint. You can also use [the Dapr SDK client instead]({{% ref "#related-links" %}}). +The following examples use the Dapr SDK client to interact with LLMs. {{< tabpane text=true >}} @@ -83,7 +83,7 @@ var response = await conversationClient.ConverseAsync("conversation", DaprConversationRole.Generic) }); -Console.WriteLine("Received the following from the LLM:"); +Console.WriteLine("conversation output: "); foreach (var resp in response.Outputs) { Console.WriteLine($"\t{resp.Result}"); @@ -92,6 +92,77 @@ foreach (var resp in response.Outputs) {{% /tab %}} + +{{% tab "Java" %}} + +```java +//dependencies +import io.dapr.client.DaprClientBuilder; +import io.dapr.client.DaprPreviewClient; +import io.dapr.client.domain.ConversationInput; +import io.dapr.client.domain.ConversationRequest; +import io.dapr.client.domain.ConversationResponse; +import reactor.core.publisher.Mono; + +import java.util.List; + +public class Conversation { + + public static void main(String[] args) { + String prompt = "Please write a witty haiku about the Dapr distributed programming framework at dapr.io"; + + try (DaprPreviewClient client = new DaprClientBuilder().buildPreviewClient()) { + System.out.println("Input: " + prompt); + + ConversationInput daprConversationInput = new ConversationInput(prompt); + + // Component name is the name provided in the metadata block of the conversation.yaml file. + Mono responseMono = client.converse(new ConversationRequest("echo", + List.of(daprConversationInput)) + .setContextId("contextId") + .setScrubPii(true).setTemperature(1.1d)); + ConversationResponse response = responseMono.block(); + System.out.printf("conversation output: %s", response.getConversationOutputs().get(0).getResult()); + } catch (Exception e) { + throw new RuntimeException(e); + } + } +} +``` + +{{% /tab %}} + + +{{% tab "Python" %}} + +```python +#dependencies +from dapr.clients import DaprClient +from dapr.clients.grpc._request import ConversationInput + +#code +with DaprClient() as d: + inputs = [ + ConversationInput(content="Please write a witty haiku about the Dapr distributed programming framework at dapr.io", role='user', scrub_pii=True), + ] + + metadata = { + 'model': 'modelname', + 'key': 'authKey', + 'cacheTTL': '10m', + } + + response = d.converse_alpha1( + name='echo', inputs=inputs, temperature=0.7, context_id='chat-123', metadata=metadata + ) + + for output in response.outputs: + print(f'conversation output: {output.result}') +``` + +{{% /tab %}} + + {{% tab "Go" %}} @@ -189,21 +260,40 @@ dapr run --app-id conversation --dapr-grpc-port 50001 --log-level debug --resour {{% /tab %}} - -{{% tab "Go" %}} + +{{% tab "Java" %}} ```bash -dapr run --app-id conversation --dapr-grpc-port 50001 --log-level debug --resources-path ./config -- go run ./main.go + +dapr run --app-id conversation --dapr-grpc-port 50001 --log-level debug --resources-path ./config -- mvn spring-boot:run ``` -**Expected output** +{{% /tab %}} + + +{{% tab "Python" %}} + +```bash + +dapr run --app-id conversation --dapr-grpc-port 50001 --log-level debug --resources-path ./config -- python3 app.py ``` - - '== APP == conversation output: Please write a witty haiku about the Dapr distributed programming framework at dapr.io' + +{{% /tab %}} + + + +{{% tab "Go" %}} + +```bash +dapr run --app-id conversation --dapr-grpc-port 50001 --log-level debug --resources-path ./config -- go run ./main.go ``` + {{% /tab %}} + + {{% tab "Rust" %}} @@ -211,17 +301,17 @@ dapr run --app-id conversation --dapr-grpc-port 50001 --log-level debug --resour dapr run --app-id=conversation --resources-path ./config --dapr-grpc-port 3500 -- cargo run --example conversation ``` +{{% /tab %}} + +{{< /tabpane >}} + + **Expected output** ``` - - 'conversation input: hello world' - - 'conversation output: hello world' + - '== APP == conversation output: Please write a witty haiku about the Dapr distributed programming framework at dapr.io' ``` -{{% /tab %}} - -{{< /tabpane >}} - ## Advanced features The conversation API supports the following features: @@ -230,9 +320,11 @@ The conversation API supports the following features: 1. **PII scrubbing:** Allows for the obfuscation of data going in and out of the LLM. +1. **Tool calling:** Allows LLMs to interact with external functions and APIs. + To learn how to enable these features, see the [conversation API reference guide]({{% ref conversation_api %}}). -## Related links +## Conversation API examples in Dapr SDK repositories Try out the conversation API using the full examples provided in the supported SDK repos. @@ -246,7 +338,23 @@ Try out the conversation API using the full examples provided in the supported S {{% /tab %}} - + + +{{% tab "Java" %}} + +[Dapr conversation example with the Java SDK](https://github.com/dapr/java-sdk/tree/master/examples/src/main/java/io/dapr/examples/conversation) + +{{% /tab %}} + + + +{{% tab "Python" %}} + +[Dapr conversation example with the Python SDK](https://github.com/dapr/python-sdk/tree/main/examples/conversation) + +{{% /tab %}} + + {{% tab "Go" %}} [Dapr conversation example with the Go SDK](https://github.com/dapr/go-sdk/tree/main/examples/conversation) @@ -264,6 +372,6 @@ Try out the conversation API using the full examples provided in the supported S ## Next steps - +- [Conversation quickstart]({{% ref conversation-quickstart %}}) - [Conversation API reference guide]({{% ref conversation_api %}}) - [Available conversation components]({{% ref supported-conversation %}}) diff --git a/daprdocs/content/en/developing-applications/building-blocks/jobs/howto-schedule-and-handle-triggered-jobs.md b/daprdocs/content/en/developing-applications/building-blocks/jobs/howto-schedule-and-handle-triggered-jobs.md index 12e50dda952..2057d25ff9c 100644 --- a/daprdocs/content/en/developing-applications/building-blocks/jobs/howto-schedule-and-handle-triggered-jobs.md +++ b/daprdocs/content/en/developing-applications/building-blocks/jobs/howto-schedule-and-handle-triggered-jobs.md @@ -56,7 +56,7 @@ internal sealed record BackupJobData([property: JsonPropertyName("task")] string internal sealed record BackupMetadata([property: JsonPropertyName("DBName")]string DatabaseName, [property: JsonPropertyName("BackupLocation")] string BackupLocation); ``` -Next, set up a handler as part of your application setup that will be called anytime a job is triggered on your +Next, set up a handler as part of your application setup that will be called any time a job is triggered on your application. It's the responsibility of this handler to identify how jobs should be processed based on the job name provided. This works by registering a handler with ASP.NET Core at `/job/`, where `` is parameterized and diff --git a/daprdocs/content/en/developing-applications/building-blocks/state-management/howto-outbox.md b/daprdocs/content/en/developing-applications/building-blocks/state-management/howto-outbox.md index f99f44c3b88..40fc8719475 100644 --- a/daprdocs/content/en/developing-applications/building-blocks/state-management/howto-outbox.md +++ b/daprdocs/content/en/developing-applications/building-blocks/state-management/howto-outbox.md @@ -16,23 +16,72 @@ For example, you can use the outbox pattern to: With Dapr's outbox support, you can notify subscribers when an application's state is created or updated when calling Dapr's [transactions API]({{% ref "state_api.md#state-transactions" %}}). -The diagram below is an overview of how the outbox feature works: +The diagram below is an overview of how the outbox feature works at a high level: 1) Service A saves/updates state to the state store using a transaction. 2) A message is written to the broker under the same transaction. When the message is successfully delivered to the message broker, the transaction completes, ensuring the state and message are transacted together. 3) The message broker delivers the message topic to any subscribers - in this case, Service B. -Diagram showing the steps of the outbox pattern +Diagram showing the overview of outbox pattern +## How outbox works under the hood + +Dapr outbox processes requests in two flows: the user request flow and the background message flow. Together, they guarantee that state and events stay consistent. + +Diagram showing the steps of the outbox pattern + +This is the sequence of interactions: + +1. An application calls the Dapr State Management API to write state transactionally using the transactional methods. + This is the entry point where business data, such as an order or profile update, is submitted for persistence. + +2. Dapr publishes an intent message with a unique transaction ID to an internal outbox topic. + This durable record ensures the event intent exists before any database commit happens. + +3. The state and a transaction marker are written atomically in the same state store. + Both the business data and the marker are committed in the same transaction, preventing partial writes. + +4. The application receives a success response after the transaction commits. + At this point, the application can continue, knowing state is saved and the event intent is guaranteed. + +5. A background subscriber reads the intent message. + When outbox is enabled, Dapr starts consumers that process the internal outbox topic. + +6. The subscriber verifies the transaction marker in the state store. + This check confirms that the database commit was successful before publishing externally. + +7. Verified business event is published to the external pub/sub topic. + The event is sent to the configured broker (Kafka, RabbitMQ, etc.) where other services can consume it. + +8. The marker is cleaned up (deleted) from the state store. + This prevents unbounded growth in the database once the event has been successfully delivered. + +9. Message is acknowledged and removed from internal topic + If publishing or cleanup fails, Dapr retries, ensuring reliable at-least-once delivery. + ## Requirements -The outbox feature can be used with using any [transactional state store]({{% ref supported-state-stores %}}) supported by Dapr. All [pub/sub brokers]({{% ref supported-pubsub %}}) are supported with the outbox feature. +1. The outbox feature requires a [transactional state store]({{% ref supported-state-stores %}}) supported by Dapr. + [Learn more about the transactional methods you can use.]({{% ref "howto-get-save-state.md#perform-state-transactions" %}}) -[Learn more about the transactional methods you can use.]({{% ref "howto-get-save-state.md#perform-state-transactions" %}}) +2. Any [pub/sub broker]({{% ref supported-pubsub %}}) supported by Dapr can be used with the outbox feature. -{{% alert title="Note" color="primary" %}} -Message brokers that work with the competing consumer pattern (for example, [Apache Kafka]({{% ref setup-apache-kafka%}})) are encouraged to reduce the chances of duplicate events. -{{% /alert %}} + {{% alert title="Note" color="primary" %}} + Message brokers that support the competing consumer pattern (for example, [Apache Kafka]({{% ref setup-apache-kafka%}})) are recommended to reduce the chance of duplicate events. + {{% /alert %}} + +3. Internal outbox topic + When outbox is enabled, Dapr creates an internal topic using the following naming convention: `{namespace}{appID}{topic}outbox`, where: + + - `namespace`: the Dapr application namespace (if configured) + - `appID`: the Dapr application identifier + - `topic`: the value specified in the `outboxPublishTopic` metadata + + This way each outbox topic is uniquely identified per application and external topic, preventing routing conflicts in multi-tenant environments. + + {{% alert title="Note" color="primary" %}} + Ensure that the topic is created in advance, or Dapr has sufficient permissions to create the topic at startup time. + {{% /alert %}} ## Enable the outbox pattern @@ -682,3 +731,7 @@ The `data` CloudEvent field is reserved for Dapr's use only, and is non-customiz Watch [this video for an overview of the outbox pattern](https://youtu.be/rTovKpG0rhY?t=1338): {{< youtube id=rTovKpG0rhY start=1338 >}} + +## Next Steps + +[How Dapr Outbox Eliminates Dual Writes in Distributed Applications](https://www.diagrid.io/blog/how-dapr-outbox-eliminates-dual-writes-in-distributed-applications) diff --git a/daprdocs/content/en/developing-applications/building-blocks/workflow/howto-author-workflow.md b/daprdocs/content/en/developing-applications/building-blocks/workflow/howto-author-workflow.md index ddb92b63ac3..ca3777064b9 100644 --- a/daprdocs/content/en/developing-applications/building-blocks/workflow/howto-author-workflow.md +++ b/daprdocs/content/en/developing-applications/building-blocks/workflow/howto-author-workflow.md @@ -197,10 +197,12 @@ public class DemoWorkflowActivity implements WorkflowActivity { +### Define workflow activities + Define each workflow activity you'd like your workflow to perform. The Activity input can be unmarshalled from the context with `ctx.GetInput`. Activities should be defined as taking a `ctx workflow.ActivityContext` parameter and returning an interface and error. ```go -func TestActivity(ctx workflow.ActivityContext) (any, error) { +func BusinessActivity(ctx workflow.ActivityContext) (any, error) { var input int if err := ctx.GetInput(&input); err != nil { return "", err @@ -211,6 +213,87 @@ func TestActivity(ctx workflow.ActivityContext) (any, error) { } ``` +### Define the workflow + +Define your workflow function with the parameter `ctx *workflow.WorkflowContext` and return any and error. Invoke your defined activities from within your workflow. + +```go +func BusinessWorkflow(ctx *workflow.WorkflowContext) (any, error) { + var input int + if err := ctx.GetInput(&input); err != nil { + return nil, err + } + var output string + if err := ctx.CallActivity(BusinessActivity, workflow.ActivityInput(input)).Await(&output); err != nil { + return nil, err + } + if err := ctx.WaitForExternalEvent("businessEvent", time.Second*60).Await(&output); err != nil { + return nil, err + } + + if err := ctx.CreateTimer(time.Second).Await(nil); err != nil { + return nil, nil + } + return output, nil +} +``` + +### Register workflows and activities + +Before your application can execute workflows, you must register both the workflow orchestrator and its activities with a workflow registry. This ensures Dapr knows which functions to call when executing your workflow. + +```go +func main() { + // Create a workflow registry + r := workflow.NewRegistry() + + // Register the workflow orchestrator + if err := r.AddWorkflow(BusinessWorkflow); err != nil { + log.Fatal(err) + } + fmt.Println("BusinessWorkflow registered") + + // Register the workflow activities + if err := r.AddActivity(BusinessActivity); err != nil { + log.Fatal(err) + } + fmt.Println("BusinessActivity registered") + + // Create workflow client and start worker + wclient, err := client.NewWorkflowClient() + if err != nil { + log.Fatal(err) + } + fmt.Println("Worker initialized") + + ctx, cancel := context.WithCancel(context.Background()) + if err = wclient.StartWorker(ctx, r); err != nil { + log.Fatal(err) + } + fmt.Println("runner started") + + // Your application logic continues here... + // Example: Start a workflow + instanceID, err := wclient.ScheduleWorkflow(ctx, "BusinessWorkflow", workflow.WithInput(1)) + if err != nil { + log.Fatalf("failed to start workflow: %v", err) + } + fmt.Printf("workflow started with id: %v\n", instanceID) + + // Stop workflow worker when done + cancel() + fmt.Println("workflow worker successfully shutdown") +} +``` + +**Key points about registration:** +- Use `workflow.NewRegistry()` to create a workflow registry +- Use `r.AddWorkflow()` to register workflow functions +- Use `r.AddActivity()` to register activity functions +- Use `client.NewWorkflowClient()` to create a workflow client +- Call `wclient.StartWorker()` to begin processing workflows +- Use `wclient.ScheduleWorkflow` to schedule a named instance of a workflow + [See the Go SDK workflow activity example in context.](https://github.com/dapr/go-sdk/tree/main/examples/workflow/README.md) {{% /tab %}} @@ -281,9 +364,9 @@ export default class WorkflowRuntime { // Register workflow activities public registerActivity(fn: TWorkflowActivity): WorkflowRuntime { const name = getFunctionName(fn); - const activityWrapper = (ctx: ActivityContext, intput: TInput): TOutput => { + const activityWrapper = (ctx: ActivityContext, input: TInput): TOutput => { const wfActivityContext = new WorkflowActivityContext(ctx); - return fn(wfActivityContext, intput); + return fn(wfActivityContext, input); }; this.worker.addNamedActivity(name, activityWrapper); return this; @@ -383,16 +466,16 @@ public class DemoWorkflowWorker { Define your workflow function with the parameter `ctx *workflow.WorkflowContext` and return any and error. Invoke your defined activities from within your workflow. ```go -func TestWorkflow(ctx *workflow.WorkflowContext) (any, error) { +func BusinessWorkflow(ctx *workflow.WorkflowContext) (any, error) { var input int if err := ctx.GetInput(&input); err != nil { return nil, err } var output string - if err := ctx.CallActivity(TestActivity, workflow.ActivityInput(input)).Await(&output); err != nil { + if err := ctx.CallActivity(BusinessActivity, workflow.ActivityInput(input)).Await(&output); err != nil { return nil, err } - if err := ctx.WaitForExternalEvent("testEvent", time.Second*60).Await(&output); err != nil { + if err := ctx.WaitForExternalEvent("businessEvent", time.Second*60).Await(&output); err != nil { return nil, err } @@ -864,7 +947,7 @@ public class DemoWorkflow extends Workflow { [As in the following example](https://github.com/dapr/go-sdk/tree/main/examples/workflow/README.md), a hello-world application using the Go SDK and Dapr Workflow would include: - A Go package called `client` to receive the Go SDK client capabilities. -- The `TestWorkflow` method +- The `BusinessWorkflow` method - Creating the workflow with input and output. - API calls. In the example below, these calls start and call the workflow activities. @@ -874,86 +957,97 @@ package main import ( "context" + "errors" "fmt" "log" "time" - "github.com/dapr/durabletask-go/api" - "github.com/dapr/durabletask-go/backend" - "github.com/dapr/durabletask-go/client" - "github.com/dapr/durabletask-go/task" - dapr "github.com/dapr/go-sdk/client" + "github.com/dapr/durabletask-go/workflow" + "github.com/dapr/go-sdk/client" ) var stage = 0 - -const ( - workflowComponent = "dapr" -) +var failActivityTries = 0 func main() { - registry := task.NewTaskRegistry() + r := workflow.NewRegistry() - if err := registry.AddOrchestrator(TestWorkflow); err != nil { + if err := r.AddWorkflow(BusinessWorkflow); err != nil { log.Fatal(err) } - fmt.Println("TestWorkflow registered") + fmt.Println("BusinessWorkflow registered") - if err := registry.AddActivity(TestActivity); err != nil { + if err := r.AddActivity(BusinessActivity); err != nil { log.Fatal(err) } - fmt.Println("TestActivity registered") + fmt.Println("BusinessActivity registered") - daprClient, err := dapr.NewClient() - if err != nil { - log.Fatalf("failed to create Dapr client: %v", err) + if err := r.AddActivity(FailActivity); err != nil { + log.Fatal(err) } + fmt.Println("FailActivity registered") - client := client.NewTaskHubGrpcClient(daprClient.GrpcClientConn(), backend.DefaultLogger()) - if err := client.StartWorkItemListener(context.TODO(), registry); err != nil { - log.Fatalf("failed to start work item listener: %v", err) + wclient, err := client.NewWorkflowClient() + if err != nil { + log.Fatal(err) } + fmt.Println("Worker initialized") + ctx, cancel := context.WithCancel(context.Background()) + if err = wclient.StartWorker(ctx, r); err != nil { + log.Fatal(err) + } fmt.Println("runner started") - ctx := context.Background() - // Start workflow test - id, err := client.ScheduleNewOrchestration(ctx, "TestWorkflow", api.WithInput(1)) + // Set the start time to the current time to not wait for the workflow to + // "start". This is useful for increasing the throughput of creating + // workflows. + // workflow.WithStartTime(time.Now()) + instanceID, err := wclient.ScheduleWorkflow(ctx, "BusinessWorkflow", workflow.WithInstanceID("a7a4168d-3a1c-41da-8a4f-e7f6d9c718d9"), workflow.WithInput(1)) if err != nil { log.Fatalf("failed to start workflow: %v", err) } - fmt.Printf("workflow started with id: %v\n", id) + fmt.Printf("workflow started with id: %v\n", instanceID) // Pause workflow test - err = client.PurgeOrchestrationState(ctx, id) + err = wclient.SuspendWorkflow(ctx, instanceID, "") if err != nil { log.Fatalf("failed to pause workflow: %v", err) } - respGet, err := client.FetchOrchestrationMetadata(ctx, id) + respFetch, err := wclient.FetchWorkflowMetadata(ctx, instanceID, workflow.WithFetchPayloads(true)) if err != nil { - log.Fatalf("failed to get workflow: %v", err) + log.Fatalf("failed to fetch workflow: %v", err) + } + + if respFetch.RuntimeStatus != workflow.StatusSuspended { + log.Fatalf("workflow not paused: %s: %v", respFetch.RuntimeStatus, respFetch) } - fmt.Printf("workflow paused: %s\n", respGet.RuntimeStatus) + + fmt.Printf("workflow paused\n") // Resume workflow test - err = client.ResumeOrchestration(ctx, id, "") + err = wclient.ResumeWorkflow(ctx, instanceID, "") if err != nil { log.Fatalf("failed to resume workflow: %v", err) } - fmt.Printf("workflow running: %s\n", respGet.RuntimeStatus) - respGet, err = client.FetchOrchestrationMetadata(ctx, id) + respFetch, err = wclient.FetchWorkflowMetadata(ctx, instanceID, workflow.WithFetchPayloads(true)) if err != nil { log.Fatalf("failed to get workflow: %v", err) } - fmt.Printf("workflow resumed: %s\n", respGet.RuntimeStatus) + + if respFetch.RuntimeStatus != workflow.StatusRunning { + log.Fatalf("workflow not running") + } + + fmt.Println("workflow resumed") fmt.Printf("stage: %d\n", stage) - // Raise Event Test - err = client.RaiseEvent(ctx, id, "testEvent", api.WithEventPayload("testData")) + // Raise Event + err = wclient.RaiseEvent(ctx, instanceID, "businessEvent", workflow.WithEventPayload("testData")) if err != nil { fmt.Printf("failed to raise event: %v", err) } @@ -964,44 +1058,99 @@ func main() { fmt.Printf("stage: %d\n", stage) - respGet, err = client.FetchOrchestrationMetadata(ctx, id) + waitCtx, cancel := context.WithTimeout(ctx, 5*time.Second) + _, err = wclient.WaitForWorkflowCompletion(waitCtx, instanceID) + cancel() + if err != nil { + log.Fatalf("failed to wait for workflow: %v", err) + } + + fmt.Printf("fail activity executions: %d\n", failActivityTries) + + respFetch, err = wclient.FetchWorkflowMetadata(ctx, instanceID, workflow.WithFetchPayloads(true)) if err != nil { log.Fatalf("failed to get workflow: %v", err) } - fmt.Printf("workflow status: %v\n", respGet.RuntimeStatus) + fmt.Printf("workflow status: %v\n", respFetch.String()) // Purge workflow test - err = client.PurgeOrchestrationState(ctx, id) + err = wclient.PurgeWorkflowState(ctx, instanceID) + if err != nil { + log.Fatalf("failed to purge workflow: %v", err) + } + + respFetch, err = wclient.FetchWorkflowMetadata(ctx, instanceID, workflow.WithFetchPayloads(true)) + if err == nil || respFetch != nil { + log.Fatalf("failed to purge workflow: %v", err) + } + + fmt.Println("workflow purged") + + fmt.Printf("stage: %d\n", stage) + + // Terminate workflow test + id, err := wclient.ScheduleWorkflow(ctx, "BusinessWorkflow", workflow.WithInstanceID("a7a4168d-3a1c-41da-8a4f-e7f6d9c718d9"), workflow.WithInput(1)) + if err != nil { + log.Fatalf("failed to start workflow: %v", err) + } + fmt.Printf("workflow started with id: %v\n", instanceID) + + metadata, err := wclient.WaitForWorkflowStart(ctx, id) + if err != nil { + log.Fatalf("failed to get workflow: %v", err) + } + fmt.Printf("workflow status: %s\n", metadata.String()) + + err = wclient.TerminateWorkflow(ctx, id) + if err != nil { + log.Fatalf("failed to terminate workflow: %v", err) + } + fmt.Println("workflow terminated") + + err = wclient.PurgeWorkflowState(ctx, id) if err != nil { log.Fatalf("failed to purge workflow: %v", err) } fmt.Println("workflow purged") + + cancel() + + fmt.Println("workflow worker successfully shutdown") } -func TestWorkflow(ctx *task.OrchestrationContext) (any, error) { +func BusinessWorkflow(ctx *workflow.WorkflowContext) (any, error) { var input int if err := ctx.GetInput(&input); err != nil { return nil, err } var output string - if err := ctx.CallActivity(TestActivity, task.WithActivityInput(input)).Await(&output); err != nil { + if err := ctx.CallActivity(BusinessActivity, task.WithActivityInput(input)).Await(&output); err != nil { return nil, err } - err := ctx.WaitForSingleEvent("testEvent", time.Second*60).Await(&output) + err := ctx.WaitForSingleEvent("businessEvent", time.Second*60).Await(&output) if err != nil { return nil, err } - if err := ctx.CallActivity(TestActivity, task.WithActivityInput(input)).Await(&output); err != nil { + if err := ctx.CallActivity(BusinessActivity, task.WithActivityInput(input)).Await(&output); err != nil { return nil, err } + if err := ctx.CallActivity(FailActivity, workflow.WithActivityRetryPolicy(&workflow.RetryPolicy{ + MaxAttempts: 3, + InitialRetryInterval: 100 * time.Millisecond, + BackoffCoefficient: 2, + MaxRetryInterval: 1 * time.Second, + })).Await(nil); err == nil { + return nil, fmt.Errorf("unexpected no error executing fail activity") + } + return output, nil } -func TestActivity(ctx task.ActivityContext) (any, error) { +func BusinessActivity(ctx task.ActivityContext) (any, error) { var input int if err := ctx.GetInput(&input); err != nil { return "", err @@ -1011,6 +1160,11 @@ func TestActivity(ctx task.ActivityContext) (any, error) { return fmt.Sprintf("Stage: %d", stage), nil } + +func FailActivity(ctx workflow.ActivityContext) (any, error) { + failActivityTries += 1 + return nil, errors.New("dummy activity error") +} ``` [See the full Go SDK workflow example in context.](https://github.com/dapr/go-sdk/tree/main/examples/workflow/README.md) diff --git a/daprdocs/content/en/developing-applications/building-blocks/workflow/workflow-architecture.md b/daprdocs/content/en/developing-applications/building-blocks/workflow/workflow-architecture.md index 2fdd37d1cf8..e5d77c2c18e 100644 --- a/daprdocs/content/en/developing-applications/building-blocks/workflow/workflow-architecture.md +++ b/daprdocs/content/en/developing-applications/building-blocks/workflow/workflow-architecture.md @@ -175,6 +175,20 @@ Similarly, if a state store imposes restrictions on the size of a batch transact Workflow state can be purged from a state store, including all its history. Each Dapr SDK exposes APIs for purging all metadata related to specific workflow instances. +#### State store record count + +The number of records which are saved as history in the state store per workflow run is determined by its complexity or "shape". In other words, the number of activities, timers, sub-workflows etc. +The following table shows a general guide to the number of records that are saved by different workflow tasks. +This number may be larger or smaller depending on retries or concurrency. + +| Task type | Number of records saved | +| ----------|-------------------------| +| Start workflow | 5 records | +| Call activity | 3 records | +| Timer | 3 records | +| Raise event | 3 records | +| Start child workflow | 8 records | + ## Workflow scalability Because Dapr Workflows are internally implemented using actors, Dapr Workflows have the same scalability characteristics as actors. diff --git a/daprdocs/content/en/developing-applications/building-blocks/workflow/workflow-multi-app.md b/daprdocs/content/en/developing-applications/building-blocks/workflow/workflow-multi-app.md index 8eb2f439430..64f112c7018 100644 --- a/daprdocs/content/en/developing-applications/building-blocks/workflow/workflow-multi-app.md +++ b/daprdocs/content/en/developing-applications/building-blocks/workflow/workflow-multi-app.md @@ -6,8 +6,9 @@ weight: 7000 description: "Executing workflows across multiple applications" --- -It is often the case that a single workflow spans multiple applications, microservices, or programing languages. +It is often the case that a single workflow spans multiple applications, microservices, or programming languages. This is where an activity or a child workflow will be executed on a different application than the one hosting the parent workflow. + Some scenarios where this is useful include: - A Machine Learning (ML) training activity must be executed on GPU-enabled machines, while the rest of the workflow runs on CPU-only orchestration machines. @@ -15,85 +16,97 @@ Some scenarios where this is useful include: - Different parts of the workflow need to be executed in different trust zones or networks. - Different parts of the workflow need to be executed in different geographic regions due to data residency requirements. - An involved business process spans multiple teams or departments, each owning their own application. -- Implementation of a workflow spans different programming lanaguages based on team expertise or existing codebases. +- Implementation of a workflow spans different programming languages based on team expertise or existing codebases. +- Different team boundaries or microservice ownership. + +Diagram showing multi-application complex workflow + +The diagram below shows an example scenario of a complex workflow that orchestrates across multiple applications that are written in different languages. Each applications' main steps and activities are: + +• **App1: Main Workflow Service** - Top-level orchestrator that coordinates the entire ML pipeline +- Starts the process +- Calls data processing activities on App2 +- Calls ML training child workflow on App3 +- Calls model deployment on App4 +- Ends the complete workflow +- **Language: Java** + +• **App2: Data Processing Pipeline** - **GPU activities** only +- Data Ingesting Activity (GPU-accelerated) +- Feature Engineering Activity (GPU-accelerated) +- Returns completion signal to Main Workflow +- **Language: Go** + +• **App3: ML Training Child Workflow** - Contains a child workflow and activities +- Child workflow orchestrates: + - Data Processing Activity + - Model Training Activity (GPU-intensive) + - Model Validation Activity +- Triggered by App2's activities completing +- Returns completion signal to Main Workflow +- **Language: Java** + +• **App4: Model Serving Service** - **Beefy GPU app** with activities only +- Model Loading Activity (GPU memory intensive) +- Inference Setup Activity (GPU-accelerated inference) +- Triggered by App3's workflow completing +- Returns completion signal to Main Workflow +- **Language: Go** ## Multi-application workflows -Like all building blocks in Dapr, workflow execution routing is based on the [App ID of the hosting Dapr application]({{% ref "security-concept.md#application-identity" %}}). -By default, the full workflow execution is hosted on the app ID that started the workflow. -This workflow will be executed across all replicas of that app ID, not just the single replica which scheduled the workflow. +Workflow execution routing is based on the [App ID of the hosting Dapr application]({{% ref "security-concept.md#application-identity" %}}). +By default, the full workflow execution is hosted on the app ID that started the workflow. This workflow can be executed across any replicas of that app ID, not just the single replica which scheduled the workflow. + -It is possible to execute activities or child workflows on different app IDs by specifying the target app ID parameter, inside the workflow execution code. -Upon execution, the target app ID will execute the activity or child workflow, and return the result to the parent workflow of the originating app ID. -Workflows being durable, if the target activity or child workflow app ID is not available or has not been defined, the parent workflow retry until the target app ID becomes available, indefinitely. -It is paramount that their is co-ordination between the teams owning the different app IDs to ensure that the activities and child workflows are defined and available when needed. +It is possible to execute activities and child workflows on different app IDs by specifying the target app ID parameter, inside the workflow execution code. +Upon execution, the target app ID executes the activity or child workflow, and returns the result to the parent workflow of the originating app ID. The entire Workflow execution may be distributed across multiple app IDs with no limit, with each activity or child workflow specifying the target app ID. The final history of the workflow will be saved by the app ID that hosts the very parent (or can consider it the root) workflow. {{% alert title="Restrictions" color="primary" %}} -Like other building blocks and resources in Dapr, workflows are scoped to a single namespace. +Like other API building blocks and resources in Dapr, workflows are scoped to a single namespace. This means that all app IDs involved in a multi-application workflow must be in the same namespace. -Similarly, all app IDs must use the same actor state store. -Finally, the target app ID must have the activity or child workflow defined, otherwise the parent workflow will retry indefinitely. +Similarly, all app IDs must use the same workflow (or actor) state store. +Finally, the target app ID must have the activity or child workflow defined and registered, otherwise the parent workflow retries indefinitely. {{% /alert %}} -## Multi-application activity examples - -The following examples show how to execute activities on different target app IDs. - -{{< tabpane text=true >}} - -{{% tab "Go" %}} +{{% alert title="Important Limitations" color="warning" %}} +**SDKs supporting multi-application workflows** - Multi-application workflows are used via the SDKs. +Currently the following are supported: +- **Java** (**only** activity calls) +- **Go** (**both** activities and child workflows calls) +- The Python, .NET, JavaScript SDKs support are planned for future releases +{{% /alert %}} -```go -package main +## Error handling -import ( - "context" - "log" +When calling multi-application activities or child workflows: +- If the target application does not exist, the call will be retried using the provided retry policy. +- If the target application exists but doesn't contain the specified activity or workflow, the call will return an error. +- Standard workflow retry policies apply to multi-application calls. - "github.com/dapr/durabletask-go/backend" - "github.com/dapr/durabletask-go/client" - "github.com/dapr/durabletask-go/task" - dapr "github.com/dapr/go-sdk/client" -) +It is paramount that there is coordination between the teams owning the different app IDs to ensure that the activities and child workflows are defined and available when needed. -func main() { - ctx := context.Background() +## Multi-application activity example - registry := task.NewTaskRegistry() - if err := registry.AddOrchestrator(TestWorkflow); err != nil { - log.Fatal(err) - } +Diagram showing multi-application call activity workflow pattern - daprClient, err := dapr.NewClient() - if err != nil { - log.Fatal(err) - } +The following example shows how to execute the activity `ActivityA` on the target app `App2`. - client := client.NewTaskHubGrpcClient(daprClient.GrpcClientConn(), backend.DefaultLogger()) - if err := client.StartWorkItemListener(ctx, registry); err != nil { - log.Fatal(err) - } - - id, err := client.ScheduleNewOrchestration(ctx, "TestWorkflow") - if err != nil { - log.Fatal(err) - } +{{< tabpane text=true >}} - if _, err = client.WaitForOrchestrationCompletion(ctx, id); err != nil { - log.Fatal(err) - } -} +{{% tab "Go" %}} -func TestWorkflow(ctx *task.OrchestrationContext) (any, error) { +```go +func BusinessWorkflow(ctx *workflow.WorkflowContext) (any, error) { var output string - err := ctx.CallActivity("my-other-activity", - task.WithActivityInput("my-input"), - // Here we set custom target app ID which will execute this activity. - task.WithActivityAppID("my-other-app-id"), + err := ctx.CallActivity("ActivityA", + workflow.WithActivityInput("my-input"), + workflow.WithActivityAppID("App2"), // Here we set the target app ID which will execute this activity. ).Await(&output) + if err != nil { return nil, err } @@ -107,45 +120,18 @@ func TestWorkflow(ctx *task.OrchestrationContext) (any, error) { {{% tab "Java" %}} ```java -public class CrossAppWorkflow implements Workflow { +public class BusinessWorkflow implements Workflow { @Override public WorkflowStub create() { return ctx -> { - var logger = ctx.getLogger(); - logger.info("=== WORKFLOW STARTING ==="); - logger.info("Starting CrossAppWorkflow: {}", ctx.getName()); - logger.info("Workflow name: {}", ctx.getName()); - logger.info("Workflow instance ID: {}", ctx.getInstanceId()); - - String input = ctx.getInput(String.class); - logger.info("CrossAppWorkflow received input: {}", input); - logger.info("Workflow input: {}", input); - - // Call an activity in another app by passing in an active appID to the WorkflowTaskOptions - logger.info("Calling cross-app activity in 'app2'..."); - logger.info("About to call cross-app activity in app2..."); - String crossAppResult = ctx.callActivity( - App2TransformActivity.class.getName(), - input, - new WorkflowTaskOptions("app2"), + String output = ctx.callActivity( + ActivityA.class.getName(), + "my-input", + new WorkflowTaskOptions("App2"), // Here we set the target app ID which will execute this activity. String.class ).await(); - // Call another activity in a different app - logger.info("Calling cross-app activity in 'app3'..."); - logger.info("About to call cross-app activity in app3..."); - String finalResult = ctx.callActivity( - App3FinalizeActivity.class.getName(), - crossAppResult, - new WorkflowTaskOptions("app3"), - String.class - ).await(); - logger.info("Final cross-app activity result: {}", finalResult); - logger.info("Final cross-app activity result: {}", finalResult); - - logger.info("CrossAppWorkflow finished with: {}", finalResult); - logger.info("=== WORKFLOW COMPLETING WITH: {} ===" , finalResult); - ctx.complete(finalResult); + ctx.complete(output); }; } } @@ -155,60 +141,24 @@ public class CrossAppWorkflow implements Workflow { {{< /tabpane >}} -The following examples show how to execute child workflows on different target app IDs. +## Multi-application child workflow example + +Diagram showing multi-application child workflow pattern + +The following example shows how to execute the child workflow `Workflow2` on the target app `App2`. {{< tabpane text=true >}} {{% tab "Go" %}} ```go -package main - -import ( - "context" - "log" - - "github.com/dapr/durabletask-go/backend" - "github.com/dapr/durabletask-go/client" - "github.com/dapr/durabletask-go/task" - dapr "github.com/dapr/go-sdk/client" -) - -func main() { - ctx := context.Background() - - registry := task.NewTaskRegistry() - if err := registry.AddOrchestrator(TestWorkflow); err != nil { - log.Fatal(err) - } - - daprClient, err := dapr.NewClient() - if err != nil { - log.Fatal(err) - } - - client := client.NewTaskHubGrpcClient(daprClient.GrpcClientConn(), backend.DefaultLogger()) - if err := client.StartWorkItemListener(ctx, registry); err != nil { - log.Fatal(err) - } - - id, err := client.ScheduleNewOrchestration(ctx, "TestWorkflow") - if err != nil { - log.Fatal(err) - } - - if _, err = client.WaitForOrchestrationCompletion(ctx, id); err != nil { - log.Fatal(err) - } -} - -func TestWorkflow(ctx *task.OrchestrationContext) (any, error) { +func BusinessWorkflow(ctx *workflow.WorkflowContext) (any, error) { var output string - err := ctx.CallSubOrchestrator("my-sub-orchestration", - task.WithSubOrchestratorInput("my-input"), - // Here we set custom target app ID which will execute this child workflow. - task.WithSubOrchestratorAppID("my-sub-app-id"), + err := ctx.CallChildWorkflow("Workflow2", + workflow.WithChildWorkflowInput("my-input"), + workflow.WithChildWorkflowAppID("App2"), // Here we set the target app ID which will execute this child workflow. ).Await(&output) + if err != nil { return nil, err } diff --git a/daprdocs/content/en/developing-applications/building-blocks/workflow/workflow-overview.md b/daprdocs/content/en/developing-applications/building-blocks/workflow/workflow-overview.md index 67e4941f880..5c79019fafd 100644 --- a/daprdocs/content/en/developing-applications/building-blocks/workflow/workflow-overview.md +++ b/daprdocs/content/en/developing-applications/building-blocks/workflow/workflow-overview.md @@ -46,6 +46,12 @@ Child workflow also supports automatic retry policies. [Learn more about child workflows.]({{% ref "workflow-features-concepts.md#child-workflows" %}}) +### Multi-application workflows + +Multi-application workflows, enable you to orchestrate complex business processes that span across multiple applications. This allows a workflow to call activities or start child workflows in different applications, distributing the workflow execution while maintaining the security, reliability and durability guarantees of Dapr's workflow engine. + +[Learn more about multi-application workflows.]({{% ref "workflow-multi-app.md" %}}) + ### Timers and reminders Same as Dapr actors, you can schedule reminder-like durable delays for any time range. @@ -110,7 +116,9 @@ Want to skip the quickstarts? Not a problem. You can try out the workflow buildi ## Limitations -- **State stores:** Due to underlying limitations in some database choices, more commonly NoSQL databases, you might run into limitations around storing internal states. For example, CosmosDB has a maximum single operation item limit of only 100 states in a single request. +- **State stores:** You can only use state stores which support workflows, as [described here]({{% ref supported-state-stores %}}). +- Azure Cosmos DB has [payload and workflow complexity limitations]({{% ref "setup-azure-cosmosdb.md#workflow-limitations" %}}). +- AWS DynamoDB has [workflow complexity limitations]({{% ref "setup-azure-cosmosdb.md#workflow-limitations" %}}). ## Watch the demo diff --git a/daprdocs/content/en/developing-applications/building-blocks/workflow/workflow-patterns.md b/daprdocs/content/en/developing-applications/building-blocks/workflow/workflow-patterns.md index faa92d946ae..a0f0e66bbd4 100644 --- a/daprdocs/content/en/developing-applications/building-blocks/workflow/workflow-patterns.md +++ b/daprdocs/content/en/developing-applications/building-blocks/workflow/workflow-patterns.md @@ -615,8 +615,7 @@ await context.CallActivityAsync("PostResults", sum); {{< /tabpane >}} -With the release of 1.16, it's even easier to process workflow activities in parallel while putting an upper cap on -concurrency by using the following extension methods on the `WorkflowContext`: +You can process workflow activities in parallel while putting an upper cap on concurrency by using the following extension methods on the `WorkflowContext`: {{< tabpane text=true >}} @@ -1428,33 +1427,33 @@ The following diagram illustrates this flow. ```java public class PaymentProcessingWorkflow implements Workflow { - + @Override public WorkflowStub create() { return ctx -> { ctx.getLogger().info("Starting Workflow: " + ctx.getName()); var orderId = ctx.getInput(String.class); List compensations = new ArrayList<>(); - + try { // Step 1: Reserve inventory String reservationId = ctx.callActivity(ReserveInventoryActivity.class.getName(), orderId, String.class).await(); ctx.getLogger().info("Inventory reserved: {}", reservationId); compensations.add("ReleaseInventory"); - + // Step 2: Process payment String paymentId = ctx.callActivity(ProcessPaymentActivity.class.getName(), orderId, String.class).await(); ctx.getLogger().info("Payment processed: {}", paymentId); compensations.add("RefundPayment"); - + // Step 3: Ship order String shipmentId = ctx.callActivity(ShipOrderActivity.class.getName(), orderId, String.class).await(); ctx.getLogger().info("Order shipped: {}", shipmentId); compensations.add("CancelShipment"); - + } catch (TaskFailedException e) { ctx.getLogger().error("Activity failed: {}", e.getMessage()); - + // Execute compensations in reverse order Collections.reverse(compensations); for (String compensation : compensations) { @@ -1462,24 +1461,24 @@ public class PaymentProcessingWorkflow implements Workflow { switch (compensation) { case "CancelShipment": String shipmentCancelResult = ctx.callActivity( - CancelShipmentActivity.class.getName(), - orderId, + CancelShipmentActivity.class.getName(), + orderId, String.class).await(); ctx.getLogger().info("Shipment cancellation completed: {}", shipmentCancelResult); break; - + case "RefundPayment": String refundResult = ctx.callActivity( - RefundPaymentActivity.class.getName(), - orderId, + RefundPaymentActivity.class.getName(), + orderId, String.class).await(); ctx.getLogger().info("Payment refund completed: {}", refundResult); break; - + case "ReleaseInventory": String releaseResult = ctx.callActivity( - ReleaseInventoryActivity.class.getName(), - orderId, + ReleaseInventoryActivity.class.getName(), + orderId, String.class).await(); ctx.getLogger().info("Inventory release completed: {}", releaseResult); break; @@ -1494,7 +1493,7 @@ public class PaymentProcessingWorkflow implements Workflow { // Step 4: Send confirmation ctx.callActivity(SendConfirmationActivity.class.getName(), orderId, Void.class).await(); ctx.getLogger().info("Confirmation sent for order: {}", orderId); - + ctx.complete("Order processed successfully: " + orderId); }; } @@ -1597,7 +1596,7 @@ The compensation pattern ensures that your distributed workflows can maintain co - [Try out Dapr Workflows using the quickstart]({{% ref workflow-quickstart.md %}}) - [Workflow overview]({{% ref workflow-overview.md %}}) - [Workflow API reference]({{% ref workflow_api.md %}}) -- Try out the following examples: +- Try out the following examples: - [Python](https://github.com/dapr/python-sdk/tree/master/examples/demo_workflow) - [JavaScript](https://github.com/dapr/js-sdk/tree/main/examples/workflow) - [.NET](https://github.com/dapr/dotnet-sdk/tree/master/examples/Workflow) diff --git a/daprdocs/content/en/developing-applications/error-codes/errors-overview.md b/daprdocs/content/en/developing-applications/error-codes/errors-overview.md index 00e56ae61a9..b66ce4bcb0f 100644 --- a/daprdocs/content/en/developing-applications/error-codes/errors-overview.md +++ b/daprdocs/content/en/developing-applications/error-codes/errors-overview.md @@ -6,7 +6,7 @@ weight: 10 description: "Overview of Dapr errors" --- -An error code is a numeric or alphamueric code that indicates the nature of an error and, when possible, why it occured. +An error code is a numeric or alphanumeric code that indicates the nature of an error and, when possible, why it occured. Dapr error codes are standardized strings for over 80+ common errors across HTTP and gRPC requests when using the Dapr APIs. These codes are both: - Returned in the JSON response body of the request. diff --git a/daprdocs/content/en/developing-applications/local-development/multi-app-dapr-run/multi-app-template.md b/daprdocs/content/en/developing-applications/local-development/multi-app-dapr-run/multi-app-template.md index 7e527d74fe1..7cac1459d1b 100644 --- a/daprdocs/content/en/developing-applications/local-development/multi-app-dapr-run/multi-app-template.md +++ b/daprdocs/content/en/developing-applications/local-development/multi-app-dapr-run/multi-app-template.md @@ -100,7 +100,7 @@ dapr list -k ## Stop the multi-app template -Stop the multi-app run template anytime with either of the following commands: +Stop the multi-app run template any time with either of the following commands: {{< tabpane text=true >}} diff --git a/daprdocs/content/en/developing-applications/sdks/_index.md b/daprdocs/content/en/developing-applications/sdks/_index.md index 079a06d3297..1982360b30e 100644 --- a/daprdocs/content/en/developing-applications/sdks/_index.md +++ b/daprdocs/content/en/developing-applications/sdks/_index.md @@ -31,6 +31,12 @@ Select your [preferred language below]({{% ref "#sdk-languages" %}}) to learn mo | [C++](https://github.com/dapr/cpp-sdk) | In development | ✔ | | | | [Rust]({{% ref rust %}}) | In development | ✔ | | ✔ | | + +## Frameworks + +| Framework | Language | Status | Description | +|----------------------------------------|:----------------------|:---------------|:-----------------:| +| [Dapr Agents]({{% ref "../dapr-agents" %}}) | Python | In development | A framework for building LLM-powered autonomous agents that leverages Dapr's distributed systems capabilities for durable execution, with built-in security, observability, and state management. | ## Further reading - [Serialization in the Dapr SDKs]({{% ref sdk-serialization.md %}}) diff --git a/daprdocs/content/en/getting-started/quickstarts/_index.md b/daprdocs/content/en/getting-started/quickstarts/_index.md index 102aae1f5b2..6b5d17f6553 100644 --- a/daprdocs/content/en/getting-started/quickstarts/_index.md +++ b/daprdocs/content/en/getting-started/quickstarts/_index.md @@ -25,6 +25,7 @@ Hit the ground running with our Dapr quickstarts, complete with code samples aim | [Service Invocation]({{% ref serviceinvocation-quickstart %}}) | Synchronous communication between two services using HTTP or gRPC. | | [Publish and Subscribe]({{% ref pubsub-quickstart %}}) | Asynchronous communication between two services using messaging. | | [Workflow]({{% ref workflow-quickstart %}}) | Orchestrate business workflow activities in long running, fault-tolerant, stateful applications. | +| [Agents]({{% ref dapr-agents-quickstarts.md %}}) | Build LLM-powered autonomous agentic applications. | | [State Management]({{% ref statemanagement-quickstart %}}) | Store a service's data as key/value pairs in supported state stores. | | [Bindings]({{% ref bindings-quickstart %}}) | Work with external systems using input bindings to respond to events and output bindings to call operations. | | [Actors]({{% ref actors-quickstart %}}) | Run a microservice and a simple console client to demonstrate stateful object patterns in Dapr Actors. | @@ -33,4 +34,4 @@ Hit the ground running with our Dapr quickstarts, complete with code samples aim | [Resiliency]({{% ref resiliency %}}) | Define and apply fault-tolerance policies to your Dapr API requests. | | [Cryptography]({{% ref cryptography-quickstart %}}) | Encrypt and decrypt data using Dapr's cryptographic APIs. | | [Jobs]({{% ref jobs-quickstart %}}) | Schedule, retrieve, and delete jobs using Dapr's jobs APIs. | -| [Conversation]({{% ref conversation-quickstart %}}) | Securely and reliably interact with Large Language Models (LLMs). | \ No newline at end of file +| [Conversation]({{% ref conversation-quickstart %}}) | Securely and reliably interact with Large Language Models (LLMs). | diff --git a/daprdocs/content/en/getting-started/quickstarts/workflow-quickstart.md b/daprdocs/content/en/getting-started/quickstarts/workflow-quickstart.md index 0c6be63d31d..72473207839 100644 --- a/daprdocs/content/en/getting-started/quickstarts/workflow-quickstart.md +++ b/daprdocs/content/en/getting-started/quickstarts/workflow-quickstart.md @@ -1756,11 +1756,8 @@ import ( "log" "time" - "github.com/dapr/durabletask-go/api" - "github.com/dapr/durabletask-go/backend" - "github.com/dapr/durabletask-go/client" - "github.com/dapr/durabletask-go/task" - dapr "github.com/dapr/go-sdk/client" + "github.com/dapr/durabletask-go/workflow" + "github.com/dapr/go-sdk/client" ) var ( @@ -1774,43 +1771,46 @@ func main() { fmt.Println("*** Welcome to the Dapr Workflow console app sample!") fmt.Println("*** Using this app, you can place orders that start workflows.") - registry := task.NewTaskRegistry() + r := workflow.NewRegistry() - if err := registry.AddOrchestrator(OrderProcessingWorkflow); err != nil { + if err := r.AddWorkflow(OrderProcessingWorkflow); err != nil { log.Fatal(err) } - if err := registry.AddActivity(NotifyActivity); err != nil { + if err := r.AddActivity(NotifyActivity); err != nil { log.Fatal(err) } - if err := registry.AddActivity(RequestApprovalActivity); err != nil { + if err := r.AddActivity(RequestApprovalActivity); err != nil { log.Fatal(err) } - if err := registry.AddActivity(VerifyInventoryActivity); err != nil { + if err := r.AddActivity(VerifyInventoryActivity); err != nil { log.Fatal(err) } - if err := registry.AddActivity(ProcessPaymentActivity); err != nil { + if err := r.AddActivity(ProcessPaymentActivity); err != nil { log.Fatal(err) } - if err := registry.AddActivity(UpdateInventoryActivity); err != nil { + if err := r.AddActivity(UpdateInventoryActivity); err != nil { log.Fatal(err) } - daprClient, err := dapr.NewClient() + wfClient, err := client.NewWorkflowClient() if err != nil { - log.Fatalf("failed to create Dapr client: %v", err) + log.Fatalf("failed to initialise workflow client: %v", err) } - client := client.NewTaskHubGrpcClient(daprClient.GrpcClientConn(), backend.DefaultLogger()) - if err := client.StartWorkItemListener(context.TODO(), registry); err != nil { - log.Fatalf("failed to start work item listener: %v", err) + if err := wfClient.StartWorker(context.Background(), r); err != nil { + log.Fatal(err) } + dclient, err := client.NewClient() + if err != nil { + log.Fatal(err) + } inventory := []InventoryItem{ {ItemName: "paperclip", PerItemCost: 5, Quantity: 100}, {ItemName: "cars", PerItemCost: 5000, Quantity: 10}, {ItemName: "computers", PerItemCost: 500, Quantity: 100}, } - if err := restockInventory(daprClient, inventory); err != nil { + if err := restockInventory(dclient, inventory); err != nil { log.Fatalf("failed to restock: %v", err) } @@ -1827,31 +1827,29 @@ func main() { TotalCost: totalCost, } - id, err := client.ScheduleNewOrchestration(context.TODO(), workflowName, - api.WithInput(orderPayload), - ) + id, err := wfClient.ScheduleWorkflow(context.Background(), workflowName, workflow.WithInput(orderPayload)) if err != nil { log.Fatalf("failed to start workflow: %v", err) } waitCtx, cancel := context.WithTimeout(context.Background(), 30*time.Second) - defer cancel() - _, err = client.WaitForOrchestrationCompletion(waitCtx, id) + _, err = wfClient.WaitForWorkflowCompletion(waitCtx, id) + cancel() if err != nil { log.Fatalf("failed to wait for workflow: %v", err) } - respFetch, err := client.FetchOrchestrationMetadata(context.Background(), id, api.WithFetchPayloads(true)) + respFetch, err := wfClient.FetchWorkflowMetadata(context.Background(), id, workflow.WithFetchPayloads(true)) if err != nil { log.Fatalf("failed to get workflow: %v", err) } - fmt.Printf("workflow status: %v\n", respFetch.RuntimeStatus) + fmt.Printf("workflow status: %v\n", respFetch.String()) fmt.Println("Purchase of item is complete") } -func restockInventory(daprClient dapr.Client, inventory []InventoryItem) error { +func restockInventory(daprClient client.Client, inventory []InventoryItem) error { for _, item := range inventory { itemSerialized, err := json.Marshal(item) if err != nil { @@ -1879,18 +1877,18 @@ import ( "log" "time" - "github.com/dapr/durabletask-go/task" + "github.com/dapr/durabletask-go/workflow" "github.com/dapr/go-sdk/client" ) // OrderProcessingWorkflow is the main workflow for orchestrating activities in the order process. -func OrderProcessingWorkflow(ctx *task.OrchestrationContext) (any, error) { - orderID := ctx.ID +func OrderProcessingWorkflow(ctx *workflow.WorkflowContext) (any, error) { + orderID := ctx.ID() var orderPayload OrderPayload if err := ctx.GetInput(&orderPayload); err != nil { return nil, err } - err := ctx.CallActivity(NotifyActivity, task.WithActivityInput(Notification{ + err := ctx.CallActivity(NotifyActivity, workflow.WithActivityInput(Notification{ Message: fmt.Sprintf("Received order %s for %d %s - $%d", orderID, orderPayload.Quantity, orderPayload.ItemName, orderPayload.TotalCost), })).Await(nil) if err != nil { @@ -1898,8 +1896,8 @@ func OrderProcessingWorkflow(ctx *task.OrchestrationContext) (any, error) { } var verifyInventoryResult InventoryResult - if err := ctx.CallActivity(VerifyInventoryActivity, task.WithActivityInput(InventoryRequest{ - RequestID: string(orderID), + if err := ctx.CallActivity(VerifyInventoryActivity, workflow.WithActivityInput(InventoryRequest{ + RequestID: orderID, ItemName: orderPayload.ItemName, Quantity: orderPayload.Quantity, })).Await(&verifyInventoryResult); err != nil { @@ -1908,64 +1906,64 @@ func OrderProcessingWorkflow(ctx *task.OrchestrationContext) (any, error) { if !verifyInventoryResult.Success { notification := Notification{Message: fmt.Sprintf("Insufficient inventory for %s", orderPayload.ItemName)} - err := ctx.CallActivity(NotifyActivity, task.WithActivityInput(notification)).Await(nil) + err := ctx.CallActivity(NotifyActivity, workflow.WithActivityInput(notification)).Await(nil) return OrderResult{Processed: false}, err } if orderPayload.TotalCost > 5000 { var approvalRequired ApprovalRequired - if err := ctx.CallActivity(RequestApprovalActivity, task.WithActivityInput(orderPayload)).Await(&approvalRequired); err != nil { + if err := ctx.CallActivity(RequestApprovalActivity, workflow.WithActivityInput(orderPayload)).Await(&approvalRequired); err != nil { return OrderResult{Processed: false}, err } - if err := ctx.WaitForSingleEvent("manager_approval", time.Second*200).Await(nil); err != nil { + if err := ctx.WaitForExternalEvent("manager_approval", time.Second*200).Await(nil); err != nil { return OrderResult{Processed: false}, err } // TODO: Confirm timeout flow - this will be in the form of an error. if approvalRequired.Approval { - if err := ctx.CallActivity(NotifyActivity, task.WithActivityInput(Notification{Message: fmt.Sprintf("Payment for order %s has been approved!", orderID)})).Await(nil); err != nil { + if err := ctx.CallActivity(NotifyActivity, workflow.WithActivityInput(Notification{Message: fmt.Sprintf("Payment for order %s has been approved!", orderID)})).Await(nil); err != nil { log.Printf("failed to notify of a successful order: %v\n", err) } } else { - if err := ctx.CallActivity(NotifyActivity, task.WithActivityInput(Notification{Message: fmt.Sprintf("Payment for order %s has been rejected!", orderID)})).Await(nil); err != nil { + if err := ctx.CallActivity(NotifyActivity, workflow.WithActivityInput(Notification{Message: fmt.Sprintf("Payment for order %s has been rejected!", orderID)})).Await(nil); err != nil { log.Printf("failed to notify of an unsuccessful order :%v\n", err) } return OrderResult{Processed: false}, err } } - err = ctx.CallActivity(ProcessPaymentActivity, task.WithActivityInput(PaymentRequest{ - RequestID: string(orderID), + err = ctx.CallActivity(ProcessPaymentActivity, workflow.WithActivityInput(PaymentRequest{ + RequestID: orderID, ItemBeingPurchased: orderPayload.ItemName, Amount: orderPayload.TotalCost, Quantity: orderPayload.Quantity, })).Await(nil) if err != nil { - if err := ctx.CallActivity(NotifyActivity, task.WithActivityInput(Notification{Message: fmt.Sprintf("Order %s failed!", orderID)})).Await(nil); err != nil { + if err := ctx.CallActivity(NotifyActivity, workflow.WithActivityInput(Notification{Message: fmt.Sprintf("Order %s failed!", orderID)})).Await(nil); err != nil { log.Printf("failed to notify of a failed order: %v", err) } return OrderResult{Processed: false}, err } - err = ctx.CallActivity(UpdateInventoryActivity, task.WithActivityInput(PaymentRequest{ - RequestID: string(orderID), + err = ctx.CallActivity(UpdateInventoryActivity, workflow.WithActivityInput(PaymentRequest{ + RequestID: orderID, ItemBeingPurchased: orderPayload.ItemName, Amount: orderPayload.TotalCost, Quantity: orderPayload.Quantity, })).Await(nil) if err != nil { - if err := ctx.CallActivity(NotifyActivity, task.WithActivityInput(Notification{Message: fmt.Sprintf("Order %s failed!", orderID)})).Await(nil); err != nil { + if err := ctx.CallActivity(NotifyActivity, workflow.WithActivityInput(Notification{Message: fmt.Sprintf("Order %s failed!", orderID)})).Await(nil); err != nil { log.Printf("failed to notify of a failed order: %v", err) } return OrderResult{Processed: false}, err } - if err := ctx.CallActivity(NotifyActivity, task.WithActivityInput(Notification{Message: fmt.Sprintf("Order %s has completed!", orderID)})).Await(nil); err != nil { + if err := ctx.CallActivity(NotifyActivity, workflow.WithActivityInput(Notification{Message: fmt.Sprintf("Order %s has completed!", orderID)})).Await(nil); err != nil { log.Printf("failed to notify of a successful order: %v", err) } return OrderResult{Processed: true}, err } // NotifyActivity outputs a notification message -func NotifyActivity(ctx task.ActivityContext) (any, error) { +func NotifyActivity(ctx workflow.ActivityContext) (any, error) { var input Notification if err := ctx.GetInput(&input); err != nil { return "", err @@ -1975,7 +1973,7 @@ func NotifyActivity(ctx task.ActivityContext) (any, error) { } // ProcessPaymentActivity is used to process a payment -func ProcessPaymentActivity(ctx task.ActivityContext) (any, error) { +func ProcessPaymentActivity(ctx workflow.ActivityContext) (any, error) { var input PaymentRequest if err := ctx.GetInput(&input); err != nil { return "", err @@ -1985,7 +1983,7 @@ func ProcessPaymentActivity(ctx task.ActivityContext) (any, error) { } // VerifyInventoryActivity is used to verify if an item is available in the inventory -func VerifyInventoryActivity(ctx task.ActivityContext) (any, error) { +func VerifyInventoryActivity(ctx workflow.ActivityContext) (any, error) { var input InventoryRequest if err := ctx.GetInput(&input); err != nil { return nil, err @@ -2017,7 +2015,7 @@ func VerifyInventoryActivity(ctx task.ActivityContext) (any, error) { } // UpdateInventoryActivity modifies the inventory. -func UpdateInventoryActivity(ctx task.ActivityContext) (any, error) { +func UpdateInventoryActivity(ctx workflow.ActivityContext) (any, error) { var input PaymentRequest if err := ctx.GetInput(&input); err != nil { return nil, err @@ -2051,7 +2049,7 @@ func UpdateInventoryActivity(ctx task.ActivityContext) (any, error) { } // RequestApprovalActivity requests approval for the order -func RequestApprovalActivity(ctx task.ActivityContext) (any, error) { +func RequestApprovalActivity(ctx workflow.ActivityContext) (any, error) { var input OrderPayload if err := ctx.GetInput(&input); err != nil { return nil, err diff --git a/daprdocs/content/en/operations/configuration/configuration-overview.md b/daprdocs/content/en/operations/configuration/configuration-overview.md index f501710405c..33cc11d8e31 100644 --- a/daprdocs/content/en/operations/configuration/configuration-overview.md +++ b/daprdocs/content/en/operations/configuration/configuration-overview.md @@ -62,13 +62,14 @@ A Dapr sidecar can apply a specific configuration by using a `dapr.io/config` an ### Application configuration settings -The following menu includes all of the configuration settings you can set on the sidecar. +The following menu includes all of the configuration settings you can set: - [Tracing](#tracing) - [Metrics](#metrics) - [Logging](#logging) - [Middleware](#middleware) - [Name resolution](#name-resolution) +- [Workflow](#workflow) - [Scope secret store access](#scope-secret-store-access) - [Access Control allow lists for building block APIs](#access-control-allow-lists-for-building-block-apis) - [Access Control allow lists for service invocation API](#access-control-allow-lists-for-service-invocation-api) @@ -255,6 +256,15 @@ For more information, see: - [The name resolution component documentation]({{% ref supported-name-resolution %}}) for more examples. - [The Configuration file documentation]({{% ref configuration-schema.md %}}) to learn more about how to configure name resolution per component. +#### Workflow + +The `workflow` section contains properties for configuring [Workflows]({{% ref "workflow-overview.md" %}}). + +| Property | Type | Description | +|------------------|--------|-----| +| `maxConcurrentWorkflowInvocations` | int32 | Maximum number of concurrent workflow executions per Dapr sidecar. Default is infinite. | +| `maxConcurrentActivityInvocations` | int32 | Maximum number of concurrent activity executions per Dapr sidecar. Default is infinite. | + #### Scope secret store access See the [Scoping secrets]({{% ref "secret-scope.md" %}}) guide for information and examples on how to scope secrets to an application. @@ -334,6 +344,9 @@ spec: deny: - bindings.smtp - secretstores.local.file + workflow: + maxConcurrentWorkflowInvocations: 100 + maxConcurrentActivityInvocations: 1000 accessControl: defaultAction: deny trustDomain: "public" diff --git a/daprdocs/content/en/operations/configuration/increase-request-size.md b/daprdocs/content/en/operations/configuration/increase-request-size.md index 04f6500866c..2246d93bfdd 100644 --- a/daprdocs/content/en/operations/configuration/increase-request-size.md +++ b/daprdocs/content/en/operations/configuration/increase-request-size.md @@ -1,27 +1,29 @@ --- type: docs -title: "How-To: Handle large http body requests" -linkTitle: "HTTP request body size" +title: "How-To: Handle larger body requests" +linkTitle: "Request body size" weight: 6000 description: "Configure http requests that are bigger than 4 MB" --- -By default, Dapr has a limit for the request body size, set to 4MB. You can change this by defining: -- The `dapr.io/http-max-request-size` annotation, or -- The `--dapr-http-max-request-size` flag. +{{% alert title="Note" color="primary" %}} +The existing flag/annotation`dapr-http-max-request-size` has been deprecated and updated to `max-body-size`. +{{% /alert %}} + +By default, Dapr has a limit for the request body size, set to 4MB. You can change this for both HTTP and gRPC requests by defining: +- The `dapr.io/max-body-size` annotation, or +- The `--max-body-size` flag. {{< tabpane text=true >}} {{% tab "Self-hosted" %}} -When running in self-hosted mode, use the `--dapr-http-max-request-size` flag to configure Dapr to use non-default request body size: +When running in self-hosted mode, use the `--max-body-size` flag to configure Dapr to use non-default request body size: ```bash -dapr run --dapr-http-max-request-size 16 node app.js +dapr run --max-body-size 16 node app.js ``` -This tells Dapr to set maximum request body size to `16` MB. - {{% /tab %}} @@ -50,7 +52,7 @@ spec: dapr.io/enabled: "true" dapr.io/app-id: "myapp" dapr.io/app-port: "8000" - dapr.io/http-max-request-size: "16" + dapr.io/max-body-size: "16" #... ``` @@ -58,6 +60,8 @@ spec: {{< /tabpane >}} +This tells Dapr to set the maximum request body size to `16` MB for both HTTP and gRPC requests. + ## Related links [Dapr Kubernetes pod annotations spec]({{% ref arguments-annotations-overview.md %}}) diff --git a/daprdocs/content/en/operations/support/alpha-beta-apis.md b/daprdocs/content/en/operations/support/alpha-beta-apis.md index f8dd6e3ce11..8b1d81e8218 100644 --- a/daprdocs/content/en/operations/support/alpha-beta-apis.md +++ b/daprdocs/content/en/operations/support/alpha-beta-apis.md @@ -16,8 +16,8 @@ description: "List of current alpha and beta APIs" | Bulk Subscribe | [Bulk subscribe proto](https://github.com/dapr/dapr/blob/5aba3c9aa4ea9b3f388df125f9c66495b43c5c9e/dapr/proto/runtime/v1/appcallback.proto#L57) | N/A | The bulk subscribe application callback receives multiple messages from a topic in a single call. | [Bulk Publish and Subscribe API]({{% ref "pubsub-bulk.md" %}}) | v1.10 | | Cryptography | [Crypto proto](https://github.com/dapr/dapr/blob/5aba3c9aa4ea9b3f388df125f9c66495b43c5c9e/dapr/proto/runtime/v1/dapr.proto#L118) | `v1.0-alpha1/crypto` | The cryptography API enables you to perform **high level** cryptography operations for encrypting and decrypting messages. | [Cryptography API]({{% ref "cryptography-overview.md" %}}) | v1.11 | | Jobs | [Jobs proto](https://github.com/dapr/dapr/blob/master/dapr/proto/runtime/v1/dapr.proto#L212-219) | `v1.0-alpha1/jobs` | The jobs API enables you to schedule and orchestrate jobs. | [Jobs API]({{% ref "jobs-overview.md" %}}) | v1.14 | -| Conversation | [Conversation proto](https://github.com/dapr/dapr/blob/master/dapr/proto/runtime/v1/dapr.proto#L221-222) | `v1.0-alpha1/conversation` | Converse between different large language models using the conversation API. | [Conversation API]({{% ref "conversation-overview.md" %}}) | v1.15 | - +| Streaming Subscription | [Streaming Subscription proto](https://github.com/dapr/dapr/blob/310c83140b2f0c3cb7d2bef19624df88af3e8e0a/dapr/proto/runtime/v1/dapr.proto#L454) | N/A | Subscription is defined in the application code. Streaming subscriptions are dynamic, meaning they allow for adding or removing subscriptions at runtime. | [Streaming Subscription API]({{% ref "subscription-methods/#streaming-subscriptions" %}}) | v1.14 | +| Conversation | [Conversation proto](https://github.com/dapr/dapr/blob/master/dapr/proto/runtime/v1/dapr.proto#L226) | `v1.0-alpha2/conversation` | Converse between different large language models using the conversation API. | [Conversation API]({{% ref "conversation-overview.md" %}}) | v1.15 | ## Beta APIs diff --git a/daprdocs/content/en/operations/support/breaking-changes-and-deprecations.md b/daprdocs/content/en/operations/support/breaking-changes-and-deprecations.md index a0895537cc3..3c2214cb711 100644 --- a/daprdocs/content/en/operations/support/breaking-changes-and-deprecations.md +++ b/daprdocs/content/en/operations/support/breaking-changes-and-deprecations.md @@ -58,17 +58,18 @@ After announcing a future breaking change, the change will happen in 2 releases ## Announced deprecations -| Feature | Deprecation announcement | Removal | -|-----------------------|-----------------------|------------------------- | -| GET /v1.0/shutdown API (Users should use [POST API]({{% ref kubernetes-job.md %}}) instead) | 1.2.0 | 1.4.0 | -| Java domain builder classes deprecated (Users should use [setters](https://github.com/dapr/java-sdk/issues/587) instead) | Java SDK 1.3.0 | Java SDK 1.5.0 | -| Service invocation will no longer provide a default content type header of `application/json` when no content-type is specified. You must explicitly [set a content-type header]({{% ref "service_invocation_api.md#request-contents" %}}) for service invocation if your invoked apps rely on this header. | 1.7.0 | 1.9.0 | -| gRPC service invocation using `invoke` method is deprecated. Use proxy mode service invocation instead. See [How-To: Invoke services using gRPC ]({{% ref howto-invoke-services-grpc.md %}}) to use the proxy mode.| 1.9.0 | 1.10.0 | -| The CLI flag `--app-ssl` (in both the Dapr CLI and daprd) has been deprecated in favor of using `--app-protocol` with values `https` or `grpcs`. [daprd:6158](https://github.com/dapr/dapr/issues/6158) [cli:1267](https://github.com/dapr/cli/issues/1267)| 1.11.0 | 1.13.0 | -| Hazelcast PubSub Component | 1.9.0 | 1.11.0 | -| Twitter Binding Component | 1.10.0 | 1.11.0 | -| NATS Streaming PubSub Component | 1.11.0 | 1.13.0 | -| Workflows API Alpha1 `/v1.0-alpha1/workflows` being deprecated in favor of Workflow Client | 1.15.0 | 1.17.0 | +| Feature | Deprecation announcement | Removal | +|-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|--------------------------|----------------| +| GET /v1.0/shutdown API (Users should use [POST API]({{% ref kubernetes-job.md %}}) instead) | 1.2.0 | 1.4.0 | +| Java domain builder classes deprecated (Users should use [setters](https://github.com/dapr/java-sdk/issues/587) instead) | Java SDK 1.3.0 | Java SDK 1.5.0 | +| Service invocation will no longer provide a default content type header of `application/json` when no content-type is specified. You must explicitly [set a content-type header]({{% ref "service_invocation_api.md#request-contents" %}}) for service invocation if your invoked apps rely on this header. | 1.7.0 | 1.9.0 | +| gRPC service invocation using `invoke` method is deprecated. Use proxy mode service invocation instead. See [How-To: Invoke services using gRPC ]({{% ref howto-invoke-services-grpc.md %}}) to use the proxy mode. | 1.9.0 | 1.10.0 | +| The CLI flag `--app-ssl` (in both the Dapr CLI and daprd) has been deprecated in favor of using `--app-protocol` with values `https` or `grpcs`. [daprd:6158](https://github.com/dapr/dapr/issues/6158) [cli:1267](https://github.com/dapr/cli/issues/1267) | 1.11.0 | 1.13.0 | +| Hazelcast PubSub Component | 1.9.0 | 1.11.0 | +| Twitter Binding Component | 1.10.0 | 1.11.0 | +| NATS Streaming PubSub Component | 1.11.0 | 1.13.0 | +| Workflows API Alpha1 `/v1.0-alpha1/workflows` being deprecated in favor of Workflow Client | 1.15.0 | 1.17.0 | +| Migration of `http-max-request-size` flags/annotations to `max-body-size`. See [How-To: Handle larger body requests]({{% ref increase-request-size.md %}}) | 1.14.0 | 1.17.0 | ## Related links diff --git a/daprdocs/content/en/operations/support/support-release-policy.md b/daprdocs/content/en/operations/support/support-release-policy.md index 579008b70e0..110dbdf266e 100644 --- a/daprdocs/content/en/operations/support/support-release-policy.md +++ b/daprdocs/content/en/operations/support/support-release-policy.md @@ -19,7 +19,7 @@ Dapr releases use `MAJOR.MINOR.PATCH` versioning. For example, 1.0.0. A supported release means: -- A hoxfix patch is released if the release has a critical issue such as a mainline broken scenario or a security issue. Each of these are reviewed on a case by case basis. +- A hotfix patch is released if the release has a critical issue such as a mainline broken scenario or a security issue. Each of these are reviewed on a case by case basis. - Issues are investigated for the supported releases. If a release is no longer supported, you need to upgrade to a newer release and determine if the issue is still relevant. From the 1.8.0 release onwards three (3) versions of Dapr are supported; the current and previous two (2) versions. Typically these are `MINOR`release updates. This means that there is a rolling window that moves forward for supported releases and it is your operational responsibility to remain up to date with these supported versions. If you have an older version of Dapr you may have to do intermediate upgrades to get to a supported version. @@ -45,26 +45,30 @@ The table below shows the versions of Dapr releases that have been tested togeth | Release date | Runtime | CLI | SDKs | Dashboard | Status | Release notes | |--------------------|:--------:|:--------|---------|---------|---------|------------| -| July 31st 2025 | 1.15.9
| 1.15.0 | Java 1.14.2, 1.15.0
Go 1.12.0
PHP 1.2.0
Python 1.15.0
.NET 1.15.4
JS 3.5.2
Rust 0.16.1 | 0.15.0 | Supported (current) | [v1.15.9 release notes](https://github.com/dapr/dapr/releases/tag/v1.15.9) | -| July 18th 2025 | 1.15.8
| 1.15.0 | Java 1.14.2, 1.15.0
Go 1.12.0
PHP 1.2.0
Python 1.15.0
.NET 1.15.4
JS 3.5.2
Rust 0.16.1 | 0.15.0 | Supported (current) | [v1.15.8 release notes](https://github.com/dapr/dapr/releases/tag/v1.15.8) | -| July 16th 2025 | 1.15.7
| 1.15.0 | Java 1.14.1, 1.15.0
Go 1.12.0
PHP 1.2.0
Python 1.15.0
.NET 1.15.4
JS 3.5.2
Rust 0.16.1 | 0.15.0 | Supported (current) | [v1.15.7 release notes](https://github.com/dapr/dapr/releases/tag/v1.15.7) | -| June 20th 2025 | 1.15.6
| 1.15.0 | Java 1.14.1, 1.15.0
Go 1.12.0
PHP 1.2.0
Python 1.15.0
.NET 1.15.4
JS 3.5.2
Rust 0.16.1 | 0.15.0 | Supported (current) | [v1.15.6 release notes](https://github.com/dapr/dapr/releases/tag/v1.15.6) | -| May 5th 2025 | 1.15.5
| 1.15.0 | Java 1.14.1, 1.15.0
Go 1.12.0
PHP 1.2.0
Python 1.15.0
.NET 1.15.4
JS 3.5.2
Rust 0.16.1 | 0.15.0 | Supported (current) | [v1.15.5 release notes](https://github.com/dapr/dapr/releases/tag/v1.15.5) | -| April 4th 2025 | 1.15.4
| 1.15.0 | Java 1.14.0, 1.15.0
Go 1.12.0
PHP 1.2.0
Python 1.15.0
.NET 1.15.4
JS 3.5.2
Rust 0.16.1 | 0.15.0 | Supported (current) | [v1.15.4 release notes](https://github.com/dapr/dapr/releases/tag/v1.15.4) | -| March 5rd 2025 | 1.15.3
| 1.15.0 | Java 1.14.0, 1.15.0
Go 1.12.0
PHP 1.2.0
Python 1.15.0
.NET 1.15.4
JS 3.5.2
Rust 0.16.1 | 0.15.0 | Supported (current) | [v1.15.3 release notes](https://github.com/dapr/dapr/releases/tag/v1.15.3) | -| March 3rd 2025 | 1.15.2
| 1.15.0 | Java 1.14.0, 1.15.0
Go 1.12.0
PHP 1.2.0
Python 1.15.0
.NET 1.15.0
JS 3.5.0
Rust 0.16 | 0.15.0 | Supported (current) | [v1.15.2 release notes](https://github.com/dapr/dapr/releases/tag/v1.15.2) | -| February 28th 2025 | 1.15.1
| 1.15.0 | Java 1.14.0, 1.15.0
Go 1.12.0
PHP 1.2.0
Python 1.15.0
.NET 1.15.0
JS 3.5.0
Rust 0.16 | 0.15.0 | Supported (current) | [v1.15.1 release notes](https://github.com/dapr/dapr/releases/tag/v1.15.1) | +| Sep 16th 2025 | 1.16.0
| 1.16.0 | Java 1.15.0
Go 1.13.0
PHP 1.2.0
Python 1.16.0
.NET 1.16.0
JS 3.6.0
Rust 0.17.0 | 0.15.0 | Supported (current) | [v1.16.0 release notes](https://github.com/dapr/dapr/releases/tag/v1.16.0) | +| Sep 17th 2025 | 1.15.12
| 1.15.0 | Java 1.14.2, 1.15.0
Go 1.12.0
PHP 1.2.0
Python 1.15.0
.NET 1.15.4
JS 3.5.2
Rust 0.16.1 | 0.15.0 | Supported | [v1.15.12 release notes](https://github.com/dapr/dapr/releases/tag/v1.15.12) | +| August 28th 2025 | 1.15.11
| 1.15.0 | Java 1.14.2, 1.15.0
Go 1.12.0
PHP 1.2.0
Python 1.15.0
.NET 1.15.4
JS 3.5.2
Rust 0.16.1 | 0.15.0 | Supported | [v1.15.11 release notes](https://github.com/dapr/dapr/releases/tag/v1.15.11) | +| August 21st 2025 | 1.15.10
| 1.15.0 | Java 1.14.2, 1.15.0
Go 1.12.0
PHP 1.2.0
Python 1.15.0
.NET 1.15.4
JS 3.5.2
Rust 0.16.1 | 0.15.0 | Supported | [v1.15.10 release notes](https://github.com/dapr/dapr/releases/tag/v1.15.10) | +| July 31st 2025 | 1.15.9
| 1.15.0 | Java 1.14.2, 1.15.0
Go 1.12.0
PHP 1.2.0
Python 1.15.0
.NET 1.15.4
JS 3.5.2
Rust 0.16.1 | 0.15.0 | Supported | [v1.15.9 release notes](https://github.com/dapr/dapr/releases/tag/v1.15.9) | +| July 18th 2025 | 1.15.8
| 1.15.0 | Java 1.14.2, 1.15.0
Go 1.12.0
PHP 1.2.0
Python 1.15.0
.NET 1.15.4
JS 3.5.2
Rust 0.16.1 | 0.15.0 | Supported | [v1.15.8 release notes](https://github.com/dapr/dapr/releases/tag/v1.15.8) | +| July 16th 2025 | 1.15.7
| 1.15.0 | Java 1.14.1, 1.15.0
Go 1.12.0
PHP 1.2.0
Python 1.15.0
.NET 1.15.4
JS 3.5.2
Rust 0.16.1 | 0.15.0 | Supported | [v1.15.7 release notes](https://github.com/dapr/dapr/releases/tag/v1.15.7) | +| June 20th 2025 | 1.15.6
| 1.15.0 | Java 1.14.1, 1.15.0
Go 1.12.0
PHP 1.2.0
Python 1.15.0
.NET 1.15.4
JS 3.5.2
Rust 0.16.1 | 0.15.0 | Supported | [v1.15.6 release notes](https://github.com/dapr/dapr/releases/tag/v1.15.6) | +| May 5th 2025 | 1.15.5
| 1.15.0 | Java 1.14.1, 1.15.0
Go 1.12.0
PHP 1.2.0
Python 1.15.0
.NET 1.15.4
JS 3.5.2
Rust 0.16.1 | 0.15.0 | Supported | [v1.15.5 release notes](https://github.com/dapr/dapr/releases/tag/v1.15.5) | +| April 4th 2025 | 1.15.4
| 1.15.0 | Java 1.14.0, 1.15.0
Go 1.12.0
PHP 1.2.0
Python 1.15.0
.NET 1.15.4
JS 3.5.2
Rust 0.16.1 | 0.15.0 | Supported | [v1.15.4 release notes](https://github.com/dapr/dapr/releases/tag/v1.15.4) | +| March 5rd 2025 | 1.15.3
| 1.15.0 | Java 1.14.0, 1.15.0
Go 1.12.0
PHP 1.2.0
Python 1.15.0
.NET 1.15.4
JS 3.5.2
Rust 0.16.1 | 0.15.0 | Supported | [v1.15.3 release notes](https://github.com/dapr/dapr/releases/tag/v1.15.3) | +| March 3rd 2025 | 1.15.2
| 1.15.0 | Java 1.14.0, 1.15.0
Go 1.12.0
PHP 1.2.0
Python 1.15.0
.NET 1.15.0
JS 3.5.0
Rust 0.16 | 0.15.0 | Supported | [v1.15.2 release notes](https://github.com/dapr/dapr/releases/tag/v1.15.2) | +| February 28th 2025 | 1.15.1
| 1.15.0 | Java 1.14.0, 1.15.0
Go 1.12.0
PHP 1.2.0
Python 1.15.0
.NET 1.15.0
JS 3.5.0
Rust 0.16 | 0.15.0 | Supported | [v1.15.1 release notes](https://github.com/dapr/dapr/releases/tag/v1.15.1) | | February 27th 2025 | 1.15.0
| 1.15.0 | Java 1.14.0, 1.15.0
Go 1.12.0
PHP 1.2.0
Python 1.15.0
.NET 1.15.0
JS 3.5.0
Rust 0.16 | 0.15.0 | Supported | [v1.15.0 release notes](https://github.com/dapr/dapr/releases/tag/v1.15.0) | | September 16th 2024 | 1.14.4
| 1.14.1 | Java 1.12.0
Go 1.11.0
PHP 1.2.0
Python 1.14.0
.NET 1.14.0
JS 3.3.1 | 0.15.0 | Supported | [v1.14.4 release notes](https://github.com/dapr/dapr/releases/tag/v1.14.4) | | September 13th 2024 | 1.14.3
| 1.14.1 | Java 1.12.0
Go 1.11.0
PHP 1.2.0
Python 1.14.0
.NET 1.14.0
JS 3.3.1 | 0.15.0 | ⚠️ Recalled | [v1.14.3 release notes](https://github.com/dapr/dapr/releases/tag/v1.14.3) | | September 6th 2024 | 1.14.2
| 1.14.1 | Java 1.12.0
Go 1.11.0
PHP 1.2.0
Python 1.14.0
.NET 1.14.0
JS 3.3.1 | 0.15.0 | Supported | [v1.14.2 release notes](https://github.com/dapr/dapr/releases/tag/v1.14.2) | | August 14th 2024 | 1.14.1
| 1.14.1 | Java 1.12.0
Go 1.11.0
PHP 1.2.0
Python 1.14.0
.NET 1.14.0
JS 3.3.1 | 0.15.0 | Supported | [v1.14.1 release notes](https://github.com/dapr/dapr/releases/tag/v1.14.1) | | August 14th 2024 | 1.14.0
| 1.14.0 | Java 1.12.0
Go 1.11.0
PHP 1.2.0
Python 1.14.0
.NET 1.14.0
JS 3.3.1 | 0.15.0 | Supported | [v1.14.0 release notes](https://github.com/dapr/dapr/releases/tag/v1.14.0) | -| May 29th 2024 | 1.13.4
| 1.13.0 | Java 1.11.0
Go 1.10.0
PHP 1.2.0
Python 1.13.0
.NET 1.13.0
JS 3.3.0 | 0.14.0 | Supported | [v1.13.4 release notes](https://github.com/dapr/dapr/releases/tag/v1.13.4) | -| May 21st 2024 | 1.13.3
| 1.13.0 | Java 1.11.0
Go 1.10.0
PHP 1.2.0
Python 1.13.0
.NET 1.13.0
JS 3.3.0 | 0.14.0 | Supported | [v1.13.3 release notes](https://github.com/dapr/dapr/releases/tag/v1.13.3) | -| April 3rd 2024 | 1.13.2
| 1.13.0 | Java 1.11.0
Go 1.10.0
PHP 1.2.0
Python 1.13.0
.NET 1.13.0
JS 3.3.0 | 0.14.0 | Supported | [v1.13.2 release notes](https://github.com/dapr/dapr/releases/tag/v1.13.2) | -| March 26th 2024 | 1.13.1
| 1.13.0 | Java 1.11.0
Go 1.10.0
PHP 1.2.0
Python 1.13.0
.NET 1.13.0
JS 3.3.0 | 0.14.0 | Supported | [v1.13.1 release notes](https://github.com/dapr/dapr/releases/tag/v1.13.1) | -| March 6th 2024 | 1.13.0
| 1.13.0 | Java 1.11.0
Go 1.10.0
PHP 1.2.0
Python 1.13.0
.NET 1.13.0
JS 3.3.0 | 0.14.0 | Supported | [v1.13.0 release notes](https://github.com/dapr/dapr/releases/tag/v1.13.0) | +| May 29th 2024 | 1.13.4
| 1.13.0 | Java 1.11.0
Go 1.10.0
PHP 1.2.0
Python 1.13.0
.NET 1.13.0
JS 3.3.0 | 0.14.0 | Unsupported | [v1.13.4 release notes](https://github.com/dapr/dapr/releases/tag/v1.13.4) | +| May 21st 2024 | 1.13.3
| 1.13.0 | Java 1.11.0
Go 1.10.0
PHP 1.2.0
Python 1.13.0
.NET 1.13.0
JS 3.3.0 | 0.14.0 | Unsupported | [v1.13.3 release notes](https://github.com/dapr/dapr/releases/tag/v1.13.3) | +| April 3rd 2024 | 1.13.2
| 1.13.0 | Java 1.11.0
Go 1.10.0
PHP 1.2.0
Python 1.13.0
.NET 1.13.0
JS 3.3.0 | 0.14.0 | Unsupported | [v1.13.2 release notes](https://github.com/dapr/dapr/releases/tag/v1.13.2) | +| March 26th 2024 | 1.13.1
| 1.13.0 | Java 1.11.0
Go 1.10.0
PHP 1.2.0
Python 1.13.0
.NET 1.13.0
JS 3.3.0 | 0.14.0 | Unsupported | [v1.13.1 release notes](https://github.com/dapr/dapr/releases/tag/v1.13.1) | +| March 6th 2024 | 1.13.0
| 1.13.0 | Java 1.11.0
Go 1.10.0
PHP 1.2.0
Python 1.13.0
.NET 1.13.0
JS 3.3.0 | 0.14.0 | Unsupported | [v1.13.0 release notes](https://github.com/dapr/dapr/releases/tag/v1.13.0) | | January 17th 2024 | 1.12.4
| 1.12.0 | Java 1.10.0
Go 1.9.1
PHP 1.2.0
Python 1.12.0
.NET 1.12.0
JS 3.2.0 | 0.14.0 | Unsupported | [v1.12.4 release notes](https://github.com/dapr/dapr/releases/tag/v1.12.4) | | January 2nd 2024 | 1.12.3
| 1.12.0 | Java 1.10.0
Go 1.9.1
PHP 1.2.0
Python 1.12.0
.NET 1.12.0
JS 3.2.0 | 0.14.0 | Unsupported | [v1.12.3 release notes](https://github.com/dapr/dapr/releases/tag/v1.12.3) | | November 18th 2023 | 1.12.2
| 1.12.0 | Java 1.10.0
Go 1.9.1
PHP 1.2.0
Python 1.12.0
.NET 1.12.0
JS 3.2.0 | 0.14.0 | Unsupported | [v1.12.2 release notes](https://github.com/dapr/dapr/releases/tag/v1.12.2) | diff --git a/daprdocs/content/en/reference/api/actors_api.md b/daprdocs/content/en/reference/api/actors_api.md index 4fead8ee19d..288c4dcafb4 100644 --- a/daprdocs/content/en/reference/api/actors_api.md +++ b/daprdocs/content/en/reference/api/actors_api.md @@ -3,7 +3,7 @@ type: docs title: "Actors API reference" linkTitle: "Actors API" description: "Detailed documentation on the actors API" -weight: 600 +weight: 200 --- Dapr provides native, cross-platform, and cross-language virtual actor capabilities. diff --git a/daprdocs/content/en/reference/api/bindings_api.md b/daprdocs/content/en/reference/api/bindings_api.md index 81e13eecca7..8c63feb0a01 100644 --- a/daprdocs/content/en/reference/api/bindings_api.md +++ b/daprdocs/content/en/reference/api/bindings_api.md @@ -3,7 +3,7 @@ type: docs title: "Bindings API reference" linkTitle: "Bindings API" description: "Detailed documentation on the bindings API" -weight: 500 +weight: 300 --- Dapr provides bi-directional binding capabilities for applications and a consistent approach to interacting with different cloud/on-premise services or systems. diff --git a/daprdocs/content/en/reference/api/configuration_api.md b/daprdocs/content/en/reference/api/configuration_api.md index ef28fc42a8f..e09a5d9b7bd 100644 --- a/daprdocs/content/en/reference/api/configuration_api.md +++ b/daprdocs/content/en/reference/api/configuration_api.md @@ -3,7 +3,7 @@ type: docs title: "Configuration API reference" linkTitle: "Configuration API" description: "Detailed documentation on the configuration API" -weight: 800 +weight: 400 --- ## Get Configuration diff --git a/daprdocs/content/en/reference/api/conversation_api.md b/daprdocs/content/en/reference/api/conversation_api.md index 1a4e006b348..eab64dd6ed5 100644 --- a/daprdocs/content/en/reference/api/conversation_api.md +++ b/daprdocs/content/en/reference/api/conversation_api.md @@ -3,21 +3,23 @@ type: docs title: "Conversation API reference" linkTitle: "Conversation API" description: "Detailed documentation on the conversation API" -weight: 1400 +weight: 500 --- {{% alert title="Alpha" color="primary" %}} The conversation API is currently in [alpha]({{% ref "certification-lifecycle.md#certification-levels" %}}). {{% /alert %}} -Dapr provides an API to interact with Large Language Models (LLMs) and enables critical performance and security functionality with features like prompt caching and PII data obfuscation. +Dapr provides an API to interact with Large Language Models (LLMs) and enables critical performance and security functionality with features like prompt caching, PII data obfuscation, and tool calling capabilities. + +Tool calling follows OpenAI's function calling format, making it easy to integrate with existing AI development workflows and tools. ## Converse -This endpoint lets you converse with LLMs. +This endpoint lets you converse with LLMs using the Alpha2 version of the API, which provides enhanced tool calling support and alignment with OpenAI's interface. ``` -POST http://localhost:/v1.0-alpha1/conversation//converse +POST http://localhost:/v1.0-alpha2/conversation//converse ``` ### URL parameters @@ -30,35 +32,175 @@ POST http://localhost:/v1.0-alpha1/conversation//converse | Field | Description | | --------- | ----------- | +| `contextId` | The ID of an existing chat (like in ChatGPT). Optional | | `inputs` | Inputs for the conversation. Multiple inputs at one time are supported. Required | -| `cacheTTL` | A time-to-live value for a prompt cache to expire. Uses Golang duration format. Optional | -| `scrubPII` | A boolean value to enable obfuscation of sensitive information returning from the LLM. Set this value if all PII (across contents) in the request needs to be scrubbed. Optional | -| `temperature` | A float value to control the temperature of the model. Used to optimize for consistency and creativity. Optional | -| `metadata` | [Metadata](#metadata) passed to conversation components. Optional | +| `parameters` | Parameters for all custom fields. Optional | +| `metadata` | Metadata passed to conversation components. Optional | +| `scrubPii` | A boolean value to enable obfuscation of sensitive information returning from the LLM. Optional | +| `temperature` | A float value to control the temperature of the model. Used to optimize for consistency (0) or creativity (1). Optional | +| `tools` | Tools register the tools available to be used by the LLM during the conversation. Optional | +| `toolChoice` | Controls which (if any) tool is called by the model. Values: `auto`, `required`, or specific tool name. Defaults to `auto` if tools are present. Optional | #### Input body | Field | Description | | --------- | ----------- | -| `content` | The message content to send to the LLM. Required | -| `role` | The role for the LLM to assume. Possible values: 'user', 'tool', 'assistant' | -| `scrubPII` | A boolean value to enable obfuscation of sensitive information present in the content field. Set this value if PII for this specific content needs to be scrubbed exclusively. Optional | +| `messages` | Array of conversation messages. Required | +| `scrubPii` | A boolean value to enable obfuscation of sensitive information present in the content field. Optional | + +#### Message types + +The API supports different message types: + +| Type | Description | +| ---- | ----------- | +| `ofDeveloper` | Developer role messages with optional name and content | +| `ofSystem` | System role messages with optional name and content | +| `ofUser` | User role messages with optional name and content | +| `ofAssistant` | Assistant role messages with optional name, content, and tool calls | +| `ofTool` | Tool role messages with tool ID, name, and content | + + +#### Tool calling + +Tools can be defined using the `tools` field with function definitions: + +| Field | Description | +| --------- | ----------- | +| `function.name` | The name of the function to be called. Required | +| `function.description` | A description of what the function does. Optional | +| `function.parameters` | JSON Schema object describing the function parameters. Optional | + -### Request content example +#### Tool choice options + +The `toolChoice` is an optional parameter that controls how the model can use available tools: + +- **`auto`**: The model can pick between generating a message or calling one or more tools (default when tools are present) +- **`required`**: Requires one or more functions to be called +- **`{tool_name}`**: Forces the model to call a specific tool by name + + +#### Metadata +The `metadata` field serves as a dynamic configuration mechanism that allows you to pass additional configuration and authentication information to conversation components on a per-request basis. This metadata overrides any corresponding fields configured in the component's YAML configuration file, enabling dynamic configuration without modifying static component definitions. + +**Common metadata fields:** + +| Field | Description | Example | +| ----- | ----------- | ------- | +| `api_key` | API key for authenticating with the LLM service | `"sk-1234567890abcdef"` | +| `model` | Specific model identifier | `"gpt-4-turbo"`, `"claude-3-sonnet"` | +| `version` | API version or service version | `"1.0"`, `"2023-12-01"` | +| `endpoint` | Custom endpoint URL for the service | `"https://api.custom-llm.com/v1"` | + +{{% alert title="Note" color="primary" %}} +The exact metadata fields supported depend on the specific conversation component implementation. Refer to the component's documentation for the complete list of supported metadata fields. +{{% /alert %}} + +In addition to passing metadata in the request body, you can also pass metadata as URL query parameters without modifying the request payload. Here is the format: + +- **Prefix**: All metadata parameters must be prefixed with `metadata.` +- **Format**: `?metadata.=` +- **Multiple parameters**: Separate with `&` (e.g., `?metadata.api_key=sk-123&metadata.model=gpt-4`) + +Example of model override: +```bash +POST http://localhost:3500/v1.0-alpha2/conversation/openai/converse?metadata.model=sk-gpt-4-turbo +``` + +URL metadata parameters are merged with request body metadata, URL parameters take precedence if conflicts exist, and both override component configuration in the YAML file. + +### Request content examples + +#### Basic conversation ```json -REQUEST = { - "inputs": [ - { - "content": "What is Dapr?", - "role": "user", // Optional - "scrubPII": "true", // Optional. Will obfuscate any sensitive information found in the content field - }, - ], - "cacheTTL": "10m", // Optional - "scrubPII": "true", // Optional. Will obfuscate any sensitive information returning from the LLM - "temperature": 0.5 // Optional. Optimizes for consistency (0) or creativity (1) -} +curl -X POST http://localhost:3500/v1.0-alpha2/conversation/openai/converse \ + -H "Content-Type: application/json" \ + -d '{ + "inputs": [ + { + "messages": [ + { + "ofUser": { + "content": [ + { + "text": "What is Dapr?" + } + ] + } + } + ] + } + ], + "parameters": {}, + "metadata": {} + }' +``` + +#### Conversation with tool calling + +```json +curl -X POST http://localhost:3500/v1.0-alpha2/conversation/openai/converse \ + -H "Content-Type: application/json" \ + -d '{ + "inputs": [ + { + "messages": [ + { + "ofUser": { + "content": [ + { + "text": "What is the weather like in San Francisco in celsius?" + } + ] + } + } + ], + "scrubPii": false + } + ], + "parameters": { + "max_tokens": { + "@type": "type.googleapis.com/google.protobuf.Int64Value", + "value": "100" + }, + "model": { + "@type": "type.googleapis.com/google.protobuf.StringValue", + "value": "claude-3-5-sonnet-20240620" + } + }, + "metadata": { + "api_key": "test-key", + "version": "1.0" + }, + "scrubPii": false, + "temperature": 0.7, + "tools": [ + { + "function": { + "name": "get_weather", + "description": "Get the current weather for a location", + "parameters": { + "type": "object", + "properties": { + "location": { + "type": "string", + "description": "The city and state, e.g. San Francisco, CA" + }, + "unit": { + "type": "string", + "enum": ["celsius", "fahrenheit"], + "description": "The temperature unit to use" + } + }, + "required": ["location"] + } + } + } + ], + "toolChoice": "auto" + }' ``` ### HTTP response codes @@ -71,21 +213,61 @@ Code | Description ### Response content +#### Basic conversation response + ```json -RESPONSE = { - "outputs": { +{ + "outputs": [ { - "result": "Dapr is distribution application runtime ...", - "parameters": {}, - }, + "choices": [ + { + "finishReason": "stop", + "message": { + "content": "Distributed application runtime, open-source." + } + } + ] + } + ] +} +``` + +#### Tool calling response + +```json +{ + "outputs": [ { - "result": "Dapr can help developers ...", - "parameters": {}, + "choices": [ + { + "finishReason": "tool_calls", + "message": { + "toolCalls": [ + { + "id": "call_Uwa41pG0UqGA2zp0Fec0KwOq", + "function": { + "name": "get_weather", + "arguments": "{\"location\":\"San Francisco, CA\",\"unit\":\"celsius\"}" + } + } + ] + } + } + ] } - }, + ] } ``` + +## Legacy Alpha1 API + +The previous Alpha1 version of the API is still supported for backward compatibility but is deprecated. For new implementations, use the Alpha2 version described above. + +``` +POST http://localhost:/v1.0-alpha1/conversation//converse +``` + ## Next steps - [Conversation API overview]({{% ref conversation-overview.md %}}) diff --git a/daprdocs/content/en/reference/api/cryptography_api.md b/daprdocs/content/en/reference/api/cryptography_api.md index 163abe1d77a..985a247c9d9 100644 --- a/daprdocs/content/en/reference/api/cryptography_api.md +++ b/daprdocs/content/en/reference/api/cryptography_api.md @@ -3,7 +3,7 @@ type: docs title: "Cryptography API reference" linkTitle: "Cryptography API" description: "Detailed documentation on the cryptography API" -weight: 1300 +weight: 600 --- Dapr provides cross-platform and cross-language support for encryption and decryption support via the diff --git a/daprdocs/content/en/reference/api/distributed_lock_api.md b/daprdocs/content/en/reference/api/distributed_lock_api.md index 92914a03768..b4db657cee2 100644 --- a/daprdocs/content/en/reference/api/distributed_lock_api.md +++ b/daprdocs/content/en/reference/api/distributed_lock_api.md @@ -1,9 +1,9 @@ --- type: docs -title: "Distributed Lock API reference" -linkTitle: "Distributed Lock API" +title: "Distributed lock API reference" +linkTitle: "Distributed lock API" description: "Detailed documentation on the distributed lock API" -weight: 900 +weight: 700 --- ## Lock diff --git a/daprdocs/content/en/reference/api/health_api.md b/daprdocs/content/en/reference/api/health_api.md index 164e2dd6d0b..bb0e096074e 100644 --- a/daprdocs/content/en/reference/api/health_api.md +++ b/daprdocs/content/en/reference/api/health_api.md @@ -3,7 +3,7 @@ type: docs title: "Health API reference" linkTitle: "Health API" description: "Detailed documentation on the health API" -weight: 1000 +weight: 800 --- Dapr provides health checking probes that can be used as readiness or liveness of Dapr and for initialization readiness from SDKs. diff --git a/daprdocs/content/en/reference/api/jobs_api.md b/daprdocs/content/en/reference/api/jobs_api.md index aa4c29f940a..690fe834135 100644 --- a/daprdocs/content/en/reference/api/jobs_api.md +++ b/daprdocs/content/en/reference/api/jobs_api.md @@ -3,7 +3,7 @@ type: docs title: "Jobs API reference" linkTitle: "Jobs API" description: "Detailed documentation on the jobs API" -weight: 1300 +weight: 900 --- {{% alert title="Note" color="primary" %}} diff --git a/daprdocs/content/en/reference/api/metadata_api.md b/daprdocs/content/en/reference/api/metadata_api.md index dc5ed7fa953..ed77aca1b6c 100644 --- a/daprdocs/content/en/reference/api/metadata_api.md +++ b/daprdocs/content/en/reference/api/metadata_api.md @@ -3,7 +3,7 @@ type: docs title: "Metadata API reference" linkTitle: "Metadata API" description: "Detailed documentation on the Metadata API" -weight: 1100 +weight: 1000 --- Dapr has a metadata API that returns information about the sidecar allowing runtime discoverability. The metadata endpoint returns the following information. diff --git a/daprdocs/content/en/reference/api/placement_api.md b/daprdocs/content/en/reference/api/placement_api.md index de216c1f156..6b02dd2cbfb 100644 --- a/daprdocs/content/en/reference/api/placement_api.md +++ b/daprdocs/content/en/reference/api/placement_api.md @@ -3,7 +3,7 @@ type: docs title: "Placement API reference" linkTitle: "Placement API" description: "Detailed documentation on the Placement API" -weight: 1200 +weight: 1100 --- Dapr has an HTTP API `/placement/state` for Placement service that exposes placement table information. The API is exposed on the sidecar on the same port as the healthz. This is an unauthenticated endpoint, and is disabled by default. diff --git a/daprdocs/content/en/reference/api/pubsub_api.md b/daprdocs/content/en/reference/api/pubsub_api.md index d2cc67ab03e..32af3bb0d09 100644 --- a/daprdocs/content/en/reference/api/pubsub_api.md +++ b/daprdocs/content/en/reference/api/pubsub_api.md @@ -3,7 +3,7 @@ type: docs title: "Pub/sub API reference" linkTitle: "Pub/Sub API" description: "Detailed documentation on the pub/sub API" -weight: 200 +weight: 1200 --- ## Publish a message to a given topic diff --git a/daprdocs/content/en/reference/api/secrets_api.md b/daprdocs/content/en/reference/api/secrets_api.md index 752736f5fe4..6561823ec90 100644 --- a/daprdocs/content/en/reference/api/secrets_api.md +++ b/daprdocs/content/en/reference/api/secrets_api.md @@ -3,7 +3,7 @@ type: docs title: "Secrets API reference" linkTitle: "Secrets API" description: "Detailed documentation on the secrets API" -weight: 700 +weight: 1300 --- ## Get Secret diff --git a/daprdocs/content/en/reference/api/service_invocation_api.md b/daprdocs/content/en/reference/api/service_invocation_api.md index cc46d982488..811d0021ac8 100644 --- a/daprdocs/content/en/reference/api/service_invocation_api.md +++ b/daprdocs/content/en/reference/api/service_invocation_api.md @@ -3,7 +3,7 @@ type: docs title: "Service invocation API reference" linkTitle: "Service invocation API" description: "Detailed documentation on the service invocation API" -weight: 100 +weight: 1400 --- Dapr provides users with the ability to call other applications that are using Dapr with a unique named identifier (appId), or HTTP endpoints that are not using Dapr. diff --git a/daprdocs/content/en/reference/api/state_api.md b/daprdocs/content/en/reference/api/state_api.md index 328ffec80fd..bf6e2c15272 100644 --- a/daprdocs/content/en/reference/api/state_api.md +++ b/daprdocs/content/en/reference/api/state_api.md @@ -3,7 +3,7 @@ type: docs title: "State management API reference" linkTitle: "State management API" description: "Detailed documentation on the state management API" -weight: 400 +weight: 1500 --- ## Component file diff --git a/daprdocs/content/en/reference/api/workflow_api.md b/daprdocs/content/en/reference/api/workflow_api.md index 85ee207162d..f84996444f6 100644 --- a/daprdocs/content/en/reference/api/workflow_api.md +++ b/daprdocs/content/en/reference/api/workflow_api.md @@ -3,7 +3,7 @@ type: docs title: "Workflow API reference" linkTitle: "Workflow API" description: "Detailed documentation on the workflow API" -weight: 300 +weight: 1600 --- Dapr provides users with the ability to interact with workflows through its built-in workflow engine, which is implemented using Dapr Actors. This workflow engine is accessed using the name `dapr` in API calls as the `workflowComponentName`. diff --git a/daprdocs/content/en/reference/components-reference/supported-bindings/_index.md b/daprdocs/content/en/reference/components-reference/supported-bindings/_index.md index bca14fe0607..0f61294f835 100644 --- a/daprdocs/content/en/reference/components-reference/supported-bindings/_index.md +++ b/daprdocs/content/en/reference/components-reference/supported-bindings/_index.md @@ -2,7 +2,7 @@ type: docs title: "Bindings component specs" linkTitle: "Bindings" -weight: 4000 +weight: 1000 description: The supported external bindings that interface with Dapr aliases: - "/operations/components/setup-bindings/supported-bindings/" diff --git a/daprdocs/content/en/reference/components-reference/supported-bindings/eventhubs.md b/daprdocs/content/en/reference/components-reference/supported-bindings/eventhubs.md index 989f93ab366..227b9973785 100644 --- a/daprdocs/content/en/reference/components-reference/supported-bindings/eventhubs.md +++ b/daprdocs/content/en/reference/components-reference/supported-bindings/eventhubs.md @@ -70,24 +70,24 @@ The above example uses secrets as plain strings. It is recommended to use a secr ## Spec metadata fields -| Field | Required | Binding support | Details | Example | -|--------------------|:--------:|------------|-----|---------| -| `eventHub` | Y* | Input/Output | The name of the Event Hubs hub ("topic"). Required if using Microsoft Entra ID authentication or if the connection string doesn't contain an `EntityPath` value | `mytopic` | -| `connectionString` | Y* | Input/Output | Connection string for the Event Hub or the Event Hub namespace.
* Mutally exclusive with `eventHubNamespace` field.
* Required when not using [Microsoft Entra ID Authentication]({{% ref "authenticating-azure.md" %}}) | `"Endpoint=sb://{EventHubNamespace}.servicebus.windows.net/;SharedAccessKeyName={PolicyName};SharedAccessKey={Key};EntityPath={EventHub}"` or `"Endpoint=sb://{EventHubNamespace}.servicebus.windows.net/;SharedAccessKeyName={PolicyName};SharedAccessKey={Key}"` -| `eventHubNamespace` | Y* | Input/Output | The Event Hub Namespace name.
* Mutally exclusive with `connectionString` field.
* Required when using [Microsoft Entra ID Authentication]({{% ref "authenticating-azure.md" %}}) | `"namespace"` -| `enableEntityManagement` | N | Input/Output | Boolean value to allow management of the EventHub namespace and storage account. Default: `false` | `"true"`, `"false"` -| `enableInOrderMessageDelivery` | N | Input/Output | Boolean value to allow messages to be delivered in the order in which they were posted. This assumes `partitionKey` is set when publishing or posting to ensure ordering across partitions. Default: `false` | `"true"`, `"false"` -| `resourceGroupName` | N | Input/Output | Name of the resource group the Event Hub namespace is part of. Required when entity management is enabled | `"test-rg"` -| `subscriptionID` | N | Input/Output | Azure subscription ID value. Required when entity management is enabled | `"azure subscription id"` -| `partitionCount` | N | Input/Output | Number of partitions for the new Event Hub namespace. Used only when entity management is enabled. Default: `"1"` | `"2"` -| `messageRetentionInDays` | N | Input/Output | Number of days to retain messages for in the newly created Event Hub namespace. Used only when entity management is enabled. Default: `"1"` | `"90"` -| `consumerGroup` | Y | Input | The name of the [Event Hubs Consumer Group](https://docs.microsoft.com/azure/event-hubs/event-hubs-features#consumer-groups) to listen on | `"group1"` | -| `storageAccountName` | Y | Input | Storage account name to use for the checkpoint store. |`"myeventhubstorage"` -| `storageAccountKey` | Y* | Input | Storage account key for the checkpoint store account.
* When using Microsoft Entra ID, it's possible to omit this if the service principal has access to the storage account too. | `"112233445566778899"` -| `storageConnectionString` | Y* | Input | Connection string for the checkpoint store, alternative to specifying `storageAccountKey` | `"DefaultEndpointsProtocol=https;AccountName=myeventhubstorage;AccountKey="` -| `storageContainerName` | Y | Input | Storage container name for the storage account name. | `"myeventhubstoragecontainer"` -| `getAllMessageProperties` | N | Input | When set to `true`, retrieves all user/app/custom properties from the Event Hub message and forwards them in the returned event metadata. Default setting is `"false"`. | `"true"`, `"false"` -| `direction` | N | Input/Output | The direction of the binding. | `"input"`, `"output"`, `"input, output"` +| Field | Required | Binding support | Details | Example | +|--------------------|:--------:|------------|---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|---------| +| `eventHub` | Y* | Input/Output | The name of the Event Hubs hub ("topic"). Required if using Microsoft Entra ID authentication or if the connection string doesn't contain an `EntityPath` value | `mytopic` | +| `connectionString` | Y* | Input/Output | Connection string for the Event Hub or the Event Hub namespace.
* Mutually exclusive with `eventHubNamespace` field.
* Required when not using [Microsoft Entra ID Authentication]({{% ref "authenticating-azure.md" %}}) | `"Endpoint=sb://{EventHubNamespace}.servicebus.windows.net/;SharedAccessKeyName={PolicyName};SharedAccessKey={Key};EntityPath={EventHub}"` or `"Endpoint=sb://{EventHubNamespace}.servicebus.windows.net/;SharedAccessKeyName={PolicyName};SharedAccessKey={Key}"` +| `eventHubNamespace` | Y* | Input/Output | The Event Hub Namespace name.
* Mutually exclusive with `connectionString` field.
* Required when using [Microsoft Entra ID Authentication]({{% ref "authenticating-azure.md" %}}) | `"namespace"` +| `enableEntityManagement` | N | Input/Output | Boolean value to allow management of the EventHub namespace and storage account. Default: `false` | `"true"`, `"false"` +| `enableInOrderMessageDelivery` | N | Input/Output | Boolean value to allow messages to be delivered in the order in which they were posted. This assumes `partitionKey` is set when publishing or posting to ensure ordering across partitions. Default: `false` | `"true"`, `"false"` +| `resourceGroupName` | N | Input/Output | Name of the resource group the Event Hub namespace is part of. Required when entity management is enabled | `"test-rg"` +| `subscriptionID` | N | Input/Output | Azure subscription ID value. Required when entity management is enabled | `"azure subscription id"` +| `partitionCount` | N | Input/Output | Number of partitions for the new Event Hub namespace. Used only when entity management is enabled. Default: `"1"` | `"2"` +| `messageRetentionInDays` | N | Input/Output | Number of days to retain messages for in the newly created Event Hub namespace. Used only when entity management is enabled. Default: `"1"` | `"90"` +| `consumerGroup` | Y | Input | The name of the [Event Hubs Consumer Group](https://docs.microsoft.com/azure/event-hubs/event-hubs-features#consumer-groups) to listen on | `"group1"` | +| `storageAccountName` | Y | Input | Storage account name to use for the checkpoint store. |`"myeventhubstorage"` +| `storageAccountKey` | Y* | Input | Storage account key for the checkpoint store account.
* When using Microsoft Entra ID, it's possible to omit this if the service principal has access to the storage account too. | `"112233445566778899"` +| `storageConnectionString` | Y* | Input | Connection string for the checkpoint store, alternative to specifying `storageAccountKey` | `"DefaultEndpointsProtocol=https;AccountName=myeventhubstorage;AccountKey="` +| `storageContainerName` | Y | Input | Storage container name for the storage account name. | `"myeventhubstoragecontainer"` +| `getAllMessageProperties` | N | Input | When set to `true`, retrieves all user/app/custom properties from the Event Hub message and forwards them in the returned event metadata. Default setting is `"false"`. | `"true"`, `"false"` +| `direction` | N | Input/Output | The direction of the binding. | `"input"`, `"output"`, `"input, output"` ### Microsoft Entra ID authentication diff --git a/daprdocs/content/en/reference/components-reference/supported-configuration-stores/_index.md b/daprdocs/content/en/reference/components-reference/supported-configuration-stores/_index.md index b8e80f12216..64e4ebf7366 100644 --- a/daprdocs/content/en/reference/components-reference/supported-configuration-stores/_index.md +++ b/daprdocs/content/en/reference/components-reference/supported-configuration-stores/_index.md @@ -2,7 +2,7 @@ type: docs title: "Configuration store component specs" linkTitle: "Configuration stores" -weight: 6000 +weight: 2000 description: The supported configuration stores that interface with Dapr aliases: - "/operations/components/setup-configuration-store/supported-configuration-stores/" diff --git a/daprdocs/content/en/reference/components-reference/supported-configuration-stores/azure-appconfig-configuration-store.md b/daprdocs/content/en/reference/components-reference/supported-configuration-stores/azure-appconfig-configuration-store.md index 3db6a4bba94..b2e2c3919d3 100644 --- a/daprdocs/content/en/reference/components-reference/supported-configuration-stores/azure-appconfig-configuration-store.md +++ b/daprdocs/content/en/reference/components-reference/supported-configuration-stores/azure-appconfig-configuration-store.md @@ -50,14 +50,14 @@ The above example uses secrets as plain strings. It is recommended to use a secr ## Spec metadata fields -| Field | Required | Details | Example | -|----------------------------|:--------:|---------|---------| -| connectionString | Y* | Connection String for the Azure App Configuration instance. No Default. Can be `secretKeyRef` to use a secret reference. *Mutally exclusive with host field. *Not to be used when [Azure Authentication](https://docs.dapr.io/developing-applications/integrations/azure/azure-authentication/authenticating-azure/) is used | `Endpoint=https://foo.azconfig.io;Id=osOX-l9-s0:sig;Secret=00000000000000000000000000000000000000000000` -| host | N* | Endpoint for the Azure App Configuration instance. No Default. *Mutally exclusive with connectionString field. *To be used when [Azure Authentication](https://docs.dapr.io/developing-applications/integrations/azure/azure-authentication/authenticating-azure/) is used | `https://dapr.azconfig.io` -| maxRetries | N | Maximum number of retries before giving up. Defaults to `3` | `5`, `10` -| retryDelay | N | RetryDelay specifies the initial amount of delay to use before retrying an operation. The delay increases exponentially with each retry up to the maximum specified by MaxRetryDelay. Defaults to `4` seconds; `"-1"` disables delay between retries. | `4s` -| maxRetryDelay | N | MaxRetryDelay specifies the maximum delay allowed before retrying an operation. Typically the value is greater than or equal to the value specified in RetryDelay. Defaults to `120` seconds; `"-1"` disables the limit | `120s` -| subscribePollInterval | N | subscribePollInterval specifies the poll interval in nanoseconds for polling the subscribed keys for any changes. This will be updated in the future to Go Time format. Default polling interval is set to `24` hours. | `24h` +| Field | Required | Details | Example | +|----------------------------|:--------:|-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|---------| +| connectionString | Y* | Connection String for the Azure App Configuration instance. No Default. Can be `secretKeyRef` to use a secret reference. *Mutually exclusive with host field. *Not to be used when [Azure Authentication](https://docs.dapr.io/developing-applications/integrations/azure/azure-authentication/authenticating-azure/) is used | `Endpoint=https://foo.azconfig.io;Id=osOX-l9-s0:sig;Secret=00000000000000000000000000000000000000000000` +| host | N* | Endpoint for the Azure App Configuration instance. No Default. *Mutually exclusive with connectionString field. *To be used when [Azure Authentication](https://docs.dapr.io/developing-applications/integrations/azure/azure-authentication/authenticating-azure/) is used | `https://dapr.azconfig.io` +| maxRetries | N | Maximum number of retries before giving up. Defaults to `3` | `5`, `10` +| retryDelay | N | RetryDelay specifies the initial amount of delay to use before retrying an operation. The delay increases exponentially with each retry up to the maximum specified by MaxRetryDelay. Defaults to `4` seconds; `"-1"` disables delay between retries. | `4s` +| maxRetryDelay | N | MaxRetryDelay specifies the maximum delay allowed before retrying an operation. Typically the value is greater than or equal to the value specified in RetryDelay. Defaults to `120` seconds; `"-1"` disables the limit | `120s` +| subscribePollInterval | N | subscribePollInterval specifies the poll interval in nanoseconds for polling the subscribed keys for any changes. This will be updated in the future to Go Time format. Default polling interval is set to `24` hours. | `24h` **Note**: either `host` or `connectionString` must be specified. diff --git a/daprdocs/content/en/reference/components-reference/supported-conversation/_index.md b/daprdocs/content/en/reference/components-reference/supported-conversation/_index.md index 179162b3bb2..c6c862960b9 100644 --- a/daprdocs/content/en/reference/components-reference/supported-conversation/_index.md +++ b/daprdocs/content/en/reference/components-reference/supported-conversation/_index.md @@ -2,7 +2,7 @@ type: docs title: "Conversation component specs" linkTitle: "Conversation" -weight: 9000 +weight: 3000 description: The supported conversation components that interface with Dapr no_list: true --- diff --git a/daprdocs/content/en/reference/components-reference/supported-conversation/openai.md b/daprdocs/content/en/reference/components-reference/supported-conversation/openai.md index 795f9877909..f1c29e2b5f3 100644 --- a/daprdocs/content/en/reference/components-reference/supported-conversation/openai.md +++ b/daprdocs/content/en/reference/components-reference/supported-conversation/openai.md @@ -46,6 +46,48 @@ The above example uses secrets as plain strings. It is recommended to use a secr | `apiType` | N | Specifies the API provider type. Required when using a provider that does not follow the default OpenAI API endpoint conventions. | `azure` | | `apiVersion`| N | The API version to use. Required when the `apiType` is set to `azure`. | `2025-04-01-preview` | +## Azure OpenAI Configuration + +To configure the OpenAI component to connect to Azure OpenAI, you need to set the following metadata fields which are required for Azure's API format. + +### Required fields for Azure OpenAI + +When connecting to Azure OpenAI, the following fields are **required**: + +- `apiType`: Must be set to `azure` to enable Azure OpenAI compatibility +- `endpoint`: Your Azure OpenAI resource endpoint URL (e.g., `https://your-resource.openai.azure.com/`) +- `apiVersion`: The API version for your Azure OpenAI deployment (e.g., `2025-01-01-preview`) +- `key`: Your Azure OpenAI API key + +Get your configuration values from: https://ai.azure.com/ + +### Azure OpenAI component example + +```yaml +apiVersion: dapr.io/v1alpha1 +kind: Component +metadata: + name: azure-openai +spec: + type: conversation.openai + metadata: + - name: key + value: "your-azure-openai-api-key" + - name: model + value: "gpt-4.1-nano" # Default: gpt-4.1-nano + - name: endpoint + value: "https://your-resource.openai.azure.com/" + - name: apiType + value: "azure" + - name: apiVersion + value: "2025-01-01-preview" +``` + + +{{% alert title="Note" color="primary" %}} +When using Azure OpenAI, both `endpoint` and `apiVersion` are mandatory fields. The component returns an error if either field is missing when `apiType` is set to `azure`. +{{% /alert %}} + ## Related links - [Conversation API overview]({{% ref conversation-overview.md %}}) diff --git a/daprdocs/content/en/reference/components-reference/supported-cryptography/_index.md b/daprdocs/content/en/reference/components-reference/supported-cryptography/_index.md index c7789d5e4a6..160d3e3427c 100644 --- a/daprdocs/content/en/reference/components-reference/supported-cryptography/_index.md +++ b/daprdocs/content/en/reference/components-reference/supported-cryptography/_index.md @@ -2,7 +2,7 @@ type: docs title: "Cryptography component specs" linkTitle: "Cryptography" -weight: 8000 +weight: 4000 description: The supported cryptography components that interface with Dapr no_list: true --- diff --git a/daprdocs/content/en/reference/components-reference/supported-locks/_index.md b/daprdocs/content/en/reference/components-reference/supported-locks/_index.md index 134e75360dc..588e5acc4ff 100644 --- a/daprdocs/content/en/reference/components-reference/supported-locks/_index.md +++ b/daprdocs/content/en/reference/components-reference/supported-locks/_index.md @@ -2,7 +2,7 @@ type: docs title: "Lock component specs" linkTitle: "Locks" -weight: 7000 +weight: 5000 description: The supported locks that interface with Dapr no_list: true --- diff --git a/daprdocs/content/en/reference/components-reference/supported-middleware/_index.md b/daprdocs/content/en/reference/components-reference/supported-middleware/_index.md index ddb92d740ed..995651d14db 100644 --- a/daprdocs/content/en/reference/components-reference/supported-middleware/_index.md +++ b/daprdocs/content/en/reference/components-reference/supported-middleware/_index.md @@ -2,7 +2,7 @@ type: docs title: "Middleware component specs" linkTitle: "Middleware" -weight: 10000 +weight: 6000 description: List of all the supported middleware components that can be injected in Dapr's processing pipeline. no_list: true aliases: diff --git a/daprdocs/content/en/reference/components-reference/supported-name-resolution/_index.md b/daprdocs/content/en/reference/components-reference/supported-name-resolution/_index.md index c27f6f3fea8..ac0beb524dc 100644 --- a/daprdocs/content/en/reference/components-reference/supported-name-resolution/_index.md +++ b/daprdocs/content/en/reference/components-reference/supported-name-resolution/_index.md @@ -2,7 +2,7 @@ type: docs title: "Name resolution provider component specs" linkTitle: "Name resolution" -weight: 9000 +weight: 7000 description: The supported name resolution providers to enable Dapr service invocation no_list: true --- diff --git a/daprdocs/content/en/reference/components-reference/supported-pubsub/_index.md b/daprdocs/content/en/reference/components-reference/supported-pubsub/_index.md index 876e7bedc16..34785d7d65c 100644 --- a/daprdocs/content/en/reference/components-reference/supported-pubsub/_index.md +++ b/daprdocs/content/en/reference/components-reference/supported-pubsub/_index.md @@ -2,7 +2,7 @@ type: docs title: "Pub/sub brokers component specs" linkTitle: "Pub/sub brokers" -weight: 1000 +weight: 8000 description: The supported pub/sub brokers that interface with Dapr aliases: - "/operations/components/setup-pubsub/supported-pubsub/" diff --git a/daprdocs/content/en/reference/components-reference/supported-pubsub/setup-azure-eventhubs.md b/daprdocs/content/en/reference/components-reference/supported-pubsub/setup-azure-eventhubs.md index 21a50e96b55..7b21a4817d0 100644 --- a/daprdocs/content/en/reference/components-reference/supported-pubsub/setup-azure-eventhubs.md +++ b/daprdocs/content/en/reference/components-reference/supported-pubsub/setup-azure-eventhubs.md @@ -62,21 +62,21 @@ The above example uses secrets as plain strings. It is recommended to use a secr ## Spec metadata fields -| Field | Required | Details | Example | -|--------------------|:--------:|---------|---------| -| `connectionString` | Y* | Connection string for the Event Hub or the Event Hub namespace.
* Mutally exclusive with `eventHubNamespace` field.
* Required when not using [Microsoft Entra ID Authentication]({{% ref "authenticating-azure.md" %}}) | `"Endpoint=sb://{EventHubNamespace}.servicebus.windows.net/;SharedAccessKeyName={PolicyName};SharedAccessKey={Key};EntityPath={EventHub}"` or `"Endpoint=sb://{EventHubNamespace}.servicebus.windows.net/;SharedAccessKeyName={PolicyName};SharedAccessKey={Key}"` -| `eventHubNamespace` | Y* | The Event Hub Namespace name.
* Mutally exclusive with `connectionString` field.
* Required when using [Microsoft Entra ID Authentication]({{% ref "authenticating-azure.md" %}}) | `"namespace"` +| Field | Required | Details | Example | +|--------------------|:--------:|-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|---------| +| `connectionString` | Y* | Connection string for the Event Hub or the Event Hub namespace.
* Mutually exclusive with `eventHubNamespace` field.
* Required when not using [Microsoft Entra ID Authentication]({{% ref "authenticating-azure.md" %}}) | `"Endpoint=sb://{EventHubNamespace}.servicebus.windows.net/;SharedAccessKeyName={PolicyName};SharedAccessKey={Key};EntityPath={EventHub}"` or `"Endpoint=sb://{EventHubNamespace}.servicebus.windows.net/;SharedAccessKeyName={PolicyName};SharedAccessKey={Key}"` +| `eventHubNamespace` | Y* | The Event Hub Namespace name.
* Mutually exclusive with `connectionString` field.
* Required when using [Microsoft Entra ID Authentication]({{% ref "authenticating-azure.md" %}}) | `"namespace"` | `consumerID` | N | Consumer ID (consumer tag) organizes one or more consumers into a group. Consumers with the same consumer ID work as one virtual consumer; for example, a message is processed only once by one of the consumers in the group. If the `consumerID` is not provided, the Dapr runtime set it to the Dapr application ID (`appID`) value. | Can be set to string value (such as `"channel1"` in the example above) or string format value (such as `"{podName}"`, etc.). [See all of template tags you can use in your component metadata.]({{% ref "component-schema.md#templated-metadata-values" %}}) -| `enableEntityManagement` | N | Boolean value to allow management of the EventHub namespace and storage account. Default: `false` | `"true", "false"` -| `enableInOrderMessageDelivery` | N | Input/Output | Boolean value to allow messages to be delivered in the order in which they were posted. This assumes `partitionKey` is set when publishing or posting to ensure ordering across partitions. Default: `false` | `"true"`, `"false"` -| `storageAccountName` | Y | Storage account name to use for the checkpoint store. |`"myeventhubstorage"` -| `storageAccountKey` | Y* | Storage account key for the checkpoint store account.
* When using Microsoft Entra ID, it's possible to omit this if the service principal has access to the storage account too. | `"112233445566778899"` -| `storageConnectionString` | Y* | Connection string for the checkpoint store, alternative to specifying `storageAccountKey` | `"DefaultEndpointsProtocol=https;AccountName=myeventhubstorage;AccountKey="` -| `storageContainerName` | Y | Storage container name for the storage account name. | `"myeventhubstoragecontainer"` -| `resourceGroupName` | N | Name of the resource group the Event Hub namespace is part of. Required when entity management is enabled | `"test-rg"` -| `subscriptionID` | N | Azure subscription ID value. Required when entity management is enabled | `"azure subscription id"` -| `partitionCount` | N | Number of partitions for the new Event Hub namespace. Used only when entity management is enabled. Default: `"1"` | `"2"` -| `messageRetentionInDays` | N | Number of days to retain messages for in the newly created Event Hub namespace. Used only when entity management is enabled. Default: `"1"` | `"90"` +| `enableEntityManagement` | N | Boolean value to allow management of the EventHub namespace and storage account. Default: `false` | `"true", "false"` +| `enableInOrderMessageDelivery` | N | Input/Output | Boolean value to allow messages to be delivered in the order in which they were posted. This assumes `partitionKey` is set when publishing or posting to ensure ordering across partitions. Default: `false` | `"true"`, `"false"` +| `storageAccountName` | Y | Storage account name to use for the checkpoint store. |`"myeventhubstorage"` +| `storageAccountKey` | Y* | Storage account key for the checkpoint store account.
* When using Microsoft Entra ID, it's possible to omit this if the service principal has access to the storage account too. | `"112233445566778899"` +| `storageConnectionString` | Y* | Connection string for the checkpoint store, alternative to specifying `storageAccountKey` | `"DefaultEndpointsProtocol=https;AccountName=myeventhubstorage;AccountKey="` +| `storageContainerName` | Y | Storage container name for the storage account name. | `"myeventhubstoragecontainer"` +| `resourceGroupName` | N | Name of the resource group the Event Hub namespace is part of. Required when entity management is enabled | `"test-rg"` +| `subscriptionID` | N | Azure subscription ID value. Required when entity management is enabled | `"azure subscription id"` +| `partitionCount` | N | Number of partitions for the new Event Hub namespace. Used only when entity management is enabled. Default: `"1"` | `"2"` +| `messageRetentionInDays` | N | Number of days to retain messages for in the newly created Event Hub namespace. Used only when entity management is enabled. Default: `"1"` | `"90"` ### Microsoft Entra ID authentication diff --git a/daprdocs/content/en/reference/components-reference/supported-pubsub/setup-rabbitmq.md b/daprdocs/content/en/reference/components-reference/supported-pubsub/setup-rabbitmq.md index c3b4e69a8f5..cb65415ba7c 100644 --- a/daprdocs/content/en/reference/components-reference/supported-pubsub/setup-rabbitmq.md +++ b/daprdocs/content/en/reference/components-reference/supported-pubsub/setup-rabbitmq.md @@ -76,35 +76,35 @@ The above example uses secrets as plain strings. It is recommended to use a secr ## Spec metadata fields -| Field | Required | Details | Example | -|--------------------|:--------:|---------|---------| -| connectionString | Y* | The RabbitMQ connection string. *Mutally exclusive with protocol, hostname, username, password field | `amqp://user:pass@localhost:5672` | -| protocol | N* | The RabbitMQ protocol. *Mutally exclusive with connectionString field | `amqp` | -| hostname | N* | The RabbitMQ hostname. *Mutally exclusive with connectionString field | `localhost` | -| username | N* | The RabbitMQ username. *Mutally exclusive with connectionString field | `username` | -| password | N* | The RabbitMQ password. *Mutally exclusive with connectionString field | `password` | +| Field | Required | Details | Example | +|--------------------|:--------:|-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------|---------| +| connectionString | Y* | The RabbitMQ connection string. *Mutually exclusive with protocol, hostname, username, password field | `amqp://user:pass@localhost:5672` | +| protocol | N* | The RabbitMQ protocol. *Mutually exclusive with connectionString field | `amqp` | +| hostname | N* | The RabbitMQ hostname. *Mutually exclusive with connectionString field | `localhost` | +| username | N* | The RabbitMQ username. *Mutually exclusive with connectionString field | `username` | +| password | N* | The RabbitMQ password. *Mutually exclusive with connectionString field | `password` | | consumerID | N | Consumer ID (consumer tag) organizes one or more consumers into a group. Consumers with the same consumer ID work as one virtual consumer; for example, a message is processed only once by one of the consumers in the group. If the `consumerID` is not provided, the Dapr runtime set it to the Dapr application ID (`appID`) value. | Can be set to string value (such as `"channel1"` in the example above) or string format value (such as `"{podName}"`, etc.). [See all of template tags you can use in your component metadata.]({{% ref "component-schema.md#templated-metadata-values" %}}) -| durable | N | Whether or not to use [durable](https://www.rabbitmq.com/queues.html#durability) queues. Defaults to `"false"` | `"true"`, `"false"` -| deletedWhenUnused | N | Whether or not the queue should be configured to [auto-delete](https://www.rabbitmq.com/queues.html) Defaults to `"true"` | `"true"`, `"false"` -| autoAck | N | Whether or not the queue consumer should [auto-ack](https://www.rabbitmq.com/confirms.html) messages. Defaults to `"false"` | `"true"`, `"false"` -| deliveryMode | N | Persistence mode when publishing messages. Defaults to `"0"`. RabbitMQ treats `"2"` as persistent, all other numbers as non-persistent | `"0"`, `"2"` -| requeueInFailure | N | Whether or not to requeue when sending a [negative acknowledgement](https://www.rabbitmq.com/nack.html) in case of a failure. Defaults to `"false"` | `"true"`, `"false"` -| prefetchCount | N | Number of messages to [prefetch](https://www.rabbitmq.com/consumer-prefetch.html). Consider changing this to a non-zero value for production environments. Defaults to `"0"`, which means that all available messages will be pre-fetched. | `"2"` -| publisherConfirm | N | If enabled, client waits for [publisher confirms](https://www.rabbitmq.com/confirms.html#publisher-confirms) after publishing a message. Defaults to `"false"` | `"true"`, `"false"` -| reconnectWait | N | How long to wait (in seconds) before reconnecting if a connection failure occurs | `"0"` -| concurrencyMode | N | `parallel` is the default, and allows processing multiple messages in parallel (limited by the `app-max-concurrency` annotation, if configured). Set to `single` to disable parallel processing. In most situations there's no reason to change this. | `parallel`, `single` -| enableDeadLetter | N | Enable forwarding Messages that cannot be handled to a dead-letter topic. Defaults to `"false"` | `"true"`, `"false"` | -| maxLen | N | The maximum number of messages of a queue and its dead letter queue (if dead letter enabled). If both `maxLen` and `maxLenBytes` are set then both will apply; whichever limit is hit first will be enforced. Defaults to no limit. | `"1000"` | -| maxLenBytes | N | Maximum length in bytes of a queue and its dead letter queue (if dead letter enabled). If both `maxLen` and `maxLenBytes` are set then both will apply; whichever limit is hit first will be enforced. Defaults to no limit. | `"1048576"` | -| exchangeKind | N | Exchange kind of the rabbitmq exchange. Defaults to `"fanout"`. | `"fanout"`,`"topic"` | -| saslExternal | N | With TLS, should the username be taken from an additional field (for example, CN). See [RabbitMQ Authentication Mechanisms](https://www.rabbitmq.com/access-control.html#mechanisms). Defaults to `"false"`. | `"true"`, `"false"` | -| ttlInSeconds | N | Set message TTL at the component level, which can be overwritten by message level TTL per request. | `"60"` | -| caCert | Required for using TLS | Certificate Authority (CA) certificate in PEM format for verifying server TLS certificates. | `"-----BEGIN CERTIFICATE-----\n\n-----END CERTIFICATE-----"` -| clientCert | Required for using TLS | TLS client certificate in PEM format. Must be used with `clientKey`. | `"-----BEGIN CERTIFICATE-----\n\n-----END CERTIFICATE-----"` -| clientKey | Required for using TLS | TLS client key in PEM format. Must be used with `clientCert`. Can be `secretKeyRef` to use a secret reference. | `"-----BEGIN RSA PRIVATE KEY-----\n\n-----END RSA PRIVATE KEY-----"` -| clientName | N | This RabbitMQ [client-provided connection name](https://www.rabbitmq.com/connections.html#client-provided-names) is a custom identifier. If set, the identifier is mentioned in RabbitMQ server log entries and management UI. Can be set to {uuid}, {podName}, or {appID}, which is replaced by Dapr runtime to the real value. | `"app1"`, `{uuid}`, `{podName}`, `{appID}` -| heartBeat | N | Defines the heartbeat interval with the server, detecting the aliveness of the peer TCP connection with the RabbitMQ server. Defaults to `10s` . | `"10s"` -| `publishMessagePropertiesToMetadata` | N | Whether to publish AMQP message properties (headers, message ID, etc.) to the metadata. | "true", "false" +| durable | N | Whether or not to use [durable](https://www.rabbitmq.com/queues.html#durability) queues. Defaults to `"false"` | `"true"`, `"false"` +| deletedWhenUnused | N | Whether or not the queue should be configured to [auto-delete](https://www.rabbitmq.com/queues.html) Defaults to `"true"` | `"true"`, `"false"` +| autoAck | N | Whether or not the queue consumer should [auto-ack](https://www.rabbitmq.com/confirms.html) messages. Defaults to `"false"` | `"true"`, `"false"` +| deliveryMode | N | Persistence mode when publishing messages. Defaults to `"0"`. RabbitMQ treats `"2"` as persistent, all other numbers as non-persistent | `"0"`, `"2"` +| requeueInFailure | N | Whether or not to requeue when sending a [negative acknowledgement](https://www.rabbitmq.com/nack.html) in case of a failure. Defaults to `"false"` | `"true"`, `"false"` +| prefetchCount | N | Number of messages to [prefetch](https://www.rabbitmq.com/consumer-prefetch.html). Consider changing this to a non-zero value for production environments. Defaults to `"0"`, which means that all available messages will be pre-fetched. | `"2"` +| publisherConfirm | N | If enabled, client waits for [publisher confirms](https://www.rabbitmq.com/confirms.html#publisher-confirms) after publishing a message. Defaults to `"false"` | `"true"`, `"false"` +| reconnectWait | N | How long to wait (in seconds) before reconnecting if a connection failure occurs | `"0"` +| concurrencyMode | N | `parallel` is the default, and allows processing multiple messages in parallel (limited by the `app-max-concurrency` annotation, if configured). Set to `single` to disable parallel processing. In most situations there's no reason to change this. | `parallel`, `single` +| enableDeadLetter | N | Enable forwarding Messages that cannot be handled to a dead-letter topic. Defaults to `"false"` | `"true"`, `"false"` | +| maxLen | N | The maximum number of messages of a queue and its dead letter queue (if dead letter enabled). If both `maxLen` and `maxLenBytes` are set then both will apply; whichever limit is hit first will be enforced. Defaults to no limit. | `"1000"` | +| maxLenBytes | N | Maximum length in bytes of a queue and its dead letter queue (if dead letter enabled). If both `maxLen` and `maxLenBytes` are set then both will apply; whichever limit is hit first will be enforced. Defaults to no limit. | `"1048576"` | +| exchangeKind | N | Exchange kind of the rabbitmq exchange. Defaults to `"fanout"`. | `"fanout"`,`"topic"` | +| saslExternal | N | With TLS, should the username be taken from an additional field (for example, CN). See [RabbitMQ Authentication Mechanisms](https://www.rabbitmq.com/access-control.html#mechanisms). Defaults to `"false"`. | `"true"`, `"false"` | +| ttlInSeconds | N | Set message TTL at the component level, which can be overwritten by message level TTL per request. | `"60"` | +| caCert | Required for using TLS | Certificate Authority (CA) certificate in PEM format for verifying server TLS certificates. | `"-----BEGIN CERTIFICATE-----\n\n-----END CERTIFICATE-----"` +| clientCert | Required for using TLS | TLS client certificate in PEM format. Must be used with `clientKey`. | `"-----BEGIN CERTIFICATE-----\n\n-----END CERTIFICATE-----"` +| clientKey | Required for using TLS | TLS client key in PEM format. Must be used with `clientCert`. Can be `secretKeyRef` to use a secret reference. | `"-----BEGIN RSA PRIVATE KEY-----\n\n-----END RSA PRIVATE KEY-----"` +| clientName | N | This RabbitMQ [client-provided connection name](https://www.rabbitmq.com/connections.html#client-provided-names) is a custom identifier. If set, the identifier is mentioned in RabbitMQ server log entries and management UI. Can be set to {uuid}, {podName}, or {appID}, which is replaced by Dapr runtime to the real value. | `"app1"`, `{uuid}`, `{podName}`, `{appID}` +| heartBeat | N | Defines the heartbeat interval with the server, detecting the aliveness of the peer TCP connection with the RabbitMQ server. Defaults to `10s` . | `"10s"` +| `publishMessagePropertiesToMetadata` | N | Whether to publish AMQP message properties (headers, message ID, etc.) to the metadata. | "true", "false" ## Communication using TLS diff --git a/daprdocs/content/en/reference/components-reference/supported-secret-stores/_index.md b/daprdocs/content/en/reference/components-reference/supported-secret-stores/_index.md index d44055ae9d5..03d17ca02cb 100644 --- a/daprdocs/content/en/reference/components-reference/supported-secret-stores/_index.md +++ b/daprdocs/content/en/reference/components-reference/supported-secret-stores/_index.md @@ -2,7 +2,7 @@ type: docs title: "Secret store component specs" linkTitle: "Secret stores" -weight: 5000 +weight: 9000 description: The supported secret stores that interface with Dapr aliases: - "/operations/components/setup-secret-store/supported-secret-stores/" diff --git a/daprdocs/content/en/reference/components-reference/supported-secret-stores/akeyless.md b/daprdocs/content/en/reference/components-reference/supported-secret-stores/akeyless.md new file mode 100644 index 00000000000..3415649f690 --- /dev/null +++ b/daprdocs/content/en/reference/components-reference/supported-secret-stores/akeyless.md @@ -0,0 +1,224 @@ +--- +type: docs +title: "Akeyless" +linkTitle: "Akeyless" +description: Information about the Akeyless secret store component configuration. +--- + +## Create the Akeyless component + +To setup Akeyless secret store create a component of type `secretstores.akeyless`. See [this guide]({{% ref "setup-secret-store.md#apply-the-configuration" %}}) on how to create and apply a secretstore configuration. See this guide on [referencing secrets]({{% ref component-secrets.md %}}) to retrieve and use the secret with Dapr components. + + +## Component Format + +```yaml +schemaVersion: v1 +type: secretstores +name: akeyless +version: v1 +status: beta +title: "Akeyless Secret Store" +urls: + - title: Reference + url: https://docs.dapr.io/reference/components-reference/supported-secret-stores/akeyless/ +authenticationProfiles: + - title: API Key + description: Authenticate using an API key. + metadata: + - name: accessId + required: true + description: The Akeyless Access ID. + example: "p-123456780wm" + type: string + - name: accessKey + required: true + description: The Akeyless API key. + example: "ABCD1233...=" + type: string + sensitive: true + - title: JWT + description: Authenticate using a JSON Web Token. + metadata: + - name: accessId + required: true + description: The Akeyless Access ID. + example: "p-123456780wm" + type: string + - name: jwt + required: true + description: The JSON Web Token. + example: "eyJ..." + type: string + sensitive: true + - title: AWS IAM + description: Authenticate using AWS IAM. + metadata: + - name: accessId + required: true + description: The Akeyless Access ID. + example: "p-123456780wm" + type: string + - title: Kubernetes + description: Authenticate using Kubernetes. + metadata: + - name: accessId + required: true + description: The Akeyless Access ID. + example: "p-123456780wm" + type: string + - name: k8sAuthConfigName + required: true + description: The name of the k8s auth config. + example: "k8s-auth-config" + type: string + - name: k8sGatewayUrl + required: true + description: The gateway URL that where the k8s auth config is located. + example: "http://gw.akeyless.svc.cluster.local:8000" + type: string + - name: k8sServiceAccountToken + required: true + description: The service account token. + example: "eyJ..." + type: string + sensitive: true +metadata: + - name: gatewayUrl + required: false + description: | + The URL to the Akeyless Gateway API. Default is https://api.akeyless.io. + default: "https://api.akeyless.io" + example: "https://your.akeyless.gw" + type: string +``` + +## Spec metadata fields + +| Field | Required | Details | Example | +|--------------------|:--------:|-------------------------------------------------------------------------|---------------------| +| `gatewayUrl` | N | The Akeyless Gateway API URL. Defaults to https://api.akeyless.io. | `http://gw-release.akeyless.svc.cluster.local:8000/api/v2` | +| `accessID` | Y | The Akeyless Access ID of the authentication method | `p-1234567890` | +| `accessKey` | N | Fill in when using an API Key (`access_key`) authentication method. | `ABCD1233...=` | +| `jwt` | N | Fill in a `base64`-encoded string of the JWT when using OAuth2.0/JWT (`jwt`) authentication method | `eyJ...` | +| `k8sAuthConfigName` | N | Fill in when using Kubernetes Authentication (`k8s`) authentication method | `my-k8s-auth-conf` | +| `k8sGatewayUrl` | N | Fill in when using Kubernetes Authentication (`k8s`) authentication method. If not filled in, will default to value set for `akeylessGWApiURL`. | `http://gw-release.akeyless.svc.cluster.local:8000/api/v2` | +| `k8sServiceAccountToken` | N | Fill in a `base64`-encoded string of the JWT when using Kubernetes Authentication (`k8s`) authentication method. If not filled in, will read from k8s token in container filesystem | `ej...` | + + +## Authentication Methods + +We currently support the following authentication methods: + +### [API Key](https://docs.akeyless.io/docs/api-key) + + + +```yaml +apiVersion: dapr.io/v1alpha1 +kind: Component +metadata: + name: akeyless +spec: + type: secretstores.akeyless + version: v1 + metadata: + - name: gatewayUrl + value: "https://api.akeyless.io" + - name: accessId + value: "p-123..." + - name: accessKey + value: "ABCD1233...=" +``` + +### [AWS IAM](https://docs.akeyless.io/docs/aws-iam) + +```yaml +apiVersion: dapr.io/v1alpha1 +kind: Component +metadata: + name: akeyless +spec: + type: secretstores.akeyless + version: v1 + metadata: + - name: gatewayUrl + value: "https://api.akeyless.io" + - name: accessId + value: "p-123..." +``` + +### [OAuth2.0/JWT](https://docs.akeyless.io/docs/oauth20jwt) + +```yaml +apiVersion: dapr.io/v1alpha1 +kind: Component +metadata: + name: akeyless +spec: + type: secretstores.akeyless + version: v1 + metadata: + - name: gatewayUrl + value: "https://api.akeyless.io" + - name: accessId + value: "p-123..." + - name: jwt + value: "eyJ..." +``` + +### [Kubernetes](https://docs.akeyless.io/docs/kubernetes-auth) + +```yaml +apiVersion: dapr.io/v1alpha1 +kind: Component +metadata: + name: akeyless +spec: + type: secretstores.akeyless + version: v1 + metadata: + - name: gatewayUrl + value: "http://release-gw.akeyless.svc.cluster.local:8000/api/v2" + - name: accessID + value: "p-123..." + - name: k8sAuthConfigName + value: "my-k8s-auth-config" + - name: k8sGatewayUrl + value: "http://release-gw.akeyless.svc.cluster.local:8000/api/v2" + - name: k8sServiceAccountToken + value: "eyJ..." +``` + +{{% alert title="Warning" color="warning" %}} +The above examples use secrets as plain strings. It is recommended to use a local secret store such as [Kubernetes secret store]({{% ref kubernetes-secret-store.md %}}) or a [local file]({{% ref file-secret-store.md %}}) to bootstrap secure key storage. +{{% /alert %}} + + +## Retrieve secrets + +You can retrieve secrets from Akeyless using the Dapr secrets API: + +```bash +curl http://localhost:3500/v1.0/secrets/akeyless/my-secret +``` + +This returns the secret value stored in Akeyless with the name `my-secret`. + +## Setup Akeyless instance + +To get started with Akeyless: + +1. Sign up for an Akeyless account at [https://www.akeyless.io](https://www.akeyless.io) +2. Create an Access ID and configure your preferred authentication method. +3. Set up your secrets in the Akeyless. +4. Configure the Dapr component using one of the authentication methods above. + +For more detailed setup instructions, refer to the [Akeyless documentation](https://docs.akeyless.io/). + +## Related links + +- [Secrets building block]({{% ref secrets %}}) +- [How-To: Retrieve a secret]({{% ref "howto-secrets.md" %}}) +- [How-To: Reference secrets in Dapr components]({{% ref component-secrets.md %}}) +- [Secrets API reference]({{% ref secrets_api.md %}}) \ No newline at end of file diff --git a/daprdocs/content/en/reference/components-reference/supported-state-stores/_index.md b/daprdocs/content/en/reference/components-reference/supported-state-stores/_index.md index 2b2509f53fb..0855682eb22 100644 --- a/daprdocs/content/en/reference/components-reference/supported-state-stores/_index.md +++ b/daprdocs/content/en/reference/components-reference/supported-state-stores/_index.md @@ -3,7 +3,7 @@ type: docs title: "State store component specs" linkTitle: "State stores" description: "The supported state stores that interface with Dapr" -weight: 4000 +weight: 10000 aliases: - "/operations/components/setup-state-store/supported-state-stores/" no_list: true diff --git a/daprdocs/content/en/reference/components-reference/supported-state-stores/setup-azure-cosmosdb.md b/daprdocs/content/en/reference/components-reference/supported-state-stores/setup-azure-cosmosdb.md index 2e67d46c137..dc225eee5d5 100644 --- a/daprdocs/content/en/reference/components-reference/supported-state-stores/setup-azure-cosmosdb.md +++ b/daprdocs/content/en/reference/components-reference/supported-state-stores/setup-azure-cosmosdb.md @@ -225,6 +225,22 @@ This particular optimization only makes sense if you are saving large objects to {{% /alert %}} +## Workflow Limitations + +{{% alert title="Note" color="primary" %}} + +As described below, CosmosDB has limitations that likely make it unsuitable for production environments. +There is currently no path for migrating Workflow data from CosmosDB to another state store, meaning exceeding these limits in production will result in failed workflows with no workaround. + +{{% /alert %}} + +The more complex a workflow is with number of activities, child workflows, etc, the more DB state operations it performs per state store transaction. +All input & output values are saved to the workflow history, and are part of an operation of these transactions. +CosmosDB has a [maximum document size of 2MB and maximum transaction size of 100 operations.](https://learn.microsoft.com/azure/cosmos-db/concepts-limits#per-request-limits). +Attempting to write to CosmosDB beyond these limits results in an error code of `413`. +This means that the workflow history must not exceed this size, meaning that CosmosDB is not suitable for workflows with large input/output values or larger complex workflows. +A general guide to the number of records that are saved during a workflow executon can be found [here]({{% ref "workflow-architecture.md#state-store-record-count" %}}). + ## Related links - [Basic schema for a Dapr component]({{% ref component-schema %}}) diff --git a/daprdocs/content/en/reference/components-reference/supported-state-stores/setup-dynamodb.md b/daprdocs/content/en/reference/components-reference/supported-state-stores/setup-dynamodb.md index 5a45e374d90..082d4e4875a 100644 --- a/daprdocs/content/en/reference/components-reference/supported-state-stores/setup-dynamodb.md +++ b/daprdocs/content/en/reference/components-reference/supported-state-stores/setup-dynamodb.md @@ -158,6 +158,20 @@ $ aws dynamodb get-item \ } ``` +## Workflow Limitations + +{{% alert title="Note" color="primary" %}} + +As described below, DynamoDB has limitations that likely make it unsuitable for production environments. +There is currently no path for migrating Workflow data from DynamoDB to another state store, meaning exceeding these limits in production will result in failed workflows with no workaround. + +{{% /alert %}} + +The more complex a workflow is (number of activities, child workflows, etc.), the more state operations it performs per state store transaction. +The maximum number of operations that can be performed by DynamoDB in a [single transaction is 100](https://docs.aws.amazon.com/amazondynamodb/latest/developerguide/transaction-apis.html). +This means that DynamoDB can only handle workflows with a limited complexity, meaning it is not suitable for all workflow scenarios. +A general guide to the number of records that are saved during a workflow executon can be found [here]({{% ref "workflow-architecture.md#state-store-record-count" %}}). + ## Related links - [Basic schema for a Dapr component]({{% ref component-schema %}}) diff --git a/daprdocs/data/components/state_stores/azure.yaml b/daprdocs/data/components/state_stores/azure.yaml index 287477de780..b340a26db71 100644 --- a/daprdocs/data/components/state_stores/azure.yaml +++ b/daprdocs/data/components/state_stores/azure.yaml @@ -30,7 +30,7 @@ transactions: true etag: true ttl: true - workflow: false + workflow: true - component: Azure Table Storage link: setup-azure-tablestorage state: Stable diff --git a/daprdocs/data/components/state_stores/generic.yaml b/daprdocs/data/components/state_stores/generic.yaml index 289d8ce4305..8d8ce44184c 100644 --- a/daprdocs/data/components/state_stores/generic.yaml +++ b/daprdocs/data/components/state_stores/generic.yaml @@ -52,7 +52,7 @@ transactions: true etag: true ttl: true - workflow: false + workflow: true - component: Hashicorp Consul link: setup-consul state: Alpha @@ -140,7 +140,7 @@ transactions: true etag: true ttl: true - workflow: false + workflow: true - component: PostgreSQL v1 link: setup-postgresql-v1 state: Stable @@ -195,7 +195,7 @@ transactions: true etag: true ttl: true - workflow: false + workflow: true - component: Zookeeper link: setup-zookeeper state: Alpha diff --git a/daprdocs/data/components/state_stores/oracle.yaml b/daprdocs/data/components/state_stores/oracle.yaml index eae48304fde..fc5136684b6 100644 --- a/daprdocs/data/components/state_stores/oracle.yaml +++ b/daprdocs/data/components/state_stores/oracle.yaml @@ -9,7 +9,7 @@ etag: true ttl: true query: false - workflow: false + workflow: true - component: Coherence link: setup-coherence state: Alpha diff --git a/daprdocs/layouts/_partials/hooks/body-end.html b/daprdocs/layouts/_partials/hooks/body-end.html index ee0ebc1f145..eb321b47a22 100644 --- a/daprdocs/layouts/_partials/hooks/body-end.html +++ b/daprdocs/layouts/_partials/hooks/body-end.html @@ -7,7 +7,7 @@ container: '#docsearch', appId: 'O0QLQGNF38', apiKey: '54ae43aa28ce8f00c54c8d5f544d29b9', - indexName: 'daprdocs', + indexName: 'crawler_dapr', }); @@ -17,4 +17,4 @@ -{{ end }} \ No newline at end of file +{{ end }} diff --git a/daprdocs/layouts/_shortcodes/dapr-latest-version.html b/daprdocs/layouts/_shortcodes/dapr-latest-version.html index a085fd0e6f5..bdd712552d8 100644 --- a/daprdocs/layouts/_shortcodes/dapr-latest-version.html +++ b/daprdocs/layouts/_shortcodes/dapr-latest-version.html @@ -1 +1 @@ -{{- if .Get "short" }}1.15{{ else if .Get "long" }}1.15.5{{ else if .Get "cli" }}1.15.1{{ else }}1.15.1{{ end -}} +{{- if .Get "short" }}1.16{{ else if .Get "long" }}1.16.0{{ else if .Get "cli" }}1.16.0{{ else }}1.16.0{{ end -}} diff --git a/daprdocs/static/images/state-management-outbox-steps.png b/daprdocs/static/images/state-management-outbox-steps.png new file mode 100644 index 00000000000..a520b443b97 Binary files /dev/null and b/daprdocs/static/images/state-management-outbox-steps.png differ diff --git a/daprdocs/static/images/workflow-overview/workflow-multi-app-callactivity.png b/daprdocs/static/images/workflow-overview/workflow-multi-app-callactivity.png new file mode 100644 index 00000000000..7b2a28561d8 Binary files /dev/null and b/daprdocs/static/images/workflow-overview/workflow-multi-app-callactivity.png differ diff --git a/daprdocs/static/images/workflow-overview/workflow-multi-app-child-workflow.png b/daprdocs/static/images/workflow-overview/workflow-multi-app-child-workflow.png new file mode 100644 index 00000000000..388fcc8cf27 Binary files /dev/null and b/daprdocs/static/images/workflow-overview/workflow-multi-app-child-workflow.png differ diff --git a/daprdocs/static/images/workflow-overview/workflow-multi-app-complex.png b/daprdocs/static/images/workflow-overview/workflow-multi-app-complex.png new file mode 100644 index 00000000000..8a2c401f89a Binary files /dev/null and b/daprdocs/static/images/workflow-overview/workflow-multi-app-complex.png differ diff --git a/hugo.yaml b/hugo.yaml index 34953ae0732..62603c1ef25 100644 --- a/hugo.yaml +++ b/hugo.yaml @@ -117,7 +117,8 @@ params: # First one is picked as the Twitter card image if not set on page. # images: [images/project-illustration.png] - + + # Versioning # Menu title if your navbar has a versions selector to access old versions of your site. # This menu appears only if you have at least one [params.versions] set. version_menu: v1.17 (preview) @@ -132,6 +133,11 @@ params: # current doc set. version: v1.17 + # Flag used in the "version-banner" partial to decide whether to display a + # banner on every page indicating that this is an archived version of the docs. + # Set this flag to "true" if you want to display the banner. + archived_version: false + # A link to latest version of the docs. Used in the "version-banner" partial to # point people to the main doc site. url_latest_version: https://docs.dapr.io