diff --git a/src/langsmith/log-traces-to-project.mdx b/src/langsmith/log-traces-to-project.mdx
index 68ba0f937d..581f8610fb 100644
--- a/src/langsmith/log-traces-to-project.mdx
+++ b/src/langsmith/log-traces-to-project.mdx
@@ -135,3 +135,336 @@ await rt.patchRun();
```
+
+## Set the destination workspace dynamically
+
+If you need to dynamically route traces to different LangSmith workspaces based on runtime configuration (e.g., routing different users or tenants to separate workspaces), you can use the `tracing_context` context manager (Python) or `withTracing` function (TypeScript) with workspace-specific LangSmith clients.
+
+This approach is useful for multi-tenant applications where you want to isolate traces by customer, environment, or team at the workspace level.
+
+### Prerequisites
+
+- A LangSmith API key with access to multiple workspaces
+- The workspace IDs for each target workspace
+
+### Generic cross-workspace tracing
+
+Use this approach for general applications where you want to dynamically route traces to different workspaces based on runtime logic (e.g., customer ID, tenant, or environment).
+
+**Key components:**
+
+1. Initialize separate `Client` instances for each workspace with their respective `workspace_id`
+2. Use `tracing_context` (Python) or `withTracing` (TypeScript) to switch between clients at runtime
+3. Pass workspace configuration through your application's runtime config
+
+
+
+```python Python
+import os
+import contextlib
+from langsmith import Client, traceable, tracing_context
+
+# API key with access to multiple workspaces
+api_key = os.getenv("LS_CROSS_WORKSPACE_KEY")
+
+# Initialize clients for different workspaces
+workspace_a_client = Client(
+ api_key=api_key,
+ api_url="https://api.smith.langchain.com",
+ workspace_id="" # e.g., "abc123..."
+)
+
+workspace_b_client = Client(
+ api_key=api_key,
+ api_url="https://api.smith.langchain.com",
+ workspace_id="" # e.g., "def456..."
+)
+
+# Example: Route based on customer ID
+def get_workspace_client(customer_id: str):
+ """Route to appropriate workspace based on customer."""
+ if customer_id.startswith("premium_"):
+ return workspace_a_client, "premium-customer-traces"
+ else:
+ return workspace_b_client, "standard-customer-traces"
+
+@traceable
+def process_request(data: dict, customer_id: str):
+ """Process a customer request with workspace-specific tracing."""
+ # Your business logic here
+ return {"status": "success", "data": data}
+
+# Use tracing_context to route to the appropriate workspace
+def handle_customer_request(customer_id: str, request_data: dict):
+ client, project_name = get_workspace_client(customer_id)
+
+ # Everything within this context will be traced to the selected workspace
+ with tracing_context(enabled=True, client=client, project_name=project_name):
+ result = process_request(request_data, customer_id)
+
+ return result
+
+# Example usage
+handle_customer_request("premium_user_123", {"query": "Hello"})
+handle_customer_request("standard_user_456", {"query": "Hi"})
+```
+
+```typescript TypeScript
+import { Client } from "langsmith";
+import { traceable } from "langsmith/traceable";
+import { withTracing } from "langsmith/singletons";
+
+// API key with access to multiple workspaces
+const apiKey = process.env.LS_CROSS_WORKSPACE_KEY;
+
+// Initialize clients for different workspaces
+const workspaceAClient = new Client({
+ apiKey: apiKey,
+ apiUrl: "https://api.smith.langchain.com",
+ workspaceId: "", // e.g., "abc123..."
+});
+
+const workspaceBClient = new Client({
+ apiKey: apiKey,
+ apiUrl: "https://api.smith.langchain.com",
+ workspaceId: "", // e.g., "def456..."
+});
+
+// Example: Route based on customer ID
+function getWorkspaceClient(customerId: string): {
+ client: Client;
+ projectName: string;
+} {
+ if (customerId.startsWith("premium_")) {
+ return {
+ client: workspaceAClient,
+ projectName: "premium-customer-traces",
+ };
+ } else {
+ return {
+ client: workspaceBClient,
+ projectName: "standard-customer-traces",
+ };
+ }
+}
+
+const processRequest = traceable(
+ async (data: Record, customerId: string) => {
+ // Your business logic here
+ return { status: "success", data };
+ },
+ { name: "process_request" }
+);
+
+// Use withTracing to route to the appropriate workspace
+async function handleCustomerRequest(
+ customerId: string,
+ requestData: Record
+) {
+ const { client, projectName } = getWorkspaceClient(customerId);
+
+ // Everything within this context will be traced to the selected workspace
+ return await withTracing(
+ async () => {
+ return await processRequest(requestData, customerId);
+ },
+ {
+ client,
+ project_name: projectName,
+ enabled: true,
+ }
+ );
+}
+
+// Example usage
+await handleCustomerRequest("premium_user_123", { query: "Hello" });
+await handleCustomerRequest("standard_user_456", { query: "Hi" });
+```
+
+
+
+### Override default workspace for LangSmith deployments
+
+When deploying agents to LangSmith, you can override the default workspace that traces are sent to by using a graph lifespan context manager. This is useful when you want to route traces from a deployed agent to different workspaces based on runtime configuration passed through the `config` parameter.
+
+
+
+```python Python
+import os
+import contextlib
+from typing_extensions import TypedDict
+from langgraph.graph import StateGraph
+from langgraph.graph.state import RunnableConfig
+from langsmith import Client, tracing_context
+
+# API key with access to multiple workspaces
+api_key = os.getenv("LS_CROSS_WORKSPACE_KEY")
+
+# Initialize clients for different workspaces
+workspace_a_client = Client(
+ api_key=api_key,
+ api_url="https://api.smith.langchain.com",
+ workspace_id=""
+)
+
+workspace_b_client = Client(
+ api_key=api_key,
+ api_url="https://api.smith.langchain.com",
+ workspace_id=""
+)
+
+# Define configuration schema for workspace routing
+class Configuration(TypedDict):
+ workspace_id: str
+
+# Define the graph state
+class State(TypedDict):
+ response: str
+
+def greeting(state: State, config: RunnableConfig) -> State:
+ """Generate a workspace-specific greeting."""
+ workspace_id = config.get("configurable", {}).get("workspace_id", "workspace_a")
+
+ if workspace_id == "workspace_a":
+ response = "Hello from Workspace A!"
+ elif workspace_id == "workspace_b":
+ response = "Hello from Workspace B!"
+ else:
+ response = "Hello from the default workspace!"
+
+ return {"response": response}
+
+# Build the base graph
+base_graph = (
+ StateGraph(state_schema=State, config_schema=Configuration)
+ .add_node("greeting", greeting)
+ .set_entry_point("greeting")
+ .set_finish_point("greeting")
+ .compile()
+)
+
+@contextlib.asynccontextmanager
+async def graph(config):
+ """Dynamically route traces to different workspaces based on configuration."""
+ # Extract workspace_id from the configuration
+ workspace_id = config.get("configurable", {}).get("workspace_id", "workspace_a")
+
+ # Route to the appropriate workspace
+ if workspace_id == "workspace_a":
+ client = workspace_a_client
+ project_name = "production-traces"
+ elif workspace_id == "workspace_b":
+ client = workspace_b_client
+ project_name = "development-traces"
+ else:
+ client = workspace_a_client
+ project_name = "default-traces"
+
+ # Apply the tracing context for the selected workspace
+ with tracing_context(enabled=True, client=client, project_name=project_name):
+ yield base_graph
+
+# Usage: Invoke with different workspace configurations
+# await graph({"configurable": {"workspace_id": "workspace_a"}})
+# await graph({"configurable": {"workspace_id": "workspace_b"}})
+```
+
+```typescript TypeScript
+import { Client } from "langsmith";
+import { StateGraph, Annotation } from "@langchain/langgraph";
+import { withTracing } from "langsmith/singletons";
+
+// API key with access to multiple workspaces
+const apiKey = process.env.LS_CROSS_WORKSPACE_KEY;
+
+// Initialize clients for different workspaces
+const workspaceAClient = new Client({
+ apiKey: apiKey,
+ apiUrl: "https://api.smith.langchain.com",
+ workspaceId: "", // e.g., "abc123..."
+});
+
+const workspaceBClient = new Client({
+ apiKey: apiKey,
+ apiUrl: "https://api.smith.langchain.com",
+ workspaceId: "", // e.g., "def456..."
+});
+
+// Define the graph state
+const StateAnnotation = Annotation.Root({
+ response: Annotation,
+});
+
+async function greeting(state: typeof StateAnnotation.State, config: any) {
+ const workspaceId = config?.configurable?.workspace_id || "workspace_a";
+
+ let response: string;
+ if (workspaceId === "workspace_a") {
+ response = "Hello from Workspace A!";
+ } else if (workspaceId === "workspace_b") {
+ response = "Hello from Workspace B!";
+ } else {
+ response = "Hello from the default workspace!";
+ }
+
+ return { response };
+}
+
+// Build the base graph
+const baseGraph = new StateGraph(StateAnnotation)
+ .addNode("greeting", greeting)
+ .addEdge("__start__", "greeting")
+ .addEdge("greeting", "__end__")
+ .compile();
+
+// Graph lifespan context manager for workspace routing
+async function graph(config: any) {
+ const workspaceId = config?.configurable?.workspace_id || "workspace_a";
+
+ let client: Client;
+ let projectName: string;
+
+ // Route to the appropriate workspace
+ if (workspaceId === "workspace_a") {
+ client = workspaceAClient;
+ projectName = "production-traces";
+ } else if (workspaceId === "workspace_b") {
+ client = workspaceBClient;
+ projectName = "development-traces";
+ } else {
+ client = workspaceAClient;
+ projectName = "default-traces";
+ }
+
+ // Apply the tracing context for the selected workspace
+ return await withTracing(
+ async () => {
+ return baseGraph;
+ },
+ {
+ client,
+ project_name: projectName,
+ enabled: true,
+ }
+ );
+}
+
+// Usage: Invoke with different workspace configurations
+// const graphInstance = await graph({ configurable: { workspace_id: "workspace_a" } });
+// await graphInstance.invoke({ response: "" });
+```
+
+
+
+### Key points
+
+- **Generic cross-workspace tracing**: Use `tracing_context` (Python) or `withTracing` (TypeScript) to dynamically route traces to different workspaces based on your application logic
+- **LangSmith deployment override**: Use a graph lifespan context manager to override the default deployment workspace based on runtime configuration passed through the `config` parameter
+- The `tracing_context` context manager (Python) and `withTracing` function (TypeScript) override the default tracing configuration for everything within their scope
+- Each `Client` instance maintains its own connection to a specific workspace
+- You can customize both the workspace and project name for each route
+- This pattern works with any LangSmith-compatible tracing (LangChain, OpenAI, custom functions, etc.)
+
+
+When deploying with cross-workspace tracing, ensure your API key has the necessary permissions for all target workspaces. For LangSmith deployments, you must add an API key with cross-workspace access to your environment variables (e.g., `LS_CROSS_WORKSPACE_KEY`) to override the default service key generated by your deployment.
+