Skip to content

Commit 8d1aba9

Browse files
chore: update LGP docs to use new context API (#144)
Co-authored-by: Lauren Hirata Singh <[email protected]>
1 parent 41851f1 commit 8d1aba9

File tree

4 files changed

+36
-42
lines changed

4 files changed

+36
-42
lines changed

src/langgraph-platform/assistants.mdx

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -17,7 +17,8 @@ The LangGraph Cloud API provides several endpoints for creating and managing ass
1717

1818
## Configuration
1919

20-
Assistants build on the LangGraph open source concept of [configuration](/oss/graph-api#configuration).
20+
Assistants build on the LangGraph open source concept of [configuration](/oss/graph-api#runtime-context).
21+
2122
While configuration is available in the open source LangGraph library, assistants are only present in [LangGraph Platform](/langgraph-platform/index). This is due to the fact that assistants are tightly coupled to your deployed graph. Upon deployment, LangGraph Server will automatically create a default assistant for each graph using the graph's default configuration settings.
2223

2324
In practice, an assistant is just an _instance_ of a graph with a specific configuration. Therefore, multiple assistants can reference the same graph but can contain different configurations (e.g. prompts, models, tools). The LangGraph Server API provides several endpoints for creating and managing assistants. See the [API reference](https://langchain-ai.github.io/langgraph/cloud/reference/api/api_ref/) and [this how-to](/langgraph-platform/configuration-cloud) for more details on how to create assistants.

src/langgraph-platform/configuration-cloud.mdx

Lines changed: 25 additions & 32 deletions
Original file line numberDiff line numberDiff line change
@@ -4,21 +4,20 @@ sidebarTitle: Manage assistants
44
---
55
In this guide we will show how to create, configure, and manage an [assistant](/langgraph-platform/assistants).
66

7-
First, as a brief refresher on the concept of configurations, consider the following simple `call_model` node and configuration schema. Observe that this node tries to read and use the `model_name` as defined by the `config` object's `configurable`.
7+
First, as a brief refresher on the concept of context, consider the following simple `call_model` node and context schema.
8+
Observe that this node tries to read and use the `model_name` as defined by the `context` object's `model_name` field.
89

910
<Tabs>
1011
<Tab title="Python">
1112
```python
12-
13-
class ConfigSchema(TypedDict):
13+
class ContextSchema(TypedDict):
1414
model_name: str
1515

16-
builder = StateGraph(AgentState, config_schema=ConfigSchema)
16+
builder = StateGraph(AgentState, context_schema=ContextSchema)
1717

18-
def call_model(state, config):
18+
def call_model(state, runtime: Runtime[ContextSchema]):
1919
messages = state["messages"]
20-
model_name = config.get('configurable', {}).get("model_name", "anthropic")
21-
model = _get_model(model_name)
20+
model = _get_model(runtime.context.get("model_name", "anthropic"))
2221
response = model.invoke(messages)
2322
# We return a list, because this will get added to the existing list
2423
return {"messages": [response]}
@@ -28,17 +27,16 @@ First, as a brief refresher on the concept of configurations, consider the follo
2827
```js
2928
import { Annotation } from "@langchain/langgraph";
3029

31-
const ConfigSchema = Annotation.Root({
30+
const ContextSchema = Annotation.Root({
3231
model_name: Annotation<string>,
3332
system_prompt:
3433
});
3534

36-
const builder = new StateGraph(AgentState, ConfigSchema)
35+
const builder = new StateGraph(AgentState, ContextSchema)
3736

38-
function callModel(state: State, config: RunnableConfig) {
37+
function callModel(state: State, runtime: Runtime[ContextSchema]) {
3938
const messages = state.messages;
40-
const modelName = config.configurable?.model_name ?? "anthropic";
41-
const model = _getModel(modelName);
39+
const model = _getModel(runtime.context.model_name ?? "anthropic");
4240
const response = model.invoke(messages);
4341
// We return a list, because this will get added to the existing list
4442
return { messages: [response] };
@@ -55,7 +53,7 @@ For more information on configurations, [see here](/langgraph-platform/configura
5553
5654
To create an assistant, use the [LangGraph SDK](/langgraph-platform/sdk) `create` method. See the [Python](/langgraph-platform/python-sdk#langgraph_sdk.client.AssistantsClient.create) and [JS](/langgraph-platform/js-ts-sdk#create) SDK reference docs for more information.
5755
58-
This example uses the same configuration schema as above, and creates an assistant with `model_name` set to `openai`.
56+
This example uses the same context schema as above, and creates an assistant with `model_name` set to `openai`.
5957
6058
<Tabs>
6159
<Tab title="Python">
@@ -65,7 +63,7 @@ This example uses the same configuration schema as above, and creates an assista
6563
client = get_client(url=<DEPLOYMENT_URL>)
6664
openai_assistant = await client.assistants.create(
6765
# "agent" is the name of a graph we deployed
68-
"agent", config={"configurable": {"model_name": "openai"}}, name="Open AI Assistant"
66+
"agent", context={"model_name": "openai"}, name="Open AI Assistant"
6967
)
7068

7169
print(openai_assistant)
@@ -79,7 +77,7 @@ This example uses the same configuration schema as above, and creates an assista
7977
const openAIAssistant = await client.assistants.create({
8078
graphId: 'agent',
8179
name: "Open AI Assistant",
82-
config: { "configurable": { "model_name": "openai" } },
80+
context: { "model_name": "openai" },
8381
});
8482

8583
console.log(openAIAssistant);
@@ -90,7 +88,7 @@ This example uses the same configuration schema as above, and creates an assista
9088
curl --request POST \
9189
--url <DEPLOYMENT_URL>/assistants \
9290
--header 'Content-Type: application/json' \
93-
--data '{"graph_id":"agent", "config":{"configurable":{"model_name":"openai"}}, "name": "Open AI Assistant"}'
91+
--data '{"graph_id":"agent", "context":{"model_name":"openai"}, "name": "Open AI Assistant"}'
9492
```
9593
</Tab>
9694
</Tabs>
@@ -102,11 +100,9 @@ Output:
102100
"assistant_id": "62e209ca-9154-432a-b9e9-2d75c7a9219b",
103101
"graph_id": "agent",
104102
"name": "Open AI Assistant"
105-
"config": {
106-
"configurable": {
103+
"context": {
107104
"model_name": "openai"
108105
}
109-
},
110106
"metadata": {}
111107
"created_at": "2024-08-31T03:09:10.230718+00:00",
112108
"updated_at": "2024-08-31T03:09:10.230718+00:00",
@@ -239,7 +235,7 @@ To edit the assistant, use the `update` method. This will create a new version o
239235
240236
<Note>
241237
**Note**
242-
You must pass in the ENTIRE config (and metadata if you are using it). The update endpoint creates new versions completely from scratch and does not rely on previous versions.
238+
You must pass in the ENTIRE context (and metadata if you are using it). The update endpoint creates new versions completely from scratch and does not rely on previous versions.
243239
</Note>
244240
245241
For example, to update your assistant's system prompt:
@@ -249,11 +245,9 @@ For example, to update your assistant's system prompt:
249245
```python
250246
openai_assistant_v2 = await client.assistants.update(
251247
openai_assistant["assistant_id"],
252-
config={
253-
"configurable": {
254-
"model_name": "openai",
255-
"system_prompt": "You are an unhelpful assistant!",
256-
}
248+
context={
249+
"model_name": "openai",
250+
"system_prompt": "You are an unhelpful assistant!",
257251
},
258252
)
259253
```
@@ -263,13 +257,12 @@ For example, to update your assistant's system prompt:
263257
const openaiAssistantV2 = await client.assistants.update(
264258
openai_assistant["assistant_id"],
265259
{
266-
config: {
267-
configurable: {
268-
model_name: 'openai',
269-
system_prompt: 'You are an unhelpful assistant!',
270-
},
260+
context: {
261+
model_name: 'openai',
262+
system_prompt: 'You are an unhelpful assistant!',
263+
},
271264
},
272-
});
265+
);
273266
```
274267
</Tab>
275268
<Tab title="CURL">
@@ -278,7 +271,7 @@ For example, to update your assistant's system prompt:
278271
--url <DEPOLYMENT_URL>/assistants/<ASSISTANT_ID> \
279272
--header 'Content-Type: application/json' \
280273
--data '{
281-
"config": {"model_name": "openai", "system_prompt": "You are an unhelpful assistant!"}
274+
"context": {"model_name": "openai", "system_prompt": "You are an unhelpful assistant!"}
282275
}'
283276
```
284277
</Tab>

src/langgraph-platform/setup-app-requirements-txt.mdx

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -42,7 +42,7 @@ Dependencies can optionally be specified in one of the following files: `pyproje
4242
The dependencies below will be included in the image, you can also use them in your code, as long as with a compatible version range:
4343

4444
```
45-
langgraph>=0.3.27
45+
langgraph>=0.6.0
4646
langgraph-sdk>=0.1.66
4747
langgraph-checkpoint>=2.0.23
4848
langchain-core>=0.2.38
@@ -114,11 +114,11 @@ from langgraph.graph import StateGraph, END, START
114114
from my_agent.utils.nodes import call_model, should_continue, tool_node # import nodes
115115
from my_agent.utils.state import AgentState # import state
116116

117-
# Define the config
118-
class GraphConfig(TypedDict):
117+
# Define the runtime context
118+
class GraphContext(TypedDict):
119119
model_name: Literal["anthropic", "openai"]
120120

121-
workflow = StateGraph(AgentState, config_schema=GraphConfig)
121+
workflow = StateGraph(AgentState, context_schema=GraphContext)
122122
workflow.add_node("agent", call_model)
123123
workflow.add_node("action", tool_node)
124124
workflow.add_edge(START, "agent")

src/langgraph-platform/setup-pyproject.mdx

Lines changed: 5 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -42,7 +42,7 @@ Dependencies can optionally be specified in one of the following files: `pyproje
4242
The dependencies below will be included in the image, you can also use them in your code, as long as with a compatible version range:
4343

4444
```
45-
langgraph>=0.3.27
45+
langgraph>=0.6.0
4646
langgraph-sdk>=0.1.66
4747
langgraph-checkpoint>=2.0.23
4848
langchain-core>=0.2.38
@@ -77,7 +77,7 @@ license = {text = "MIT"}
7777
readme = "README.md"
7878
requires-python = ">=3.9"
7979
dependencies = [
80-
"langgraph>=0.2.0",
80+
"langgraph>=0.6.0",
8181
"langchain-fireworks>=0.1.3"
8282
]
8383

@@ -127,11 +127,11 @@ from langgraph.graph import StateGraph, END, START
127127
from my_agent.utils.nodes import call_model, should_continue, tool_node # import nodes
128128
from my_agent.utils.state import AgentState # import state
129129

130-
# Define the config
131-
class GraphConfig(TypedDict):
130+
# Define the runtime context
131+
class GraphContext(TypedDict):
132132
model_name: Literal["anthropic", "openai"]
133133

134-
workflow = StateGraph(AgentState, config_schema=GraphConfig)
134+
workflow = StateGraph(AgentState, context_schema=GraphContext)
135135
workflow.add_node("agent", call_model)
136136
workflow.add_node("action", tool_node)
137137
workflow.add_edge(START, "agent")

0 commit comments

Comments
 (0)