Skip to content

Commit e984d78

Browse files
authored
Merge pull request #441 from DefangLabs/linda-agentic-langchain
Add LangGraph (LangChain) Agent sample
2 parents a8d3d4b + ba51bda commit e984d78

File tree

16 files changed

+1966
-0
lines changed

16 files changed

+1966
-0
lines changed

.github/workflows/deploy-changed-samples.yml

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -101,6 +101,7 @@ jobs:
101101
TEST_SLACK_CHANNEL_ID: ${{ secrets.TEST_SLACK_CHANNEL_ID }}
102102
TEST_SLACK_TOKEN: ${{ secrets.TEST_SLACK_TOKEN }}
103103
TEST_SHARED_SECRETS: ${{ secrets.TEST_SHARED_SECRETS}}
104+
TEST_TAVILY_API_KEY: ${{ secrets.TEST_TAVILY_API_KEY }}
104105
TEST_ALLOWED_HOSTS: ${{ secrets.TEST_ALLOWED_HOSTS }}
105106
run: |
106107
SAMPLES=$(sed 's|^samples/||' changed_samples.txt | paste -s -d ',' -)
Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
FROM mcr.microsoft.com/devcontainers/typescript-node:22-bookworm
Lines changed: 11 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,11 @@
1+
{
2+
"build": {
3+
"dockerfile": "Dockerfile",
4+
"context": ".."
5+
},
6+
"features": {
7+
"ghcr.io/defanglabs/devcontainer-feature/defang-cli:1.0.4": {},
8+
"ghcr.io/devcontainers/features/docker-in-docker:2": {},
9+
"ghcr.io/devcontainers/features/aws-cli:1": {}
10+
}
11+
}
Lines changed: 25 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,25 @@
1+
name: Deploy
2+
3+
on:
4+
push:
5+
branches:
6+
- main
7+
8+
jobs:
9+
deploy:
10+
environment: playground
11+
runs-on: ubuntu-latest
12+
permissions:
13+
contents: read
14+
id-token: write
15+
16+
steps:
17+
- name: Checkout Repo
18+
uses: actions/checkout@v4
19+
20+
- name: Deploy
21+
uses: DefangLabs/[email protected]
22+
with:
23+
config-env-vars: TAVILY_API_KEY
24+
env:
25+
TAVILY_API_KEY: ${{ secrets.TAVILY_API_KEY }}
Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,2 @@
1+
.env
2+
node_modules/
Lines changed: 57 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,57 @@
1+
# Agentic LangGraph
2+
3+
[![1-click-deploy](https://raw.githubusercontent.com/DefangLabs/defang-assets/main/Logos/Buttons/SVG/deploy-with-defang.svg)](https://portal.defang.dev/redirect?url=https%3A%2F%2Fgithub.com%2Fnew%3Ftemplate_name%3Dsample-agentic-langgraph-template%26template_owner%3DDefangSamples)
4+
5+
This sample demonstrates a LangGraph Agent application deployed with Defang. You can customize the agent's tools as needed. For example, it includes a Tavily Search tool for performing search queries, which requires a `TAVILY_API_KEY` (see [Configuration](#configuration) for setup details).
6+
7+
## Prerequisites
8+
9+
1. Download [Defang CLI](https://github.com/DefangLabs/defang)
10+
2. (Optional) If you are using [Defang BYOC](https://docs.defang.io/docs/concepts/defang-byoc) authenticate with your cloud provider account
11+
3. (Optional for local development) [Docker CLI](https://docs.docker.com/engine/install/)
12+
13+
## Development
14+
15+
To run the application locally, you can use the following command:
16+
17+
```bash
18+
docker compose -f compose.dev.yaml up --build
19+
```
20+
21+
## Configuration
22+
23+
For this sample, you will need to provide the following [configuration](https://docs.defang.io/docs/concepts/configuration):
24+
25+
> Note that if you are using the 1-click deploy option, you can set these values as secrets in your GitHub repository and the action will automatically deploy them for you.
26+
27+
### `TAVILY_API_KEY`
28+
A Tavily API key for accessing [Tavily Search](https://www.tavily.com/).
29+
```bash
30+
defang config set TAVILY_API_KEY
31+
```
32+
33+
## Deployment
34+
35+
> [!NOTE]
36+
> Download [Defang CLI](https://github.com/DefangLabs/defang)
37+
38+
### Defang Playground
39+
40+
Deploy your application to the Defang Playground by opening up your terminal and typing:
41+
```bash
42+
defang compose up
43+
```
44+
45+
### BYOC
46+
47+
If you want to deploy to your own cloud account, you can [use Defang BYOC](https://docs.defang.io/docs/tutorials/deploy-to-your-cloud).
48+
49+
---
50+
51+
Title: Agentic LangGraph
52+
53+
Short Description: A LangGraph Agent application that can use tools, deployed with Defang.
54+
55+
Tags: Agent, LangGraph, LangChain, AI, OpenAI, Tavily
56+
57+
Languages: TypeScript
Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,2 @@
1+
node_modules
2+
npm-debug.log
Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,2 @@
1+
node_modules
2+
.env
Lines changed: 19 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,19 @@
1+
2+
FROM node:22-bookworm-slim
3+
4+
RUN apt-get update -qq \
5+
&& apt-get install -y curl \
6+
&& apt-get clean \
7+
&& rm -rf /var/lib/apt/lists/*
8+
9+
WORKDIR /app
10+
11+
COPY package*.json ./
12+
13+
RUN npm install --omit=dev
14+
15+
COPY . .
16+
17+
EXPOSE 3000
18+
19+
CMD ["npm", "start"]
Lines changed: 74 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,74 @@
1+
2+
import { TavilySearch } from "@langchain/tavily";
3+
import { ChatOpenAI } from "@langchain/openai";
4+
import { HumanMessage, AIMessage } from "@langchain/core/messages";
5+
import { ToolNode } from "@langchain/langgraph/prebuilt";
6+
import { StateGraph, MessagesAnnotation } from "@langchain/langgraph";
7+
8+
// Define the tools for the agent to use
9+
const tools = [new TavilySearch({ maxResults: 3 })];
10+
const toolNode = new ToolNode(tools);
11+
12+
const baseUrl = process.env.LLM_URL || "https://api.openai.com/v1/";
13+
console.log("Using LLM base URL:", baseUrl);
14+
const baseModel = process.env.LLM_MODEL || "gpt-4o-mini";
15+
console.log("Using LLM model:", baseModel);
16+
// Create a model and give it access to the tools
17+
const model = new ChatOpenAI({
18+
model: baseModel,
19+
temperature: 0.7,
20+
configuration: {
21+
baseURL: baseUrl,
22+
},
23+
}).bindTools(tools);
24+
25+
// Define the function that determines whether to continue or not
26+
function shouldContinue({ messages }: typeof MessagesAnnotation.State) {
27+
const lastMessage = messages[messages.length - 1] as AIMessage;
28+
29+
// If the LLM makes a tool call, then we route to the "tools" node
30+
if (lastMessage.tool_calls?.length) {
31+
return "tools";
32+
}
33+
// Otherwise, we stop (reply to the user) using the special "__end__" node
34+
return "__end__";
35+
}
36+
37+
// Define the function that calls the model
38+
async function callModel(state: typeof MessagesAnnotation.State) {
39+
const response = await model.invoke(state.messages);
40+
41+
// We return a list, because this will get added to the existing list
42+
return { messages: [response] };
43+
}
44+
45+
// Define a new graph
46+
const workflow = new StateGraph(MessagesAnnotation)
47+
.addNode("agent", callModel)
48+
.addEdge("__start__", "agent") // __start__ is a special name for the entrypoint
49+
.addNode("tools", toolNode)
50+
.addEdge("tools", "agent")
51+
.addConditionalEdges("agent", shouldContinue);
52+
53+
// Finally, we compile it into a LangChain Runnable.
54+
const app = workflow.compile();
55+
56+
// Helper function to get agent output for a given input and optional previous messages
57+
const getAgentOutput = async (input: string, previousMessages: (HumanMessage | AIMessage)[] = []) => {
58+
59+
const initialState = {
60+
messages: [...previousMessages, new HumanMessage(input)],
61+
};
62+
63+
const finalState = await app.invoke(initialState);
64+
return {
65+
content: finalState.messages[finalState.messages.length - 1].content,
66+
messages: finalState.messages,
67+
};
68+
};
69+
70+
// Helper function to get agent output as a readablestring
71+
export const getAgentOutputAsString = async (input: string, previousMessages: (HumanMessage | AIMessage)[] = []) => {
72+
return getAgentOutput(input, previousMessages).then(result => result.content);
73+
};
74+

0 commit comments

Comments
 (0)