Skip to content

Commit da6a80c

Browse files
committed
add node gen ai sample app
1 parent 19c187a commit da6a80c

File tree

5 files changed

+9594
-0
lines changed

5 files changed

+9594
-0
lines changed

.github/workflows/node-sample-app-s3-deploy.yml

Lines changed: 3 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -3,6 +3,9 @@
33

44
name: Sample App Deployment - Node S3
55
on:
6+
push:
7+
branches:
8+
- add-genai-sample-apps
69
workflow_dispatch: # be able to run the workflow on demand
710

811
permissions:
Lines changed: 28 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,28 @@
1+
const { trace } = require('@opentelemetry/api');
2+
const { registerInstrumentations } = require('@opentelemetry/instrumentation');
3+
const { HttpInstrumentation } = require('@opentelemetry/instrumentation-http');
4+
const tracerProvider = trace.getTracerProvider();
5+
const {
6+
LangChainInstrumentation,
7+
} = require("@traceloop/instrumentation-langchain");
8+
9+
const AgentsModule = require('langchain/agents');
10+
const ChainsModule = require('langchain/chains');
11+
const RunnableModule = require('@langchain/core/runnables')
12+
const ToolsModule = require('@langchain/core/tools')
13+
const VectorStoresModule = require('@langchain/core/vectorstores')
14+
15+
const traceloop = require("@traceloop/node-server-sdk")
16+
17+
traceloop.initialize({
18+
appName: "myTestApp",
19+
instrumentModules: {
20+
langchain: {
21+
runnablesModule: RunnableModule,
22+
toolsModule: ToolsModule,
23+
chainsModule: ChainsModule,
24+
agentsModule: AgentsModule,
25+
vectorStoreModule: VectorStoresModule,
26+
}
27+
}
28+
});
Lines changed: 64 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,64 @@
1+
const express = require('express');
2+
const { BedrockChat } = require("@langchain/community/chat_models/bedrock");
3+
const { ChatPromptTemplate } = require("@langchain/core/prompts");
4+
const traceloop = require("@traceloop/node-server-sdk")
5+
const logger = require('pino')()
6+
7+
const app = express();
8+
app.use(express.json());
9+
const PORT = parseInt(process.env.SAMPLE_APP_PORT || '8000', 10);
10+
11+
const llm = new BedrockChat({
12+
model: "anthropic.claude-3-sonnet-20240229-v1:0",
13+
region: "us-east-1",
14+
credentials: {
15+
accessKeyId: process.env.BEDROCK_AWS_ACCESS_KEY_ID,
16+
secretAccessKey: process.env.BEDROCK_AWS_SECRET_ACCESS_KEY,
17+
},
18+
temperature: 0.7,
19+
});
20+
21+
const prompt = ChatPromptTemplate.fromMessages([
22+
[
23+
"system",
24+
"You are a helpful assistant. Provide a helpful response to the following user input.",
25+
],
26+
["human", "{input}"],
27+
]);
28+
29+
const chain = prompt.pipe(llm);
30+
31+
app.get('/health', (req, res) => {
32+
res.json({ status: 'healthy' });
33+
});
34+
35+
app.post('/ai-chat', async (req, res) => {
36+
const { message } = req.body;
37+
38+
if (!message) {
39+
return res.status(400).json({ error: 'Message is required' });
40+
}
41+
42+
try {
43+
logger.info(`Question asked: ${message}`);
44+
45+
const response = await traceloop.withWorkflow({ name: "sample_chat" }, () => {
46+
return traceloop.withTask({ name: "parent_task" }, () => {
47+
return chain.invoke({
48+
input_language: "English",
49+
output_language: "English",
50+
input: message,
51+
});
52+
});
53+
});
54+
55+
res.json({ response: response.content });
56+
} catch (error) {
57+
logger.error(`Error processing request: ${error.message}`);
58+
res.status(500).json({ error: 'Internal server error' });
59+
}
60+
});
61+
62+
app.listen(PORT, () => {
63+
logger.info(`GenAI service listening on port ${PORT}`);
64+
});

0 commit comments

Comments
 (0)