-
Notifications
You must be signed in to change notification settings - Fork 6
Open
Description
I have the following program and I'd love to be able to showcase this or similar with LangTrace!
const {ChatOpenAI, OpenAIEmbeddings} = require("@langchain/openai");
const {MemoryVectorStore} = require("langchain/vectorstores/memory");
const {ChatPromptTemplate} = require("@langchain/core/prompts");
const {StringOutputParser} = require("@langchain/core/output_parsers");
const {RunnablePassthrough, RunnableSequence} = require("@langchain/core/runnables");
async function main() {
const llm = new ChatOpenAI({model: 'qwen2.5:0.5b'});
const embeddings = new OpenAIEmbeddings({model: 'all-minilm:33m'});
const monsters = [
"Goblin: Weak but numerous, attacks in groups.",
"Orc: Strong and aggressive, fights head-on.",
"Skeleton: Undead warrior, immune to poison but fragile.",
"Giant Spider: Webs players, poisonous bite.",
"Dragon: Powerful and magical, breathes fire.",
"Keegorg: Senior Solution Architect at Docker",
].map((pageContent) => ({ pageContent, metadata: {} }));
const vectorStore = new MemoryVectorStore(embeddings);
// Create embeddings for the monsters
await vectorStore.addDocuments(monsters);
// Retrieve only one monster
const retriever = vectorStore.asRetriever(1);
// Create prompt template
const ANSWER_PROMPT = ChatPromptTemplate.fromTemplate(
`You are a monster expert, and the context includes relevant monsters. Answer the user concisely only using the provided context. If you don't know the answer, just say that you don't know.
context: {context}
Question: "{question}"
Answer:`
);
function onlyContent(docs) {
return docs.map(doc => doc.page_content).join('\n\n');
}
const chain = RunnableSequence.from([
{
context: retriever.pipe(onlyContent),
question: new RunnablePassthrough(),
},
ANSWER_PROMPT,
llm,
new StringOutputParser(),
]);
// Pass the user's question to the sequence
const response = await chain.invoke("Who is Keegorg?");
console.log(response);
}
main();Metadata
Metadata
Assignees
Labels
No labels