@@ -1473,79 +1473,94 @@ This is updated frequently but right now this is the most exhaustive list of typ
14731473## Code Examples
14741474
14751475### 1. Basic RAG Pipeline with LCEL
1476- ``` python
1477- from langchain_community.vectorstores import FAISS
1478- from langchain_core.output_parsers import StrOutputParser
1479- from langchain_core.prompts import ChatPromptTemplate
1480- from langchain_core.runnables import RunnablePassthrough
1481- from langchain_openai import ChatOpenAI, OpenAIEmbeddings
1482-
1483- vectorstore = FAISS .from_texts([" harrison worked at kensho" ], embedding = OpenAIEmbeddings())
1484- retriever = vectorstore.as_retriever()
1485- template = """ Answer the question based only on the following context:
1486- {context}
1487-
1488- Question: {question}
1489- """
1490- prompt = ChatPromptTemplate.from_template(template)
1491- model = ChatOpenAI()
1492-
1493- retrieval_chain = (
1494- {" context" : retriever, " question" : RunnablePassthrough()}
1495- | prompt
1496- | model
1497- | StrOutputParser()
1498- )
1499-
1500- retrieval_chain.invoke(" where did harrison work?" )
1501- ```
1476+
1477+ ??? success "View Code Example"
1478+
1479+
1480+ **Difficulty:** 🟢 Easy | **Tags:** `Code Example` | **Asked by:** Code Pattern
1481+ ```python
1482+ from langchain_community.vectorstores import FAISS
1483+ from langchain_core.output_parsers import StrOutputParser
1484+ from langchain_core.prompts import ChatPromptTemplate
1485+ from langchain_core.runnables import RunnablePassthrough
1486+ from langchain_openai import ChatOpenAI, OpenAIEmbeddings
1487+
1488+ vectorstore = FAISS.from_texts(["harrison worked at kensho"], embedding=OpenAIEmbeddings())
1489+ retriever = vectorstore.as_retriever()
1490+ template = """Answer the question based only on the following context:
1491+ {context}
1492+
1493+ Question: {question}
1494+ """
1495+ prompt = ChatPromptTemplate.from_template(template)
1496+ model = ChatOpenAI()
1497+
1498+ retrieval_chain = (
1499+ {"context": retriever, "question": RunnablePassthrough()}
1500+ | prompt
1501+ | model
1502+ | StrOutputParser()
1503+ )
1504+
1505+ retrieval_chain.invoke("where did harrison work?")
1506+ ```
15021507
15031508### 2. Custom Agent with Tool Use
1504- ``` python
1505- from langchain.agents import tool
1506- from langchain_openai import ChatOpenAI
1507- from langchain.agents import AgentExecutor, create_tool_calling_agent
1508- from langchain_core.prompts import ChatPromptTemplate
15091509
1510- @tool
1511- def multiply (first_int : int , second_int : int ) -> int :
1512- """ Multiply two integers together."""
1513- return first_int * second_int
1510+ ??? success "View Code Example"
1511+
15141512
1515- tools = [multiply]
1516- llm = ChatOpenAI(model = " gpt-3.5-turbo-0125" )
1513+ **Difficulty:** 🟢 Easy | **Tags:** `Code Example` | **Asked by:** Code Pattern
1514+ ```python
1515+ from langchain.agents import tool
1516+ from langchain_openai import ChatOpenAI
1517+ from langchain.agents import AgentExecutor, create_tool_calling_agent
1518+ from langchain_core.prompts import ChatPromptTemplate
15171519
1518- prompt = ChatPromptTemplate.from_messages([
1519- (" system" , " You are a helpful assistant" ),
1520- (" user" , " {input} " ),
1521- (" placeholder" , " {agent_scratchpad} " ),
1522- ])
1520+ @tool
1521+ def multiply(first_int: int, second_int: int) -> int:
1522+ """Multiply two integers together."""
1523+ return first_int * second_int
15231524
1524- agent = create_tool_calling_agent(llm, tools, prompt)
1525- agent_executor = AgentExecutor( agent = agent, tools = tools, verbose = True )
1525+ tools = [multiply]
1526+ llm = ChatOpenAI(model="gpt-3.5-turbo-0125" )
15261527
1527- agent_executor.invoke({" input" : " what is 5 times 8?" })
1528- ```
1528+ prompt = ChatPromptTemplate.from_messages([
1529+ ("system", "You are a helpful assistant"),
1530+ ("user", "{input}"),
1531+ ("placeholder", "{agent_scratchpad}"),
1532+ ])
1533+
1534+ agent = create_tool_calling_agent(llm, tools, prompt)
1535+ agent_executor = AgentExecutor(agent=agent, tools=tools, verbose=True)
1536+
1537+ agent_executor.invoke({"input": "what is 5 times 8?"})
1538+ ```
15291539
15301540### 3. Structured Output Extraction
1531- ``` python
1532- from typing import List
1533- from langchain_core.pydantic_v1 import BaseModel, Field
1534- from langchain_openai import ChatOpenAI
15351541
1536- class Person (BaseModel ):
1537- name: str = Field(description = " The name of the person" )
1538- age: int = Field(description = " The age of the person" )
1542+ ??? success "View Code Example"
15391543
1540- class People (BaseModel ):
1541- people: List[Person]
15421544
1543- llm = ChatOpenAI()
1544- structured_llm = llm.with_structured_output(People)
1545+ **Difficulty:** 🟢 Easy | **Tags:** `Code Example` | **Asked by:** Code Pattern
1546+ ```python
1547+ from typing import List
1548+ from langchain_core.pydantic_v1 import BaseModel, Field
1549+ from langchain_openai import ChatOpenAI
1550+
1551+ class Person(BaseModel):
1552+ name: str = Field(description="The name of the person")
1553+ age: int = Field(description="The age of the person")
15451554
1546- text = " Alice is 30 years old and Bob is 25."
1547- structured_llm.invoke(text)
1548- ```
1555+ class People(BaseModel):
1556+ people: List[Person]
1557+
1558+ llm = ChatOpenAI()
1559+ structured_llm = llm.with_structured_output(People)
1560+
1561+ text = "Alice is 30 years old and Bob is 25."
1562+ structured_llm.invoke(text)
1563+ ```
15491564
15501565---
15511566
0 commit comments