@@ -373,6 +373,283 @@ for m in messages["messages"]:
373373 </div >
374374</div >
375375
376+ ### Creating Agents
377+
378+ The [ Agentic Flows] ( #agentic-flows-with-haystack-vs-langgraph ) walkthrough above showed how to assemble an agent loop manually from pipeline primitives. Haystack also provides a high-level ` Agent ` class that wraps the full loop - LLM calls, tool invocation, and iteration - into a single component. LangGraph offers an equivalent shortcut through ` create_react_agent ` in ` langgraph.prebuilt ` . Both produce a ReAct-style agent that handles tool calling and multi-step reasoning automatically.
379+
380+ <div className = " code-comparison" >
381+ <div className = " code-comparison__column" >
382+ <CodeBlock language = " python" title = " Haystack" >{ ` # pip install haystack-ai anthropic-haystack
383+
384+ from haystack.components.agents import Agent
385+ from haystack_integrations.components.generators.anthropic import AnthropicChatGenerator
386+ from haystack.dataclasses import ChatMessage
387+ from haystack.tools import tool
388+
389+ @tool
390+ def multiply(a: int, b: int) -> int:
391+ """Multiply \` a\` and \` b\` ."""
392+ return a * b
393+
394+ @tool
395+ def add(a: int, b: int) -> int:
396+ """Add \` a\` and \` b\` ."""
397+ return a + b
398+
399+ # Create an agent - the agentic loop is handled automatically
400+ agent = Agent(
401+ chat_generator=AnthropicChatGenerator(
402+ model="claude-sonnet-4-5-20250929",
403+ generation_kwargs={"temperature": 0},
404+ ),
405+ tools=[multiply, add],
406+ system_prompt="You are a helpful assistant that performs arithmetic.",
407+ )
408+
409+ result = agent.run(messages=[
410+ ChatMessage.from_user("What is 3 multiplied by 7, then add 5?")
411+ ])
412+ print(result["messages"][-1].text) ` } </CodeBlock >
413+ </div >
414+ <div className = " code-comparison__column" >
415+ <CodeBlock language = " python" title = " LangGraph + LangChain" >{ ` # pip install langchain-anthropic langgraph
416+
417+ from langchain_anthropic import ChatAnthropic
418+ from langchain_core.tools import tool
419+ from langchain.agents import create_agent
420+ from langchain_core.messages import HumanMessage, SystemMessage
421+
422+ @tool
423+ def multiply(a: int, b: int) -> int:
424+ """Multiply \` a\` and \` b\` ."""
425+ return a * b
426+
427+ @tool
428+ def add(a: int, b: int) -> int:
429+ """Add \` a\` and \` b\` ."""
430+ return a + b
431+
432+ # Create an agent - the agentic loop is handled automatically
433+ model = ChatAnthropic(
434+ model="claude-sonnet-4-5-20250929",
435+ temperature=0,
436+ )
437+ agent = create_agent(
438+ model,
439+ tools=[multiply, add],
440+ system_prompt=SystemMessage(
441+ content="You are a helpful assistant that performs arithmetic."
442+ ),
443+ )
444+
445+ result = agent.invoke({
446+ "messages": [HumanMessage(content="What is 3 multiplied by 7, then add 5?")]
447+ })
448+ print(result["messages"][-1].content) ` } </CodeBlock >
449+ </div >
450+ </div >
451+
452+ ### Connecting to Document Stores
453+
454+ Document stores are the foundation of retrieval-augmented generation (RAG). In Haystack, document stores integrate natively with pipeline components like Retrievers and Prompt Builders via explicit typed connections. LangChain centers retrieval around its vector store abstraction composed using LCEL (LangChain Expression Language).
455+
456+ Both frameworks offer in-memory stores for prototyping and a wide range of production backends (Elasticsearch, Qdrant, Weaviate, Pinecone, and more) via integrations.
457+
458+ ** Step 1: Create a document store and add documents**
459+
460+ <div className = " code-comparison" >
461+ <div className = " code-comparison__column" >
462+ <CodeBlock language = " python" title = " Haystack" >{ ` # pip install haystack-ai sentence-transformers
463+
464+ from haystack import Document
465+ from haystack.document_stores.in_memory import InMemoryDocumentStore
466+ from haystack.components.embedders import SentenceTransformersDocumentEmbedder
467+
468+ # Embed and write documents to the document store
469+ document_store = InMemoryDocumentStore()
470+
471+ doc_embedder = SentenceTransformersDocumentEmbedder(
472+ model="sentence-transformers/all-MiniLM-L6-v2"
473+ )
474+ doc_embedder.warm_up()
475+
476+ docs = [
477+ Document(content="Paris is the capital of France."),
478+ Document(content="Berlin is the capital of Germany."),
479+ Document(content="Tokyo is the capital of Japan."),
480+ ]
481+ docs_with_embeddings = doc_embedder.run(docs)["documents"]
482+ document_store.write_documents(docs_with_embeddings) ` } </CodeBlock >
483+ </div >
484+ <div className = " code-comparison__column" >
485+ <CodeBlock language = " python" title = " LangChain" >{ ` # pip install langchain-community langchain-huggingface sentence-transformers
486+
487+ from langchain_huggingface import HuggingFaceEmbeddings
488+ from langchain_community.vectorstores import InMemoryVectorStore
489+ from langchain_core.documents import Document
490+
491+ # Embed and add documents to the vector store
492+ embeddings = HuggingFaceEmbeddings(
493+ model_name="sentence-transformers/all-MiniLM-L6-v2"
494+ )
495+ vectorstore = InMemoryVectorStore(embedding=embeddings)
496+ vectorstore.add_documents([
497+ Document(page_content="Paris is the capital of France."),
498+ Document(page_content="Berlin is the capital of Germany."),
499+ Document(page_content="Tokyo is the capital of Japan."),
500+ ]) ` } </CodeBlock >
501+ </div >
502+ </div >
503+
504+ ** Step 2: Build a RAG pipeline**
505+
506+ <div className = " code-comparison" >
507+ <div className = " code-comparison__column" >
508+ <CodeBlock language = " python" title = " Haystack" >{ ` from haystack import Pipeline
509+ from haystack.components.embedders import SentenceTransformersTextEmbedder
510+ from haystack.components.retrievers.in_memory import InMemoryEmbeddingRetriever
511+ from haystack.components.builders import ChatPromptBuilder
512+ from haystack.dataclasses import ChatMessage
513+ from haystack_integrations.components.generators.anthropic import AnthropicChatGenerator
514+
515+ # ChatPromptBuilder expects a List[ChatMessage] as template
516+ template = [ChatMessage.from_user("""
517+ Given the following documents, answer the question.
518+ {% for doc in documents %}{{ doc.content }}{% endfor %}
519+ Question: {{ question }}
520+ """)]
521+
522+ rag_pipeline = Pipeline()
523+ rag_pipeline.add_component(
524+ "text_embedder",
525+ SentenceTransformersTextEmbedder(model="sentence-transformers/all-MiniLM-L6-v2")
526+ )
527+ rag_pipeline.add_component(
528+ "retriever", InMemoryEmbeddingRetriever(document_store=document_store)
529+ )
530+ rag_pipeline.add_component(
531+ "prompt_builder", ChatPromptBuilder(template=template)
532+ )
533+ rag_pipeline.add_component(
534+ "llm", AnthropicChatGenerator(model="claude-sonnet-4-5-20250929")
535+ )
536+
537+ rag_pipeline.connect("text_embedder.embedding", "retriever.query_embedding")
538+ rag_pipeline.connect("retriever.documents", "prompt_builder.documents")
539+ rag_pipeline.connect("prompt_builder.prompt", "llm.messages")
540+
541+ result = rag_pipeline.run({
542+ "text_embedder": {"text": "What is the capital of France?"},
543+ "prompt_builder": {"question": "What is the capital of France?"},
544+ })
545+ print(result["llm"]["replies"][0].text) ` } </CodeBlock >
546+ </div >
547+ <div className = " code-comparison__column" >
548+ <CodeBlock language = " python" title = " LangChain" >{ ` from langchain_anthropic import ChatAnthropic
549+ from langchain_core.prompts import ChatPromptTemplate
550+ from langchain_core.output_parsers import StrOutputParser
551+ from langchain_core.runnables import RunnablePassthrough
552+
553+ def format_docs(docs):
554+ return "\\ n".join(doc.page_content for doc in docs)
555+
556+ retriever = vectorstore.as_retriever()
557+ model = ChatAnthropic(model="claude-sonnet-4-5-20250929")
558+
559+ template = """
560+ Given the following documents, answer the question.
561+ {context}
562+ Question: {question}
563+ """
564+ prompt = ChatPromptTemplate.from_template(template)
565+
566+ rag_chain = (
567+ {"context": retriever | format_docs, "question": RunnablePassthrough()}
568+ | prompt
569+ | model
570+ | StrOutputParser()
571+ )
572+
573+ result = rag_chain.invoke("What is the capital of France?")
574+ print(result) ` } </CodeBlock >
575+ </div >
576+ </div >
577+
578+ ### Using MCP Tools
579+
580+ Both frameworks support the [ Model Context Protocol (MCP)] ( https://modelcontextprotocol.io ) , letting agents connect to external tools and services exposed by MCP servers. Haystack provides ` MCPTool ` and ` MCPToolset ` through the ` mcp-haystack ` integration package, which plug directly into the ` Agent ` component. LangChain's MCP support relies on the separate ` langchain-mcp-adapters ` package and requires an async workflow throughout.
581+
582+ <div className = " code-comparison" >
583+ <div className = " code-comparison__column" >
584+ <CodeBlock language = " python" title = " Haystack" >{ ` # pip install haystack-ai mcp-haystack anthropic-haystack
585+
586+ from haystack_integrations.tools.mcp import MCPToolset, StdioServerInfo
587+ from haystack.components.agents import Agent
588+ from haystack_integrations.components.generators.anthropic import AnthropicChatGenerator
589+ from haystack.dataclasses import ChatMessage
590+
591+ # Connect to an MCP server - tools are auto-discovered
592+ toolset = MCPToolset(
593+ server_info=StdioServerInfo(
594+ command="uvx",
595+ args=["mcp-server-fetch"],
596+ )
597+ )
598+
599+ agent = Agent(
600+ chat_generator=AnthropicChatGenerator(model="claude-sonnet-4-5-20250929"),
601+ tools=toolset,
602+ system_prompt="You are a helpful assistant that can fetch web content.",
603+ )
604+
605+ result = agent.run(messages=[
606+ ChatMessage.from_user("Fetch the content from https://haystack.deepset.ai")
607+ ])
608+ print(result["messages"][-1].text) ` } </CodeBlock >
609+ </div >
610+ <div className = " code-comparison__column" >
611+ <CodeBlock language = " python" title = " LangGraph + LangChain" >{ ` # pip install langchain-mcp-adapters langgraph langchain-anthropic
612+
613+ import asyncio
614+ from langchain_mcp_adapters.client import MultiServerMCPClient
615+ from langchain.agents import create_agent
616+ from langchain_anthropic import ChatAnthropic
617+ from langchain_core.messages import HumanMessage, SystemMessage
618+
619+ model = ChatAnthropic(model="claude-sonnet-4-5-20250929")
620+
621+ async def run():
622+ client = MultiServerMCPClient(
623+ {
624+ "fetch": {
625+ "command": "uvx",
626+ "args": ["mcp-server-fetch"],
627+ "transport": "stdio",
628+ }
629+ }
630+ )
631+ tools = await client.get_tools()
632+ agent = create_agent(
633+ model,
634+ tools,
635+ system_prompt=SystemMessage(
636+ content="You are a helpful assistant that can fetch web content."
637+ ),
638+ )
639+ result = await agent.ainvoke(
640+ {
641+ "messages": [
642+ HumanMessage(content="Fetch the content from https://haystack.deepset.ai")
643+ ]
644+ }
645+ )
646+ print(result["messages"][-1].content)
647+
648+
649+ asyncio.run(run()) ` } </CodeBlock >
650+ </div >
651+ </div >
652+
376653## Hear from Haystack Users
377654
378655See how teams across industries use Haystack to power their production AI systems, from RAG applications to agentic workflows.
0 commit comments