Skip to content

📦 NEW: Advance Examples #4

New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Open
wants to merge 2 commits into
base: main
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
22 changes: 11 additions & 11 deletions README.md
Original file line number Diff line number Diff line change
Expand Up @@ -53,14 +53,14 @@ langbase_api_key = os.getenv("LANGBASE_API_KEY")
llm_api_key = os.getenv("LLM_API_KEY")

# Initialize the client
lb = Langbase(api_key=langbase_api_key)
langbase = Langbase(api_key=langbase_api_key)
```

### 3. Generate text

```python
# Simple generation
response = lb.agent.run(
response = langbase.agent.run(
input=[{"role": "user", "content": "Tell me about AI"}],
model="openai:gpt-4.1-mini",
api_key=llm_api_key,
Expand Down Expand Up @@ -148,10 +148,10 @@ runner.process()

```python
# List all pipes
pipes = lb.pipes.list()
pipes = langbase.pipes.list()

# Run a pipe
response = lb.pipes.run(
response = langbase.pipes.run(
name="ai-agent",
messages=[{"role": "user", "content": "Hello!"}],
variables={"style": "friendly"}, # Optional variables
Expand All @@ -163,21 +163,21 @@ response = lb.pipes.run(

```python
# Create a memory
memory = lb.memories.create(
memory = langbase.memories.create(
name="product-docs",
description="Product documentation",
)

# Upload documents
lb.memories.documents.upload(
langbase.memories.documents.upload(
memory_name="product-docs",
document_name="guide.pdf",
document=open("guide.pdf", "rb"),
content_type="application/pdf",
)

# Retrieve relevant context
results = lb.memories.retrieve(
results = langbase.memories.retrieve(
query="How do I get started?",
memory=[{"name": "product-docs"}],
top_k=3,
Expand All @@ -188,7 +188,7 @@ results = lb.memories.retrieve(

```python
# Run an agent with tools
response = lb.agent.run(
response = langbase.agent.run(
model="openai:gpt-4",
messages=[{"role": "user", "content": "Search for AI news"}],
tools=[{"type": "function", "function": {...}}],
Expand All @@ -202,20 +202,20 @@ response = lb.agent.run(

```python
# Chunk text for processing
chunks = lb.chunker(
chunks = langbase.chunker(
content="Long text to split...",
chunk_max_length=1024,
chunk_overlap=256,
)

# Generate embeddings
embeddings = lb.embed(
embeddings = langbase.embed(
chunks=["Text 1", "Text 2"],
embedding_model="openai:text-embedding-3-small",
)

# Parse documents
content = lb.parser(
content = langbase.parser(
document=open("document.pdf", "rb"),
document_name="document.pdf",
content_type="application/pdf",
Expand Down
87 changes: 87 additions & 0 deletions examples/agent/agent.run.typed.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,87 @@
"""
Run Agent

This example demonstrates how to run an agent with a Typed Stream
"""

import os

from dotenv import load_dotenv

from langbase import Langbase, StreamEventType, get_typed_runner

load_dotenv()


def main():
# Check for required environment variables
langbase_api_key = os.environ.get("LANGBASE_API_KEY")
llm_api_key = os.environ.get("LLM_API_KEY")

if not langbase_api_key:
print("❌ Missing LANGBASE_API_KEY in environment variables.")
print("Please set: export LANGBASE_API_KEY='your_langbase_api_key'")
exit(1)

if not llm_api_key:
print("❌ Missing LLM_API_KEY in environment variables.")
print("Please set: export LLM_API_KEY='your_llm_api_key'")
exit(1)

# Initialize Langbase client
langbase = Langbase(api_key=langbase_api_key)
try:
# Get streaming response
response = langbase.agent.run(
stream=True,
model="openai:gpt-4.1-mini",
api_key=llm_api_key,
instructions="You are a helpful assistant that help users summarize text.",
input=[{"role": "user", "content": "Who is an AI Engineer?"}],
)

# Create typed stream processor
runner = get_typed_runner(response)

# Register event handlers
runner.on(
StreamEventType.CONNECT,
lambda event: print(f"✓ Connected! Thread ID: {event['threadId']}\n"),
)

runner.on(
StreamEventType.CONTENT,
lambda event: print(event["content"], end="", flush=True),
)

runner.on(
StreamEventType.TOOL_CALL,
lambda event: print(
f"\n🔧 Tool call: {event['toolCall']['function']['name']}"
),
)

runner.on(
StreamEventType.COMPLETION,
lambda event: print(f"\n\n✓ Completed! Reason: {event['reason']}"),
)

runner.on(
StreamEventType.ERROR,
lambda event: print(f"\n❌ Error: {event['message']}"),
)

runner.on(
StreamEventType.END,
lambda event: print(f"⏱️ Total duration: {event['duration']:.2f}s"),
)

# Process the stream
runner.process()

except Exception as e:
print(f"Error: {e}")


if __name__ == "__main__":
main()
14 changes: 7 additions & 7 deletions examples/agent/agent.run.workflow.py
Original file line number Diff line number Diff line change
Expand Up @@ -279,7 +279,7 @@ def __init__(self, langbase_client: Langbase, debug: bool = False):
langbase_client: Langbase client instance
debug: Whether to enable debug mode
"""
self.lb = langbase_client
self.langbase = langbase_client
self.workflow = Workflow(debug=debug)

async def generate_blog_post(
Expand All @@ -299,7 +299,7 @@ async def generate_blog_post(

# Step 1: Generate outline
async def create_outline():
response = self.lb.agent.run(
response = self.langbase.agent.run(
input=f"Create a {target_length} blog post outline about: {topic}",
model="openai:gpt-4o-mini",
api_key=os.environ.get("LLM_API_KEY"),
Expand All @@ -309,7 +309,7 @@ async def create_outline():
# Step 2: Generate introduction
async def write_introduction():
outline = self.workflow.context["outputs"]["outline"]
response = self.lb.agent.run(
response = self.langbase.agent.run(
input=f"Write an engaging introduction for this outline: {outline}. Tone: {tone}",
model="openai:gpt-4o-mini",
api_key=os.environ.get("LLM_API_KEY"),
Expand All @@ -320,7 +320,7 @@ async def write_introduction():
async def write_main_content():
outline = self.workflow.context["outputs"]["outline"]
intro = self.workflow.context["outputs"]["introduction"]
response = self.lb.agent.run(
response = self.langbase.agent.run(
input=f"Write the main content based on outline: {outline}\nIntroduction: {intro}\nTone: {tone}",
model="openai:gpt-4o-mini",
api_key=os.environ.get("LLM_API_KEY"),
Expand All @@ -331,7 +331,7 @@ async def write_main_content():
async def write_conclusion():
outline = self.workflow.context["outputs"]["outline"]
content = self.workflow.context["outputs"]["main_content"]
response = self.lb.agent.run(
response = self.langbase.agent.run(
input=f"Write a conclusion for this content: {content[:500]}...",
model="openai:gpt-4o-mini",
api_key=os.environ.get("LLM_API_KEY"),
Expand Down Expand Up @@ -392,8 +392,8 @@ async def advanced_workflow_example():
print("\n🚀 Advanced Workflow Example")
print("=" * 50)

lb = Langbase(api_key=os.environ.get("LANGBASE_API_KEY"))
blog_workflow = AIContentWorkflow(lb, debug=True)
langbase = Langbase(api_key=os.environ.get("LANGBASE_API_KEY"))
blog_workflow = AIContentWorkflow(langbase, debug=True)

result = await blog_workflow.generate_blog_post(
topic="The Future of Artificial Intelligence",
Expand Down
4 changes: 2 additions & 2 deletions examples/chunker/chunker.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,7 +16,7 @@
langbase_api_key = os.getenv("LANGBASE_API_KEY")

# Initialize the client
lb = Langbase(api_key=langbase_api_key)
langbase = Langbase(api_key=langbase_api_key)


def main():
Expand All @@ -31,7 +31,7 @@ def main():
with open(document_path, "r", encoding="utf-8") as file:
document_content = file.read()
# Chunk the content
chunks = lb.chunker(
chunks = langbase.chunker(
content=document_content, chunk_max_length=1024, chunk_overlap=256
)

Expand Down
4 changes: 2 additions & 2 deletions examples/memory/memory.create.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,11 +17,11 @@ def main():
langbase_api_key = os.getenv("LANGBASE_API_KEY")

# Initialize the client
lb = Langbase(api_key=langbase_api_key)
langbase = Langbase(api_key=langbase_api_key)

# Create the memory
try:
response = lb.memories.create(
response = langbase.memories.create(
name="product-knowledge",
description="Memory store for product documentation and information",
embedding_model="openai:text-embedding-3-large",
Expand Down
4 changes: 2 additions & 2 deletions examples/memory/memory.docs.delete.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,15 +17,15 @@ def main():
langbase_api_key = os.getenv("LANGBASE_API_KEY")

# Initialize the client
lb = Langbase(api_key=langbase_api_key)
langbase = Langbase(api_key=langbase_api_key)

# Memory name and document ID to delete
memory_name = "product-knowledge" # Replace with your memory name
document_name = "intro.txt" # Replace with the document name you want to delete

# Delete the document
try:
response = lb.memories.documents.delete(
response = langbase.memories.documents.delete(
memory_name=memory_name, document_name=document_name
)

Expand Down
4 changes: 2 additions & 2 deletions examples/memory/memory.docs.list.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,14 +17,14 @@ def main():
langbase_api_key = os.getenv("LANGBASE_API_KEY")

# Initialize the client
lb = Langbase(api_key=langbase_api_key)
langbase = Langbase(api_key=langbase_api_key)

# Memory name to list documents from
memory_name = "product-knowledge" # Replace with your memory name

# List documents in the memory
try:
response = lb.memories.documents.list(memory_name=memory_name)
response = langbase.memories.documents.list(memory_name=memory_name)

print(f"Documents in memory '{memory_name}':")
print(json.dumps(response, indent=2))
Expand Down
4 changes: 2 additions & 2 deletions examples/memory/memory.docs.retry-embed.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,15 +17,15 @@ def main():
langbase_api_key = os.getenv("LANGBASE_API_KEY")

# Initialize the client
lb = Langbase(api_key=langbase_api_key)
langbase = Langbase(api_key=langbase_api_key)

# Memory name to retry embedding for
memory_name = "product-knowledge" # Replace with your memory name
document_name = "name.txt" # Replace with document name

# Retry embedding for failed documents
try:
response = lb.memories.documents.embeddings.retry(
response = langbase.memories.documents.embeddings.retry(
memory_name=memory_name, document_name=document_name
)

Expand Down
4 changes: 2 additions & 2 deletions examples/memory/memory.docs.upload-pdf.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ def main():
langbase_api_key = os.getenv("LANGBASE_API_KEY")

# Initialize the client
lb = Langbase(api_key=langbase_api_key)
langbase = Langbase(api_key=langbase_api_key)

# Memory name to upload documents to
memory_name = "product-knowledge" # Replace with your memory name
Expand All @@ -31,7 +31,7 @@ def main():
document_content = file.read()

content = "Langbase is a powerful platform for building AI applications with composable AI."
response = lb.memories.documents.upload(
response = langbase.memories.documents.upload(
memory_name=memory_name,
document_name="document.pdf",
document=document_content, # Convert string to bytes
Expand Down
4 changes: 2 additions & 2 deletions examples/memory/memory.docs.upload.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,15 +16,15 @@ def main():
langbase_api_key = os.getenv("LANGBASE_API_KEY")

# Initialize the client
lb = Langbase(api_key=langbase_api_key)
langbase = Langbase(api_key=langbase_api_key)

# Memory name to upload documents to
memory_name = "product-knowledge" # Replace with your memory name

# Upload documents to the memory
try:
content = "Langbase is a powerful platform for building AI applications with composable AI."
response = lb.memories.documents.upload(
response = langbase.memories.documents.upload(
memory_name=memory_name,
document_name="intro.txt",
document=content.encode("utf-8"), # Convert string to bytes
Expand Down
4 changes: 2 additions & 2 deletions examples/memory/memory.list.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,11 +17,11 @@ def main():
langbase_api_key = os.getenv("LANGBASE_API_KEY")

# Initialize the client
lb = Langbase(api_key=langbase_api_key)
langbase = Langbase(api_key=langbase_api_key)

# List all memories
try:
response = lb.memories.list()
response = langbase.memories.list()

print(json.dumps(response, indent=2))

Expand Down
4 changes: 2 additions & 2 deletions examples/memory/memory.retrieve.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,14 +20,14 @@ def main():
langbase_api_key = os.getenv("LANGBASE_API_KEY")

# Initialize the client
lb = Langbase(api_key=langbase_api_key)
langbase = Langbase(api_key=langbase_api_key)

# Retrieve memories using a query
memory_name = "product-knowledge" # Replace with your memory name
query = "What is Langbase?"

try:
response = lb.memories.retrieve(
response = langbase.memories.retrieve(
query=query,
memory=[{"name": memory_name}],
top_k=5, # Number of relevant memories to retrieve
Expand Down
Loading