Skip to content

Commit 7480168

Browse files
committed
📦 NEW: Pipe Examples
1 parent 904c861 commit 7480168

File tree

5 files changed

+261
-0
lines changed

5 files changed

+261
-0
lines changed

examples/pipes/pipe.run.chat.py

Lines changed: 43 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,43 @@
1+
"""
2+
Example demonstrating how to have a conversation using a pipe in non-streaming mode.
3+
"""
4+
5+
import json
6+
import os
7+
8+
from dotenv import load_dotenv
9+
10+
from langbase import Langbase
11+
12+
13+
def main():
14+
load_dotenv()
15+
16+
langbase = Langbase(api_key=os.getenv("LANGBASE_API_KEY"))
17+
18+
# Message 1: Tell something to the LLM.
19+
response1 = langbase.pipes.run(
20+
stream=False,
21+
name="summary-agent",
22+
messages=[{"role": "user", "content": "My company is called Langbase"}],
23+
)
24+
25+
print(json.dumps(response1, indent=2))
26+
27+
# Message 2: Continue the conversation in the same thread
28+
# Pass the complete conversation history including the new message
29+
response2 = langbase.pipes.run(
30+
name="summary-agent",
31+
stream=False,
32+
thread_id=response1["threadId"],
33+
messages=[{"role": "user", "content": "Tell me the name of my company?"}],
34+
)
35+
36+
print(json.dumps(response2, indent=2))
37+
# You'll see any LLM will know the company is `Langbase`
38+
# since it's the same chat thread. This is how you can
39+
# continue a conversation in the same thread.
40+
41+
42+
if __name__ == "__main__":
43+
main()
Lines changed: 37 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,37 @@
1+
"""
2+
Example demonstrating how to run a pipe with a pipe API key.
3+
"""
4+
5+
import os
6+
7+
from dotenv import load_dotenv
8+
9+
from langbase import Langbase, get_runner
10+
11+
12+
def main():
13+
load_dotenv()
14+
15+
langbase = Langbase(api_key=os.getenv("LANGBASE_API_KEY"))
16+
17+
user_msg = "Who is an AI Engineer?"
18+
19+
# Get readable stream
20+
response = langbase.pipes.run(
21+
messages=[{"role": "user", "content": user_msg}],
22+
stream=True,
23+
raw_response=True,
24+
api_key=os.getenv("PIPE_API_KEY"),
25+
)
26+
27+
runner = get_runner(response)
28+
print("Stream started.\n")
29+
# Use text_generator() to stream content
30+
for content in runner.text_generator():
31+
print(content, end="", flush=True)
32+
33+
print("\n\nStream ended!") # Add a newline after first response
34+
35+
36+
if __name__ == "__main__":
37+
main()
Lines changed: 53 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,53 @@
1+
"""
2+
Example demonstrating how to have a conversation using a pipe in streaming mode.
3+
"""
4+
5+
import os
6+
import sys
7+
8+
from dotenv import load_dotenv
9+
10+
from langbase import Langbase, get_runner
11+
12+
13+
def main():
14+
load_dotenv()
15+
16+
langbase = Langbase(api_key=os.getenv("LANGBASE_API_KEY"))
17+
18+
# Message 1: Tell something to the LLM.
19+
response1 = langbase.pipes.run(
20+
name="summary-agent",
21+
stream=True,
22+
messages=[{"role": "user", "content": "My company is called Langbase"}],
23+
)
24+
25+
runner1 = get_runner(response1)
26+
27+
# Use text_generator() to stream content
28+
for content in runner1.text_generator():
29+
print(content, end="", flush=True)
30+
31+
print("\n\nStream ended!") # Add a newline after first response
32+
33+
# Message 2: Ask something about the first message.
34+
# Continue the conversation in the same thread by sending
35+
# `thread_id` from the second message onwards.
36+
response2 = langbase.pipes.run(
37+
name="summary-agent",
38+
stream=True,
39+
thread_id=response1["thread_id"],
40+
messages=[{"role": "user", "content": "Tell me the name of my company?"}],
41+
)
42+
43+
runner2 = get_runner(response2)
44+
45+
# Use text_generator() to stream content
46+
for content in runner2.text_generator():
47+
print(content, end="", flush=True)
48+
49+
print("\n\nStream ended!") # Add a newline after first response
50+
51+
52+
if __name__ == "__main__":
53+
main()
Lines changed: 38 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,38 @@
1+
"""
2+
Example demonstrating how to run a pipe with an LLM API key in streaming mode.
3+
"""
4+
5+
import os
6+
import sys
7+
8+
from dotenv import load_dotenv
9+
10+
from langbase import Langbase, get_runner
11+
12+
13+
def main():
14+
load_dotenv()
15+
16+
langbase = Langbase(api_key=os.getenv("LANGBASE_API_KEY"))
17+
18+
user_msg = "Who is an AI Engineer?"
19+
20+
# Get readable stream
21+
response = langbase.pipes.run(
22+
messages=[{"role": "user", "content": user_msg}],
23+
stream=True,
24+
raw_response=True,
25+
name="summary-agent",
26+
llm_key=os.getenv("LLM_KEY"), # Your LLM API key
27+
)
28+
29+
# Convert the stream to a stream runner.
30+
runner = get_runner(response)
31+
32+
# Use text_generator() to stream content
33+
for content in runner.text_generator():
34+
print(content, end="", flush=True)
35+
36+
37+
if __name__ == "__main__":
38+
main()
Lines changed: 90 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,90 @@
1+
"""
2+
Example demonstrating how to use structured outputs with a pipe.
3+
"""
4+
5+
import json
6+
import os
7+
from typing import List
8+
9+
from dotenv import load_dotenv
10+
from pydantic import BaseModel, Field
11+
12+
from langbase import Langbase
13+
14+
15+
# Define the Strucutred Output JSON schema with Pydantic
16+
class Step(BaseModel):
17+
explanation: str
18+
output: str
19+
20+
21+
class MathReasoning(BaseModel):
22+
steps: List[Step]
23+
final_answer: str = Field(alias="final_answer")
24+
25+
26+
def create_math_tutor_pipe(langbase: Langbase):
27+
json_schema = MathReasoning.model_json_schema()
28+
29+
pipe = langbase.pipes.create(
30+
name="math-tutor",
31+
model="openai:gpt-4o",
32+
upsert=True,
33+
messages=[
34+
{
35+
"role": "system",
36+
"content": "You are a helpful math tutor. Guide the user through the solution step by step.",
37+
},
38+
],
39+
json=True,
40+
response_format={
41+
"type": "json_schema",
42+
"json_schema": {
43+
"name": "math_reasoning",
44+
"schema": json_schema,
45+
},
46+
},
47+
)
48+
49+
print("✅ Math Tutor pipe created:", json.dumps(pipe, indent=2))
50+
51+
52+
def run_math_tutor_pipe(langbase: Langbase, question: str):
53+
response = langbase.pipes.run(
54+
name="math-tutor",
55+
messages=[{"role": "user", "content": question}],
56+
stream=False,
57+
)
58+
59+
# Parse and validate the response using Pydantic
60+
solution = MathReasoning.model_validate_json(response["completion"])
61+
62+
print("✅ Structured Output Response:")
63+
print("=" * 50)
64+
65+
for i, step in enumerate(solution.steps, 1):
66+
print(f"Step {i}:")
67+
print(f" Explanation: {step.explanation}")
68+
print(f" Output: {step.output}")
69+
print()
70+
71+
print(f"Final Answer: {solution.final_answer}")
72+
print("=" * 50)
73+
74+
75+
def main():
76+
load_dotenv()
77+
78+
if not os.getenv("LANGBASE_API_KEY"):
79+
print("❌ Missing LANGBASE_API_KEY in environment variables.")
80+
exit(1)
81+
82+
langbase = Langbase(api_key=os.getenv("LANGBASE_API_KEY"))
83+
84+
# Run this only once to create the pipe. Uncomment if it's your first time setting it up.
85+
create_math_tutor_pipe(langbase)
86+
run_math_tutor_pipe(langbase, "How can I solve 8x + 22 = -23?")
87+
88+
89+
if __name__ == "__main__":
90+
main()

0 commit comments

Comments
 (0)