-
Notifications
You must be signed in to change notification settings - Fork 221
Expand file tree
/
Copy pathchat_generator_example.py
More file actions
56 lines (39 loc) · 1.59 KB
/
chat_generator_example.py
File metadata and controls
56 lines (39 loc) · 1.59 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
import dspy
from haystack import Pipeline
from haystack.dataclasses import ChatMessage
from haystack_integrations.components.generators.dspy import DSPyChatGenerator
class QASignature(dspy.Signature):
"""Answer questions accurately and concisely."""
question = dspy.InputField(desc="The user's question")
answer = dspy.OutputField(desc="A clear, concise answer")
def basic_qa_example():
"""Simple question-answering with Chain-of-Thought reasoning."""
generator = DSPyChatGenerator(
model="openai/gpt-5-mini",
signature=QASignature,
module_type="ChainOfThought",
output_field="answer",
)
pipeline = Pipeline()
pipeline.add_component("llm", generator)
messages = [ChatMessage.from_user("What causes rainbows to appear?")]
result = pipeline.run({"llm": {"messages": messages}})
print(f"Question: {messages[0].text}")
print(f"Answer: {result['llm']['replies'][0].text}\n")
def string_signature_example():
"""Using a simple string signature instead of a class."""
generator = DSPyChatGenerator(
model="openai/gpt-5-mini",
signature="question -> answer",
module_type="Predict",
output_field="answer",
)
pipeline = Pipeline()
pipeline.add_component("llm", generator)
messages = [ChatMessage.from_user("What is the capital of Japan?")]
result = pipeline.run({"llm": {"messages": messages}})
print(f"Question: {messages[0].text}")
print(f"Answer: {result['llm']['replies'][0].text}\n")
if __name__ == "__main__":
basic_qa_example()
string_signature_example()