Response synthesizer #15289
Replies: 1 comment 14 replies
-
To ensure that your custom prompt templates are correctly updated in the
Here is the corrected code snippet: from llama_index.core import PromptTemplate
from llama_index.core.query_engine.retriever_query_engine import RetrieverQueryEngine
from llama_index.core.response_synthesizers.factory import get_response_synthesizer
# Define custom prompt templates
text_qa_template_str = (
"Say hi at the start. Given the context provided, answer the following question with as much detail and relevant information as possible. "
"Ensure to include explanations and any necessary background information to make the response informative and comprehensive.\n"
"Context: {context}\n"
"Question: {question}\n"
"Answer:\n"
)
text_qa_template = PromptTemplate(text_qa_template_str)
refine_template_str = (
"Say hi at the start. Given the new context, refine the original answer to better answer the question. "
"If the context isn't useful, output the original answer again. Ensure the refined answer is detailed and includes relevant explanations.\n"
"Original Answer: {original_answer}\n"
"New Context: {new_context}\n"
"Refined Answer:\n"
)
refine_template = PromptTemplate(refine_template_str)
# Initialize the response synthesizer
response_synthesizer = get_response_synthesizer(
response_mode="tree_summarize",
streaming=True,
structured_answer_filtering=True,
)
# Initialize the RetrieverQueryEngine
query_engine = RetrieverQueryEngine(
retriever=default_retriever,
response_synthesizer=response_synthesizer,
)
# Update the query engine with the custom prompt template
query_engine.update_prompts(
{"response_synthesizer:text_qa_template": text_qa_template}
)
# Verify the updated prompts
prompts_dict = query_engine.get_prompts()
for k, p in prompts_dict.items():
print(f"Prompt Key: {k}\nText:\n{p.get_template()}\n") This code should correctly update the |
Beta Was this translation helpful? Give feedback.
Uh oh!
There was an error while loading. Please reload this page.
-
@dosu
'''
from llama_index.core import PromptTemplate
text_qa_template_str = (
"Say hi at the start. Given the context provided, answer the following question with as much detail and relevant information as possible. "
"Ensure to include explanations and any necessary background information to make the response informative and comprehensive.\n"
"Context: {context}\n"
"Question: {question}\n"
"Answer:\n"
)
text_qa_template = PromptTemplate(text_qa_template_str)
refine_template_str = (
"Say hi at the start. Given the new context, refine the original answer to better answer the question. "
"If the context isn't useful, output the original answer again. Ensure the refined answer is detailed and includes relevant explanations.\n"
"Original Answer: {original_answer}\n"
"New Context: {new_context}\n"
"Refined Answer:\n"
)
refine_template = PromptTemplate(refine_template_str)
response_synthesizer = get_response_synthesizer(
response_mode="tree_summarize",
streaming=True,
structured_answer_filtering= True,
)
query_engine = RetrieverQueryEngine(
retriever=default_retriever,
response_synthesizer=response_synthesizer,
)
prompts_dict = query_engine.get_prompts()
for k, p in prompts_dict.items():
print(f"Prompt Key: {k}\nText:\n{p.get_template()}\n")
Prompt Key: response_synthesizer:summary_template
Text:
Context information from multiple sources is below.
{context_str}
Given the information from multiple sources and not prior knowledge, answer the query.
Query: {query_str}
Answer:
query_engine.update_prompts(
{"response_synthesizer:text_qa_template": text_qa_template}
)
prompts_dict = query_engine.get_prompts()
for k, p in prompts_dict.items():
print(f"Prompt Key: {k}\nText:\n{p.get_template()}\n")
Prompt Key: response_synthesizer:summary_template
Text:
Context information from multiple sources is below.
{context_str}
Given the information from multiple sources and not prior knowledge, answer the query.
Query: {query_str}
Answer:
'''
why is it not updating?
Beta Was this translation helpful? Give feedback.
All reactions