Skip to content

Commit c24ffa7

Browse files
committed
removed extra print and fixed suggest relationship
Signed-off-by: Amit Sharma <[email protected]>
1 parent 6e664ad commit c24ffa7

File tree

2 files changed

+36
-25
lines changed

2 files changed

+36
-25
lines changed

pywhyllm/suggesters/tuebingen_model_suggester.py

Lines changed: 36 additions & 24 deletions
Original file line numberDiff line numberDiff line change
@@ -234,15 +234,17 @@ def _build_relationship_program(
234234
query = {}
235235

236236
if use_domain is not None:
237-
query["system"] = f"""You are a helpful assistant on causal reasoning and {use_domain}. Your goal is to answer
237+
sys_prompt = f"""You are a helpful assistant on causal reasoning and {use_domain}. Your goal is to answer
238238
questions about cause and effect in a factual and concise way."""
239+
query["system"] = cleandoc(sys_prompt)
239240
else:
240-
query["system"] = f"""You are a helpful assistant on causal reasoning. Your goal is to answer questions
241+
sys_prompt = f"""You are a helpful assistant on causal reasoning. Your goal is to answer questions
241242
about cause and effect in a factual and concise way."""
243+
query["system"] = cleandoc(sys_prompt)
242244

243245
if use_strategy is not None:
244246
if use_strategy == Strategy.ToT_Single:
245-
query["user"] = f"""There is a council of three different experts answering this question.
247+
user_prompt = f"""There is a council of three different experts answering this question.
246248
Each of the three expert will write down 1 step of their thinking, and share it with the council.
247249
Then all experts will go on to the next step, etc.
248250
All experts in the council are arguing to arrive to a true and factual answer. Their goal is to arrive
@@ -251,36 +253,43 @@ def _build_relationship_program(
251253
If any expert realises their argument is wrong at any point, then they will adjust their argument to
252254
be factual and logical.
253255
The question is """
256+
query["user"] = cleandoc(user_prompt)
254257

255258
if use_description:
256-
query["user"] = f"""can changing {variable_a}, where {description_a}, change {variable_b}, where
259+
user_prompt = f"""can changing {variable_a}, where {description_a}, change {variable_b}, where
257260
{description_b}? Answer Yes or No."""
261+
query["user"] += cleandoc(user_prompt)
258262
else:
259-
query["user"] = f"""can changing {variable_a} change {variable_b}? Answer Yes or No."""
263+
user_prompt = f"""can changing {variable_a} change {variable_b}? Answer Yes or No."""
264+
query["user"] += cleandoc(user_prompt)
260265

261266
if ask_reference:
262-
query["user"] += f"""At each step, each expert include a reference to a research paper that supports
263-
their argument. They will provide a one sentence summary of the paper and how it supports their argument.
267+
user_prompt = f"""At each step, each expert include a reference to a research paper that supports
268+
their argument. They will provide a one sentence summary of the paper and how it supports their argument.
264269
Then they will answer whether a change in {variable_a} changes {variable_b}. Answer Yes or No.
265270
When consensus is reached, thinking carefully and factually, explain the council's answer. Provide
266271
the answer within the tags, <answer>Yes/No</answer>, and the most influential reference within
267272
the tags <reference>Author, Title, Year of publication</reference>.
268273
\n\n\n----------------\n\n\n<answer>Yes</answer>\n<reference>Author, Title, Year of
269274
publication</reference>\n\n\n----------------\n\n\n<answer>No</answer>"""
275+
query["user"] += cleandoc(user_prompt)
270276
else:
271-
query["user"] += """When consensus is reached, thinking carefully and factually, explain the council's answer.
277+
user_prompt = """When consensus is reached, thinking carefully and factually, explain the council's answer.
272278
Provide the answer within the tags, <answer>Yes/No</answer>.
273279
\n\n\n----------------\n\n\n<answer>Yes</answer>\n\n\n----------------\n\n\n<answer>No</answer>"""
280+
query["user"] += cleandoc(user_prompt)
274281

275282
elif use_strategy == Strategy.CoT:
276283
if use_description:
277-
query["user"] = f"""Can changing {variable_a}, where {description_a}, change {variable_b}, where
278-
{description_b}? """
284+
user_prompt = f"""Can changing {variable_a}, where {description_a}, change {variable_b}, where
285+
{description_b}?"""
286+
query["user"] = cleandoc(user_prompt)
279287
else:
280-
query["user"] = f"""Can changing {variable_a} change {variable_b}?"""
288+
user_prompt = f"""Can changing {variable_a} change {variable_b}?"""
289+
query["user"] = cleandoc(user_prompt)
281290

282291
if ask_reference:
283-
query["user"] += f"""What are three research papers that discuss each of these variables? What do they
292+
user_prompt = f"""What are three research papers that discuss each of these variables? What do they
284293
say about the relationship they may or may not have? You are to provide the paper title and a one
285294
sentence summary each paper's argument. Then use those arguments as reference to answer whether a change
286295
in {variable_a} changes {variable_b}. Answer Yes or No.
@@ -289,33 +298,36 @@ def _build_relationship_program(
289298
<reference>Author, Title, Year of publication</reference>.
290299
\n\n\n----------------\n\n\n<answer>Yes</answer>\n<reference>Author, Title,
291300
Year of publication</reference>\n\n\n----------------\n\n\n<answer>No</answer> {{~/user}}"""
301+
query["user"] += cleandoc(user_prompt)
292302
else:
293-
query["user"] += f"""Answer Yes or No. Within one sentence, you are to think step-by-step to make sure
303+
user_prompt = f"""Answer Yes or No. Within one sentence, you are to think step-by-step to make sure
294304
that you have the right answer. Then provide your final answer within the tags, <answer>Yes/No</answer.
295305
\n\n\n----------------\n\n\n<answer>Yes</answer>\n\n\n----------------\n\n\n<answer>No</answer>"""
306+
query["user"] += cleandoc(user_prompt)
296307

297308
elif use_strategy == Strategy.Straight:
298309
if use_description:
299-
query[
300-
"user"] = f"""Can changing {variable_a}, where {description_a}, change {variable_b}, where {description_b}? """
310+
user_prompt = f"""Can changing {variable_a}, where {description_a}, change {variable_b}, where {description_b}? """
311+
query["user"] = cleandoc(user_prompt)
301312
else:
302-
query["user"] = f"""Can changing {variable_a} change {variable_b}?"""
313+
user_prompt = f"""Can changing {variable_a} change {variable_b}?"""
314+
query["user"] = cleandoc(user_prompt)
303315

304316
if ask_reference:
305-
query["user"] += f"""What are three research papers that discuss each of these variables? What do they
317+
user_prompt = f"""What are three research papers that discuss each of these variables? What do they
306318
say about the relationship they may or may not have? You are to provide the paper title and a one
307319
sentence summary each paper's argument. Then use those arguments as reference to answer whether a
308320
change in {variable_a} changes {variable_b}. Answer Yes or No.
309-
Within one sentence, you are to think step-by-step to make sure that you have the right answer.
310-
Provide your final answer within the tags, <answer>Yes/No</answer>, and your references within
311-
the tags <reference>Author, Title, Year of publication</reference.
312-
\n\n\n----------------\n\n\n<answer>Yes</answer>\n<reference>Author, Title, Year of publication</reference>\n\n\n----------------\n\n\n<answer>No</answer>"""
321+
Within one sentence, you are to think step-by-step to make sure that you have the right answer.
322+
Provide your final answer within the tags, <answer>Yes/No</answer>, and your references within
323+
the tags <reference>Author, Title, Year of publication</reference.
324+
\n\n\n----------------\n\n\n<answer>Yes</answer>\n<reference>Author, Title, Year of publication</reference>\n\n\n----------------\n\n\n<answer>No</answer>"""
325+
query["user"] += cleandoc(user_prompt)
313326

314327
else:
315-
query["user"] += f"""Answer Yes or No. Within one sentence, you are to think step-by-step to make sure
328+
user_prompt = f"""Answer Yes or No. Within one sentence, you are to think step-by-step to make sure
316329
that you have the right answer. Then provide your final answer within the tags, <answer>Yes/No</answer>.
317330
\n\n\n----------------\n\n\nExample of output structure: <answer>Yes</answer>\n\n\n----------------\n\n\nExample of output structure: <answer>No</answer>"""
318-
query["user"] = cleandoc(query["user"])
319-
query["system"] = cleandoc(query["system"])
331+
query["user"] += cleandoc(user_prompt)
320332

321333
return query

pywhyllm/tests/model_suggester/test_tuebingen_model_suggester.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -48,7 +48,6 @@ def test__build_description_program(self):
4848
assert result == test__build_description_program_with_context_no_reference_expected_result
4949
#Test with context, with reference
5050
result = modeler._build_description_program(variable, True, True)
51-
print(result)
5251
assert result == test__build_description_program_with_context_with_reference_expected_result
5352

5453
def test_suggest_relationship(self):

0 commit comments

Comments
 (0)