Skip to content

Commit 01472b0

Browse files
author
yanxinl4
committed
add exist check
1 parent 25b56a8 commit 01472b0

File tree

1 file changed

+13
-13
lines changed

1 file changed

+13
-13
lines changed

eval/scripts/gencode_json.py

Lines changed: 13 additions & 13 deletions
Original file line numberDiff line numberDiff line change
@@ -8,10 +8,10 @@
88
)
99
from scicode.gen.models import extract_python_script, get_model_function
1010

11-
1211
DEFAULT_PROMPT_TEMPLATE = Path("eval", "data", "background_comment_template.txt").read_text()
1312
BACKGOUND_PROMPT_TEMPLATE = Path("eval", "data", "multistep_template.txt").read_text()
1413

14+
1515
class Gencode:
1616
def __init__(self, model: str, output_dir: Path,
1717
prompt_dir: Path, with_background: bool, temperature: float):
@@ -57,6 +57,10 @@ def generate_response_with_steps(
5757
save (bool, optional): Save propmt and model response. Defaults to True.
5858
"""
5959
prob_id = prob_data["problem_id"]
60+
output_file_path = (
61+
self.output_dir / Path(self.model).parts[-1] / self._get_background_dir()
62+
/ f"{prob_id}.{num_steps}.py"
63+
)
6064
if num_steps == 1:
6165
self.previous_llm_code = [None] * tot_steps
6266
else:
@@ -69,8 +73,7 @@ def generate_response_with_steps(
6973
prev_file_path = Path("eval", "data", f"{prob_id}.{prev_step+1}.txt")
7074
else:
7175
prev_file_path = (
72-
self.output_dir
73-
/ model
76+
self.output_dir / Path(self.model).parts[-1] / self._get_background_dir()
7477
/ f"{prob_id}.{prev_step + 1}.py"
7578
)
7679
if prev_file_path.is_file():
@@ -80,6 +83,9 @@ def generate_response_with_steps(
8083
self.previous_llm_code[prev_step] = function_code
8184
else:
8285
raise Exception(f'Generating {prob_id} step {num_steps} ahead of step {prev_step + 1}.')
86+
87+
if output_file_path.exists():
88+
return
8389
prompt, previous_code = self.generate_prompt_with_steps(prob_data, num_steps, prompt_template)
8490
if save:
8591
self.save_prompt_with_steps(prob_data, prompt, num_steps)
@@ -89,16 +95,10 @@ def generate_response_with_steps(
8995
model_kwargs["max_tokens"] = 4096
9096
model_kwargs["temperature"] = self.temperature
9197
# write the response to a file if it doesn't exist
92-
output_file_path = (
93-
self.output_dir
94-
/ model
95-
/ f"{prob_id}.{num_steps}.py"
96-
)
97-
if not output_file_path.exists():
98-
model_fct = get_model_function(model, **model_kwargs)
99-
response_from_llm = model_fct(prompt)
100-
self.previous_llm_code[num_steps - 1] = extract_python_script(response_from_llm)
101-
self.save_response_with_steps(prob_data, response_from_llm, previous_code, num_steps)
98+
model_fct = get_model_function(model, **model_kwargs)
99+
response_from_llm = model_fct(prompt)
100+
self.previous_llm_code[num_steps - 1] = extract_python_script(response_from_llm)
101+
self.save_response_with_steps(prob_data, response_from_llm, previous_code, num_steps)
102102

103103
@staticmethod
104104
def process_problem_code(prob_data: dict, num_steps: int) -> str:

0 commit comments

Comments
 (0)