1414
1515class Gencode :
1616 def __init__ (self , model : str , output_dir : Path ,
17- prompt_dir : Path , temperature : float ):
17+ prompt_dir : Path , with_background : bool , temperature : float ):
1818 self .model = model
1919 self .output_dir = output_dir
2020 self .prompt_dir = prompt_dir
21+ self .with_background = with_background
2122 self .temperature = temperature
2223 self .previous_llm_code = []
2324
24- def save_prompt_with_steps (self , prob_data : dict , prompt : str , num_steps : int , tot_steps : int ) -> None :
25- output_dir = Path (self .prompt_dir , self .model )
25+ def _get_background_dir (self ):
26+ return "with_background" if self .with_background else "without_background"
27+
28+ def save_prompt_with_steps (self , prob_data : dict , prompt : str , num_steps : int ) -> None :
29+ output_dir = Path (self .prompt_dir , Path (self .model ).parts [- 1 ], self ._get_background_dir ())
2630 output_dir .mkdir (parents = True , exist_ok = True )
2731 output_file_path = output_dir / f"{ prob_data ['problem_id' ]} .{ num_steps } .txt"
2832 output_file_path .write_text (prompt , encoding = "utf-8" )
2933
30- def save_response_with_steps (self , prob_data : dict , response : str , previous_code : str ,
31- num_steps : int , model = "gpt-4o" , ) -> None :
34+ def save_response_with_steps (self , prob_data : dict , response : str ,
35+ previous_code : str , num_steps : int ) -> None :
3236 output_dir = (
33- self .output_dir / model
37+ self .output_dir / Path ( self . model ). parts [ - 1 ] / self . _get_background_dir ()
3438 )
3539 output_dir .mkdir (parents = True , exist_ok = True )
3640 prob_id = prob_data ["problem_id" ]
@@ -78,7 +82,7 @@ def generate_response_with_steps(
7882 raise Exception (f'Generating { prob_id } step { num_steps } ahead of step { prev_step + 1 } .' )
7983 prompt , previous_code = self .generate_prompt_with_steps (prob_data , num_steps , prompt_template )
8084 if save :
81- self .save_prompt_with_steps (prob_data , prompt , num_steps , tot_steps )
85+ self .save_prompt_with_steps (prob_data , prompt , num_steps )
8286
8387 model_kwargs = {}
8488 if "claude" in model :
@@ -94,7 +98,7 @@ def generate_response_with_steps(
9498 model_fct = get_model_function (model , ** model_kwargs )
9599 response_from_llm = model_fct (prompt )
96100 self .previous_llm_code [num_steps - 1 ] = extract_python_script (response_from_llm )
97- self .save_response_with_steps (prob_data , response_from_llm , previous_code , num_steps , model )
101+ self .save_response_with_steps (prob_data , response_from_llm , previous_code , num_steps )
98102
99103 @staticmethod
100104 def process_problem_code (prob_data : dict , num_steps : int ) -> str :
@@ -109,11 +113,16 @@ def process_problem_steps(self, problem_data: dict, num_steps: int):
109113 next_step = []
110114 previous_code = []
111115 for i in range (num_steps - 1 ):
116+ output_lines .append (problem_data ["sub_steps" ][i ]["step_description_prompt" ] + '\n ' +
117+ problem_data ["sub_steps" ][i ]["step_background" ] if self .with_background
118+ else problem_data ["sub_steps" ][i ]["step_description_prompt" ])
112119 output_lines .append (self .previous_llm_code [i ])
113120 previous_code .append (self .previous_llm_code [i ])
114121 output_lines .append ("------" )
115122
116- next_step .append (problem_data ["sub_steps" ][num_steps - 1 ]["step_description_prompt" ])
123+ next_step .append (problem_data ["sub_steps" ][num_steps - 1 ]["step_description_prompt" ] + '\n ' +
124+ problem_data ["sub_steps" ][num_steps - 1 ]["step_background" ] if self .with_background
125+ else problem_data ["sub_steps" ][num_steps - 1 ]["step_description_prompt" ])
117126 next_step .append (self .process_problem_code (problem_data , num_steps ))
118127 output_str = "\n \n " .join (output_lines [:- 1 ]) # Remove the last "------"
119128 next_step_str = "\n \n " .join (next_step )
@@ -160,6 +169,11 @@ def get_cli() -> argparse.ArgumentParser:
160169 default = Path ("eval_results" , "prompt" ),
161170 help = "Prompt directory" ,
162171 )
172+ parser .add_argument (
173+ "--with-background" ,
174+ action = "store_true" ,
175+ help = "Include problem background if enabled" ,
176+ )
163177 parser .add_argument (
164178 "--temperature" ,
165179 type = float ,
@@ -173,11 +187,12 @@ def main(model: str,
173187 output_dir : Path ,
174188 input_path : Path ,
175189 prompt_dir : Path ,
190+ with_background : bool ,
176191 temperature : float
177192) -> None :
178193 gcode = Gencode (
179194 model = model , output_dir = output_dir ,
180- prompt_dir = prompt_dir , temperature = temperature
195+ prompt_dir = prompt_dir , with_background = with_background , temperature = temperature
181196 )
182197 data = read_from_jsonl (input_path )
183198 for problem in data :
0 commit comments