forked from microsoft/HeurAgenix
-
Notifications
You must be signed in to change notification settings - Fork 0
Expand file tree
/
Copy pathgenerate_problem_state.py
More file actions
31 lines (23 loc) · 1.42 KB
/
generate_problem_state.py
File metadata and controls
31 lines (23 loc) · 1.42 KB
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
import argparse
import os
from src.pipeline.problem_state_generator import ProblemStateGenerator
from src.util.llm_client.get_llm_client import get_llm_client
def parse_arguments():
problem_pool = [problem for problem in os.listdir(os.path.join("src", "problems")) if problem != "base"]
parser = argparse.ArgumentParser(description="Generate problem state")
parser.add_argument("-p", "--problem", choices=problem_pool, required=True, help="Specifies the type of combinatorial optimization problem.")
parser.add_argument("-m", "--smoke_test", action='store_true', help="Optional flag to conduct a preliminary smoke test.")
parser.add_argument("-l", "--llm_config_file", type=str, default=os.path.join("data", "llm_config", "azure_gpt_4o.json"), help="Path to the language model configuration file. Default is azure_gpt_4o.json.")
return parser.parse_args()
def main():
args = parse_arguments()
problem = args.problem
smoke_test = args.smoke_test
llm_config_file = args.llm_config_file
prompt_dir = os.path.join("src", "problems", "base", "prompt")
output_dir = os.path.join("output", problem, "generate_problem_state")
llm_client = get_llm_client(llm_config_file, prompt_dir, output_dir)
problem_state_generator = ProblemStateGenerator(llm_client=llm_client, problem=problem)
problem_state_generator.generate_problem_state(smoke_test=smoke_test)
if __name__ == "__main__":
main()