Skip to content
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
Original file line number Diff line number Diff line change
Expand Up @@ -131,7 +131,7 @@ def scorer(instance_dir):
return None

expected_answer = None
with open(expected_answer_file, "rt") as fh:
with open(expected_answer_file, "rt", encoding='utf-8') as fh:
expected_answer = fh.read().strip()

# Read the console
Expand All @@ -140,7 +140,7 @@ def scorer(instance_dir):
return None

console_log = ""
with open(console_log_file, "rt") as fh:
with open(console_log_file, "rt", encoding='utf-8') as fh:
console_log = fh.read()

final_answer = None
Expand Down
6 changes: 3 additions & 3 deletions python/packages/agbench/benchmarks/GAIA/Scripts/init_tasks.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,7 @@ def create_jsonl(name, tasks, files_dir, template):
if not os.path.isdir(TASKS_DIR):
os.mkdir(TASKS_DIR)

with open(os.path.join(TASKS_DIR, name + ".jsonl"), "wt") as fh:
with open(os.path.join(TASKS_DIR, name + ".jsonl", encoding='utf-8'), "wt") as fh:
for task in tasks:
print(f"Converting: [{name}] {task['task_id']}")

Expand Down Expand Up @@ -85,13 +85,13 @@ def main():

# Load the GAIA data
gaia_validation_tasks = [[], [], []]
with open(os.path.join(gaia_validation_files, "metadata.jsonl")) as fh:
with open(os.path.join(gaia_validation_files, "metadata.jsonl", encoding='utf-8')) as fh:
for line in fh:
data = json.loads(line)
gaia_validation_tasks[data["Level"] - 1].append(data)

gaia_test_tasks = [[], [], []]
with open(os.path.join(gaia_test_files, "metadata.jsonl")) as fh:
with open(os.path.join(gaia_test_files, "metadata.jsonl", encoding='utf-8')) as fh:
for line in fh:
data = json.loads(line)

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@
async def main() -> None:

# Load model configuration and create the model client.
with open("config.yaml", "r") as f:
with open("config.yaml", "r", encoding='utf-8') as f:
config = yaml.safe_load(f)

orchestrator_client = ChatCompletionClient.load_component(config["orchestrator_client"])
Expand All @@ -30,7 +30,7 @@ async def main() -> None:

# Read the prompt
prompt = ""
with open("prompt.txt", "rt") as fh:
with open("prompt.txt", "rt", encoding='utf-8') as fh:
prompt = fh.read().strip()
filename = "__FILE_NAME__".strip()

Expand Down