Skip to content

Commit d4f54e5

Browse files
author
chibu
committed
clean ups
1 parent 866dde1 commit d4f54e5

File tree

3 files changed

+2
-9
lines changed

3 files changed

+2
-9
lines changed

src/automation/configs.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1,4 +1,3 @@
11
DEFAULT_DOCKER_IMAGE = "quay.io/nmmlops/mlops/k8s-research-cuda12_8:latest"
22
DEFAULT_OUTPUT_URI = "gs://neuralmagic-clearml"
33
DEFAULT_RESEARCH_BRANCH = "main"
4-
#DEFAULT_GUIDELLM_SCENARIO = "chat"

src/automation/tasks/base_task.py

Lines changed: 1 addition & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -7,9 +7,6 @@
77

88
class BaseTask():
99

10-
#base_packages = ["git+https://github.com/neuralmagic/research.git"]
11-
#base_packages = ["git+https://github.com/neuralmagic/research.git@update_guidellm"]
12-
1310
def __init__(
1411
self,
1512
project_name: str,
@@ -35,7 +32,7 @@ def __init__(
3532
self.packages = packages
3633
self.task_type = task_type
3734
self.task = None
38-
self.branch= branch
35+
self.branch = branch
3936
self.script_path = None
4037
self.callable_artifacts = None
4138

src/automation/tasks/scripts/guidellm_script.py

Lines changed: 1 addition & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -4,7 +4,6 @@
44
from automation.utils import resolve_model_id, cast_args, kill_process_tree
55
from automation.vllm import start_vllm_server
66
from pyhocon import ConfigFactory
7-
#from automation.configs import DEFAULT_GUIDELLM_SCENARIO
87

98
def main(configurations=None):
109
task = Task.current_task()
@@ -72,8 +71,6 @@ def clean_hocon_value(v):
7271

7372
guidellm_args["model"] = model_id
7473

75-
DEFAULT_GUIDELLM_SCENARIO = guidellm_args["scenario"]
76-
7774
import json
7875
import asyncio
7976
from pathlib import Path
@@ -92,7 +89,7 @@ def clean_hocon_value(v):
9289
# to be used when get_builtin_scenarios() bug is fixed
9390
# current_scenario = GenerativeTextScenario.from_builtin(get_builtin_scenarios()[0], dict(guidellm_args))
9491
else:
95-
filepath = Path(os.path.join(".", "src", "automation", "standards", "benchmarking", f"{DEFAULT_GUIDELLM_SCENARIO}.json"))
92+
filepath = Path(os.path.join(".", "src", "automation", "standards", "benchmarking", f"{user_scenario}.json"))
9693
current_scenario = GenerativeTextScenario.from_file(filepath, dict(guidellm_args))
9794

9895
# Ensure output_path is set and consistent

0 commit comments

Comments
 (0)