Skip to content

Commit 55d5018

Browse files
committed
remove resources per trial
Signed-off-by: Jack Luar <[email protected]>
1 parent e59097a commit 55d5018

File tree

2 files changed

+2
-2
lines changed

2 files changed

+2
-2
lines changed

tools/AutoTuner/src/autotuner/distributed.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1092,7 +1092,7 @@ def sweep():
10921092
local_dir=LOCAL_DIR,
10931093
resume=args.resume,
10941094
stop={"training_iteration": args.iterations},
1095-
resources_per_trial={"cpu": args.resources_per_trial},
1095+
resources_per_trial={"cpu": os.cpu_count()/args.jobs},
10961096
log_to_file=["trail-out.log", "trail-err.log"],
10971097
trial_name_creator=lambda x: f"variant-{x.trainable_name}-{x.trial_id}-ray",
10981098
trial_dirname_creator=lambda x: f"variant-{x.trainable_name}-{x.trial_id}-ray",

tools/AutoTuner/test/resume_check.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -57,7 +57,7 @@ def setUp(self):
5757
f" {c}"
5858
for c in options
5959
]
60-
self.failCommands = [] # TODO
60+
6161

6262
def test_tune_resume(self):
6363
# Goal is to first run the first config (without resume) and then run the second config (with resume)

0 commit comments

Comments
 (0)