Skip to content

Commit 4f213ee

Browse files
committed
remove resources per trial
Signed-off-by: Jack Luar <[email protected]>
1 parent 8be5752 commit 4f213ee

File tree

2 files changed

+2
-2
lines changed

2 files changed

+2
-2
lines changed

tools/AutoTuner/src/autotuner/distributed.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -1081,7 +1081,7 @@ def sweep():
10811081
local_dir=LOCAL_DIR,
10821082
resume=args.resume,
10831083
stop={"training_iteration": args.iterations},
1084-
resources_per_trial={"cpu": args.resources_per_trial},
1084+
resources_per_trial={"cpu": os.cpu_count()/args.jobs},
10851085
log_to_file=["trail-out.log", "trail-err.log"],
10861086
trial_name_creator=lambda x: f"variant-{x.trainable_name}-{x.trial_id}-ray",
10871087
trial_dirname_creator=lambda x: f"variant-{x.trainable_name}-{x.trial_id}-ray",

tools/AutoTuner/test/resume_check.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -57,7 +57,7 @@ def setUp(self):
5757
f" {c}"
5858
for c in options
5959
]
60-
self.failCommands = [] # TODO
60+
6161

6262
def test_tune_resume(self):
6363
# Goal is to first run the first config (without resume) and then run the second config (with resume)

0 commit comments

Comments
 (0)