Skip to content

Commit e741052

Browse files
committed
Update pyfmt component on FBS:master (facebook#4572)
Summary: X-link: meta-pytorch/tritonbench#661 Pull Request resolved: facebook#4572 X-link: facebookexternal/aepsych_prerelease#42 X-link: facebook/dotslash#89 X-link: meta-pytorch/torchx#1165 X-link: facebookresearch/aepsych#824 X-link: meta-pytorch/botorch#3088 Differential Revision: D87671961
1 parent 462b77e commit e741052

File tree

137 files changed

+535
-511
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

137 files changed

+535
-511
lines changed

.pre-commit-config.yaml

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -11,13 +11,13 @@ repos:
1111
- PyYAML
1212

1313
- repo: https://github.com/omnilib/ufmt
14-
rev: v2.8.0
14+
rev: v2.9.0
1515
hooks:
1616
- id: ufmt
1717
additional_dependencies:
18-
- black==24.4.2
19-
- usort==1.0.8.post1
20-
- ruff-api==0.1.0
18+
- black==25.11.0
19+
- usort==1.1.0
20+
- ruff-api==0.2.0
2121
- stdlibs==2024.1.28
2222
args: [format]
2323

ax/adapter/tests/test_base_adapter.py

Lines changed: 17 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -618,10 +618,13 @@ def test_set_status_quo_with_multiple_observations(self) -> None:
618618
# Fetch constraint metric an additional time. This will lead to two
619619
# separate observations for the status quo arm.
620620
exp.fetch_data(metrics=[exp.metrics["branin_map_constraint"]])
621-
with self.assertNoLogs(logger=logger, level="WARN"), mock.patch(
622-
"ax.adapter.base._combine_multiple_status_quo_observations",
623-
wraps=_combine_multiple_status_quo_observations,
624-
) as mock_combine:
621+
with (
622+
self.assertNoLogs(logger=logger, level="WARN"),
623+
mock.patch(
624+
"ax.adapter.base._combine_multiple_status_quo_observations",
625+
wraps=_combine_multiple_status_quo_observations,
626+
) as mock_combine,
627+
):
625628
adapter = Adapter(
626629
experiment=exp,
627630
generator=Generator(),
@@ -659,9 +662,12 @@ def test_set_status_quo_with_multiple_observations(self) -> None:
659662
)
660663

661664
# Case 2: Experiment has an optimization config with no map metrics
662-
with mock.patch(
663-
"ax.adapter.base.has_map_metrics", return_value=False
664-
) as mock_extract, self.assertLogs(logger=logger, level="WARN") as mock_logs:
665+
with (
666+
mock.patch(
667+
"ax.adapter.base.has_map_metrics", return_value=False
668+
) as mock_extract,
669+
self.assertLogs(logger=logger, level="WARN") as mock_logs,
670+
):
665671
adapter = Adapter(
666672
experiment=exp,
667673
generator=Generator(),
@@ -1131,9 +1137,10 @@ def mock_predict(
11311137
self.assertTrue(np.allclose(f["m1"], np.ones(3) * 2.0))
11321138

11331139
# Test for error if an observation is dropped.
1134-
with mock.patch.object(
1135-
adapter, "_predict", side_effect=mock_predict
1136-
), self.assertRaisesRegex(ModelError, "Predictions resulted in fewer"):
1140+
with (
1141+
mock.patch.object(adapter, "_predict", side_effect=mock_predict),
1142+
self.assertRaisesRegex(ModelError, "Predictions resulted in fewer"),
1143+
):
11371144
adapter.predict(
11381145
observation_features=[
11391146
ObservationFeatures(parameters={"x": 3.0, "y": 4.0}),

ax/adapter/tests/test_cross_validation.py

Lines changed: 13 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -138,16 +138,19 @@ def test_cross_validate_base(self) -> None:
138138
np.array_equal(sorted(all_test), np.array([2.0, 2.0, 3.0, 4.0]))
139139
)
140140
# Test LOO in transformed space
141-
with mock.patch.object(
142-
self.adapter,
143-
"_transform_inputs_for_cv",
144-
wraps=self.adapter._transform_inputs_for_cv,
145-
) as mock_transform_cv, mock.patch.object(
146-
self.adapter,
147-
"_cross_validate",
148-
side_effect=lambda **kwargs: [self.observation_data]
149-
* len(kwargs["cv_test_points"]),
150-
) as mock_cv:
141+
with (
142+
mock.patch.object(
143+
self.adapter,
144+
"_transform_inputs_for_cv",
145+
wraps=self.adapter._transform_inputs_for_cv,
146+
) as mock_transform_cv,
147+
mock.patch.object(
148+
self.adapter,
149+
"_cross_validate",
150+
side_effect=lambda **kwargs: [self.observation_data]
151+
* len(kwargs["cv_test_points"]),
152+
) as mock_cv,
153+
):
151154
result = cross_validate(model=self.adapter, folds=-1, untransform=False)
152155
result_predicted_obs_data = [cv_result.predicted for cv_result in result]
153156
self.assertEqual(result_predicted_obs_data, [self.observation_data] * 4)

ax/adapter/tests/test_hierarchical_search_space.py

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -12,7 +12,6 @@
1212

1313
from ax.adapter.cross_validation import cross_validate
1414
from ax.adapter.registry import Generators
15-
1615
from ax.core.experiment import Experiment
1716
from ax.core.objective import Objective
1817
from ax.core.observation import ObservationFeatures
@@ -23,7 +22,6 @@
2322
ParameterType,
2423
RangeParameter,
2524
)
26-
2725
from ax.core.search_space import SearchSpace
2826
from ax.core.trial import Trial
2927
from ax.metrics.noisy_function import GenericNoisyFunctionMetric

ax/adapter/tests/test_torch_adapter.py

Lines changed: 29 additions & 19 deletions
Original file line numberDiff line numberDiff line change
@@ -205,9 +205,12 @@ def test_TorchAdapter(self, device: torch.device | None = None) -> None:
205205
pending_observations = {
206206
"y2": [ObservationFeatures(parameters={"x1": 1.0, "x2": 2.0, "x3": 3.0})]
207207
}
208-
with ExitStack() as es, mock.patch.object(
209-
generator, "gen", return_value=gen_return_value
210-
) as mock_gen:
208+
with (
209+
ExitStack() as es,
210+
mock.patch.object(
211+
generator, "gen", return_value=gen_return_value
212+
) as mock_gen,
213+
):
211214
es.enter_context(
212215
mock.patch.object(
213216
generator, "best_point", return_value=best_point_return_value
@@ -318,9 +321,10 @@ def test_evaluate_acquisition_function(self) -> None:
318321
obsf = ObservationFeatures(parameters={"x1": 1.0, "x2": 2.0})
319322

320323
# Check for value error when optimization config is not set.
321-
with mock.patch.object(
322-
adapter, "_optimization_config", None
323-
), self.assertRaisesRegex(ValueError, "optimization_config"):
324+
with (
325+
mock.patch.object(adapter, "_optimization_config", None),
326+
self.assertRaisesRegex(ValueError, "optimization_config"),
327+
):
324328
adapter.evaluate_acquisition_function(observation_features=[obsf])
325329

326330
mock_acq_val = 5.0
@@ -413,11 +417,14 @@ def test_best_point(self) -> None:
413417
gen_return_value = TorchGenResults(
414418
points=torch.tensor([[1.0]]), weights=torch.tensor([1.0])
415419
)
416-
with mock.patch(
417-
f"{TorchGenerator.__module__}.TorchGenerator.best_point",
418-
return_value=torch.tensor([best_point_value]),
419-
autospec=True,
420-
), mock.patch.object(adapter, "predict", return_value=predict_return_value):
420+
with (
421+
mock.patch(
422+
f"{TorchGenerator.__module__}.TorchGenerator.best_point",
423+
return_value=torch.tensor([best_point_value]),
424+
autospec=True,
425+
),
426+
mock.patch.object(adapter, "predict", return_value=predict_return_value),
427+
):
421428
with mock.patch.object(
422429
adapter.generator, "gen", return_value=gen_return_value
423430
):
@@ -814,14 +821,17 @@ def test_gen_metadata_untransform(self) -> None:
814821
weights=torch.tensor([1.0]),
815822
gen_metadata={Keys.EXPECTED_ACQF_VAL: [1.0], **additional_metadata},
816823
)
817-
with mock.patch.object(
818-
adapter,
819-
"_untransform_objective_thresholds",
820-
wraps=adapter._untransform_objective_thresholds,
821-
) as mock_untransform, mock.patch.object(
822-
generator,
823-
"gen",
824-
return_value=gen_return_value,
824+
with (
825+
mock.patch.object(
826+
adapter,
827+
"_untransform_objective_thresholds",
828+
wraps=adapter._untransform_objective_thresholds,
829+
) as mock_untransform,
830+
mock.patch.object(
831+
generator,
832+
"gen",
833+
return_value=gen_return_value,
834+
),
825835
):
826836
adapter.gen(n=1)
827837
if additional_metadata.get("objective_thresholds", None) is None:

ax/adapter/tests/test_torch_moo_adapter.py

Lines changed: 12 additions & 9 deletions
Original file line numberDiff line numberDiff line change
@@ -642,15 +642,18 @@ def test_infer_objective_thresholds(self, _, cuda: bool = False) -> None:
642642
torch_device=torch.device("cuda" if cuda else "cpu"),
643643
)
644644
self.assertIn("Cast", adapter.transforms)
645-
with patch.object(
646-
adapter,
647-
"_untransform_objective_thresholds",
648-
wraps=adapter._untransform_objective_thresholds,
649-
) as mock_untransform, patch.object(
650-
adapter.transforms["Cast"],
651-
"untransform_observation_features",
652-
wraps=adapter.transforms["Cast"].untransform_observation_features,
653-
) as wrapped_cast:
645+
with (
646+
patch.object(
647+
adapter,
648+
"_untransform_objective_thresholds",
649+
wraps=adapter._untransform_objective_thresholds,
650+
) as mock_untransform,
651+
patch.object(
652+
adapter.transforms["Cast"],
653+
"untransform_observation_features",
654+
wraps=adapter.transforms["Cast"].untransform_observation_features,
655+
) as wrapped_cast,
656+
):
654657
obj_thresholds = adapter.infer_objective_thresholds(
655658
search_space=exp.search_space,
656659
optimization_config=exp.optimization_config,

ax/adapter/torch.py

Lines changed: 4 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -732,9 +732,10 @@ def _get_fit_args(
732732
if update_outcomes_and_parameters:
733733
self.outcomes = ordered_outcomes
734734
else:
735-
assert (
736-
ordered_outcomes == self.outcomes
737-
), f"Unexpected ordering of outcomes: {ordered_outcomes} != {self.outcomes}"
735+
assert ordered_outcomes == self.outcomes, (
736+
"Unexpected ordering of outcomes: "
737+
f"{ordered_outcomes} != {self.outcomes}"
738+
)
738739
return datasets, candidate_metadata, search_space_digest
739740

740741
def _fit(

ax/adapter/transforms/stratified_standardize_y.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -124,8 +124,7 @@ def __init__(
124124
experiment_data = none_throws(experiment_data)
125125
if len(experiment_data.observation_data.index.names) > 2:
126126
raise NotImplementedError(
127-
"StratifiedStandardizeY does not support experiment data with "
128-
"map keys."
127+
"StratifiedStandardizeY does not support experiment data with map keys."
129128
)
130129
strata = (
131130
experiment_data.arm_data[self.p_name]

ax/adapter/transforms/task_encode.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -40,9 +40,9 @@ def __init__(
4040
adapter: Optional["adapter_module.base.Adapter"] = None,
4141
config: TConfig | None = None,
4242
) -> None:
43-
assert (
44-
search_space is not None
45-
), "TaskChoiceToIntTaskChoice requires search space"
43+
assert search_space is not None, (
44+
"TaskChoiceToIntTaskChoice requires search space"
45+
)
4646
super().__init__(
4747
search_space=search_space,
4848
experiment_data=experiment_data,

ax/adapter/transforms/tests/test_bilog_y.py

Lines changed: 0 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -13,7 +13,6 @@
1313
from itertools import product
1414

1515
import numpy as np
16-
1716
from ax.adapter.base import Adapter, DataLoaderConfig
1817
from ax.adapter.data_utils import extract_experiment_data
1918
from ax.adapter.transforms.bilog_y import bilog_transform, BilogY, inv_bilog_transform

0 commit comments

Comments
 (0)