Skip to content

Commit b24b3f1

Browse files
esantorellafacebook-github-bot
authored andcommitted
Remove :meta private: from documentation so Sphinx doesn't hide subclasses (#2437)
Summary: ## Motivation * Addresses #2419 , classes missing from Sphinx-rendered API documentation * Makes ABCs that were previously manually hidden (by me) show up in the documentation. I now think it was a mistake to hide these, as users frequently need to use these. # Changes in this PR * Remove `:meta private:` from docstrings. `:meta_private` was present in many ABCs; it prevents them from appearing in the Sphinx-rendered API documentation. It also seems to prevent their subclasses from appearing if the subclasses don't "override" the docstring. * Fixed up a docstring in `batch_cross_validation` * Removed ABC `HomotopySchedule`, which only had one subclass. We might want to consider adding a lint to check that `:meta private:` is not added again, but I don't expect it to come up too often. Pull Request resolved: #2437 Test Plan: Built the website locally and confirmed that the documentation looks okay and that formerly missing classes such as `qLogNoisyExpectedHypervolumeImprovement` and `AffineInputTransform` now show up. Reviewed By: saitcakmak Differential Revision: D60020672 Pulled By: esantorella fbshipit-source-id: 3f546dba4404fc9303a2b43f71c9dab5c30d3822
1 parent b229bf8 commit b24b3f1

27 files changed

+22
-132
lines changed

botorch/acquisition/acquisition.py

Lines changed: 0 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -28,8 +28,6 @@ class AcquisitionFunction(Module, ABC):
2828
Please note that if your acquisition requires a backwards call,
2929
you will need to wrap the backwards call inside of an enable_grad
3030
context to be able to optimize the acquisition. See #1164.
31-
32-
:meta private:
3331
"""
3432

3533
_log: bool = False # whether the acquisition utilities are in log-space
@@ -79,8 +77,6 @@ def forward(self, X: Tensor) -> Tensor:
7977
class OneShotAcquisitionFunction(AcquisitionFunction, ABC):
8078
r"""
8179
Abstract base class for acquisition functions using one-shot optimization
82-
83-
:meta private:
8480
"""
8581

8682
@abstractmethod
@@ -115,8 +111,6 @@ class MCSamplerMixin(ABC):
115111
116112
Attributes:
117113
_default_sample_shape: The `sample_shape` for the default sampler.
118-
119-
:meta private:
120114
"""
121115

122116
_default_sample_shape = torch.Size([512])
@@ -169,8 +163,6 @@ class MultiModelAcquisitionFunction(AcquisitionFunction, ABC):
169163
This is currently only a placeholder to help with some development
170164
in Ax. We plan to add some acquisition functions utilizing multiple
171165
models in the future.
172-
173-
:meta private:
174166
"""
175167

176168
def __init__(self, model_dict: ModelDict) -> None:

botorch/acquisition/analytic.py

Lines changed: 1 addition & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -49,11 +49,7 @@
4949

5050

5151
class AnalyticAcquisitionFunction(AcquisitionFunction, ABC):
52-
r"""
53-
Base class for analytic acquisition functions.
54-
55-
:meta private:
56-
"""
52+
"""Base class for analytic acquisition functions."""
5753

5854
def __init__(
5955
self,

botorch/acquisition/cached_cholesky.py

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -66,8 +66,6 @@ class CachedCholeskyMCSamplerMixin(MCSamplerMixin):
6666
Specifically, this is for acquisition functions that require sampling from
6767
the posterior P(f(X_baseline, X) | D). The Cholesky of the posterior
6868
covariance over f(X_baseline) is cached.
69-
70-
:meta private:
7169
"""
7270

7371
def __init__(

botorch/acquisition/cost_aware.py

Lines changed: 1 addition & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -31,11 +31,7 @@
3131

3232

3333
class CostAwareUtility(Module, ABC):
34-
r"""
35-
Abstract base class for cost-aware utilities.
36-
37-
:meta private:
38-
"""
34+
"""Abstract base class for cost-aware utilities."""
3935

4036
@abstractmethod
4137
def forward(
@@ -55,7 +51,6 @@ def forward(
5551
Returns:
5652
A `num_fantasies x batch_shape`-dim Tensor of cost-transformed utilities.
5753
"""
58-
pass # pragma: no cover
5954

6055

6156
class GenericCostAwareUtility(CostAwareUtility):

botorch/acquisition/logei.py

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -71,8 +71,6 @@
7171
class LogImprovementMCAcquisitionFunction(SampleReducingMCAcquisitionFunction):
7272
r"""
7373
Abstract base class for Monte-Carlo-based batch LogEI acquisition functions.
74-
75-
:meta private:
7674
"""
7775

7876
_log: bool = True

botorch/acquisition/max_value_entropy_search.py

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -65,8 +65,6 @@ class MaxValueBase(AcquisitionFunction, ABC):
6565
6666
Subclasses need to implement `_sample_max_values` and _compute_information_gain`
6767
methods.
68-
69-
:meta private:
7068
"""
7169

7270
def __init__(

botorch/acquisition/monte_carlo.py

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -58,8 +58,6 @@
5858
class MCAcquisitionFunction(AcquisitionFunction, MCSamplerMixin, ABC):
5959
r"""
6060
Abstract base class for Monte-Carlo based batch acquisition functions.
61-
62-
:meta private:
6361
"""
6462

6563
def __init__(

botorch/acquisition/multi_objective/multi_output_risk_measures.py

Lines changed: 0 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -55,8 +55,6 @@ class MultiOutputRiskMeasureMCObjective(
5555
If the q-batch includes samples corresponding to multiple inputs, it is assumed
5656
that first `n_w` samples correspond to first input, second `n_w` samples
5757
correspond to second input, etc.
58-
59-
:meta private:
6058
"""
6159

6260
def __init__(

botorch/acquisition/multi_objective/objective.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -150,7 +150,7 @@ def __init__(
150150
constraint_idcs: List[int],
151151
objective: Optional[MCMultiOutputObjective] = None,
152152
) -> None:
153-
r"""Construct a feasibility weighted objective.
153+
r"""Construct a feasibility-weighted objective.
154154
155155
This applies feasibility weighting before calculating the objective value.
156156
Defaults to identity if no constraints or objective is present.

botorch/acquisition/objective.py

Lines changed: 1 addition & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -32,11 +32,7 @@
3232

3333

3434
class PosteriorTransform(Module, ABC):
35-
r"""
36-
Abstract base class for objectives that transform the posterior.
37-
38-
:meta private:
39-
"""
35+
"""Abstract base class for objectives that transform the posterior."""
4036

4137
@abstractmethod
4238
def evaluate(self, Y: Tensor) -> Tensor:
@@ -240,8 +236,6 @@ class MCAcquisitionObjective(Module, ABC):
240236
_verify_output_shape: If True and `X` is given, check that the q-batch
241237
shape of the objectives agrees with that of X.
242238
_is_mo: A boolean denoting whether the objectives are multi-output.
243-
244-
:meta private:
245239
"""
246240

247241
_verify_output_shape: bool = True

0 commit comments

Comments
 (0)