Skip to content

Commit d7816cc

Browse files
committed
formatting
1 parent cddac85 commit d7816cc

File tree

8 files changed

+28
-28
lines changed

8 files changed

+28
-28
lines changed

pina/data/data_module.py

Lines changed: 7 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -20,18 +20,18 @@ class DummyDataloader:
2020
def __init__(self, dataset):
2121
"""
2222
Prepare a dataloader object that returns the entire dataset in a single
23-
batch. Depending on the number of GPUs, the dataset is managed
23+
batch. Depending on the number of GPUs, the dataset is managed
2424
as follows:
2525
26-
- **Distributed Environment** (multiple GPUs): Divides dataset across
27-
processes using the rank and world size. Fetches only portion of
26+
- **Distributed Environment** (multiple GPUs): Divides dataset across
27+
processes using the rank and world size. Fetches only portion of
2828
data corresponding to the current process.
29-
- **Non-Distributed Environment** (single GPU): Fetches the entire
29+
- **Non-Distributed Environment** (single GPU): Fetches the entire
3030
dataset.
3131
3232
:param PinaDataset dataset: The dataset object to be processed.
3333
34-
.. note::
34+
.. note::
3535
This dataloader is used when the batch size is ``None``.
3636
"""
3737

@@ -78,8 +78,8 @@ def __init__(
7878
Initialize the object, setting the collate function based on whether
7979
automatic batching is enabled or not.
8080
81-
:param dict max_conditions_lengths: ``dict`` containing the maximum
82-
number of data points to consider in a single batch for
81+
:param dict max_conditions_lengths: ``dict`` containing the maximum
82+
number of data points to consider in a single batch for
8383
each condition.
8484
:param bool automatic_batching: Whether to enable automatic batching.
8585
:param PinaDataset dataset: The dataset where the data is stored.

pina/data/dataset.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -276,7 +276,7 @@ def create_batch(self, data):
276276
:param data: List of items to collate in a single batch.
277277
:type data: list[Data] | list[Graph]
278278
:return: Batch object.
279-
:rtype: :class:`~torch_geometric.data.Batch`
279+
:rtype: :class:`~torch_geometric.data.Batch`
280280
| :class:`~pina.graph.LabelBatch`
281281
"""
282282

pina/label_tensor.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -389,7 +389,7 @@ def stack(tensors):
389389

390390
def requires_grad_(self, mode=True):
391391
"""
392-
Override the :meth:`~torch.Tensor.requires_grad_` method to handle
392+
Override the :meth:`~torch.Tensor.requires_grad_` method to handle
393393
the labels in the new tensor.
394394
For more details, see :meth:`~torch.Tensor.requires_grad_`.
395395

pina/model/fourier_neural_operator.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -325,14 +325,14 @@ def forward(self, x):
325325
``projection_net`` maps the hidden representation to the output
326326
function.
327327
328-
:param x: The input tensor for performing the computation. Depending
328+
:param x: The input tensor for performing the computation. Depending
329329
on the ``dimensions`` in the initialization, it expects a tensor
330330
with the following shapes:
331-
331+
332332
* 1D tensors: ``[batch, X, channels]``
333333
* 2D tensors: ``[batch, X, Y, channels]``
334334
* 3D tensors: ``[batch, X, Y, Z, channels]``
335-
335+
336336
:type x: torch.Tensor | LabelTensor
337337
:return: The output tensor.
338338
:rtype: torch.Tensor

pina/optim/torch_optimizer.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -18,7 +18,7 @@ def __init__(self, optimizer_class, **kwargs):
1818
:param torch.optim.Optimizer optimizer_class: A
1919
:class:`torch.optim.Optimizer` class.
2020
:param dict kwargs: Additional parameters passed to ``optimizer_class``,
21-
see more
21+
see more
2222
`here <https://pytorch.org/docs/stable/optim.html#algorithms>`_.
2323
"""
2424
check_consistency(optimizer_class, torch.optim.Optimizer, subclass=True)

pina/optim/torch_scheduler.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,7 @@ def __init__(self, scheduler_class, **kwargs):
2424
:param torch.optim.LRScheduler scheduler_class: A
2525
:class:`torch.optim.LRScheduler` class.
2626
:param dict kwargs: Additional parameters passed to ``scheduler_class``,
27-
see more
27+
see more
2828
`here <https://pytorch.org/docs/stable/optim.html#algorithms>_`.
2929
"""
3030
check_consistency(scheduler_class, LRScheduler, subclass=True)

pina/problem/abstract_problem.py

Lines changed: 10 additions & 10 deletions
Original file line numberDiff line numberDiff line change
@@ -178,14 +178,14 @@ def discretise_domain(
178178
chebyshev sampling, ``chebyshev``; grid sampling ``grid``.
179179
:param domains: The domains from which to sample. Default is ``all``.
180180
:type domains: str | list[str]
181-
:param dict sample_rules: A dictionary defining custom sampling rules
182-
for input variables. If provided, it must contain a dictionary
183-
specifying the sampling rule for each variable, overriding the
184-
``n`` and ``mode`` arguments. Each key must correspond to the
185-
input variables from
186-
:meth:~pina.problem.AbstractProblem.input_variables, and its value
187-
should be another dictionary with
188-
two keys: ``n`` (number of points to sample) and ``mode``
181+
:param dict sample_rules: A dictionary defining custom sampling rules
182+
for input variables. If provided, it must contain a dictionary
183+
specifying the sampling rule for each variable, overriding the
184+
``n`` and ``mode`` arguments. Each key must correspond to the
185+
input variables from
186+
:meth:~pina.problem.AbstractProblem.input_variables, and its value
187+
should be another dictionary with
188+
two keys: ``n`` (number of points to sample) and ``mode``
189189
(sampling method). Defaults to None.
190190
:raises RuntimeError: If both ``n`` and ``sample_rules`` are specified.
191191
:raises RuntimeError: If neither ``n`` nor ``sample_rules`` are set.
@@ -214,8 +214,8 @@ def discretise_domain(
214214
implemented for :class:`~pina.domain.cartesian.CartesianDomain`.
215215
216216
.. warning::
217-
If custom discretisation is applied by setting ``sample_rules`` not
218-
to ``None``, then the discretised domain must be of class
217+
If custom discretisation is applied by setting ``sample_rules`` not
218+
to ``None``, then the discretised domain must be of class
219219
:class:`~pina.domain.cartesian.CartesianDomain`
220220
"""
221221

pina/trainer.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -13,8 +13,8 @@ class Trainer(lightning.pytorch.Trainer):
1313
PINA custom Trainer class to extend the standard Lightning functionality.
1414
1515
This class enables specific features or behaviors required by the PINA
16-
framework. It modifies the standard
17-
:class:`lightning.pytorch.Trainer <lightning.pytorch.trainer.trainer.Trainer>`
16+
framework. It modifies the standard
17+
:class:`lightning.pytorch.Trainer <lightning.pytorch.trainer.trainer.Trainer>`
1818
class to better support the training process in PINA.
1919
"""
2020

@@ -209,7 +209,7 @@ def train(self, **kwargs):
209209
Manage the training process of the solver.
210210
211211
:param dict kwargs: Additional keyword arguments. See `pytorch-lightning
212-
Trainer API <https://lightning.ai/docs/pytorch/stable/common/trainer.html#trainer-class-api>`_
212+
Trainer API <https://lightning.ai/docs/pytorch/stable/common/trainer.html#trainer-class-api>`_
213213
for details.
214214
"""
215215
return super().fit(self.solver, datamodule=self.data_module, **kwargs)
@@ -219,7 +219,7 @@ def test(self, **kwargs):
219219
Manage the test process of the solver.
220220
221221
:param dict kwargs: Additional keyword arguments. See `pytorch-lightning
222-
Trainer API <https://lightning.ai/docs/pytorch/stable/common/trainer.html#trainer-class-api>`_
222+
Trainer API <https://lightning.ai/docs/pytorch/stable/common/trainer.html#trainer-class-api>`_
223223
for details.
224224
"""
225225
return super().test(self.solver, datamodule=self.data_module, **kwargs)

0 commit comments

Comments
 (0)