Skip to content

Commit 03f57f9

Browse files
Bordalexierule
authored andcommitted
docformatter: config with black (#18064)
* docformatter: config with black * additional_dependencies: [tomli] * 119 * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * fix * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci --------- Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com> (cherry picked from commit efa7b2f)
1 parent cbf53ca commit 03f57f9

File tree

345 files changed

+1203
-424
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

345 files changed

+1203
-424
lines changed

.actions/assistant.py

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -81,6 +81,7 @@ def adjust(self, unfreeze: str) -> str:
8181
'arrow>=1.2.0'
8282
>>> _RequirementWithComment("arrow").adjust("major")
8383
'arrow'
84+
8485
"""
8586
out = str(self)
8687
if self.strict:
@@ -110,6 +111,7 @@ def _parse_requirements(strs: Union[str, Iterable[str]]) -> Iterator[_Requiremen
110111
>>> txt = '\\n'.join(txt)
111112
>>> [r.adjust('none') for r in _parse_requirements(txt)]
112113
['this', 'example', 'foo # strict', 'thing']
114+
113115
"""
114116
lines = yield_lines(strs)
115117
pip_argument = None
@@ -144,6 +146,7 @@ def load_requirements(path_dir: str, file_name: str = "base.txt", unfreeze: str
144146
>>> path_req = os.path.join(_PROJECT_ROOT, "requirements")
145147
>>> load_requirements(path_req, "docs.txt", unfreeze="major") # doctest: +ELLIPSIS +NORMALIZE_WHITESPACE
146148
['sphinx<...]
149+
147150
"""
148151
assert unfreeze in {"none", "major", "all"}
149152
path = Path(path_dir) / file_name
@@ -157,6 +160,7 @@ def load_readme_description(path_dir: str, homepage: str, version: str) -> str:
157160
158161
>>> load_readme_description(_PROJECT_ROOT, "", "") # doctest: +ELLIPSIS +NORMALIZE_WHITESPACE
159162
'...PyTorch Lightning is just organized PyTorch...'
163+
160164
"""
161165
path_readme = os.path.join(path_dir, "README.md")
162166
with open(path_readme, encoding="utf-8") as fo:
@@ -236,6 +240,7 @@ def _load_aggregate_requirements(req_dir: str = "requirements", freeze_requireme
236240
"""Load all base requirements from all particular packages and prune duplicates.
237241
238242
>>> _load_aggregate_requirements(os.path.join(_PROJECT_ROOT, "requirements"))
243+
239244
"""
240245
requires = [
241246
load_requirements(d, unfreeze="none" if freeze_requirements else "major")
@@ -292,6 +297,7 @@ def _replace_imports(lines: List[str], mapping: List[Tuple[str, str]], lightning
292297
'http://pytorch_lightning.ai', \
293298
'from lightning_fabric import __version__', \
294299
'@lightning.ai']
300+
295301
"""
296302
out = lines[:]
297303
for source_import, target_import in mapping:

.pre-commit-config.yaml

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -60,7 +60,8 @@ repos:
6060
rev: v1.7.3
6161
hooks:
6262
- id: docformatter
63-
args: [--in-place, --wrap-summaries=115, --wrap-descriptions=120]
63+
additional_dependencies: [tomli]
64+
args: ["--in-place"]
6465

6566
- repo: https://github.com/asottile/yesqa
6667
rev: v1.5.0

docs/source-app/examples/file_server/app.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -23,6 +23,7 @@ def __init__(
2323
drive: The drive can share data inside your application.
2424
base_dir: The local directory where the data will be stored.
2525
chunk_size: The quantity of bytes to download/upload at once.
26+
2627
"""
2728
super().__init__(
2829
cloud_build_config=L.BuildConfig(["flask, flask-cors"]),
@@ -238,4 +239,5 @@ def test_file_server_in_cloud():
238239
239240
# 2. By calling logs = get_logs_fn(),
240241
# you get all the logs currently on the admin page.
242+
241243
"""

docs/source-app/examples/github_repo_runner/app.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -36,6 +36,7 @@ def __init__(
3636
script_args: The arguments to be provided to the script.
3737
requirements: The python requirements tp run the script.
3838
cloud_compute: The object to select the cloud instance.
39+
3940
"""
4041
super().__init__(
4142
script_path=script_path,

docs/source-app/examples/model_server_app/locust_component.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -10,6 +10,7 @@ def __init__(self, num_users: int = 100):
1010
1111
Arguments:
1212
num_users: Number of users emulated by Locust
13+
1314
"""
1415
# Note: Using the default port 8089 of Locust.
1516
super().__init__(

docs/source-app/examples/model_server_app/model_server.py

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -18,6 +18,7 @@ class MLServer(LightningWork):
1818
Example: "mlserver_sklearn.SKLearnModel".
1919
Learn more here: $ML_SERVER_URL/tree/master/runtimes
2020
workers: Number of server worker.
21+
2122
"""
2223

2324
def __init__(
@@ -51,6 +52,7 @@ def run(self, model_path: Path):
5152
5253
Arguments:
5354
model_path: The path to the trained model.
55+
5456
"""
5557
# 1: Use the host and port at runtime so it works in the cloud.
5658
# $ML_SERVER_URL/blob/master/mlserver/settings.py#L50

examples/app/hpo/utils.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -15,6 +15,7 @@ def download_data(url: str, path: str = "data/", verbose: bool = False) -> None:
1515
1616
Usage:
1717
download_file('http://web4host.net/5MB.zip')
18+
1819
"""
1920
if url == "NEED_TO_BE_CREATED":
2021
raise NotImplementedError

examples/app/layout/app.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -5,6 +5,7 @@
55
lightning run app examples/layout/demo.py
66
77
This starts one server for each flow that returns a UI. Access the UI at the link printed in the terminal.
8+
89
"""
910

1011
import os

examples/fabric/build_your_own_trainer/trainer.py

Lines changed: 11 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -137,6 +137,7 @@ def fit(
137137
If not specified, no validation will run.
138138
ckpt_path: Path to previous checkpoints to resume training from.
139139
If specified, will always look for the latest checkpoint within the given directory.
140+
140141
"""
141142
self.fabric.launch()
142143

@@ -207,6 +208,7 @@ def train_loop(
207208
If greater then the number of batches in the ``train_loader``, this has no effect.
208209
scheduler_cfg: The learning rate scheduler configuration.
209210
Have a look at :meth:`lightning.pytorch.LightninModule.configure_optimizers` for supported values.
211+
210212
"""
211213
self.fabric.call("on_train_epoch_start")
212214
iterable = self.progbar_wrapper(
@@ -269,6 +271,7 @@ def val_loop(
269271
val_loader: The dataloader yielding the validation batches.
270272
limit_batches: Limits the batches during this validation epoch.
271273
If greater then the number of batches in the ``val_loader``, this has no effect.
274+
272275
"""
273276
# no validation if val_loader wasn't passed
274277
if val_loader is None:
@@ -313,13 +316,14 @@ def val_loop(
313316
torch.set_grad_enabled(True)
314317

315318
def training_step(self, model: L.LightningModule, batch: Any, batch_idx: int) -> torch.Tensor:
316-
"""A single training step, running forward and backward. The optimizer step is called separately, as this
317-
is given as a closure to the optimizer step.
319+
"""A single training step, running forward and backward. The optimizer step is called separately, as this is
320+
given as a closure to the optimizer step.
318321
319322
Args:
320323
model: the lightning module to train
321324
batch: the batch to run the forward on
322325
batch_idx: index of the current batch w.r.t the current epoch
326+
323327
"""
324328
outputs: Union[torch.Tensor, Mapping[str, Any]] = model.training_step(batch, batch_idx=batch_idx)
325329

@@ -349,6 +353,7 @@ def step_scheduler(
349353
Have a look at :meth:`lightning.pytorch.LightninModule.configure_optimizers` for supported values.
350354
level: whether we are trying to step on epoch- or step-level
351355
current_value: Holds the current_epoch if ``level==epoch``, else holds the ``global_step``
356+
352357
"""
353358

354359
# no scheduler
@@ -397,6 +402,7 @@ def progbar_wrapper(self, iterable: Iterable, total: int, **kwargs: Any):
397402
Args:
398403
iterable: the iterable to wrap with tqdm
399404
total: the total length of the iterable, necessary in case the number of batches was limited.
405+
400406
"""
401407
if self.fabric.is_global_zero:
402408
return tqdm(iterable, total=total, **kwargs)
@@ -408,6 +414,7 @@ def load(self, state: Optional[Mapping], path: str) -> None:
408414
Args:
409415
state: a mapping contaning model, optimizer and lr scheduler
410416
path: the path to load the checkpoint from
417+
411418
"""
412419
if state is None:
413420
state = {}
@@ -460,6 +467,7 @@ def _parse_optimizers_schedulers(
460467
Args:
461468
configure_optim_output: The output of ``configure_optimizers``.
462469
For supported values, please refer to :meth:`lightning.pytorch.LightningModule.configure_optimizers`.
470+
463471
"""
464472
_lr_sched_defaults = {"interval": "epoch", "frequency": 1, "monitor": "val_loss"}
465473

@@ -513,6 +521,7 @@ def _format_iterable(
513521
prog_bar: a progressbar (on global rank zero) or an iterable (every other rank).
514522
candidates: the values to add as postfix strings to the progressbar.
515523
prefix: the prefix to add to each of these values.
524+
516525
"""
517526
if isinstance(prog_bar, tqdm) and candidates is not None:
518527
postfix_str = ""

examples/fabric/image_classifier/train_fabric.py

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -25,6 +25,7 @@
2525
Accelerate your training loop by setting the ``--accelerator``, ``--strategy``, ``--devices`` options directly from
2626
the command line. See ``lightning run model --help`` or learn more from the documentation:
2727
https://lightning.ai/docs/fabric.
28+
2829
"""
2930

3031
import argparse

0 commit comments

Comments
 (0)