Skip to content

Commit d902516

Browse files
authored
fix: typos in documentation files (#20582)
1 parent ea59e40 commit d902516

File tree

3 files changed

+4
-4
lines changed

3 files changed

+4
-4
lines changed

examples/fabric/reinforcement_learning/train_fabric_decoupled.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -274,7 +274,7 @@ def trainer(
274274
if group_rank == 0:
275275
metrics = {}
276276

277-
# Lerning rate annealing
277+
# Learning rate annealing
278278
if args.anneal_lr:
279279
linear_annealing(optimizer, update, num_updates, args.learning_rate)
280280
if group_rank == 0:

tests/tests_fabric/loggers/test_csv.py

Lines changed: 2 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -148,7 +148,7 @@ def test_automatic_step_tracking(tmp_path):
148148

149149

150150
@mock.patch(
151-
# Mock the existance check, so we can simulate appending to the metrics file
151+
# Mock the existence check, so we can simulate appending to the metrics file
152152
"lightning.fabric.loggers.csv_logs._ExperimentWriter._check_log_dir_exists"
153153
)
154154
def test_append_metrics_file(_, tmp_path):
@@ -189,7 +189,7 @@ def test_append_columns(tmp_path):
189189

190190

191191
@mock.patch(
192-
# Mock the existance check, so we can simulate appending to the metrics file
192+
# Mock the existence check, so we can simulate appending to the metrics file
193193
"lightning.fabric.loggers.csv_logs._ExperimentWriter._check_log_dir_exists"
194194
)
195195
def test_rewrite_with_new_header(_, tmp_path):

tests/tests_pytorch/accelerators/test_xla.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -148,7 +148,7 @@ def on_train_start(self):
148148

149149
def on_train_end(self):
150150
# this might fail if run in an environment with too many ranks, as the total
151-
# length of the dataloader will be distrbuted among them and then each rank might not do 3 steps
151+
# length of the dataloader will be distributed among them and then each rank might not do 3 steps
152152
assert self.called["training_step"] == 3
153153
assert self.called["on_train_batch_start"] == 3
154154
assert self.called["on_train_batch_end"] == 3

0 commit comments

Comments
 (0)