Skip to content

Commit 95faabf

Browse files
authored
Apply several ruff SIM rules (#37283)
* Apply ruff SIM118 fix Signed-off-by: cyy <[email protected]> * Apply ruff SIM910 fix Signed-off-by: cyy <[email protected]> * Apply ruff SIM101 fix Signed-off-by: cyy <[email protected]> * Format code Signed-off-by: cyy <[email protected]> * More fixes Signed-off-by: cyy <[email protected]> --------- Signed-off-by: cyy <[email protected]>
1 parent cf97f6c commit 95faabf

File tree

391 files changed

+762
-788
lines changed

Some content is hidden

Large Commits have some content hidden by default. Use the searchbox below for content that may be hidden.

391 files changed

+762
-788
lines changed

examples/flax/image-captioning/run_image_captioning_flax.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -906,7 +906,7 @@ def decay_mask_fn(params):
906906
layer_norm_named_params = {
907907
layer[-2:]
908908
for layer_norm_name in layer_norm_candidates
909-
for layer in flat_params.keys()
909+
for layer in flat_params
910910
if layer_norm_name in "".join(layer).lower()
911911
}
912912
flat_mask = {path: (path[-1] != "bias" and path[-2:] not in layer_norm_named_params) for path in flat_params}

examples/flax/language-modeling/run_bart_dlm_flax.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -530,7 +530,7 @@ def main():
530530
trust_remote_code=data_args.trust_remote_code,
531531
)
532532

533-
if "validation" not in datasets.keys():
533+
if "validation" not in datasets:
534534
datasets["validation"] = load_dataset(
535535
data_args.dataset_name,
536536
data_args.dataset_config_name,
@@ -567,7 +567,7 @@ def main():
567567
num_proc=data_args.preprocessing_num_workers,
568568
)
569569

570-
if "validation" not in datasets.keys():
570+
if "validation" not in datasets:
571571
datasets["validation"] = load_dataset(
572572
extension,
573573
data_files=data_files,
@@ -671,7 +671,7 @@ def tokenize_function(examples):
671671
# max_seq_length.
672672
def group_texts(examples):
673673
# Concatenate all texts.
674-
concatenated_examples = {k: list(chain(*examples[k])) for k in examples.keys()}
674+
concatenated_examples = {k: list(chain(*examples[k])) for k in examples}
675675
total_length = len(concatenated_examples[list(examples.keys())[0]])
676676
# We drop the small remainder, we could add padding if the model supported it instead of this drop, you can
677677
# customize this part to your needs.
@@ -777,7 +777,7 @@ def decay_mask_fn(params):
777777
layer_norm_named_params = {
778778
layer[-2:]
779779
for layer_norm_name in layer_norm_candidates
780-
for layer in flat_params.keys()
780+
for layer in flat_params
781781
if layer_norm_name in "".join(layer).lower()
782782
}
783783
flat_mask = {path: (path[-1] != "bias" and path[-2:] not in layer_norm_named_params) for path in flat_params}

examples/flax/language-modeling/run_clm_flax.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -407,7 +407,7 @@ def main():
407407
trust_remote_code=model_args.trust_remote_code,
408408
)
409409

410-
if "validation" not in dataset.keys():
410+
if "validation" not in dataset:
411411
dataset["validation"] = load_dataset(
412412
data_args.dataset_name,
413413
data_args.dataset_config_name,
@@ -447,7 +447,7 @@ def main():
447447
num_proc=data_args.preprocessing_num_workers,
448448
)
449449

450-
if "validation" not in dataset.keys():
450+
if "validation" not in dataset:
451451
dataset["validation"] = load_dataset(
452452
extension,
453453
data_files=data_files,
@@ -580,7 +580,7 @@ def tokenize_function(examples):
580580
# Main data processing function that will concatenate all texts from our dataset and generate chunks of block_size.
581581
def group_texts(examples):
582582
# Concatenate all texts.
583-
concatenated_examples = {k: list(chain(*examples[k])) for k in examples.keys()}
583+
concatenated_examples = {k: list(chain(*examples[k])) for k in examples}
584584
total_length = len(concatenated_examples[list(examples.keys())[0]])
585585
# We drop the small remainder, we could add padding if the model supported it instead of this drop, you can
586586
# customize this part to your needs.
@@ -674,7 +674,7 @@ def decay_mask_fn(params):
674674
layer_norm_named_params = {
675675
layer[-2:]
676676
for layer_norm_name in layer_norm_candidates
677-
for layer in flat_params.keys()
677+
for layer in flat_params
678678
if layer_norm_name in "".join(layer).lower()
679679
}
680680
flat_mask = {path: (path[-1] != "bias" and path[-2:] not in layer_norm_named_params) for path in flat_params}

examples/flax/language-modeling/run_mlm_flax.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -448,7 +448,7 @@ def main():
448448
trust_remote_code=model_args.trust_remote_code,
449449
)
450450

451-
if "validation" not in datasets.keys():
451+
if "validation" not in datasets:
452452
datasets["validation"] = load_dataset(
453453
data_args.dataset_name,
454454
data_args.dataset_config_name,
@@ -485,7 +485,7 @@ def main():
485485
num_proc=data_args.preprocessing_num_workers,
486486
)
487487

488-
if "validation" not in datasets.keys():
488+
if "validation" not in datasets:
489489
datasets["validation"] = load_dataset(
490490
extension,
491491
data_files=data_files,
@@ -603,7 +603,7 @@ def tokenize_function(examples):
603603
# max_seq_length.
604604
def group_texts(examples):
605605
# Concatenate all texts.
606-
concatenated_examples = {k: list(chain(*examples[k])) for k in examples.keys()}
606+
concatenated_examples = {k: list(chain(*examples[k])) for k in examples}
607607
total_length = len(concatenated_examples[list(examples.keys())[0]])
608608
# We drop the small remainder, we could add padding if the model supported it instead of this drop, you can
609609
# customize this part to your needs.
@@ -707,7 +707,7 @@ def decay_mask_fn(params):
707707
layer_norm_named_params = {
708708
layer[-2:]
709709
for layer_norm_name in layer_norm_candidates
710-
for layer in flat_params.keys()
710+
for layer in flat_params
711711
if layer_norm_name in "".join(layer).lower()
712712
}
713713
flat_mask = {path: (path[-1] != "bias" and path[-2:] not in layer_norm_named_params) for path in flat_params}

examples/flax/language-modeling/run_t5_mlm_flax.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -572,7 +572,7 @@ def main():
572572
trust_remote_code=data_args.trust_remote_code,
573573
)
574574

575-
if "validation" not in datasets.keys():
575+
if "validation" not in datasets:
576576
datasets["validation"] = load_dataset(
577577
data_args.dataset_name,
578578
data_args.dataset_config_name,
@@ -609,7 +609,7 @@ def main():
609609
num_proc=data_args.preprocessing_num_workers,
610610
)
611611

612-
if "validation" not in datasets.keys():
612+
if "validation" not in datasets:
613613
datasets["validation"] = load_dataset(
614614
extension,
615615
data_files=data_files,
@@ -703,7 +703,7 @@ def tokenize_function(examples):
703703
# Main data processing function that will concatenate all texts from our dataset and generate chunks of expanded_inputs_length.
704704
def group_texts(examples):
705705
# Concatenate all texts.
706-
concatenated_examples = {k: list(chain(*examples[k])) for k in examples.keys()}
706+
concatenated_examples = {k: list(chain(*examples[k])) for k in examples}
707707
total_length = len(concatenated_examples[list(examples.keys())[0]])
708708
# We drop the small remainder, we could add padding if the model supported it instead of this drop, you can
709709
# customize this part to your needs.
@@ -814,7 +814,7 @@ def decay_mask_fn(params):
814814
layer_norm_named_params = {
815815
layer[-2:]
816816
for layer_norm_name in layer_norm_candidates
817-
for layer in flat_params.keys()
817+
for layer in flat_params
818818
if layer_norm_name in "".join(layer).lower()
819819
}
820820
flat_mask = {path: (path[-1] != "bias" and path[-2:] not in layer_norm_named_params) for path in flat_params}

examples/flax/question-answering/run_qa.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -345,7 +345,7 @@ def decay_mask_fn(params):
345345
layer_norm_named_params = {
346346
layer[-2:]
347347
for layer_norm_name in layer_norm_candidates
348-
for layer in flat_params.keys()
348+
for layer in flat_params
349349
if layer_norm_name in "".join(layer).lower()
350350
}
351351
flat_mask = {path: (path[-1] != "bias" and path[-2:] not in layer_norm_named_params) for path in flat_params}

examples/flax/speech-recognition/run_flax_speech_recognition_seq2seq.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -668,7 +668,7 @@ def decay_mask_fn(params):
668668
layer_norm_named_params = {
669669
layer[-2:]
670670
for layer_norm_name in layer_norm_candidates
671-
for layer in flat_params.keys()
671+
for layer in flat_params
672672
if layer_norm_name in "".join(layer).lower()
673673
}
674674
flat_mask = {path: (path[-1] != "bias" and path[-2:] not in layer_norm_named_params) for path in flat_params}

examples/flax/summarization/run_summarization_flax.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -768,7 +768,7 @@ def decay_mask_fn(params):
768768
layer_norm_named_params = {
769769
layer[-2:]
770770
for layer_norm_name in layer_norm_candidates
771-
for layer in flat_params.keys()
771+
for layer in flat_params
772772
if layer_norm_name in "".join(layer).lower()
773773
}
774774
flat_mask = {path: (path[-1] != "bias" and path[-2:] not in layer_norm_named_params) for path in flat_params}

examples/flax/text-classification/run_flax_glue.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -249,7 +249,7 @@ def decay_mask_fn(params):
249249
layer_norm_named_params = {
250250
layer[-2:]
251251
for layer_norm_name in layer_norm_candidates
252-
for layer in flat_params.keys()
252+
for layer in flat_params
253253
if layer_norm_name in "".join(layer).lower()
254254
}
255255
flat_mask = {path: (path[-1] != "bias" and path[-2:] not in layer_norm_named_params) for path in flat_params}

examples/flax/token-classification/run_flax_ner.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -310,7 +310,7 @@ def decay_mask_fn(params):
310310
layer_norm_named_params = {
311311
layer[-2:]
312312
for layer_norm_name in layer_norm_candidates
313-
for layer in flat_params.keys()
313+
for layer in flat_params
314314
if layer_norm_name in "".join(layer).lower()
315315
}
316316
flat_mask = {path: (path[-1] != "bias" and path[-2:] not in layer_norm_named_params) for path in flat_params}

0 commit comments

Comments
 (0)