Skip to content

Commit d0339e0

Browse files
committed
remove unviolated ignores. autofix.
1 parent e088694 commit d0339e0

File tree

16 files changed

+27
-46
lines changed

16 files changed

+27
-46
lines changed

pyproject.toml

Lines changed: 8 additions & 8 deletions
Original file line numberDiff line numberDiff line change
@@ -93,9 +93,9 @@ ignore = [
9393
"S101", # todo: Use of `assert` detected
9494
"S105", "S106", "S107", # todo: Possible hardcoded password: ...
9595
"S113", # todo: Probable use of requests call without timeout
96-
"S301", # todo: `pickle` and modules that wrap it can be unsafe when used to deserialize untrusted data, possible security issue
97-
"S324", # todo: Probable use of insecure hash functions in `hashlib`
98-
"S403", # todo: `pickle`, `cPickle`, `dill`, and `shelve` modules are possibly insecure
96+
# "S301", # todo: `pickle` and modules that wrap it can be unsafe when used to deserialize untrusted data, possible security issue
97+
# "S324", # todo: Probable use of insecure hash functions in `hashlib`
98+
# "S403", # todo: `pickle`, `cPickle`, `dill`, and `shelve` modules are possibly insecure
9999
"S404", # todo: `subprocess` module is possibly insecure
100100
"S602", # todo: `subprocess` call with `shell=True` identified, security issue
101101
"S603", # todo: `subprocess` call: check for execution of untrusted input
@@ -106,20 +106,20 @@ ignore = [
106106
]
107107
"tests/**" = [
108108
"S101", # Use of `assert` detected
109-
"S105", "S106", # todo: Possible hardcoded password: ...
109+
# "S105", "S106", # todo: Possible hardcoded password: ...
110110
"S301", # `pickle` and modules that wrap it can be unsafe when used to deserialize untrusted data, possible security issue
111-
"S113", # todo: Probable use of requests call without timeout
111+
# "S113", # todo: Probable use of requests call without timeout
112112
"S311", # todo: Standard pseudo-random generators are not suitable for cryptographic purposes
113113
"S108", # todo: Probable insecure usage of temporary file or directory: "/tmp/sys-customizations-sync"
114-
"S202", # Uses of `tarfile.extractall()`
114+
# "S202", # Uses of `tarfile.extractall()`
115115
"S403", # `pickle`, `cPickle`, `dill`, and `shelve` modules are possibly insecure
116116
"S404", # `subprocess` module is possibly insecure
117117
"S602", # todo: `subprocess` call with `shell=True` identified, security issue
118118
"S603", # todo: `subprocess` call: check for execution of untrusted input
119119
"S605", # todo: Starting a process with a shell: seems safe, but may be changed in the future; consider rewriting without `shell`
120120
"S607", # todo: Starting a process with a partial executable path
121-
"RET504", # todo:Unnecessary variable assignment before `return` statement
122-
"PT004", # todo: Fixture `tmpdir_unittest_fixture` does not return anything, add leading underscore
121+
# "RET504", # todo:Unnecessary variable assignment before `return` statement
122+
# "PT004", # todo: Fixture `tmpdir_unittest_fixture` does not return anything, add leading underscore
123123
"PT012", # todo: `pytest.raises()` block should contain a single simple statement
124124
"PT019", # todo: Fixture `_` without value is injected as parameter, use `@pytest.mark.usefixtures` instead
125125
]

tests/parity_fabric/models.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -60,8 +60,7 @@ def forward(self, x):
6060
x = torch.flatten(x, 1) # flatten all dimensions except batch
6161
x = F.relu(self.fc1(x))
6262
x = F.relu(self.fc2(x))
63-
x = self.fc3(x)
64-
return x
63+
return self.fc3(x)
6564

6665
def get_optimizer(self):
6766
return torch.optim.SGD(self.parameters(), lr=0.0001)

tests/tests_fabric/strategies/test_model_parallel_integration.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -83,8 +83,7 @@ def _parallelize_feed_forward_fsdp2(model, device_mesh):
8383

8484
def _parallelize_feed_forward_fsdp2_tp(model, device_mesh):
8585
model = _parallelize_feed_forward_tp(model, device_mesh)
86-
model = _parallelize_feed_forward_fsdp2(model, device_mesh)
87-
return model
86+
return _parallelize_feed_forward_fsdp2(model, device_mesh)
8887

8988

9089
@RunIf(min_torch="2.4", standalone=True, min_cuda_gpus=4)

tests/tests_pytorch/accelerators/test_xla.py

Lines changed: 2 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -46,8 +46,7 @@ def __init__(self):
4646
def forward(self, x):
4747
x = self.layer_1(x)
4848
x = self.layer_2(x)
49-
x = self.layer_3(x)
50-
return x
49+
return self.layer_3(x)
5150

5251

5352
@RunIf(tpu=True, standalone=True)
@@ -230,8 +229,7 @@ def __init__(self):
230229
def forward(self, x):
231230
x = self.net_a(x)
232231
x = self.layer_2(x)
233-
x = self.net_b(x)
234-
return x
232+
return self.net_b(x)
235233

236234

237235
@RunIf(tpu=True)

tests/tests_pytorch/callbacks/test_lr_monitor.py

Lines changed: 2 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -428,8 +428,7 @@ def __init__(self):
428428

429429
def forward(self, x):
430430
x = self.linear_a(x)
431-
x = self.linear_b(x)
432-
return x
431+
return self.linear_b(x)
433432

434433
def configure_optimizers(self):
435434
param_groups = [
@@ -603,8 +602,7 @@ def __init__(self, lr, momentum):
603602

604603
def forward(self, x):
605604
x = self.linear_a(x)
606-
x = self.linear_b(x)
607-
return x
605+
return self.linear_b(x)
608606

609607
def configure_optimizers(self):
610608
param_groups = [

tests/tests_pytorch/callbacks/test_spike.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -29,8 +29,7 @@ def training_step(self, batch, batch_idx: int):
2929
if curr_loss_val is None:
3030
curr_loss_val = batch_idx
3131

32-
loss = self.layer(torch.tensor(curr_loss_val, device=self.device, dtype=self.dtype).view(1, 1))
33-
return loss
32+
return self.layer(torch.tensor(curr_loss_val, device=self.device, dtype=self.dtype).view(1, 1))
3433

3534
def configure_optimizers(self):
3635
return torch.optim.SGD(self.parameters(), lr=1e-3)

tests/tests_pytorch/callbacks/test_stochastic_weight_avg.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -266,8 +266,7 @@ def __init__(self):
266266

267267
def forward(self, x):
268268
x = self.layer1(x)
269-
x = self.layer2(x)
270-
return x
269+
return self.layer2(x)
271270

272271
def configure_optimizers(self):
273272
params = [{"params": self.layer1.parameters(), "lr": 0.1}, {"params": self.layer2.parameters(), "lr": 0.2}]

tests/tests_pytorch/helpers/advanced_models.py

Lines changed: 2 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -46,8 +46,7 @@ def block(in_feat, out_feat, normalize=True):
4646

4747
def forward(self, z):
4848
img = self.model(z)
49-
img = img.view(img.size(0), *self.img_shape)
50-
return img
49+
return img.view(img.size(0), *self.img_shape)
5150

5251

5352
class Discriminator(nn.Module):
@@ -204,8 +203,7 @@ def forward(self, x):
204203
x = torch.tanh(x)
205204
x = self.c_d1_bn(x)
206205
x = self.c_d1_drop(x)
207-
x = self.c_d2(x)
208-
return x
206+
return self.c_d2(x)
209207

210208
def training_step(self, batch, batch_nb):
211209
x, y = batch

tests/tests_pytorch/helpers/simple_models.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -100,8 +100,7 @@ def forward(self, x):
100100
x = self.layer_1a(x)
101101
x = self.layer_2(x)
102102
x = self.layer_2a(x)
103-
x = self.layer_end(x)
104-
return x
103+
return self.layer_end(x)
105104

106105
def configure_optimizers(self):
107106
optimizer = torch.optim.Adam(self.parameters(), lr=0.01)

tests/tests_pytorch/models/test_hparams.py

Lines changed: 1 addition & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -417,8 +417,7 @@ def _raw_checkpoint_path(trainer) -> str:
417417
raw_checkpoint_paths = [x for x in raw_checkpoint_paths if ".ckpt" in x]
418418
assert raw_checkpoint_paths
419419
raw_checkpoint_path = raw_checkpoint_paths[0]
420-
raw_checkpoint_path = os.path.join(trainer.checkpoint_callback.dirpath, raw_checkpoint_path)
421-
return raw_checkpoint_path
420+
return os.path.join(trainer.checkpoint_callback.dirpath, raw_checkpoint_path)
422421

423422

424423
@pytest.mark.parametrize("base_class", [HyperparametersMixin, LightningModule, LightningDataModule])

0 commit comments

Comments
 (0)