Skip to content

Commit 0ac8660

Browse files
author
Beat Buesser
committed
Fix docs build
Signed-off-by: Beat Buesser <[email protected]>
1 parent b8a8c8c commit 0ac8660

File tree

5 files changed

+32
-5
lines changed

5 files changed

+32
-5
lines changed

art/attacks/evasion/projected_gradient_descent/projected_gradient_descent_pytorch.py

Lines changed: 10 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -29,7 +29,6 @@
2929
from typing import Optional, TYPE_CHECKING
3030

3131
import numpy as np
32-
import torch
3332

3433
from art.config import ART_NUMPY_DTYPE
3534
from art.attacks.evasion.projected_gradient_descent.projected_gradient_descent_numpy import (
@@ -121,6 +120,8 @@ def generate(self, x: np.ndarray, y: Optional[np.ndarray] = None, **kwargs) -> n
121120
:type mask: `np.ndarray`
122121
:return: An array holding the adversarial examples.
123122
"""
123+
import torch
124+
124125
# Check whether random eps is enabled
125126
self._random_eps()
126127

@@ -232,6 +233,8 @@ def _compute_perturbation(self, x: "torch.Tensor", y: "torch.Tensor", mask: "tor
232233
perturbed.
233234
:return: Perturbations.
234235
"""
236+
import torch
237+
235238
# Pick a small scalar to avoid division by 0
236239
tol = 10e-8
237240

@@ -266,6 +269,8 @@ def _apply_perturbation(self, x: "torch.Tensor", perturbation: "torch.Tensor", e
266269
:param eps_step: Attack step size (input variation) at each iteration.
267270
:return: Adversarial examples.
268271
"""
272+
import torch
273+
269274
x = x + eps_step * perturbation
270275

271276
if self.estimator.clip_values is not None:
@@ -302,6 +307,8 @@ def _compute_torch(
302307
original input.
303308
:return: Adversarial examples.
304309
"""
310+
import torch
311+
305312
if random_init:
306313
n = x.shape[0]
307314
m = np.prod(x.shape[1:])
@@ -344,6 +351,8 @@ def _projection(self, values: "torch.Tensor", eps: float, norm_p: int) -> "torch
344351
:param norm_p: L_p norm to use for clipping supporting 1, 2 and `np.Inf`.
345352
:return: Values of `values` after projection.
346353
"""
354+
import torch
355+
347356
# Pick a small scalar to avoid division by 0
348357
tol = 10e-8
349358
values_tmp = values.reshape(values.shape[0], -1)

art/attacks/evasion/projected_gradient_descent/projected_gradient_descent_tensorflow_v2.py

Lines changed: 10 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -29,7 +29,6 @@
2929
from typing import Optional, TYPE_CHECKING
3030

3131
import numpy as np
32-
import tensorflow as tf
3332

3433
from art.config import ART_NUMPY_DTYPE
3534
from art.attacks.evasion.projected_gradient_descent.projected_gradient_descent_numpy import (
@@ -120,6 +119,8 @@ def generate(self, x: np.ndarray, y: Optional[np.ndarray] = None, **kwargs) -> n
120119
:type mask: `np.ndarray`
121120
:return: An array holding the adversarial examples.
122121
"""
122+
import tensorflow as tf
123+
123124
# Check whether random eps is enabled
124125
self._random_eps()
125126

@@ -222,6 +223,8 @@ def _compute_perturbation(self, x: "tf.Tensor", y: "tf.Tensor", mask: "tf.Tensor
222223
perturbed.
223224
:return: Perturbations.
224225
"""
226+
import tensorflow as tf
227+
225228
# Pick a small scalar to avoid division by 0
226229
tol = 10e-8
227230

@@ -256,6 +259,8 @@ def _apply_perturbation(self, x: "tf.Tensor", perturbation: "tf.Tensor", eps_ste
256259
:param eps_step: Attack step size (input variation) at each iteration.
257260
:return: Adversarial examples.
258261
"""
262+
import tensorflow as tf
263+
259264
x = x + eps_step * perturbation
260265

261266
if self.estimator.clip_values is not None:
@@ -292,6 +297,8 @@ def _compute_tf(
292297
original input.
293298
:return: Adversarial examples.
294299
"""
300+
import tensorflow as tf
301+
295302
if random_init:
296303
n = x.shape[0]
297304
m = np.prod(x.shape[1:])
@@ -334,6 +341,8 @@ def _projection(values: "tf.Tensor", eps: float, norm_p: int) -> "tf.Tensor":
334341
:param norm_p: L_p norm to use for clipping supporting 1, 2 and `np.Inf`.
335342
:return: Values of `values` after projection.
336343
"""
344+
import tensorflow as tf
345+
337346
# Pick a small scalar to avoid division by 0
338347
tol = 10e-8
339348
values_tmp = tf.reshape(values, (values.shape[0], -1))

art/defences/trainer/adversarial_trainer_FBF_Pytorch.py

Lines changed: 2 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -26,8 +26,6 @@
2626
import time
2727

2828
import numpy as np
29-
import torch
30-
import torch.nn as nn
3129

3230
from art.config import ART_NUMPY_DTYPE
3331
from art.defences.trainer.adversarial_trainer_FBF import AdversarialTrainerFBF
@@ -200,6 +198,7 @@ def _batch_process(self, x_batch, y_batch, lr):
200198
:type lr: `float`
201199
:return: `(float, float, float)`
202200
"""
201+
import torch
203202

204203
n = x_batch.shape[0]
205204
m = np.prod(x_batch.shape[1:])
@@ -239,7 +238,7 @@ def _batch_process(self, x_batch, y_batch, lr):
239238
loss.backward()
240239

241240
# clip the gradients
242-
nn.utils.clip_grad_norm_(self._classifier._model.parameters(), 0.5)
241+
torch.nn.utils.clip_grad_norm_(self._classifier._model.parameters(), 0.5)
243242
self._classifier._optimizer.step()
244243

245244
train_loss = loss.item() * o_batch.size(0)

setup.py

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -28,6 +28,12 @@
2828
"sphinx_rtd_theme",
2929
"sphinx-autodoc-annotation",
3030
"sphinx-autodoc-typehints",
31+
"matplotlib",
32+
"numpy",
33+
"scipy",
34+
"six==1.13.0",
35+
"scikit-learn==0.22.1",
36+
"Pillow>=6.0.0",
3137
]
3238

3339

tests/defences/test_adversarial_trainer_FBF.py

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -73,3 +73,7 @@ def test_adversarial_trainer_FBF_Pytorch_fit_and_predict(get_adv_trainer, fix_ge
7373

7474
np.testing.assert_array_almost_equal(accuracy, 0.32, decimal=0.001)
7575
np.testing.assert_array_almost_equal(accuracy_new, 0.14, decimal=0.001)
76+
77+
78+
if __name__ == "__main__":
79+
pytest.cmdline.main("-q -s {} --mlFramework=pytorch --durations=0".format(__file__).split(" "))

0 commit comments

Comments
 (0)