Skip to content

Commit d6c9373

Browse files
Irina NicolaeIrina Nicolae
authored andcommitted
Rename labels in classifier
1 parent 9e5258d commit d6c9373

File tree

4 files changed

+15
-15
lines changed

4 files changed

+15
-15
lines changed

art/classifiers/classifier.py

Lines changed: 3 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -113,14 +113,14 @@ def class_gradient(self, x, logits=False):
113113
raise NotImplementedError
114114

115115
@abc.abstractmethod
116-
def loss_gradient(self, x, labels):
116+
def loss_gradient(self, x, y):
117117
"""
118118
Compute the gradient of the loss function w.r.t. `x`.
119119
120120
:param x: Sample input with shape as expected by the model.
121121
:type x: `np.ndarray`
122-
:param labels: Correct labels, one-vs-rest encoding.
123-
:type labels: `np.ndarray`
122+
:param y: Correct labels, one-vs-rest encoding.
123+
:type y: `np.ndarray`
124124
:return: Array of gradients of the same shape as `x`.
125125
:rtype: `np.ndarray`
126126
"""

art/classifiers/keras.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -69,18 +69,18 @@ def __init__(self, clip_values, model, use_logits=False, channel_index=3, defenc
6969
self._class_grads = k.function([self._input], class_grads)
7070
self._preds = k.function([self._input], [preds])
7171

72-
def loss_gradient(self, x, labels):
72+
def loss_gradient(self, x, y):
7373
"""
7474
Compute the gradient of the loss function w.r.t. `x`.
7575
7676
:param x: Sample input with shape as expected by the model.
7777
:type x: `np.ndarray`
78-
:param labels: Correct labels, one-vs-rest encoding.
79-
:type labels: `np.ndarray`
78+
:param y: Correct labels, one-vs-rest encoding.
79+
:type y: `np.ndarray`
8080
:return: Array of gradients of the same shape as `x`.
8181
:rtype: `np.ndarray`
8282
"""
83-
return self._loss_grads([x, np.argmax(labels, axis=1)])[0]
83+
return self._loss_grads([x, np.argmax(y, axis=1)])[0]
8484

8585
def class_gradient(self, x, logits=False):
8686
"""

art/classifiers/pytorch.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -170,14 +170,14 @@ def class_gradient(self, x, logits=False):
170170

171171
return grds
172172

173-
def loss_gradient(self, x, labels):
173+
def loss_gradient(self, x, y):
174174
"""
175175
Compute the gradient of the loss function w.r.t. `x`.
176176
177177
:param x: Sample input with shape as expected by the model.
178178
:type x: `np.ndarray`
179-
:param labels: Correct labels, one-vs-rest encoding.
180-
:type labels: `np.ndarray`
179+
:param y: Correct labels, one-vs-rest encoding.
180+
:type y: `np.ndarray`
181181
:return: Array of gradients of the same shape as `x`.
182182
:rtype: `np.ndarray`
183183
"""
@@ -187,7 +187,7 @@ def loss_gradient(self, x, labels):
187187
inputs_t.requires_grad = True
188188

189189
# Convert the labels to Tensors
190-
labels_t = torch.from_numpy(labels)
190+
labels_t = torch.from_numpy(y)
191191

192192
# Compute the gradient and return
193193
(_, m_output) = self._model(inputs_t)

art/classifiers/tensorflow.py

Lines changed: 4 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -161,14 +161,14 @@ def class_gradient(self, x, logits=False):
161161

162162
return grds
163163

164-
def loss_gradient(self, x, labels):
164+
def loss_gradient(self, x, y):
165165
"""
166166
Compute the gradient of the loss function w.r.t. `x`.
167167
168168
:param x: Sample input with shape as expected by the model.
169169
:type x: `np.ndarray`
170-
:param labels: Correct labels, one-vs-rest encoding.
171-
:type labels: `np.ndarray`
170+
:param y: Correct labels, one-vs-rest encoding.
171+
:type y: `np.ndarray`
172172
:return: Array of gradients of the same shape as `x`.
173173
:rtype: `np.ndarray`
174174
"""
@@ -177,6 +177,6 @@ def loss_gradient(self, x, labels):
177177
raise ValueError("Need the loss function to compute the loss gradient.")
178178

179179
# Compute the gradient and return
180-
grds = self._sess.run(self._loss_grads, feed_dict={self._input_ph: x, self._output_ph: labels})
180+
grds = self._sess.run(self._loss_grads, feed_dict={self._input_ph: x, self._output_ph: y})
181181

182182
return grds

0 commit comments

Comments
 (0)