@@ -60,7 +60,7 @@ def test_mnist(self):
6060
6161 def _test_backend_mnist (self , classifier ):
6262 # Get MNIST
63- (x_train , y_train ), (x_test , y_test ), _ , _ = load_mnist ()
63+ (x_train , y_train ), (x_test , y_test ) = self . mnist
6464 x_train , y_train = x_train [:NB_TRAIN ], y_train [:NB_TRAIN ]
6565 x_test , y_test = x_test [:NB_TEST ], y_test [:NB_TEST ]
6666
@@ -130,49 +130,36 @@ def _test_backend_mnist(self, classifier):
130130 acc = np .sum (np .argmax (test_y_pred , axis = 1 ) == np .argmax (y_test , axis = 1 )) / y_test .shape [0 ]
131131 print ('\n Accuracy on adversarial test examples with L2 norm: %.2f%%' % (acc * 100 ))
132132
133- # def test_with_preprocessing(self):
134- #
135- # session = tf.Session()
136- # k.set_session(session)
137- #
138- # comp_params = {"loss": 'categorical_crossentropy',
139- # "optimizer": 'adam',
140- # "metrics": ['accuracy']}
141- #
142- # # get MNIST
143- # batch_size, nb_train, nb_test = 100, 1000, 100
144- # (X_train, Y_train), (X_test, Y_test), _, _ = load_mnist()
145- # X_train, Y_train = X_train[:nb_train], Y_train[:nb_train]
146- # X_test, Y_test = X_test[:nb_test], Y_test[:nb_test]
147- # im_shape = X_train[0].shape
148- #
149- # # get classifier
150- # classifier = CNN(im_shape, act="relu", defences=["featsqueeze1"])
151- # classifier.compile(comp_params)
152- # classifier.fit(X_train, Y_train, epochs=1, batch_size=batch_size)
153- # scores = classifier.evaluate(X_train, Y_train)
154- # print("\naccuracy on training set: %.2f%%" % (scores[1] * 100))
155- # scores = classifier.evaluate(X_test, Y_test)
156- # print("\naccuracy on test set: %.2f%%" % (scores[1] * 100))
157- #
158- # attack = FastGradientMethod(classifier, eps=1)
159- # X_train_adv = attack.generate(X_train)
160- # X_test_adv = attack.generate(X_test)
161- #
162- # self.assertFalse((X_train == X_train_adv).all())
163- # self.assertFalse((X_test == X_test_adv).all())
164- #
165- # train_y_pred = get_labels_np_array(classifier.predict(X_train_adv))
166- # test_y_pred = get_labels_np_array(classifier.predict(X_test_adv))
167- #
168- # self.assertFalse((Y_train == train_y_pred).all())
169- # self.assertFalse((Y_test == test_y_pred).all())
170- #
171- # scores = classifier.evaluate(X_train_adv, Y_train)
172- # print('\naccuracy on adversarial train examples: %.2f%%' % (scores[1] * 100))
173- #
174- # scores = classifier.evaluate(X_test_adv, Y_test)
175- # print('\naccuracy on adversarial test examples: %.2f%%' % (scores[1] * 100))
133+ def test_with_defences (self ):
134+ # Get MNIST
135+ (x_train , y_train ), (x_test , y_test ) = self .mnist
136+ x_train , y_train = x_train [:NB_TRAIN ], y_train [:NB_TRAIN ]
137+ x_test , y_test = x_test [:NB_TEST ], y_test [:NB_TEST ]
138+
139+ # Get the ready-trained Keras model
140+ model = self .classifier_k ._model
141+ classifier = KerasClassifier ((0 , 1 ), model , defences = 'featsqueeze1' )
142+
143+ attack = FastGradientMethod (classifier , eps = 1 )
144+ x_train_adv = attack .generate (x_train )
145+ x_test_adv = attack .generate (x_test )
146+
147+ self .assertFalse ((x_train == x_train_adv ).all ())
148+ self .assertFalse ((x_test == x_test_adv ).all ())
149+
150+ train_y_pred = get_labels_np_array (classifier .predict (x_train_adv ))
151+ test_y_pred = get_labels_np_array (classifier .predict (x_test_adv ))
152+
153+ self .assertFalse ((y_train == train_y_pred ).all ())
154+ self .assertFalse ((y_test == test_y_pred ).all ())
155+
156+ preds = classifier .predict (x_train_adv )
157+ acc = np .sum (np .argmax (preds , axis = 1 ) == np .argmax (y_train , axis = 1 )) / y_train .shape [0 ]
158+ print ('\n Accuracy on adversarial train examples with feature squeezing: %.2f%%' % (acc * 100 ))
159+
160+ preds = classifier .predict (x_test_adv )
161+ acc = np .sum (np .argmax (preds , axis = 1 ) == np .argmax (y_test , axis = 1 )) / y_test .shape [0 ]
162+ print ('\n accuracy on adversarial test examples: %.2f%%' % (acc * 100 ))
176163
177164 @staticmethod
178165 def _cnn_mnist_tf (input_shape ):
0 commit comments