22import numpy as np
33import tensorflow as tf
44import pickle
5- from manuscripts .Posion25 .analysis import best_analysis , denoise_analysis , reduce_excess_perturbations , full_analysis
5+ from manuscripts .Posion25 .analysis import *
66from Adversarial_Observation .Swarm import ParticleSwarm
77from analysis import *
88
99def adversarial_attack_blackbox (model , dataset , image_index , output_dir = 'results' , num_iterations = 30 , num_particles = 100 ):
1010
1111 pickle_path = os .path .join (output_dir , 'attacker.pkl' )
12+
13+ dataset_list = list (dataset .as_numpy_iterator ())
14+ all_images , all_labels = zip (* dataset_list )
15+ all_images = np .concatenate (all_images , axis = 0 )
16+ all_labels = np .concatenate (all_labels , axis = 0 )
17+
18+ if image_index < 0 or image_index >= len (all_images ):
19+ raise ValueError (f"Image index { image_index } out of range" )
20+
21+ single_input = all_images [image_index ]
22+ single_target = np .argmax (all_labels [image_index ])
23+ target_class = (single_target + 1 ) % 10
24+
25+ input_set = np .stack ([
26+ single_input + (np .random .uniform (0 , 1 , single_input .shape ) * (np .random .rand (* single_input .shape ) < 0.9 ))
27+ for _ in range (num_particles )
28+ ])
29+
1230 if os .path .exists (pickle_path ):
1331 with open (pickle_path , 'rb' ) as f :
1432 attacker = pickle .load (f )
1533 print (f"Loaded attacker from { pickle_path } " )
1634 else :
1735
18- dataset_list = list (dataset .as_numpy_iterator ())
19- all_images , all_labels = zip (* dataset_list )
20- all_images = np .concatenate (all_images , axis = 0 )
21- all_labels = np .concatenate (all_labels , axis = 0 )
22-
23- if image_index < 0 or image_index >= len (all_images ):
24- raise ValueError (f"Image index { image_index } out of range" )
25-
26- single_input = all_images [image_index ]
27- single_target = np .argmax (all_labels [image_index ])
28- target_class = (single_target + 1 ) % 10
29-
30- input_set = np .stack ([
31- single_input + (np .random .uniform (0 , 1 , single_input .shape ) * (np .random .rand (* single_input .shape ) < 0.9 ))
32- for _ in range (num_particles )
33- ])
34-
3536 attacker = ParticleSwarm (
3637 model = model , input_set = input_set , starting_class = single_target ,
3738 target_class = target_class , num_iterations = num_iterations ,
@@ -42,7 +43,7 @@ def adversarial_attack_blackbox(model, dataset, image_index, output_dir='results
4243 with open (pickle_path , 'wb' ) as f :
4344 pickle .dump (attacker , f )
4445 print (f"Saved attacker to { pickle_path } " )
45-
46+ print ( "Adversarial attack completed. Analyzing results..." )
4647 analyze_attack (attacker , single_input , target_class )
4748
4849def best_analysis (attacker , original_data , target ):
@@ -200,7 +201,10 @@ def full_analysis(attacker, input_data, target):
200201 print (f"Full analysis saved to { path } " )
201202
202203def analyze_attack (attacker , original_img , target ):
204+ print ("Starting analysis of the adversarial attack..." )
203205 best_analysis (attacker , original_img , target )
204- reduced_img = reduce_excess_perturbations_scale (attacker , original_img , attacker .global_best_position .numpy (), target )
206+ print ("Reducing excess perturbations..." )
207+ reduced_img = reduce_excess_perturbations (attacker , original_img , attacker .global_best_position .numpy (), target )
205208 denoise_analysis (attacker , original_img , reduced_img , target )
209+ print ("Performing full analysis of the attack..." )
206210 full_analysis (attacker , original_img , target )
0 commit comments