-
Notifications
You must be signed in to change notification settings - Fork 1.3k
Closed
Labels
questionFurther information is requestedFurther information is requested
Description
This is my code.
import torch
import sys
import torch
sys.path.append('..')
sys.path.append('../..')
from data.data_utils_1 import get_loader
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
from art.attacks.evasion import ProjectedGradientDescent
from art.estimators.classification import PyTorchClassifier
from art.defences.trainer import AdversarialTrainer
import torch
import numpy as np
from torch.optim.lr_scheduler import StepLR
model = torch.load((f'xxx/cifar10/r50_1x_sk0_pretrain.pth'))
optimizer = torch.optim.SGD(model.parameters(), lr=0.001, momentum=0.9, weight_decay=5e-4)
classifier = PyTorchClassifier(
model=model,
clip_values=(0.0, 1.0),
loss = torch.nn.CrossEntropyLoss(label_smoothing=0.1),
optimizer = optimizer,
# scheduler = scheduler,
input_shape=(3, 224, 224),
nb_classes=10,
)
train_loader, test_loader, val_loader = get_loader('cifar10', 128, 1, 4, False)
attack = ProjectedGradientDescent(
estimator=classifier,
norm=np.inf,
eps=8/255,
eps_step=2/255,
max_iter=7,
targeted=False,
verbose=True,
)
from art.data_generators import PyTorchDataGenerator
generator = PyTorchDataGenerator(train_loader, size=40000, batch_size=128)
trainer = AdversarialTrainer(classifier=classifier, attacks = attack, ratio=1.0)
trainer.fit_generator(generator, nb_epochs=100)And this is the code of get_loader
elif(dataset == 'cifar10'):
test_dataset = torchvision.datasets.CIFAR10(root='/data4/xxx/cifar', train=False, transform=test_transform_used, download=False)
train_dataset = torchvision.datasets.CIFAR10(root='/data4/xxx/cifar', train=True, transform=train_transform_used, download=False)
train_dataset, _ = random_split(train_dataset, [int(percent*len(train_dataset)), len(train_dataset)-int(percent*len(train_dataset))])
train_size = int(0.8 * len(train_dataset))
val_size = len(train_dataset) - train_size
train_dataset, val_dataset = random_split(train_dataset, [train_size, val_size])
val_loader = DataLoader(val_dataset, batch_size=batch_size, shuffle=False, num_workers=num_workers)
test_loader = DataLoader(test_dataset, batch_size=batch_size, shuffle=False, num_workers=num_workers)
train_loader = DataLoader(train_dataset, batch_size=batch_size, shuffle=True, num_workers=num_workers)
...
return train_loader, test_loader, val_loaderThe transform is:
train_transform_without_normalize = transforms.Compose([
transforms.Resize(256),
transforms.CenterCrop(224),
transforms.ToTensor(),
transforms.RandomHorizontalFlip(p=0.5),
transforms.RandomRotation(degrees=45),
# transforms.Lambda(lambda x: x.repeat(3,1,1))
# transforms.Normalize(mean=[0.485, 0.456, 0.406], std=[0.229, 0.224, 0.225])
])It has been like this for a long time.
Metadata
Metadata
Assignees
Labels
questionFurther information is requestedFurther information is requested
