-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathdeepfool.py
80 lines (57 loc) · 2.22 KB
/
deepfool.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
import numpy as np
import torch
import copy
# from torch.autograd.gradcheck import zero_gradients
from torch.autograd import Variable
import collections
# Mohammad: zero_gradients doesn't exist anymore, I found an alternative code
def zero_gradients(x):
if isinstance(x, torch.Tensor):
if x.grad is not None:
x.grad.detach_()
x.grad.zero_()
elif isinstance(x, collections.abc.Iterable):
for elem in x:
zero_gradients(elem)
def deepfool(im, net, lambda_fac=3., num_classes=10, overshoot=0.02, max_iter=50, device='cuda'):
image = copy.deepcopy(im)
input_shape = image.size()
f_image = net.forward(Variable(image, requires_grad=True)).data.cpu().numpy().flatten()
I = (np.array(f_image)).flatten().argsort()[::-1]
I = I[0:num_classes]
label = I[0]
pert_image = copy.deepcopy(image)
r_tot = torch.zeros(input_shape).to(device)
k_i = label
loop_i = 0
while k_i == label and loop_i < max_iter:
x = Variable(pert_image, requires_grad=True)
fs = net.forward(x)
pert = torch.Tensor([np.inf])[0].to(device)
w = torch.zeros(input_shape).to(device)
fs[0, I[0]].backward(retain_graph=True)
grad_orig = copy.deepcopy(x.grad.data)
for k in range(1, num_classes):
zero_gradients(x)
fs[0, I[k]].backward(retain_graph=True)
cur_grad = copy.deepcopy(x.grad.data)
w_k = cur_grad - grad_orig
f_k = (fs[0, I[k]] - fs[0, I[0]]).data
pert_k = torch.abs(f_k) / w_k.norm()
if pert_k < pert:
pert = pert_k + 0.
w = w_k + 0.
r_i = torch.clamp(pert, min=1e-4) * w / w.norm()
r_tot = r_tot + r_i
pert_image = pert_image + r_i
check_fool = image + (1 + overshoot) * r_tot
k_i = torch.argmax(net.forward(Variable(check_fool, requires_grad=True)).data).item()
loop_i += 1
x = Variable(pert_image, requires_grad=True)
fs = net.forward(x)
(fs[0, k_i] - fs[0, label]).backward(retain_graph=True)
grad = copy.deepcopy(x.grad.data)
grad = grad / grad.norm()
r_tot = lambda_fac * r_tot
pert_image = image + r_tot
return grad, pert_image