-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathinput_synthesis.py
executable file
·70 lines (54 loc) · 2.15 KB
/
input_synthesis.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
from utils import normalize
from keras import backend as K
import numpy as np
import random
def synthesize(model, x_original, suspicious_indices, step_size, d):
input_tensor = model.input
perturbed_set_x = []
perturbed_set_y = []
original_set_x = []
iterators = []
for s_ind in suspicious_indices:
loss = K.mean(model.layers[int(s_ind[0])].output[..., int(s_ind[1])])
grads = K.gradients(loss, input_tensor)[0]
iterate = K.function([input_tensor], [loss, grads])
iterators.append(iterate)
print(f"have to synthesize {len(x_original)} inputs")
for x in x_original:
all_grads = []
for iterate in iterators:
_, grad_vals = iterate([np.expand_dims(x, axis=0)])
all_grads.append(grad_vals[0])
perturbed_x = x.copy()
for z in range(x.shape[0]):
for i in range(x.shape[1]):
for k in range(x.shape[2]):
sum_grad = 0
for j in range(len(all_grads)):
sum_grad += all_grads[j][z][i][k]
avg_grad = float(sum_grad) / len(suspicious_indices)
avg_grad = avg_grad * step_size
# Clipping gradients.
if avg_grad > d:
avg_grad = d
elif avg_grad < -d:
avg_grad = -d
perturbed_x[z][i][k] = max(min(x[z][i][k] + avg_grad, 1), 0)
# perturbed_x.append(max(min(x[0][i][k] + avg_grad, 1), 0))
'''
for i in range(len(flatX)):
sum_grad = 0
for j in range(len(all_grads)):
sum_grad += all_grads[j][0][i]
avg_grad = float(sum_grad) / len(suspicious_indices)
avg_grad = avg_grad * step_size
if avg_grad > d:
avg_grad = d
elif avg_grad < -d:
avg_grad = -d
perturbed_x.append(max(min(flatX[i] + avg_grad, 1), 0))
'''
perturbed_set_x.append(perturbed_x)
# perturbed_set_y.append(y)
# original_set_x.append(x)
return perturbed_set_x