fk_i_hat = fk_hat
nb_iter = 0
while fk_i_hat == fk_hat and nb_iter < self.max_iter:
grad_diff = grd - grd[fk_hat]
f_diff = f - f[fk_hat]
// Masking true label
mask = [0] * self.classifier.nb_classes
mask[fk_hat] = 1
norm = np.linalg.norm(grad_diff.reshape(self.classifier.nb_classes, -1), axis=1) + tol
value = np.ma.array(np.abs(f_diff) / norm, mask=mask)
l = value.argmin(fill_value=np.inf)
r = (abs(f_diff[l]) / (pow(np.linalg.norm(grad_diff[l]), 2) + tol)) * grad_diff[l]
// Add perturbation and clip result
xj = np.clip(xj + r, clip_min, clip_max)
After Change
for j, val in enumerate(x_adv):
xj = val[None, ...]
f = preds[j]
grd = self.classifier.class_gradient(xj, logits=True)[0]
fk_hat = np.argmax(f)
for _ in range(self.max_iter):