def get_updates(self, params, constraints, loss):
grads = self.get_gradients(loss, params)
self.updates = [K.update_add(self.iterations, 1)]
t = self.iterations + 1
lr_t = self.lr / (1. - K.pow(self.beta_1, t))
// zero init of 1st moment
ms = [K.variable(np.zeros(K.get_value(p).shape)) for p in params]
// zero init of exponentially weighted infinity norm
us = [K.variable(np.zeros(K.get_value(p).shape)) for p in params]
self.weights = ms + us
for p, g, m, u in zip(params, grads, ms, us):
m_t = (self.beta_1 * m) + (1. - self.beta_1) * g
u_t = K.maximum(self.beta_2 * u, K.abs(g))
p_t = p - lr_t * m_t / (u_t + self.epsilon)
self.updates.append(K.update(m, m_t))
self.updates.append(K.update(u, u_t))
new_p = p_t
// apply constraints
if p in constraints:
c = constraints[p]
new_p = c(new_p)
self.updates.append(K.update(p, new_p))
return self.updates
def get_config(self):