if isinstance(self.scheme, MiddlewareScheme):
self.layers_params = copy.copy(self.schemes[self.scheme])
else:
self.layers_params = copy.copy(self.scheme)
// we allow adding batchnorm, dropout or activation functions after each layer.
// The motivation is to simplify the transition between a network with batchnorm and a network without
// batchnorm to a single flag (the same applies to activation function and dropout)
After Change
else:
// if scheme is specified directly, convert to TF layer if it"s not a callable object
// NOTE: if layer object is callable, it must return a TF tensor when invoked
self.layers_params = [convert_layer(l) for l in copy.copy(self.scheme)]
// we allow adding batchnorm, dropout or activation functions after each layer.
// The motivation is to simplify the transition between a network with batchnorm and a network without