69e3319c4bffaa78823be2267f036d3d3fdf286e,tensorlayer/layers/activation.py,PTRelu6,forward,#PTRelu6#,237
Before Change
// self.add_weights([self.alpha_low, self.alpha_high])
def forward(self, inputs):
outputs = self._apply_activation(
inputs, **{
"alpha_low": self.alpha_low_constrained,
"alpha_high": self.alpha_high_constrained,
"name": "ptrelu6_activation"
}
)
return outputs
After Change
self.alpha_high_constrained = tf.nn.sigmoid(self.alpha_high, name="constraining_alpha_high_in_0_1")
def forward(self, inputs):
pos = tf.nn.relu(inputs)
pos_6 = -tf.nn.relu(inputs - 6) + self.alpha_high_constrained * tf.nn.relu(inputs - 6)
neg = -self.alpha_low_constrained * tf.nn.relu(-inputs)
return pos + pos_6 + neg
In pattern: SUPERPATTERN
Frequency: 3
Non-data size: 19
Instances
Project Name: tensorlayer/tensorlayer
Commit Name: 69e3319c4bffaa78823be2267f036d3d3fdf286e
Time: 2019-03-19
Author: jingqing.zhang15@imperial.ac.uk
File Name: tensorlayer/layers/activation.py
Class Name: PTRelu6
Method Name: forward
Project Name: tensorlayer/tensorlayer
Commit Name: 96596d3f9277ca11279c0ff4f7ae556ec5e7388f
Time: 2019-03-10
Author: jingqing.zhang15@imperial.ac.uk
File Name: tensorlayer/layers/activation.py
Class Name: PRelu
Method Name: forward
Project Name: tensorlayer/tensorlayer
Commit Name: 69e3319c4bffaa78823be2267f036d3d3fdf286e
Time: 2019-03-19
Author: jingqing.zhang15@imperial.ac.uk
File Name: tensorlayer/layers/activation.py
Class Name: PRelu6
Method Name: forward
Project Name: tensorlayer/tensorlayer
Commit Name: 69e3319c4bffaa78823be2267f036d3d3fdf286e
Time: 2019-03-19
Author: jingqing.zhang15@imperial.ac.uk
File Name: tensorlayer/layers/activation.py
Class Name: PTRelu6
Method Name: forward