88ff1953e94cfbbcd53f4d659a1099fce8a7344e,tensorlayer/layers/binary.py,BinaryDenseLayer,__init__,#BinaryDenseLayer#Any#Any#Any#Any#Any#Any#Any#Any#Any#,127

Before Change


        if b_init_args is None:
            b_init_args = {}

        Layer.__init__(self, prev_layer=prev_layer, name=name)
        self.inputs = prev_layer.outputs
        if self.inputs.get_shape().ndims != 2:
            raise Exception("The input dimension must be rank 2, please reshape or flatten it")

        if use_gemm:
            raise Exception("TODO. The current version use tf.matmul for inferencing.")

        n_in = int(self.inputs.get_shape()[-1])
        self.n_units = n_units
        logging.info("BinaryDenseLayer  %s: %d %s" % (self.name, self.n_units, act.__name__))
        with tf.variable_scope(name):
            W = tf.get_variable(name="W", shape=(n_in, n_units), initializer=W_init, dtype=LayersConfig.tf_dtype, **W_init_args)
            // W = tl.act.sign(W)    // dont update ...
            W = quantize(W)
            // W = tf.Variable(W)
            // print(W)
            if b_init is not None:
                try:
                    b = tf.get_variable(name="b", shape=(n_units), initializer=b_init, dtype=LayersConfig.tf_dtype, **b_init_args)
                except Exception:  // If initializer is a constant, do not specify shape.
                    b = tf.get_variable(name="b", initializer=b_init, dtype=LayersConfig.tf_dtype, **b_init_args)
                self.outputs = act(tf.matmul(self.inputs, W) + b)
                // self.outputs = act(xnor_gemm(self.inputs, W) + b) // TODO
            else:
                self.outputs = act(tf.matmul(self.inputs, W))
                // self.outputs = act(xnor_gemm(self.inputs, W)) // TODO

        self.all_layers.append(self.outputs)

After Change


            b_init_args=None,
            name="binary_dense",
    ):
        super(BinaryDenseLayer, self).__init__(prev_layer=prev_layer, name=name)
        logging.info("BinaryDenseLayer  %s: %d %s" % (name, n_units, act.__name__))

        self.inputs = prev_layer.outputs

        if W_init_args is None:
            W_init_args = {}
        if b_init_args is None:
            b_init_args = {}

        if self.inputs.get_shape().ndims != 2:
            raise Exception("The input dimension must be rank 2, please reshape or flatten it")

        if use_gemm:
            raise Exception("TODO. The current version use tf.matmul for inferencing.")

        n_in = int(self.inputs.get_shape()[-1])
        self.n_units = n_units

        with tf.variable_scope(name):
            W = tf.get_variable(name="W", shape=(n_in, n_units), initializer=W_init, dtype=LayersConfig.tf_dtype, **W_init_args)
            // W = tl.act.sign(W)    // dont update ...
            W = quantize(W)
            // W = tf.Variable(W)
            // print(W)
            if b_init is not None:
                try:
                    b = tf.get_variable(name="b", shape=(n_units), initializer=b_init, dtype=LayersConfig.tf_dtype, **b_init_args)
                except Exception:  // If initializer is a constant, do not specify shape.
                    b = tf.get_variable(name="b", initializer=b_init, dtype=LayersConfig.tf_dtype, **b_init_args)
                self.outputs = act(tf.matmul(self.inputs, W) + b)
                // self.outputs = act(xnor_gemm(self.inputs, W) + b) // TODO
            else:
                self.outputs = act(tf.matmul(self.inputs, W))
                // self.outputs = act(xnor_gemm(self.inputs, W)) // TODO

        self.all_layers.append(self.outputs)
Italian Trulli
In pattern: SUPERPATTERN

Frequency: 3

Non-data size: 13

Instances


Project Name: tensorlayer/tensorlayer
Commit Name: 88ff1953e94cfbbcd53f4d659a1099fce8a7344e
Time: 2018-04-13
Author: DEKHTIARJonathan@users.noreply.github.com
File Name: tensorlayer/layers/binary.py
Class Name: BinaryDenseLayer
Method Name: __init__


Project Name: tensorlayer/tensorlayer
Commit Name: 88ff1953e94cfbbcd53f4d659a1099fce8a7344e
Time: 2018-04-13
Author: DEKHTIARJonathan@users.noreply.github.com
File Name: tensorlayer/layers/binary.py
Class Name: DorefaDenseLayer
Method Name: __init__


Project Name: tensorlayer/tensorlayer
Commit Name: 88ff1953e94cfbbcd53f4d659a1099fce8a7344e
Time: 2018-04-13
Author: DEKHTIARJonathan@users.noreply.github.com
File Name: tensorlayer/layers/binary.py
Class Name: TernaryDenseLayer
Method Name: __init__


Project Name: tensorlayer/tensorlayer
Commit Name: 88ff1953e94cfbbcd53f4d659a1099fce8a7344e
Time: 2018-04-13
Author: DEKHTIARJonathan@users.noreply.github.com
File Name: tensorlayer/layers/binary.py
Class Name: BinaryDenseLayer
Method Name: __init__