f25bc176d0365234ebb051d5069edff24ad2de4d,python/dgl/nn/tensorflow/conv/graphconv.py,GraphConv,call,#GraphConv#Any#Any#Any#,100

Before Change


        tf.Tensor
            The output feature
        
        graph = graph.local_var()

        if self._norm == "both":
            degs = tf.clip_by_value(tf.cast(graph.out_degrees(), tf.float32),
                                    clip_value_min=1,
                                    clip_value_max=np.inf)
            norm = tf.pow(degs, -0.5)
            shp = norm.shape + (1,) * (feat.ndim - 1)
            norm = tf.reshape(norm, shp)
            feat = feat * norm

        if weight is not None:
            if self.weight is not None:
                raise DGLError("External weight is provided while at the same time the"
                               " module has defined its own weight parameter. Please"
                               " create the module with flag weight=False.")
        else:
            weight = self.weight

        if self._in_feats > self._out_feats:
            // mult W first to reduce the feature size for aggregation.
            if weight is not None:
                feat = tf.matmul(feat, weight)
            graph.srcdata["h"] = feat
            graph.update_all(fn.copy_src(src="h", out="m"),
                             fn.sum(msg="m", out="h"))
            rst = graph.dstdata["h"]
        else:
            // aggregate first then mult W
            graph.srcdata["h"] = feat
            graph.update_all(fn.copy_src(src="h", out="m"),
                             fn.sum(msg="m", out="h"))
            rst = graph.dstdata["h"]
            if weight is not None:
                rst = tf.matmul(rst, weight)

        if self._norm != "none":
            degs = tf.clip_by_value(tf.cast(graph.in_degrees(), tf.float32),
                                    clip_value_min=1,
                                    clip_value_max=np.inf)
            if self._norm == "both":
                norm = tf.pow(degs, -0.5)
            else:
                norm = 1.0 / degs
            shp = norm.shape + (1,) * (feat.ndim - 1)
            norm = tf.reshape(norm, shp)
            rst = rst * norm

        if self.bias is not None:
            rst = rst + self.bias

        if self._activation is not None:
            rst = self._activation(rst)

        return rst

    def extra_repr(self):
        Set the extra representation of the module,
        which will come into effect when printing the model.

After Change


        tf.Tensor
            The output feature
        
        with graph.local_scope():
            if self._norm == "both":
                degs = tf.clip_by_value(tf.cast(graph.out_degrees(), tf.float32),
                                        clip_value_min=1,
                                        clip_value_max=np.inf)
                norm = tf.pow(degs, -0.5)
                shp = norm.shape + (1,) * (feat.ndim - 1)
                norm = tf.reshape(norm, shp)
                feat = feat * norm

            if weight is not None:
                if self.weight is not None:
                    raise DGLError("External weight is provided while at the same time the"
                                   " module has defined its own weight parameter. Please"
                                   " create the module with flag weight=False.")
            else:
                weight = self.weight

            if self._in_feats > self._out_feats:
                // mult W first to reduce the feature size for aggregation.
                if weight is not None:
                    feat = tf.matmul(feat, weight)
                graph.srcdata["h"] = feat
                graph.update_all(fn.copy_src(src="h", out="m"),
                                 fn.sum(msg="m", out="h"))
                rst = graph.dstdata["h"]
            else:
                // aggregate first then mult W
                graph.srcdata["h"] = feat
                graph.update_all(fn.copy_src(src="h", out="m"),
                                 fn.sum(msg="m", out="h"))
                rst = graph.dstdata["h"]
                if weight is not None:
                    rst = tf.matmul(rst, weight)

            if self._norm != "none":
                degs = tf.clip_by_value(tf.cast(graph.in_degrees(), tf.float32),
                                        clip_value_min=1,
                                        clip_value_max=np.inf)
                if self._norm == "both":
                    norm = tf.pow(degs, -0.5)
                else:
                    norm = 1.0 / degs
                shp = norm.shape + (1,) * (feat.ndim - 1)
                norm = tf.reshape(norm, shp)
                rst = rst * norm

            if self.bias is not None:
                rst = rst + self.bias

            if self._activation is not None:
                rst = self._activation(rst)

            return rst

    def extra_repr(self):
        Set the extra representation of the module,
        which will come into effect when printing the model.
        
Italian Trulli
In pattern: SUPERPATTERN

Frequency: 3

Non-data size: 8

Instances


Project Name: dmlc/dgl
Commit Name: f25bc176d0365234ebb051d5069edff24ad2de4d
Time: 2020-05-01
Author: wmjlyjemaine@gmail.com
File Name: python/dgl/nn/tensorflow/conv/graphconv.py
Class Name: GraphConv
Method Name: call


Project Name: dmlc/dgl
Commit Name: f25bc176d0365234ebb051d5069edff24ad2de4d
Time: 2020-05-01
Author: wmjlyjemaine@gmail.com
File Name: python/dgl/nn/mxnet/conv/graphconv.py
Class Name: GraphConv
Method Name: forward


Project Name: dmlc/dgl
Commit Name: f25bc176d0365234ebb051d5069edff24ad2de4d
Time: 2020-05-01
Author: wmjlyjemaine@gmail.com
File Name: python/dgl/nn/pytorch/conv/graphconv.py
Class Name: GraphConv
Method Name: forward