// pad the 0th dimension (T/sequence) with zeroes whose number = context
// Once pytorch"s padding functions have settled, should move to those.
padding = torch.zeros(self.context, *(input.size()[1:])).type_as(input)
x = torch.cat((input, padding), 0)
// add lookahead windows (with context+1 width) as a fourth dimension
// for each seq-batch-feature combination
After Change
x = x.transpose(0, 1).transpose(1, 2)
x = F.pad(x, pad=self.pad, value=0)
x = self.conv(x)
x = x.transpose(1, 2).transpose(0, 1).contiguous()
return x
def __repr__(self):
return self.__class__.__name__ + "(" \