word = self.word_lut(src_input[:, :, 0])
emb = word
if self.feature_dicts:
features = [feature_lut(src_input[:, :, j+1])
for j, feature_lut in enumerate(self.feature_luts)]
// Apply one MLP layer.
After Change
Return:
emb (FloatTensor): len x batch x sum of feature embedding sizes
feat_inputs = (feat.squeeze(2) for feat in src_input.split(1, dim=2))
features = [lut(feat) for lut, feat in zip(self.emb_luts, feat_inputs)]
emb = self.merge(features)
return emb