// When concatenating, derive the size of each feature"s
// embedding from the number of values the embedding
// takes
feat_sizes = (int(feat_dict.size() ** feat_exp)
for feat_dict in feature_dicts)
else:
// sum feature merge (for now, the same as the old option
// for merging features in OpenNMT-py)
feat_sizes = (opt.feat_vec_size for feat_dict in feature_dicts)
After Change
emb_sizes.extend([opt.word_vec_size] * len(feature_dicts))
else:
// mlp feature merge
emb_sizes.extend([opt.feat_vec_size] * len(feature_dicts))
// apply a layer of mlp to get it down to the correct dim
self.mlp = nn.Sequential(onmt.modules.BottleLinear(
sum(emb_sizes),
opt.word_vec_size),