Commit b1dfd39e authored by Myle Ott's avatar Myle Ott
Browse files

Revert `dim` in `F.softmax` for backwards compatibility

parent 42a0150c
...@@ -128,7 +128,7 @@ class AttentionLayer(nn.Module): ...@@ -128,7 +128,7 @@ class AttentionLayer(nn.Module):
# softmax over last dim # softmax over last dim
sz = x.size() sz = x.size()
x = F.softmax(x.view(sz[0] * sz[1], sz[2]), dim=1) x = F.softmax(x.view(sz[0] * sz[1], sz[2]))
x = x.view(sz) x = x.view(sz)
attn_scores = x attn_scores = x
......
...@@ -330,7 +330,7 @@ class SequenceGenerator(object): ...@@ -330,7 +330,7 @@ class SequenceGenerator(object):
decoder_out, attn = model.decoder.incremental_forward(tokens, encoder_out) decoder_out, attn = model.decoder.incremental_forward(tokens, encoder_out)
else: else:
decoder_out, attn = model.decoder.forward(tokens, encoder_out) decoder_out, attn = model.decoder.forward(tokens, encoder_out)
probs = F.softmax(decoder_out[:, -1, :], dim=1).data probs = F.softmax(decoder_out[:, -1, :]).data
attn = attn[:, -1, :].data attn = attn[:, -1, :].data
if avg_probs is None or avg_attn is None: if avg_probs is None or avg_attn is None:
avg_probs = probs avg_probs = probs
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment