Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
OpenDAS
FastMoE
Commits
82fe21d3
Commit
82fe21d3
authored
Nov 19, 2020
by
Jiezhong Qiu
Browse files
fix
parent
37d01e9c
Changes
1
Hide whitespace changes
Inline
Side-by-side
Showing
1 changed file
with
3 additions
and
3 deletions
+3
-3
pytorch/mem_transformer.py
pytorch/mem_transformer.py
+3
-3
No files found.
pytorch/mem_transformer.py
View file @
82fe21d3
...
@@ -90,7 +90,7 @@ class MultiHeadPositionwiseFF(nn.Module):
...
@@ -90,7 +90,7 @@ class MultiHeadPositionwiseFF(nn.Module):
assert
d_model
%
n_head
==
0
assert
d_model
%
n_head
==
0
self
.
n_head
=
n_head
self
.
n_head
=
n_head
d_head
=
d_model
/
n_head
d_head
=
d_model
/
/
n_head
self
.
d_head
=
d_head
self
.
d_head
=
d_head
self
.
d_model
=
d_model
self
.
d_model
=
d_model
self
.
d_inner
=
d_inner
self
.
d_inner
=
d_inner
...
@@ -138,7 +138,7 @@ class MultiHeadPositionwiseFF(nn.Module):
...
@@ -138,7 +138,7 @@ class MultiHeadPositionwiseFF(nn.Module):
attn_vec
=
torch
.
einsum
(
'ibnh,ndh->ibnd'
,
(
attn_score
,
self
.
v_weight
))
+
self
.
v_bias
attn_vec
=
torch
.
einsum
(
'ibnh,ndh->ibnd'
,
(
attn_score
,
self
.
v_weight
))
+
self
.
v_bias
attn_vec
=
attn_vec
.
view
(
inp
.
size
(
0
),
inp
.
size
(
1
),
self
.
d_model
)
attn_vec
=
attn_vec
.
contiguous
().
view
(
inp
.
size
(
0
),
inp
.
size
(
1
),
self
.
d_model
)
core_out
=
self
.
o_net
(
attn_vec
)
core_out
=
self
.
o_net
(
attn_vec
)
core_out
=
self
.
dropout
(
core_out
)
core_out
=
self
.
dropout
(
core_out
)
...
@@ -987,7 +987,7 @@ class MemTransformerLM(nn.Module):
...
@@ -987,7 +987,7 @@ class MemTransformerLM(nn.Module):
self
.
out_layer
.
bias
,
target
,
pred_hid
,
self
.
sampler
)
self
.
out_layer
.
bias
,
target
,
pred_hid
,
self
.
sampler
)
loss
=
-
F
.
log_softmax
(
logit
,
-
1
)[:,
:,
0
]
loss
=
-
F
.
log_softmax
(
logit
,
-
1
)[:,
:,
0
]
else
:
else
:
loss
=
self
.
crit
(
pred_hid
.
view
(
-
1
,
pred_hid
.
size
(
-
1
)),
target
.
view
(
-
1
))
loss
=
self
.
crit
(
pred_hid
.
view
(
-
1
,
pred_hid
.
size
(
-
1
)),
target
.
contiguous
().
view
(
-
1
))
loss
=
loss
.
view
(
tgt_len
,
-
1
)
loss
=
loss
.
view
(
tgt_len
,
-
1
)
if
new_mems
is
None
:
if
new_mems
is
None
:
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment