Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
chenpangpang
transformers
Commits
52c53f39
"src/git@developer.sourcefind.cn:chenpangpang/open-webui.git" did not exist on "e82027310dbf7a6ee6ad54c332c5bc2f012fe0b4"
Commit
52c53f39
authored
Dec 13, 2018
by
thomwolf
Browse files
clean up apex integration
parent
4946c2c5
Changes
3
Hide whitespace changes
Inline
Side-by-side
Showing
3 changed files
with
5 additions
and
5 deletions
+5
-5
pytorch_pretrained_bert/convert_tf_checkpoint_to_pytorch.py
pytorch_pretrained_bert/convert_tf_checkpoint_to_pytorch.py
+2
-2
pytorch_pretrained_bert/modeling.py
pytorch_pretrained_bert/modeling.py
+2
-2
tests/optimization_test.py
tests/optimization_test.py
+1
-1
No files found.
pytorch_pretrained_bert/convert_tf_checkpoint_to_pytorch.py
View file @
52c53f39
...
@@ -59,9 +59,9 @@ def convert_tf_checkpoint_to_pytorch(tf_checkpoint_path, bert_config_file, pytor
...
@@ -59,9 +59,9 @@ def convert_tf_checkpoint_to_pytorch(tf_checkpoint_path, bert_config_file, pytor
l
=
re
.
split
(
r
'_(\d+)'
,
m_name
)
l
=
re
.
split
(
r
'_(\d+)'
,
m_name
)
else
:
else
:
l
=
[
m_name
]
l
=
[
m_name
]
if
l
[
0
]
==
'kernel'
:
if
l
[
0
]
==
'kernel'
or
l
[
0
]
==
'gamma'
:
pointer
=
getattr
(
pointer
,
'weight'
)
pointer
=
getattr
(
pointer
,
'weight'
)
elif
l
[
0
]
==
'output_bias'
:
elif
l
[
0
]
==
'output_bias'
or
l
[
0
]
==
'beta'
:
pointer
=
getattr
(
pointer
,
'bias'
)
pointer
=
getattr
(
pointer
,
'bias'
)
elif
l
[
0
]
==
'output_weights'
:
elif
l
[
0
]
==
'output_weights'
:
pointer
=
getattr
(
pointer
,
'weight'
)
pointer
=
getattr
(
pointer
,
'weight'
)
...
...
pytorch_pretrained_bert/modeling.py
View file @
52c53f39
...
@@ -516,9 +516,9 @@ class PreTrainedBertModel(nn.Module):
...
@@ -516,9 +516,9 @@ class PreTrainedBertModel(nn.Module):
for
key
in
state_dict
.
keys
():
for
key
in
state_dict
.
keys
():
new_key
=
None
new_key
=
None
if
'gamma'
in
key
:
if
'gamma'
in
key
:
new_key
=
key
.
replace
(
'gamma'
,
'weight'
)
new_key
=
key
.
replace
(
'gamma'
,
'weight'
)
if
'beta'
in
key
:
if
'beta'
in
key
:
new_key
=
key
.
replace
(
'beta'
,
'bias'
)
new_key
=
key
.
replace
(
'beta'
,
'bias'
)
if
new_key
:
if
new_key
:
old_keys
.
append
(
key
)
old_keys
.
append
(
key
)
new_keys
.
append
(
new_key
)
new_keys
.
append
(
new_key
)
...
...
tests/optimization_test.py
View file @
52c53f39
...
@@ -35,7 +35,7 @@ class OptimizationTest(unittest.TestCase):
...
@@ -35,7 +35,7 @@ class OptimizationTest(unittest.TestCase):
criterion
=
torch
.
nn
.
MSELoss
()
criterion
=
torch
.
nn
.
MSELoss
()
# No warmup, constant schedule, no gradient clipping
# No warmup, constant schedule, no gradient clipping
optimizer
=
BertAdam
(
params
=
[
w
],
lr
=
2e-1
,
optimizer
=
BertAdam
(
params
=
[
w
],
lr
=
2e-1
,
weight_decay
=
0.0
,
weight_decay
_rate
=
0.0
,
max_grad_norm
=-
1
)
max_grad_norm
=-
1
)
for
_
in
range
(
100
):
for
_
in
range
(
100
):
loss
=
criterion
(
w
,
target
)
loss
=
criterion
(
w
,
target
)
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment