Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
chenpangpang
transformers
Commits
5f432480
"test/git@developer.sourcefind.cn:hehl2/torchaudio.git" did not exist on "723e9a52ebde0afd542b1cc8588598ad2c893c87"
Commit
5f432480
authored
Nov 03, 2018
by
VictorSanh
Browse files
Create DataParallel model if several GPUs
parent
5889765a
Changes
3
Hide whitespace changes
Inline
Side-by-side
Showing
3 changed files
with
9 additions
and
0 deletions
+9
-0
extract_features_pytorch.py
extract_features_pytorch.py
+3
-0
run_classifier_pytorch.py
run_classifier_pytorch.py
+3
-0
run_squad_pytorch.py
run_squad_pytorch.py
+3
-0
No files found.
extract_features_pytorch.py
View file @
5f432480
...
...
@@ -249,6 +249,9 @@ def main():
if
args
.
init_checkpoint
is
not
None
:
model
.
load_state_dict
(
torch
.
load
(
args
.
init_checkpoint
,
map_location
=
'cpu'
))
model
.
to
(
device
)
if
n_gpu
>
1
:
model
=
nn
.
DataParallel
(
model
)
all_input_ids
=
torch
.
tensor
([
f
.
input_ids
for
f
in
features
],
dtype
=
torch
.
long
)
all_input_mask
=
torch
.
tensor
([
f
.
input_mask
for
f
in
features
],
dtype
=
torch
.
long
)
...
...
run_classifier_pytorch.py
View file @
5f432480
...
...
@@ -482,6 +482,9 @@ def main():
if
args
.
init_checkpoint
is
not
None
:
model
.
bert
.
load_state_dict
(
torch
.
load
(
args
.
init_checkpoint
,
map_location
=
'cpu'
))
model
.
to
(
device
)
if
n_gpu
>
1
:
model
=
torch
.
nn
.
DataParallel
(
model
)
optimizer
=
BERTAdam
([{
'params'
:
[
p
for
n
,
p
in
model
.
named_parameters
()
if
n
!=
'bias'
],
'l2'
:
0.01
},
{
'params'
:
[
p
for
n
,
p
in
model
.
named_parameters
()
if
n
==
'bias'
],
'l2'
:
0.
}
...
...
run_squad_pytorch.py
View file @
5f432480
...
...
@@ -795,6 +795,9 @@ def main():
if
args
.
init_checkpoint
is
not
None
:
model
.
bert
.
load_state_dict
(
torch
.
load
(
args
.
init_checkpoint
,
map_location
=
'cpu'
))
model
.
to
(
device
)
if
n_gpu
>
1
:
model
=
torch
.
nn
.
DataParallel
(
model
)
optimizer
=
BERTAdam
([{
'params'
:
[
p
for
n
,
p
in
model
.
named_parameters
()
if
n
!=
'bias'
],
'l2'
:
0.01
},
{
'params'
:
[
p
for
n
,
p
in
model
.
named_parameters
()
if
n
==
'bias'
],
'l2'
:
0.
}
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment