Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
ModelZoo
ResNet50_tensorflow
Commits
7cda51fa
Commit
7cda51fa
authored
Oct 30, 2020
by
Le Hou
Committed by
A. Unique TensorFlower
Oct 30, 2020
Browse files
Internal change
PiperOrigin-RevId: 339901099
parent
e792d861
Changes
2
Show whitespace changes
Inline
Side-by-side
Showing
2 changed files
with
12 additions
and
5 deletions
+12
-5
official/nlp/tasks/masked_lm.py
official/nlp/tasks/masked_lm.py
+11
-5
official/nlp/tasks/masked_lm_test.py
official/nlp/tasks/masked_lm_test.py
+1
-0
No files found.
official/nlp/tasks/masked_lm.py
View file @
7cda51fa
...
...
@@ -36,6 +36,9 @@ class MaskedLMConfig(cfg.TaskConfig):
bert
.
ClsHeadConfig
(
inner_dim
=
768
,
num_classes
=
2
,
dropout_rate
=
0.1
,
name
=
'next_sentence'
)
])
# TODO(b/154564893): Mathematically, scale_loss should be True.
# However, it works better with scale_loss being False.
scale_loss
:
bool
=
False
train_data
:
cfg
.
DataConfig
=
cfg
.
DataConfig
()
validation_data
:
cfg
.
DataConfig
=
cfg
.
DataConfig
()
...
...
@@ -161,11 +164,14 @@ class MaskedLMTask(base_task.Task):
model_outputs
=
outputs
,
metrics
=
metrics
,
aux_losses
=
model
.
losses
)
if
self
.
task_config
.
scale_loss
:
# Scales loss as the default gradients allreduce performs sum inside the
# optimizer.
# TODO(b/154564893): enable loss scaling.
# scaled_loss = loss / tf.distribute.get_strategy().num_replicas_in_sync
scaled_loss
=
loss
/
tf
.
distribute
.
get_strategy
().
num_replicas_in_sync
tvars
=
model
.
trainable_variables
if
self
.
task_config
.
scale_loss
:
grads
=
tape
.
gradient
(
scaled_loss
,
tvars
)
else
:
grads
=
tape
.
gradient
(
loss
,
tvars
)
optimizer
.
apply_gradients
(
list
(
zip
(
grads
,
tvars
)))
self
.
process_metrics
(
metrics
,
inputs
,
outputs
)
...
...
official/nlp/tasks/masked_lm_test.py
View file @
7cda51fa
...
...
@@ -28,6 +28,7 @@ class MLMTaskTest(tf.test.TestCase):
def
test_task
(
self
):
config
=
masked_lm
.
MaskedLMConfig
(
init_checkpoint
=
self
.
get_temp_dir
(),
scale_loss
=
True
,
model
=
bert
.
PretrainerConfig
(
encoder
=
encoders
.
EncoderConfig
(
bert
=
encoders
.
BertEncoderConfig
(
vocab_size
=
30522
,
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment