Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
ModelZoo
ResNet50_tensorflow
Commits
80af2a7b
Commit
80af2a7b
authored
Aug 20, 2019
by
Vinh Nguyen
Browse files
fix mixed precision parameter
parent
8f526987
Changes
1
Show whitespace changes
Inline
Side-by-side
Showing
1 changed file
with
13 additions
and
13 deletions
+13
-13
official/vision/image_classification/resnet_imagenet_main.py
official/vision/image_classification/resnet_imagenet_main.py
+13
-13
No files found.
official/vision/image_classification/resnet_imagenet_main.py
View file @
80af2a7b
...
...
@@ -32,7 +32,7 @@ from official.vision.image_classification import common
from
official.vision.image_classification
import
imagenet_preprocessing
from
official.vision.image_classification
import
resnet_model
from
official.vision.image_classification
import
trivial_model
import
pdb
LR_SCHEDULE
=
[
# (multiplier, epoch to start) tuples
(
1.0
,
5
),
(
0.1
,
30
),
(
0.01
,
60
),
(
0.001
,
80
)
...
...
@@ -96,9 +96,6 @@ def run(flags_obj):
dtype
=
flags_core
.
get_tf_dtype
(
flags_obj
)
if
dtype
==
'float16'
:
# Mixed precision training via graph rewrite should not be used in conjunction
# with tf.keras.mixed_precision
if
flags_obj
[
"fp16_implementation"
]
!=
"graph_rewrite"
:
policy
=
tf
.
keras
.
mixed_precision
.
experimental
.
Policy
(
'infer_float32_vars'
)
tf
.
keras
.
mixed_precision
.
experimental
.
set_policy
(
policy
)
...
...
@@ -185,12 +182,15 @@ def run(flags_obj):
if
dtype
==
'float16'
:
# TODO(reedwm): Remove manually wrapping optimizer once mixed precision
# can be enabled with a single line of code.
if
flags_dict
[
"fp16_implementation"
]
==
"graph_rewrite"
:
optimizer
=
tf
.
compat
.
v1
.
train
.
experimental
.
enable_mixed_precision_graph_rewrite
(
optimizer
)
else
:
optimizer
=
tf
.
keras
.
mixed_precision
.
experimental
.
LossScaleOptimizer
(
optimizer
,
loss_scale
=
flags_core
.
get_loss_scale
(
flags_obj
,
default_for_fp16
=
128
))
pdb
.
set_trace
()
if
flags_obj
.
fp16_implementation
==
"graph_rewrite"
:
# Note: when flags_obj["fp16_implementation"] == "graph_rewrite",
# dtype as determined by flags_core.get_tf_dtype(flags_obj) would be 'float32'
#
optimizer
=
tf
.
train
.
experimental
.
enable_mixed_precision_graph_rewrite
(
optimizer
)
if
flags_obj
.
use_trivial_model
:
model
=
trivial_model
.
trivial_model
(
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment