Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
ModelZoo
ResNet50_tensorflow
Commits
57f839bf
Commit
57f839bf
authored
May 12, 2016
by
Sergio Guadarrama
Committed by
Martin Wicke
May 12, 2016
Browse files
Make the split of batch outside GPUs (#51)
Tentative fix for issue #47
parent
c348081a
Changes
1
Hide whitespace changes
Inline
Side-by-side
Showing
1 changed file
with
6 additions
and
11 deletions
+6
-11
inception/inception/inception_train.py
inception/inception/inception_train.py
+6
-11
No files found.
inception/inception/inception_train.py
View file @
57f839bf
...
@@ -220,28 +220,23 @@ def train(dataset):
...
@@ -220,28 +220,23 @@ def train(dataset):
# Number of classes in the Dataset label set plus 1.
# Number of classes in the Dataset label set plus 1.
# Label 0 is reserved for an (unused) background class.
# Label 0 is reserved for an (unused) background class.
num_classes
=
dataset
.
num_classes
()
+
1
num_classes
=
dataset
.
num_classes
()
+
1
# Split the batch of images and labels for towers.
images_splits
=
tf
.
split
(
0
,
FLAGS
.
num_gpus
,
images
)
labels_splits
=
tf
.
split
(
0
,
FLAGS
.
num_gpus
,
labels
)
# Calculate the gradients for each model tower.
# Calculate the gradients for each model tower.
tower_grads
=
[]
tower_grads
=
[]
for
i
in
xrange
(
FLAGS
.
num_gpus
):
for
i
in
xrange
(
FLAGS
.
num_gpus
):
with
tf
.
device
(
'/gpu:%d'
%
i
):
with
tf
.
device
(
'/gpu:%d'
%
i
):
with
tf
.
name_scope
(
'%s_%d'
%
(
inception
.
TOWER_NAME
,
i
))
as
scope
:
with
tf
.
name_scope
(
'%s_%d'
%
(
inception
.
TOWER_NAME
,
i
))
as
scope
:
# Split the batch of images and labels.
batch_start
=
split_batch_size
*
i
images_batch
=
tf
.
slice
(
images
,
begin
=
[
batch_start
,
0
,
0
,
0
],
size
=
[
split_batch_size
,
-
1
,
-
1
,
-
1
])
labels_batch
=
tf
.
slice
(
labels
,
begin
=
[
batch_start
],
size
=
[
split_batch_size
])
# Force all Variables to reside on the CPU.
# Force all Variables to reside on the CPU.
with
slim
.
arg_scope
([
slim
.
variables
.
variable
],
device
=
'/cpu:0'
):
with
slim
.
arg_scope
([
slim
.
variables
.
variable
],
device
=
'/cpu:0'
):
# Calculate the loss for one tower of the ImageNet model. This
# Calculate the loss for one tower of the ImageNet model. This
# function constructs the entire ImageNet model but shares the
# function constructs the entire ImageNet model but shares the
# variables across all towers.
# variables across all towers.
loss
=
_tower_loss
(
images_batch
,
labels_batch
,
num_classes
,
scope
)
loss
=
_tower_loss
(
images_splits
[
i
],
labels_splits
[
i
],
num_classes
,
scope
)
# Reuse variables for the next tower.
# Reuse variables for the next tower.
tf
.
get_variable_scope
().
reuse_variables
()
tf
.
get_variable_scope
().
reuse_variables
()
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment