mobilenet_v1_test.py 26.5 KB
Newer Older
andrewghoward's avatar
andrewghoward committed
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# =============================================================================
"""Tests for MobileNet v1."""

from __future__ import absolute_import
from __future__ import division
from __future__ import print_function

import numpy as np
import tensorflow as tf
23
from tensorflow.contrib import slim as contrib_slim
andrewghoward's avatar
andrewghoward committed
24
25
26

from nets import mobilenet_v1

27
slim = contrib_slim
andrewghoward's avatar
andrewghoward committed
28
29
30
31
32
33
34
35
36


class MobilenetV1Test(tf.test.TestCase):

  def testBuildClassificationNetwork(self):
    batch_size = 5
    height, width = 224, 224
    num_classes = 1000

37
    inputs = tf.random.uniform((batch_size, height, width, 3))
andrewghoward's avatar
andrewghoward committed
38
    logits, end_points = mobilenet_v1.mobilenet_v1(inputs, num_classes)
39
40
    self.assertTrue(logits.op.name.startswith(
        'MobilenetV1/Logits/SpatialSqueeze'))
andrewghoward's avatar
andrewghoward committed
41
42
43
44
45
46
    self.assertListEqual(logits.get_shape().as_list(),
                         [batch_size, num_classes])
    self.assertTrue('Predictions' in end_points)
    self.assertListEqual(end_points['Predictions'].get_shape().as_list(),
                         [batch_size, num_classes])

47
48
49
50
51
  def testBuildPreLogitsNetwork(self):
    batch_size = 5
    height, width = 224, 224
    num_classes = None

52
    inputs = tf.random.uniform((batch_size, height, width, 3))
53
54
55
56
57
58
    net, end_points = mobilenet_v1.mobilenet_v1(inputs, num_classes)
    self.assertTrue(net.op.name.startswith('MobilenetV1/Logits/AvgPool'))
    self.assertListEqual(net.get_shape().as_list(), [batch_size, 1, 1, 1024])
    self.assertFalse('Logits' in end_points)
    self.assertFalse('Predictions' in end_points)

andrewghoward's avatar
andrewghoward committed
59
60
61
62
  def testBuildBaseNetwork(self):
    batch_size = 5
    height, width = 224, 224

63
    inputs = tf.random.uniform((batch_size, height, width, 3))
andrewghoward's avatar
andrewghoward committed
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
    net, end_points = mobilenet_v1.mobilenet_v1_base(inputs)
    self.assertTrue(net.op.name.startswith('MobilenetV1/Conv2d_13'))
    self.assertListEqual(net.get_shape().as_list(),
                         [batch_size, 7, 7, 1024])
    expected_endpoints = ['Conv2d_0',
                          'Conv2d_1_depthwise', 'Conv2d_1_pointwise',
                          'Conv2d_2_depthwise', 'Conv2d_2_pointwise',
                          'Conv2d_3_depthwise', 'Conv2d_3_pointwise',
                          'Conv2d_4_depthwise', 'Conv2d_4_pointwise',
                          'Conv2d_5_depthwise', 'Conv2d_5_pointwise',
                          'Conv2d_6_depthwise', 'Conv2d_6_pointwise',
                          'Conv2d_7_depthwise', 'Conv2d_7_pointwise',
                          'Conv2d_8_depthwise', 'Conv2d_8_pointwise',
                          'Conv2d_9_depthwise', 'Conv2d_9_pointwise',
                          'Conv2d_10_depthwise', 'Conv2d_10_pointwise',
                          'Conv2d_11_depthwise', 'Conv2d_11_pointwise',
                          'Conv2d_12_depthwise', 'Conv2d_12_pointwise',
                          'Conv2d_13_depthwise', 'Conv2d_13_pointwise']
    self.assertItemsEqual(end_points.keys(), expected_endpoints)

  def testBuildOnlyUptoFinalEndpoint(self):
    batch_size = 5
    height, width = 224, 224
    endpoints = ['Conv2d_0',
                 'Conv2d_1_depthwise', 'Conv2d_1_pointwise',
                 'Conv2d_2_depthwise', 'Conv2d_2_pointwise',
                 'Conv2d_3_depthwise', 'Conv2d_3_pointwise',
                 'Conv2d_4_depthwise', 'Conv2d_4_pointwise',
                 'Conv2d_5_depthwise', 'Conv2d_5_pointwise',
                 'Conv2d_6_depthwise', 'Conv2d_6_pointwise',
                 'Conv2d_7_depthwise', 'Conv2d_7_pointwise',
                 'Conv2d_8_depthwise', 'Conv2d_8_pointwise',
                 'Conv2d_9_depthwise', 'Conv2d_9_pointwise',
                 'Conv2d_10_depthwise', 'Conv2d_10_pointwise',
                 'Conv2d_11_depthwise', 'Conv2d_11_pointwise',
                 'Conv2d_12_depthwise', 'Conv2d_12_pointwise',
                 'Conv2d_13_depthwise', 'Conv2d_13_pointwise']
    for index, endpoint in enumerate(endpoints):
      with tf.Graph().as_default():
103
        inputs = tf.random.uniform((batch_size, height, width, 3))
andrewghoward's avatar
andrewghoward committed
104
105
106
107
        out_tensor, end_points = mobilenet_v1.mobilenet_v1_base(
            inputs, final_endpoint=endpoint)
        self.assertTrue(out_tensor.op.name.startswith(
            'MobilenetV1/' + endpoint))
pkulzc's avatar
pkulzc committed
108
        self.assertItemsEqual(endpoints[:index+1], end_points.keys())
andrewghoward's avatar
andrewghoward committed
109
110
111
112
113
114
115
116
117
118
119

  def testBuildCustomNetworkUsingConvDefs(self):
    batch_size = 5
    height, width = 224, 224
    conv_defs = [
        mobilenet_v1.Conv(kernel=[3, 3], stride=2, depth=32),
        mobilenet_v1.DepthSepConv(kernel=[3, 3], stride=1, depth=64),
        mobilenet_v1.DepthSepConv(kernel=[3, 3], stride=2, depth=128),
        mobilenet_v1.DepthSepConv(kernel=[3, 3], stride=1, depth=512)
    ]

120
    inputs = tf.random.uniform((batch_size, height, width, 3))
andrewghoward's avatar
andrewghoward committed
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
    net, end_points = mobilenet_v1.mobilenet_v1_base(
        inputs, final_endpoint='Conv2d_3_pointwise', conv_defs=conv_defs)
    self.assertTrue(net.op.name.startswith('MobilenetV1/Conv2d_3'))
    self.assertListEqual(net.get_shape().as_list(),
                         [batch_size, 56, 56, 512])
    expected_endpoints = ['Conv2d_0',
                          'Conv2d_1_depthwise', 'Conv2d_1_pointwise',
                          'Conv2d_2_depthwise', 'Conv2d_2_pointwise',
                          'Conv2d_3_depthwise', 'Conv2d_3_pointwise']
    self.assertItemsEqual(end_points.keys(), expected_endpoints)

  def testBuildAndCheckAllEndPointsUptoConv2d_13(self):
    batch_size = 5
    height, width = 224, 224

136
    inputs = tf.random.uniform((batch_size, height, width, 3))
andrewghoward's avatar
andrewghoward committed
137
138
139
140
    with slim.arg_scope([slim.conv2d, slim.separable_conv2d],
                        normalizer_fn=slim.batch_norm):
      _, end_points = mobilenet_v1.mobilenet_v1_base(
          inputs, final_endpoint='Conv2d_13_pointwise')
pkulzc's avatar
pkulzc committed
141
142
143
      _, explicit_padding_end_points = mobilenet_v1.mobilenet_v1_base(
          inputs, final_endpoint='Conv2d_13_pointwise',
          use_explicit_padding=True)
andrewghoward's avatar
andrewghoward committed
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
    endpoints_shapes = {'Conv2d_0': [batch_size, 112, 112, 32],
                        'Conv2d_1_depthwise': [batch_size, 112, 112, 32],
                        'Conv2d_1_pointwise': [batch_size, 112, 112, 64],
                        'Conv2d_2_depthwise': [batch_size, 56, 56, 64],
                        'Conv2d_2_pointwise': [batch_size, 56, 56, 128],
                        'Conv2d_3_depthwise': [batch_size, 56, 56, 128],
                        'Conv2d_3_pointwise': [batch_size, 56, 56, 128],
                        'Conv2d_4_depthwise': [batch_size, 28, 28, 128],
                        'Conv2d_4_pointwise': [batch_size, 28, 28, 256],
                        'Conv2d_5_depthwise': [batch_size, 28, 28, 256],
                        'Conv2d_5_pointwise': [batch_size, 28, 28, 256],
                        'Conv2d_6_depthwise': [batch_size, 14, 14, 256],
                        'Conv2d_6_pointwise': [batch_size, 14, 14, 512],
                        'Conv2d_7_depthwise': [batch_size, 14, 14, 512],
                        'Conv2d_7_pointwise': [batch_size, 14, 14, 512],
                        'Conv2d_8_depthwise': [batch_size, 14, 14, 512],
                        'Conv2d_8_pointwise': [batch_size, 14, 14, 512],
                        'Conv2d_9_depthwise': [batch_size, 14, 14, 512],
                        'Conv2d_9_pointwise': [batch_size, 14, 14, 512],
                        'Conv2d_10_depthwise': [batch_size, 14, 14, 512],
                        'Conv2d_10_pointwise': [batch_size, 14, 14, 512],
                        'Conv2d_11_depthwise': [batch_size, 14, 14, 512],
                        'Conv2d_11_pointwise': [batch_size, 14, 14, 512],
                        'Conv2d_12_depthwise': [batch_size, 7, 7, 512],
                        'Conv2d_12_pointwise': [batch_size, 7, 7, 1024],
                        'Conv2d_13_depthwise': [batch_size, 7, 7, 1024],
                        'Conv2d_13_pointwise': [batch_size, 7, 7, 1024]}
    self.assertItemsEqual(endpoints_shapes.keys(), end_points.keys())
Mark Sandler's avatar
Mark Sandler committed
172
    for endpoint_name, expected_shape in endpoints_shapes.items():
andrewghoward's avatar
andrewghoward committed
173
174
175
      self.assertTrue(endpoint_name in end_points)
      self.assertListEqual(end_points[endpoint_name].get_shape().as_list(),
                           expected_shape)
pkulzc's avatar
pkulzc committed
176
177
    self.assertItemsEqual(endpoints_shapes.keys(),
                          explicit_padding_end_points.keys())
Mark Sandler's avatar
Mark Sandler committed
178
    for endpoint_name, expected_shape in endpoints_shapes.items():
pkulzc's avatar
pkulzc committed
179
180
181
182
      self.assertTrue(endpoint_name in explicit_padding_end_points)
      self.assertListEqual(
          explicit_padding_end_points[endpoint_name].get_shape().as_list(),
          expected_shape)
andrewghoward's avatar
andrewghoward committed
183
184
185
186
187
188

  def testOutputStride16BuildAndCheckAllEndPointsUptoConv2d_13(self):
    batch_size = 5
    height, width = 224, 224
    output_stride = 16

189
    inputs = tf.random.uniform((batch_size, height, width, 3))
andrewghoward's avatar
andrewghoward committed
190
191
192
193
194
    with slim.arg_scope([slim.conv2d, slim.separable_conv2d],
                        normalizer_fn=slim.batch_norm):
      _, end_points = mobilenet_v1.mobilenet_v1_base(
          inputs, output_stride=output_stride,
          final_endpoint='Conv2d_13_pointwise')
pkulzc's avatar
pkulzc committed
195
196
197
      _, explicit_padding_end_points = mobilenet_v1.mobilenet_v1_base(
          inputs, output_stride=output_stride,
          final_endpoint='Conv2d_13_pointwise', use_explicit_padding=True)
andrewghoward's avatar
andrewghoward committed
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
    endpoints_shapes = {'Conv2d_0': [batch_size, 112, 112, 32],
                        'Conv2d_1_depthwise': [batch_size, 112, 112, 32],
                        'Conv2d_1_pointwise': [batch_size, 112, 112, 64],
                        'Conv2d_2_depthwise': [batch_size, 56, 56, 64],
                        'Conv2d_2_pointwise': [batch_size, 56, 56, 128],
                        'Conv2d_3_depthwise': [batch_size, 56, 56, 128],
                        'Conv2d_3_pointwise': [batch_size, 56, 56, 128],
                        'Conv2d_4_depthwise': [batch_size, 28, 28, 128],
                        'Conv2d_4_pointwise': [batch_size, 28, 28, 256],
                        'Conv2d_5_depthwise': [batch_size, 28, 28, 256],
                        'Conv2d_5_pointwise': [batch_size, 28, 28, 256],
                        'Conv2d_6_depthwise': [batch_size, 14, 14, 256],
                        'Conv2d_6_pointwise': [batch_size, 14, 14, 512],
                        'Conv2d_7_depthwise': [batch_size, 14, 14, 512],
                        'Conv2d_7_pointwise': [batch_size, 14, 14, 512],
                        'Conv2d_8_depthwise': [batch_size, 14, 14, 512],
                        'Conv2d_8_pointwise': [batch_size, 14, 14, 512],
                        'Conv2d_9_depthwise': [batch_size, 14, 14, 512],
                        'Conv2d_9_pointwise': [batch_size, 14, 14, 512],
                        'Conv2d_10_depthwise': [batch_size, 14, 14, 512],
                        'Conv2d_10_pointwise': [batch_size, 14, 14, 512],
                        'Conv2d_11_depthwise': [batch_size, 14, 14, 512],
                        'Conv2d_11_pointwise': [batch_size, 14, 14, 512],
                        'Conv2d_12_depthwise': [batch_size, 14, 14, 512],
                        'Conv2d_12_pointwise': [batch_size, 14, 14, 1024],
                        'Conv2d_13_depthwise': [batch_size, 14, 14, 1024],
                        'Conv2d_13_pointwise': [batch_size, 14, 14, 1024]}
    self.assertItemsEqual(endpoints_shapes.keys(), end_points.keys())
Mark Sandler's avatar
Mark Sandler committed
226
    for endpoint_name, expected_shape in endpoints_shapes.items():
andrewghoward's avatar
andrewghoward committed
227
228
229
      self.assertTrue(endpoint_name in end_points)
      self.assertListEqual(end_points[endpoint_name].get_shape().as_list(),
                           expected_shape)
pkulzc's avatar
pkulzc committed
230
231
    self.assertItemsEqual(endpoints_shapes.keys(),
                          explicit_padding_end_points.keys())
Mark Sandler's avatar
Mark Sandler committed
232
    for endpoint_name, expected_shape in endpoints_shapes.items():
pkulzc's avatar
pkulzc committed
233
234
235
236
      self.assertTrue(endpoint_name in explicit_padding_end_points)
      self.assertListEqual(
          explicit_padding_end_points[endpoint_name].get_shape().as_list(),
          expected_shape)
andrewghoward's avatar
andrewghoward committed
237
238
239
240
241
242

  def testOutputStride8BuildAndCheckAllEndPointsUptoConv2d_13(self):
    batch_size = 5
    height, width = 224, 224
    output_stride = 8

243
    inputs = tf.random.uniform((batch_size, height, width, 3))
andrewghoward's avatar
andrewghoward committed
244
245
246
247
248
    with slim.arg_scope([slim.conv2d, slim.separable_conv2d],
                        normalizer_fn=slim.batch_norm):
      _, end_points = mobilenet_v1.mobilenet_v1_base(
          inputs, output_stride=output_stride,
          final_endpoint='Conv2d_13_pointwise')
pkulzc's avatar
pkulzc committed
249
250
251
      _, explicit_padding_end_points = mobilenet_v1.mobilenet_v1_base(
          inputs, output_stride=output_stride,
          final_endpoint='Conv2d_13_pointwise', use_explicit_padding=True)
andrewghoward's avatar
andrewghoward committed
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
    endpoints_shapes = {'Conv2d_0': [batch_size, 112, 112, 32],
                        'Conv2d_1_depthwise': [batch_size, 112, 112, 32],
                        'Conv2d_1_pointwise': [batch_size, 112, 112, 64],
                        'Conv2d_2_depthwise': [batch_size, 56, 56, 64],
                        'Conv2d_2_pointwise': [batch_size, 56, 56, 128],
                        'Conv2d_3_depthwise': [batch_size, 56, 56, 128],
                        'Conv2d_3_pointwise': [batch_size, 56, 56, 128],
                        'Conv2d_4_depthwise': [batch_size, 28, 28, 128],
                        'Conv2d_4_pointwise': [batch_size, 28, 28, 256],
                        'Conv2d_5_depthwise': [batch_size, 28, 28, 256],
                        'Conv2d_5_pointwise': [batch_size, 28, 28, 256],
                        'Conv2d_6_depthwise': [batch_size, 28, 28, 256],
                        'Conv2d_6_pointwise': [batch_size, 28, 28, 512],
                        'Conv2d_7_depthwise': [batch_size, 28, 28, 512],
                        'Conv2d_7_pointwise': [batch_size, 28, 28, 512],
                        'Conv2d_8_depthwise': [batch_size, 28, 28, 512],
                        'Conv2d_8_pointwise': [batch_size, 28, 28, 512],
                        'Conv2d_9_depthwise': [batch_size, 28, 28, 512],
                        'Conv2d_9_pointwise': [batch_size, 28, 28, 512],
                        'Conv2d_10_depthwise': [batch_size, 28, 28, 512],
                        'Conv2d_10_pointwise': [batch_size, 28, 28, 512],
                        'Conv2d_11_depthwise': [batch_size, 28, 28, 512],
                        'Conv2d_11_pointwise': [batch_size, 28, 28, 512],
                        'Conv2d_12_depthwise': [batch_size, 28, 28, 512],
                        'Conv2d_12_pointwise': [batch_size, 28, 28, 1024],
                        'Conv2d_13_depthwise': [batch_size, 28, 28, 1024],
                        'Conv2d_13_pointwise': [batch_size, 28, 28, 1024]}
    self.assertItemsEqual(endpoints_shapes.keys(), end_points.keys())
Mark Sandler's avatar
Mark Sandler committed
280
    for endpoint_name, expected_shape in endpoints_shapes.items():
andrewghoward's avatar
andrewghoward committed
281
282
283
      self.assertTrue(endpoint_name in end_points)
      self.assertListEqual(end_points[endpoint_name].get_shape().as_list(),
                           expected_shape)
pkulzc's avatar
pkulzc committed
284
285
    self.assertItemsEqual(endpoints_shapes.keys(),
                          explicit_padding_end_points.keys())
Mark Sandler's avatar
Mark Sandler committed
286
    for endpoint_name, expected_shape in endpoints_shapes.items():
pkulzc's avatar
pkulzc committed
287
288
289
290
      self.assertTrue(endpoint_name in explicit_padding_end_points)
      self.assertListEqual(
          explicit_padding_end_points[endpoint_name].get_shape().as_list(),
          expected_shape)
andrewghoward's avatar
andrewghoward committed
291
292
293
294
295

  def testBuildAndCheckAllEndPointsApproximateFaceNet(self):
    batch_size = 5
    height, width = 128, 128

296
    inputs = tf.random.uniform((batch_size, height, width, 3))
andrewghoward's avatar
andrewghoward committed
297
298
299
300
    with slim.arg_scope([slim.conv2d, slim.separable_conv2d],
                        normalizer_fn=slim.batch_norm):
      _, end_points = mobilenet_v1.mobilenet_v1_base(
          inputs, final_endpoint='Conv2d_13_pointwise', depth_multiplier=0.75)
pkulzc's avatar
pkulzc committed
301
302
303
      _, explicit_padding_end_points = mobilenet_v1.mobilenet_v1_base(
          inputs, final_endpoint='Conv2d_13_pointwise', depth_multiplier=0.75,
          use_explicit_padding=True)
andrewghoward's avatar
andrewghoward committed
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
    # For the Conv2d_0 layer FaceNet has depth=16
    endpoints_shapes = {'Conv2d_0': [batch_size, 64, 64, 24],
                        'Conv2d_1_depthwise': [batch_size, 64, 64, 24],
                        'Conv2d_1_pointwise': [batch_size, 64, 64, 48],
                        'Conv2d_2_depthwise': [batch_size, 32, 32, 48],
                        'Conv2d_2_pointwise': [batch_size, 32, 32, 96],
                        'Conv2d_3_depthwise': [batch_size, 32, 32, 96],
                        'Conv2d_3_pointwise': [batch_size, 32, 32, 96],
                        'Conv2d_4_depthwise': [batch_size, 16, 16, 96],
                        'Conv2d_4_pointwise': [batch_size, 16, 16, 192],
                        'Conv2d_5_depthwise': [batch_size, 16, 16, 192],
                        'Conv2d_5_pointwise': [batch_size, 16, 16, 192],
                        'Conv2d_6_depthwise': [batch_size, 8, 8, 192],
                        'Conv2d_6_pointwise': [batch_size, 8, 8, 384],
                        'Conv2d_7_depthwise': [batch_size, 8, 8, 384],
                        'Conv2d_7_pointwise': [batch_size, 8, 8, 384],
                        'Conv2d_8_depthwise': [batch_size, 8, 8, 384],
                        'Conv2d_8_pointwise': [batch_size, 8, 8, 384],
                        'Conv2d_9_depthwise': [batch_size, 8, 8, 384],
                        'Conv2d_9_pointwise': [batch_size, 8, 8, 384],
                        'Conv2d_10_depthwise': [batch_size, 8, 8, 384],
                        'Conv2d_10_pointwise': [batch_size, 8, 8, 384],
                        'Conv2d_11_depthwise': [batch_size, 8, 8, 384],
                        'Conv2d_11_pointwise': [batch_size, 8, 8, 384],
                        'Conv2d_12_depthwise': [batch_size, 4, 4, 384],
                        'Conv2d_12_pointwise': [batch_size, 4, 4, 768],
                        'Conv2d_13_depthwise': [batch_size, 4, 4, 768],
                        'Conv2d_13_pointwise': [batch_size, 4, 4, 768]}
    self.assertItemsEqual(endpoints_shapes.keys(), end_points.keys())
Mark Sandler's avatar
Mark Sandler committed
333
    for endpoint_name, expected_shape in endpoints_shapes.items():
andrewghoward's avatar
andrewghoward committed
334
335
336
      self.assertTrue(endpoint_name in end_points)
      self.assertListEqual(end_points[endpoint_name].get_shape().as_list(),
                           expected_shape)
pkulzc's avatar
pkulzc committed
337
338
    self.assertItemsEqual(endpoints_shapes.keys(),
                          explicit_padding_end_points.keys())
Mark Sandler's avatar
Mark Sandler committed
339
    for endpoint_name, expected_shape in endpoints_shapes.items():
pkulzc's avatar
pkulzc committed
340
341
342
343
      self.assertTrue(endpoint_name in explicit_padding_end_points)
      self.assertListEqual(
          explicit_padding_end_points[endpoint_name].get_shape().as_list(),
          expected_shape)
andrewghoward's avatar
andrewghoward committed
344
345
346
347

  def testModelHasExpectedNumberOfParameters(self):
    batch_size = 5
    height, width = 224, 224
348
    inputs = tf.random.uniform((batch_size, height, width, 3))
andrewghoward's avatar
andrewghoward committed
349
350
351
352
353
    with slim.arg_scope([slim.conv2d, slim.separable_conv2d],
                        normalizer_fn=slim.batch_norm):
      mobilenet_v1.mobilenet_v1_base(inputs)
      total_params, _ = slim.model_analyzer.analyze_vars(
          slim.get_model_variables())
354
      self.assertAlmostEqual(3217920, total_params)
andrewghoward's avatar
andrewghoward committed
355
356
357
358
359
360

  def testBuildEndPointsWithDepthMultiplierLessThanOne(self):
    batch_size = 5
    height, width = 224, 224
    num_classes = 1000

361
    inputs = tf.random.uniform((batch_size, height, width, 3))
andrewghoward's avatar
andrewghoward committed
362
363
364
365
366
367
368
369
370
371
372
373
374
375
376
377
378
379
    _, end_points = mobilenet_v1.mobilenet_v1(inputs, num_classes)

    endpoint_keys = [key for key in end_points.keys() if key.startswith('Conv')]

    _, end_points_with_multiplier = mobilenet_v1.mobilenet_v1(
        inputs, num_classes, scope='depth_multiplied_net',
        depth_multiplier=0.5)

    for key in endpoint_keys:
      original_depth = end_points[key].get_shape().as_list()[3]
      new_depth = end_points_with_multiplier[key].get_shape().as_list()[3]
      self.assertEqual(0.5 * original_depth, new_depth)

  def testBuildEndPointsWithDepthMultiplierGreaterThanOne(self):
    batch_size = 5
    height, width = 224, 224
    num_classes = 1000

380
    inputs = tf.random.uniform((batch_size, height, width, 3))
andrewghoward's avatar
andrewghoward committed
381
382
383
384
385
386
387
388
389
390
391
392
393
394
395
396
397
398
399
    _, end_points = mobilenet_v1.mobilenet_v1(inputs, num_classes)

    endpoint_keys = [key for key in end_points.keys()
                     if key.startswith('Mixed') or key.startswith('Conv')]

    _, end_points_with_multiplier = mobilenet_v1.mobilenet_v1(
        inputs, num_classes, scope='depth_multiplied_net',
        depth_multiplier=2.0)

    for key in endpoint_keys:
      original_depth = end_points[key].get_shape().as_list()[3]
      new_depth = end_points_with_multiplier[key].get_shape().as_list()[3]
      self.assertEqual(2.0 * original_depth, new_depth)

  def testRaiseValueErrorWithInvalidDepthMultiplier(self):
    batch_size = 5
    height, width = 224, 224
    num_classes = 1000

400
    inputs = tf.random.uniform((batch_size, height, width, 3))
andrewghoward's avatar
andrewghoward committed
401
402
403
404
405
406
407
408
409
410
411
412
    with self.assertRaises(ValueError):
      _ = mobilenet_v1.mobilenet_v1(
          inputs, num_classes, depth_multiplier=-0.1)
    with self.assertRaises(ValueError):
      _ = mobilenet_v1.mobilenet_v1(
          inputs, num_classes, depth_multiplier=0.0)

  def testHalfSizeImages(self):
    batch_size = 5
    height, width = 112, 112
    num_classes = 1000

413
    inputs = tf.random.uniform((batch_size, height, width, 3))
andrewghoward's avatar
andrewghoward committed
414
415
416
417
418
419
420
421
422
    logits, end_points = mobilenet_v1.mobilenet_v1(inputs, num_classes)
    self.assertTrue(logits.op.name.startswith('MobilenetV1/Logits'))
    self.assertListEqual(logits.get_shape().as_list(),
                         [batch_size, num_classes])
    pre_pool = end_points['Conv2d_13_pointwise']
    self.assertListEqual(pre_pool.get_shape().as_list(),
                         [batch_size, 4, 4, 1024])

  def testUnknownImageShape(self):
423
    tf.compat.v1.reset_default_graph()
andrewghoward's avatar
andrewghoward committed
424
425
426
427
428
    batch_size = 2
    height, width = 224, 224
    num_classes = 1000
    input_np = np.random.uniform(0, 1, (batch_size, height, width, 3))
    with self.test_session() as sess:
429
430
      inputs = tf.compat.v1.placeholder(
          tf.float32, shape=(batch_size, None, None, 3))
andrewghoward's avatar
andrewghoward committed
431
432
433
434
435
436
      logits, end_points = mobilenet_v1.mobilenet_v1(inputs, num_classes)
      self.assertTrue(logits.op.name.startswith('MobilenetV1/Logits'))
      self.assertListEqual(logits.get_shape().as_list(),
                           [batch_size, num_classes])
      pre_pool = end_points['Conv2d_13_pointwise']
      feed_dict = {inputs: input_np}
437
      tf.compat.v1.global_variables_initializer().run()
andrewghoward's avatar
andrewghoward committed
438
439
440
      pre_pool_out = sess.run(pre_pool, feed_dict=feed_dict)
      self.assertListEqual(list(pre_pool_out.shape), [batch_size, 7, 7, 1024])

441
  def testGlobalPoolUnknownImageShape(self):
442
    tf.compat.v1.reset_default_graph()
pkulzc's avatar
pkulzc committed
443
444
    batch_size = 1
    height, width = 250, 300
445
446
447
    num_classes = 1000
    input_np = np.random.uniform(0, 1, (batch_size, height, width, 3))
    with self.test_session() as sess:
448
449
      inputs = tf.compat.v1.placeholder(
          tf.float32, shape=(batch_size, None, None, 3))
450
451
452
453
454
455
456
      logits, end_points = mobilenet_v1.mobilenet_v1(inputs, num_classes,
                                                     global_pool=True)
      self.assertTrue(logits.op.name.startswith('MobilenetV1/Logits'))
      self.assertListEqual(logits.get_shape().as_list(),
                           [batch_size, num_classes])
      pre_pool = end_points['Conv2d_13_pointwise']
      feed_dict = {inputs: input_np}
457
      tf.compat.v1.global_variables_initializer().run()
458
      pre_pool_out = sess.run(pre_pool, feed_dict=feed_dict)
pkulzc's avatar
pkulzc committed
459
      self.assertListEqual(list(pre_pool_out.shape), [batch_size, 8, 10, 1024])
460

andrewghoward's avatar
andrewghoward committed
461
462
463
464
465
  def testUnknowBatchSize(self):
    batch_size = 1
    height, width = 224, 224
    num_classes = 1000

466
    inputs = tf.compat.v1.placeholder(tf.float32, (None, height, width, 3))
andrewghoward's avatar
andrewghoward committed
467
468
469
470
    logits, _ = mobilenet_v1.mobilenet_v1(inputs, num_classes)
    self.assertTrue(logits.op.name.startswith('MobilenetV1/Logits'))
    self.assertListEqual(logits.get_shape().as_list(),
                         [None, num_classes])
471
    images = tf.random.uniform((batch_size, height, width, 3))
andrewghoward's avatar
andrewghoward committed
472
473

    with self.test_session() as sess:
474
      sess.run(tf.compat.v1.global_variables_initializer())
andrewghoward's avatar
andrewghoward committed
475
476
477
478
479
480
481
482
      output = sess.run(logits, {inputs: images.eval()})
      self.assertEquals(output.shape, (batch_size, num_classes))

  def testEvaluation(self):
    batch_size = 2
    height, width = 224, 224
    num_classes = 1000

483
    eval_inputs = tf.random.uniform((batch_size, height, width, 3))
andrewghoward's avatar
andrewghoward committed
484
485
    logits, _ = mobilenet_v1.mobilenet_v1(eval_inputs, num_classes,
                                          is_training=False)
486
    predictions = tf.argmax(input=logits, axis=1)
andrewghoward's avatar
andrewghoward committed
487
488

    with self.test_session() as sess:
489
      sess.run(tf.compat.v1.global_variables_initializer())
andrewghoward's avatar
andrewghoward committed
490
491
492
493
494
495
496
497
498
      output = sess.run(predictions)
      self.assertEquals(output.shape, (batch_size,))

  def testTrainEvalWithReuse(self):
    train_batch_size = 5
    eval_batch_size = 2
    height, width = 150, 150
    num_classes = 1000

499
    train_inputs = tf.random.uniform((train_batch_size, height, width, 3))
andrewghoward's avatar
andrewghoward committed
500
    mobilenet_v1.mobilenet_v1(train_inputs, num_classes)
501
    eval_inputs = tf.random.uniform((eval_batch_size, height, width, 3))
andrewghoward's avatar
andrewghoward committed
502
503
    logits, _ = mobilenet_v1.mobilenet_v1(eval_inputs, num_classes,
                                          reuse=True)
504
    predictions = tf.argmax(input=logits, axis=1)
andrewghoward's avatar
andrewghoward committed
505
506

    with self.test_session() as sess:
507
      sess.run(tf.compat.v1.global_variables_initializer())
andrewghoward's avatar
andrewghoward committed
508
509
510
511
512
      output = sess.run(predictions)
      self.assertEquals(output.shape, (eval_batch_size,))

  def testLogitsNotSqueezed(self):
    num_classes = 25
513
    images = tf.random.uniform([1, 224, 224, 3])
andrewghoward's avatar
andrewghoward committed
514
515
516
517
518
    logits, _ = mobilenet_v1.mobilenet_v1(images,
                                          num_classes=num_classes,
                                          spatial_squeeze=False)

    with self.test_session() as sess:
519
      tf.compat.v1.global_variables_initializer().run()
andrewghoward's avatar
andrewghoward committed
520
521
522
      logits_out = sess.run(logits)
      self.assertListEqual(list(logits_out.shape), [1, 1, 1, num_classes])

523
524
525
526
527
528
529
530
531
532
533
534
  def testBatchNormScopeDoesNotHaveIsTrainingWhenItsSetToNone(self):
    sc = mobilenet_v1.mobilenet_v1_arg_scope(is_training=None)
    self.assertNotIn('is_training', sc[slim.arg_scope_func_key(
        slim.batch_norm)])

  def testBatchNormScopeDoesHasIsTrainingWhenItsNotNone(self):
    sc = mobilenet_v1.mobilenet_v1_arg_scope(is_training=True)
    self.assertIn('is_training', sc[slim.arg_scope_func_key(slim.batch_norm)])
    sc = mobilenet_v1.mobilenet_v1_arg_scope(is_training=False)
    self.assertIn('is_training', sc[slim.arg_scope_func_key(slim.batch_norm)])
    sc = mobilenet_v1.mobilenet_v1_arg_scope()
    self.assertIn('is_training', sc[slim.arg_scope_func_key(slim.batch_norm)])
andrewghoward's avatar
andrewghoward committed
535
536
537

if __name__ == '__main__':
  tf.test.main()