export_mobilenet.sh 4.45 KB
Newer Older
Pete Warden's avatar
Pete Warden committed
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
#!/bin/bash
# This script prepares the various different versions of MobileNet models for
# use in a mobile application. If you don't specify your own trained checkpoint
# file, it will download pretrained checkpoints for ImageNet. You'll also need
# to have a copy of the TensorFlow source code to run some of the commands,
# by default it will be looked for in ./tensorflow, but you can set the
# TENSORFLOW_PATH environment variable before calling the script if your source
# is in a different location.
# The main slim/nets/mobilenet_v1.md description has more details about the
# model, but the main points are that it comes in four size versions, 1.0, 0.75,
# 0.50, and 0.25, which controls the number of parameters and so the file size
# of the model, and the input image size, which can be 224, 192, 160, or 128
# pixels, and affects the amount of computation needed, and the latency.
# Here's an example generating a frozen model from pretrained weights:
# 

set -e

print_usage () {
  echo "Creates a frozen mobilenet model suitable for mobile use"
  echo "Usage:"
  echo "$0 <mobilenet version> <input size> [checkpoint path]"
} 

MOBILENET_VERSION=$1
IMAGE_SIZE=$2
CHECKPOINT=$3

if [[ ${MOBILENET_VERSION} = "1.0" ]]; then
   SLIM_NAME=mobilenet_v1
elif [[ ${MOBILENET_VERSION} = "0.75" ]]; then
   SLIM_NAME=mobilenet_v1_075
elif [[ ${MOBILENET_VERSION} = "0.50" ]]; then
   SLIM_NAME=mobilenet_v1_050
elif [[ ${MOBILENET_VERSION} = "0.25" ]]; then
   SLIM_NAME=mobilenet_v1_025
else
  echo "Bad mobilenet version, should be one of 1.0, 0.75, 0.50, or 0.25"
  print_usage
  exit 1
fi

if [[ ${IMAGE_SIZE} -ne "224" ]] && [[ ${IMAGE_SIZE} -ne "192" ]] && [[ ${IMAGE_SIZE} -ne "160" ]] && [[ ${IMAGE_SIZE} -ne "128" ]]; then
  echo "Bad input image size, should be one of 224, 192, 160, or 128"
  print_usage
  exit 1
fi

if [[ ${TENSORFLOW_PATH} -eq "" ]]; then
   TENSORFLOW_PATH=../tensorflow
fi

if [[ ! -d ${TENSORFLOW_PATH} ]]; then
   echo "TensorFlow source folder not found. You should download the source and then set"
   echo "the TENSORFLOW_PATH environment variable to point to it, like this:"
   echo "export TENSORFLOW_PATH=/my/path/to/tensorflow"
   print_usage
   exit 1
fi

MODEL_FOLDER=/tmp/mobilenet_v1_${MOBILENET_VERSION}_${IMAGE_SIZE}
if [[ -d ${MODEL_FOLDER} ]]; then
  echo "Model folder ${MODEL_FOLDER} already exists!"
  echo "If you want to overwrite it, then 'rm -rf ${MODEL_FOLDER}' first."
  print_usage
  exit 1
fi
mkdir ${MODEL_FOLDER}

if [[ ${CHECKPOINT} = "" ]]; then
  echo "*******"
  echo "Downloading pretrained weights"
  echo "*******"
  curl "http://download.tensorflow.org/models/mobilenet_v1_${MOBILENET_VERSION}_${IMAGE_SIZE}_2017_06_14.tar.gz" \
    -o ${MODEL_FOLDER}/checkpoints.tar.gz
  tar xzf ${MODEL_FOLDER}/checkpoints.tar.gz --directory ${MODEL_FOLDER}
  CHECKPOINT=${MODEL_FOLDER}/mobilenet_v1_${MOBILENET_VERSION}_${IMAGE_SIZE}.ckpt
fi

echo "*******"
echo "Exporting graph architecture to ${MODEL_FOLDER}/unfrozen_graph.pb"
echo "*******"
bazel run slim:export_inference_graph -- \
  --model_name=${SLIM_NAME} --image_size=${IMAGE_SIZE} --logtostderr \
  --output_file=${MODEL_FOLDER}/unfrozen_graph.pb --dataset_dir=${MODEL_FOLDER}

cd ../tensorflow

echo "*******"
echo "Freezing graph to ${MODEL_FOLDER}/frozen_graph.pb"
echo "*******"
bazel run tensorflow/python/tools:freeze_graph -- \
  --input_graph=${MODEL_FOLDER}/unfrozen_graph.pb \
  --input_checkpoint=${CHECKPOINT} \
  --input_binary=true --output_graph=${MODEL_FOLDER}/frozen_graph.pb \
  --output_node_names=MobilenetV1/Predictions/Reshape_1

echo "Quantizing weights to ${MODEL_FOLDER}/quantized_graph.pb"
bazel run tensorflow/tools/graph_transforms:transform_graph -- \
  --in_graph=${MODEL_FOLDER}/frozen_graph.pb \
  --out_graph=${MODEL_FOLDER}/quantized_graph.pb \
  --inputs=input --outputs=MobilenetV1/Predictions/Reshape_1 \
  --transforms='fold_constants fold_batch_norms quantize_weights'

echo "*******"
echo "Running label_image using the graph"
echo "*******"
bazel build tensorflow/examples/label_image:label_image
bazel-bin/tensorflow/examples/label_image/label_image \
  --input_layer=input --output_layer=MobilenetV1/Predictions/Reshape_1 \
  --graph=${MODEL_FOLDER}/quantized_graph.pb --input_mean=-127 --input_std=127 \
  --image=tensorflow/examples/label_image/data/grace_hopper.jpg \
  --input_width=${IMAGE_SIZE} --input_height=${IMAGE_SIZE} --labels=${MODEL_FOLDER}/labels.txt

echo "*******"
echo "Saved graphs to ${MODEL_FOLDER}/frozen_graph.pb and ${MODEL_FOLDER}/quantized_graph.pb"
echo "*******"