Unverified Commit 44ecc4a7 authored by Toby Boyd's avatar Toby Boyd Committed by GitHub
Browse files

Imagenet short tests (#6132)

* Add short imagenet tests (taken from seemuch)
- also rename to match go forward naming

* fix method name

* Update doc strings.

* Fixe gpu number.
parent a66d4713
...@@ -18,71 +18,166 @@ from __future__ import print_function ...@@ -18,71 +18,166 @@ from __future__ import print_function
import os import os
from absl import flags from absl import flags
from absl.testing import flagsaver
import tensorflow as tf # pylint: disable=g-bad-import-order
from official.resnet import imagenet_main from official.resnet import imagenet_main
from official.resnet.keras import keras_benchmark
from official.resnet.keras import keras_common from official.resnet.keras import keras_common
from official.resnet.keras import keras_imagenet_main from official.resnet.keras import keras_imagenet_main
MIN_TOP_1_ACCURACY = 0.76
MAX_TOP_1_ACCURACY = 0.77
DATA_DIR = '/data/imagenet/' DATA_DIR = '/data/imagenet/'
FLAGS = flags.FLAGS
class KerasImagenetBenchmarkTests(object):
"""Benchmarks and accuracy tests for KerasCifar10."""
local_flags = None class Resnet50KerasAccuracy(keras_benchmark.KerasBenchmark):
"""Benchmark accuracy tests for ResNet50 in Keras."""
def __init__(self, output_dir=None): def __init__(self, output_dir=None):
self.oss_report_object = None flag_methods = [keras_common.define_keras_flags,
self.output_dir = output_dir imagenet_main.define_imagenet_flags]
super(Resnet50KerasAccuracy, self).__init__(output_dir=output_dir,
flag_methods=flag_methods)
def keras_resnet50_8_gpu(self): def benchmark_graph_8_gpu(self):
"""Test Keras model with Keras fit/dist_strat and 8 GPUs.""" """Test Keras model with Keras fit/dist_strat and 8 GPUs."""
self._setup() self._setup()
flags.FLAGS.num_gpus = 8 FLAGS.num_gpus = 8
flags.FLAGS.data_dir = DATA_DIR FLAGS.data_dir = DATA_DIR
flags.FLAGS.batch_size = 64*8 FLAGS.batch_size = 128*8
flags.FLAGS.train_epochs = 90 FLAGS.train_epochs = 90
flags.FLAGS.model_dir = self._get_model_dir('keras_resnet50_8_gpu') FLAGS.model_dir = self._get_model_dir('keras_resnet50_8_gpu')
flags.FLAGS.dtype = 'fp32' FLAGS.dtype = 'fp32'
stats = keras_imagenet_main.run(flags.FLAGS) stats = keras_imagenet_main.run(FLAGS)
self._fill_report_object(stats) self._fill_report_object(stats, FLAGS.batch_size)
def keras_resnet50_eager_8_gpu(self): def benchmark_8_gpu(self):
"""Test Keras model with eager, dist_strat and 8 GPUs.""" """Test Keras model with eager, dist_strat and 8 GPUs."""
self._setup() self._setup()
flags.FLAGS.num_gpus = 8 FLAGS.num_gpus = 8
flags.FLAGS.data_dir = DATA_DIR FLAGS.data_dir = DATA_DIR
flags.FLAGS.batch_size = 64*8 FLAGS.batch_size = 128*8
flags.FLAGS.train_epochs = 90 FLAGS.train_epochs = 90
flags.FLAGS.model_dir = self._get_model_dir('keras_resnet50_eager_8_gpu') FLAGS.model_dir = self._get_model_dir('keras_resnet50_eager_8_gpu')
flags.FLAGS.dtype = 'fp32' FLAGS.dtype = 'fp32'
flags.FLAGS.enable_eager = True FLAGS.enable_eager = True
stats = keras_imagenet_main.run(flags.FLAGS) stats = keras_imagenet_main.run(FLAGS)
self._fill_report_object(stats) self._fill_report_object(stats, FLAGS.batch_size)
def _fill_report_object(self, stats): def fill_report_object(self, stats, total_batch_size):
if self.oss_report_object: super(Resnet50KerasAccuracy, self).fill_report_object(
self.oss_report_object.top_1 = stats['accuracy_top_1'] stats,
self.oss_report_object.add_other_quality(stats['training_accuracy_top_1'], top_1_min=MIN_TOP_1_ACCURACY,
'top_1_train_accuracy') top_1_max=MAX_TOP_1_ACCURACY,
else: total_batch_size=total_batch_size,
raise ValueError('oss_report_object has not been set.') log_steps=100)
def _get_model_dir(self, folder_name): def _get_model_dir(self, folder_name):
return os.path.join(self.output_dir, folder_name) return os.path.join(self.output_dir, folder_name)
def _setup(self):
"""Setups up and resets flags before each test.""" class Resnet50KerasBenchmarkBase(keras_benchmark.KerasBenchmark):
tf.logging.set_verbosity(tf.logging.DEBUG) """Resnet50 benchmarks."""
if KerasImagenetBenchmarkTests.local_flags is None:
keras_common.define_keras_flags() def __init__(self, output_dir=None, default_flags=None):
imagenet_main.define_imagenet_flags() flag_methods = [keras_common.define_keras_flags,
# Loads flags to get defaults to then override. List cannot be empty. imagenet_main.define_imagenet_flags]
flags.FLAGS(['foo'])
saved_flag_values = flagsaver.save_flag_values() super(Resnet50KerasBenchmarkBase, self).__init__(
KerasImagenetBenchmarkTests.local_flags = saved_flag_values output_dir=output_dir,
return flag_methods=flag_methods,
flagsaver.restore_flag_values(KerasImagenetBenchmarkTests.local_flags) default_flags=default_flags)
def _run_benchmark(self):
stats = keras_imagenet_main.run(FLAGS)
self.fill_report_object(stats)
def benchmark_1_gpu_no_dist_strat(self):
self._setup()
FLAGS.num_gpus = 1
FLAGS.enable_eager = True
FLAGS.turn_off_distribution_strategy = True
FLAGS.batch_size = 128
self._run_benchmark()
def benchmark_graph_1_gpu_no_dist_strat(self):
self._setup()
FLAGS.num_gpus = 1
FLAGS.enable_eager = False
FLAGS.turn_off_distribution_strategy = True
FLAGS.batch_size = 128
self._run_benchmark()
def benchmark_1_gpu(self):
self._setup()
FLAGS.num_gpus = 1
FLAGS.enable_eager = True
FLAGS.turn_off_distribution_strategy = False
FLAGS.batch_size = 128
self._run_benchmark()
def benchmark_graph_1_gpu(self):
self._setup()
FLAGS.num_gpus = 1
FLAGS.enable_eager = False
FLAGS.turn_off_distribution_strategy = False
FLAGS.batch_size = 128
self._run_benchmark()
def benchmark_8_gpu(self):
self._setup()
FLAGS.num_gpus = 8
FLAGS.enable_eager = True
FLAGS.turn_off_distribution_strategy = False
FLAGS.batch_size = 128 * 8 # 8 GPUs
self._run_benchmark()
def benchmark_graph_8_gpu(self):
self._setup()
FLAGS.num_gpus = 8
FLAGS.enable_eager = False
FLAGS.turn_off_distribution_strategy = False
FLAGS.batch_size = 128 * 8 # 8 GPUs
self._run_benchmark()
class Resnet50KerasBenchmarkSynth(Resnet50KerasBenchmarkBase):
"""Resnet50 synthetic benchmark tests."""
def __init__(self, output_dir=None):
def_flags = {}
def_flags['skip_eval'] = True
def_flags['use_synthetic_data'] = True
def_flags['train_steps'] = 110
def_flags['log_steps'] = 10
super(Resnet50KerasBenchmarkSynth, self).__init__(output_dir=output_dir,
default_flags=def_flags)
class Resnet50KerasBenchmarkReal(Resnet50KerasBenchmarkBase):
"""Resnet50 real data benchmark tests."""
def __init__(self, output_dir=None):
def_flags = {}
def_flags['skip_eval'] = True
def_flags['data_dir'] = DATA_DIR
def_flags['train_steps'] = 110
def_flags['log_steps'] = 10
super(Resnet50KerasBenchmarkReal, self).__init__(output_dir=output_dir,
default_flags=def_flags)
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment