Commit 8f44de85 authored by guptapriya's avatar guptapriya Committed by guptapriya
Browse files

Add more tests and benchmarks to cover no dist strat and ctl cases

parent bed2745d
...@@ -121,6 +121,12 @@ class KerasNCFRealData(KerasNCFBenchmarkBase): ...@@ -121,6 +121,12 @@ class KerasNCFRealData(KerasNCFBenchmarkBase):
self._setup() self._setup()
self._run_and_report_benchmark() self._run_and_report_benchmark()
def benchmark_1_gpu_no_dist_strat_early_stop(self):
self._setup()
FLAGS.distribution_strategy = 'off'
FLAGS.early_stopping = True
self._run_and_report_benchmark()
def benchmark_1_gpu_early_stop(self): def benchmark_1_gpu_early_stop(self):
self._setup() self._setup()
FLAGS.early_stopping = True FLAGS.early_stopping = True
......
...@@ -201,22 +201,29 @@ class NcfTest(tf.test.TestCase): ...@@ -201,22 +201,29 @@ class NcfTest(tf.test.TestCase):
extra_flags=self._BASE_END_TO_END_FLAGS + ['-ml_perf', 'True']) extra_flags=self._BASE_END_TO_END_FLAGS + ['-ml_perf', 'True'])
@mock.patch.object(rconst, "SYNTHETIC_BATCHES_PER_EPOCH", 100) @mock.patch.object(rconst, "SYNTHETIC_BATCHES_PER_EPOCH", 100)
def test_end_to_end_keras(self): def test_end_to_end_keras_no_dist_strat(self):
integration.run_synthetic( integration.run_synthetic(
ncf_keras_main.main, tmp_root=self.get_temp_dir(), max_train=None, ncf_keras_main.main, tmp_root=self.get_temp_dir(), max_train=None,
extra_flags=self._BASE_END_TO_END_FLAGS + extra_flags=self._BASE_END_TO_END_FLAGS +
['-distribution_strategy', 'off']) ['-distribution_strategy', 'off'])
@mock.patch.object(rconst, "SYNTHETIC_BATCHES_PER_EPOCH", 100) @mock.patch.object(rconst, "SYNTHETIC_BATCHES_PER_EPOCH", 100)
def test_end_to_end_keras_mlperf(self): def test_end_to_end_keras_dist_strat(self):
integration.run_synthetic( integration.run_synthetic(
ncf_keras_main.main, tmp_root=self.get_temp_dir(), max_train=None, ncf_keras_main.main, tmp_root=self.get_temp_dir(), max_train=None,
extra_flags=self._BASE_END_TO_END_FLAGS + extra_flags=self._BASE_END_TO_END_FLAGS + ['-num_gpus', '0'])
['-ml_perf', 'True',
'-distribution_strategy', 'off']) @mock.patch.object(rconst, "SYNTHETIC_BATCHES_PER_EPOCH", 100)
def test_end_to_end_keras_dist_strat_ctl(self):
flags = (self._BASE_END_TO_END_FLAGS +
['-num_gpus', '0'] +
['-keras_use_ctl', 'True'])
integration.run_synthetic(
ncf_keras_main.main, tmp_root=self.get_temp_dir(), max_train=None,
extra_flags=self._BASE_END_TO_END_FLAGS + ['-num_gpus', '0'])
@mock.patch.object(rconst, "SYNTHETIC_BATCHES_PER_EPOCH", 100) @mock.patch.object(rconst, "SYNTHETIC_BATCHES_PER_EPOCH", 100)
def test_end_to_end_keras_1_gpu(self): def test_end_to_end_keras_1_gpu_dist_strat(self):
if context.num_gpus() < 1: if context.num_gpus() < 1:
self.skipTest( self.skipTest(
"{} GPUs are not available for this test. {} GPUs are available". "{} GPUs are not available for this test. {} GPUs are available".
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment