ncf_test.py 4.23 KB
Newer Older
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests NCF."""

from __future__ import absolute_import
from __future__ import division
from __future__ import print_function

21
import unittest
22
23

import tensorflow as tf
Hongkun Yu's avatar
Hongkun Yu committed
24
from tensorflow.python.eager import context  # pylint: disable=ungrouped-imports
25
from official.recommendation import constants as rconst
Shining Sun's avatar
Shining Sun committed
26
from official.recommendation import ncf_common
27
28
from official.recommendation import ncf_keras_main
from official.utils.testing import integration
29

30
NUM_TRAIN_NEG = 4
31
32
33


class NcfTest(tf.test.TestCase):
Reed's avatar
Reed committed
34
35
36
37

  @classmethod
  def setUpClass(cls):  # pylint: disable=invalid-name
    super(NcfTest, cls).setUpClass()
Shining Sun's avatar
Shining Sun committed
38
    ncf_common.define_ncf_flags()
Reed's avatar
Reed committed
39

40
41
42
43
44
45
46
47
48
  def setUp(self):
    self.top_k_old = rconst.TOP_K
    self.num_eval_negatives_old = rconst.NUM_EVAL_NEGATIVES
    rconst.NUM_EVAL_NEGATIVES = 2

  def tearDown(self):
    rconst.NUM_EVAL_NEGATIVES = self.num_eval_negatives_old
    rconst.TOP_K = self.top_k_old

49
  _BASE_END_TO_END_FLAGS = ['-batch_size', '1044', '-train_epochs', '1']
50

51
  @unittest.mock.patch.object(rconst, "SYNTHETIC_BATCHES_PER_EPOCH", 100)
52
  def test_end_to_end_keras_no_dist_strat(self):
53
    integration.run_synthetic(
54
        ncf_keras_main.main, tmp_root=self.get_temp_dir(),
55
        extra_flags=self._BASE_END_TO_END_FLAGS +
56
        ['-distribution_strategy', 'off'])
Reed's avatar
Reed committed
57

58
  @unittest.mock.patch.object(rconst, "SYNTHETIC_BATCHES_PER_EPOCH", 100)
59
  def test_end_to_end_keras_dist_strat(self):
60
    integration.run_synthetic(
61
        ncf_keras_main.main, tmp_root=self.get_temp_dir(),
62
63
        extra_flags=self._BASE_END_TO_END_FLAGS + ['-num_gpus', '0'])

64
  @unittest.mock.patch.object(rconst, "SYNTHETIC_BATCHES_PER_EPOCH", 100)
65
66
67
68
69
  def test_end_to_end_keras_dist_strat_ctl(self):
    flags = (self._BASE_END_TO_END_FLAGS +
             ['-num_gpus', '0'] +
             ['-keras_use_ctl', 'True'])
    integration.run_synthetic(
70
        ncf_keras_main.main, tmp_root=self.get_temp_dir(),
guptapriya's avatar
guptapriya committed
71
        extra_flags=flags)
Reed's avatar
Reed committed
72

73
  @unittest.mock.patch.object(rconst, "SYNTHETIC_BATCHES_PER_EPOCH", 100)
74
  def test_end_to_end_keras_1_gpu_dist_strat_fp16(self):
75
76
77
78
79
80
    if context.num_gpus() < 1:
      self.skipTest(
          "{} GPUs are not available for this test. {} GPUs are available".
          format(1, context.num_gpus()))

    integration.run_synthetic(
81
        ncf_keras_main.main, tmp_root=self.get_temp_dir(),
82
83
        extra_flags=self._BASE_END_TO_END_FLAGS + ['-num_gpus', '1',
                                                   '--dtype', 'fp16'])
84

85
  @unittest.mock.patch.object(rconst, "SYNTHETIC_BATCHES_PER_EPOCH", 100)
86
87
88
89
90
91
92
93
94
95
96
97
  def test_end_to_end_keras_1_gpu_dist_strat_ctl_fp16(self):
    if context.num_gpus() < 1:
      self.skipTest(
          '{} GPUs are not available for this test. {} GPUs are available'.
          format(1, context.num_gpus()))

    integration.run_synthetic(
        ncf_keras_main.main, tmp_root=self.get_temp_dir(),
        extra_flags=self._BASE_END_TO_END_FLAGS + ['-num_gpus', '1',
                                                   '--dtype', 'fp16',
                                                   '--keras_use_ctl'])

98
  @unittest.mock.patch.object(rconst, 'SYNTHETIC_BATCHES_PER_EPOCH', 100)
99
  def test_end_to_end_keras_2_gpu_fp16(self):
100
101
102
103
104
105
    if context.num_gpus() < 2:
      self.skipTest(
          "{} GPUs are not available for this test. {} GPUs are available".
          format(2, context.num_gpus()))

    integration.run_synthetic(
106
        ncf_keras_main.main, tmp_root=self.get_temp_dir(),
107
108
        extra_flags=self._BASE_END_TO_END_FLAGS + ['-num_gpus', '2',
                                                   '--dtype', 'fp16'])
109
110
111

if __name__ == "__main__":
  tf.test.main()