pretrain.py 1.44 KB
Newer Older
1
# Copyright 2017 Google Inc. All Rights Reserved.
Ryan Sepassi's avatar
Ryan Sepassi committed
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
#     http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Pretrains a recurrent language model.

Computational time:
18
19
20
  2 days to train 100000 steps on 1 layer 1024 hidden units LSTM,
  256 embeddings, 400 truncated BP, 256 minibatch and on single GPU (Pascal
  Titan X, cuDNNv5).
Ryan Sepassi's avatar
Ryan Sepassi committed
21
22
23
24
25
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function

26
27
# Dependency imports

Ryan Sepassi's avatar
Ryan Sepassi committed
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
import tensorflow as tf

import graphs
import train_utils

FLAGS = tf.app.flags.FLAGS


def main(_):
  """Trains Language Model."""
  tf.logging.set_verbosity(tf.logging.INFO)
  with tf.device(tf.train.replica_device_setter(FLAGS.ps_tasks)):
    model = graphs.get_model()
    train_op, loss, global_step = model.language_model_training()
    train_utils.run_training(train_op, loss, global_step)


if __name__ == '__main__':
  tf.app.run()