Unverified Commit 0e05682d authored by Toby Boyd's avatar Toby Boyd Committed by GitHub
Browse files

Single timestamp list and return average_exp_per_second. (#6032)

* Single timestamp list and return average_exp_per_second.

* Add paren to subtract before division.
parent b36872b6
...@@ -51,14 +51,12 @@ class TimeHistory(tf.keras.callbacks.Callback): ...@@ -51,14 +51,12 @@ class TimeHistory(tf.keras.callbacks.Callback):
batch_size: Total batch size. batch_size: Total batch size.
""" """
self._batch_size = batch_size self.batch_size = batch_size
super(TimeHistory, self).__init__() super(TimeHistory, self).__init__()
self.log_steps = log_steps self.log_steps = log_steps
# has stats for all batches # Logs start of step 0 then end of each step based on log_steps interval.
self.batch_start_timestamps = [] self.timestamp_log = []
# only has stats for batch_index % log_steps == 0 (excluding 0)
self.batch_end_timestamps = []
def on_train_begin(self, logs=None): def on_train_begin(self, logs=None):
self.record_batch = True self.record_batch = True
...@@ -71,16 +69,17 @@ class TimeHistory(tf.keras.callbacks.Callback): ...@@ -71,16 +69,17 @@ class TimeHistory(tf.keras.callbacks.Callback):
timestamp = time.time() timestamp = time.time()
self.start_time = timestamp self.start_time = timestamp
self.record_batch = False self.record_batch = False
self.batch_start_timestamps.append(BatchTimestamp(batch, timestamp)) if batch == 0:
self.timestamp_log.append(BatchTimestamp(batch, timestamp))
def on_batch_end(self, batch, logs=None): def on_batch_end(self, batch, logs=None):
if batch % self.log_steps == 0: if batch % self.log_steps == 0:
timestamp = time.time() timestamp = time.time()
elapsed_time = timestamp - self.start_time elapsed_time = timestamp - self.start_time
examples_per_second = (self._batch_size * self.log_steps) / elapsed_time examples_per_second = (self.batch_size * self.log_steps) / elapsed_time
self.record_batch = True
if batch != 0: if batch != 0:
self.batch_end_timestamps.append(BatchTimestamp(batch, timestamp)) self.record_batch = True
self.timestamp_log.append(BatchTimestamp(batch, timestamp))
tf.logging.info("BenchmarkMetric: {'num_batches':%d, 'time_taken': %f," tf.logging.info("BenchmarkMetric: {'num_batches':%d, 'time_taken': %f,"
"'images_per_second': %f}" % "'images_per_second': %f}" %
(batch, elapsed_time, examples_per_second)) (batch, elapsed_time, examples_per_second))
...@@ -154,6 +153,7 @@ def build_stats(history, eval_output, time_callback): ...@@ -154,6 +153,7 @@ def build_stats(history, eval_output, time_callback):
and sparse_categorical_accuracy. and sparse_categorical_accuracy.
eval_output: Output of the eval step. Assumes first value is eval_loss and eval_output: Output of the eval step. Assumes first value is eval_loss and
second value is accuracy_top_1. second value is accuracy_top_1.
time_callback: Time tracking callback likely used during keras.fit.
Returns: Returns:
Dictionary of normalized results. Dictionary of normalized results.
...@@ -174,9 +174,14 @@ def build_stats(history, eval_output, time_callback): ...@@ -174,9 +174,14 @@ def build_stats(history, eval_output, time_callback):
stats[TRAIN_TOP_1] = train_hist['sparse_categorical_accuracy'][-1].item() stats[TRAIN_TOP_1] = train_hist['sparse_categorical_accuracy'][-1].item()
if time_callback: if time_callback:
stats['batch_start_timestamps'] = time_callback.batch_start_timestamps timestamp_log = time_callback.timestamp_log
stats['batch_end_timestamps'] = time_callback.batch_end_timestamps stats['step_timestamp_log'] = timestamp_log
stats['train_finish_time'] = time_callback.train_finish_time stats['train_finish_time'] = time_callback.train_finish_time
if len(timestamp_log) > 1:
stats['avg_exp_per_second'] = (
time_callback.batch_size * time_callback.log_steps *
(len(time_callback.timestamp_log)-1) /
(timestamp_log[-1].timestamp - timestamp_log[0].timestamp))
return stats return stats
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment