Commit 91c681af authored by Haitang Hu's avatar Haitang Hu Committed by A. Unique TensorFlower
Browse files

Revert log passing change since it might hurt performance.

PiperOrigin-RevId: 294922828
parent e8140fa9
...@@ -329,12 +329,12 @@ def run_customized_training_loop( ...@@ -329,12 +329,12 @@ def run_customized_training_loop(
for callback in custom_callbacks: for callback in custom_callbacks:
callback.on_batch_begin(batch) callback.on_batch_begin(batch)
def _run_callbacks_on_batch_end(batch, logs): def _run_callbacks_on_batch_end(batch):
"""Runs custom callbacks at the end of every step.""" """Runs custom callbacks at the end of every step."""
if not custom_callbacks: if not custom_callbacks:
return return
for callback in custom_callbacks: for callback in custom_callbacks:
callback.on_batch_end(batch, logs) callback.on_batch_end(batch)
# Training loop starts here. # Training loop starts here.
checkpoint = tf.train.Checkpoint(model=model, optimizer=optimizer) checkpoint = tf.train.Checkpoint(model=model, optimizer=optimizer)
...@@ -371,10 +371,10 @@ def run_customized_training_loop( ...@@ -371,10 +371,10 @@ def run_customized_training_loop(
# Converts steps to a Tensor to avoid tf.function retracing. # Converts steps to a Tensor to avoid tf.function retracing.
train_steps(train_iterator, train_steps(train_iterator,
tf.convert_to_tensor(steps, dtype=tf.int32)) tf.convert_to_tensor(steps, dtype=tf.int32))
train_loss = _float_metric_value(train_loss_metric) _run_callbacks_on_batch_end(current_step)
_run_callbacks_on_batch_end(current_step, {'loss': train_loss})
current_step += steps current_step += steps
train_loss = _float_metric_value(train_loss_metric)
# Updates training logging. # Updates training logging.
training_status = 'Train Step: %d/%d / loss = %s' % ( training_status = 'Train Step: %d/%d / loss = %s' % (
current_step, total_training_steps, train_loss) current_step, total_training_steps, train_loss)
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment