"git@developer.sourcefind.cn:wangsen/mineru.git" did not exist on "cf09313b1ef19c5b8d88effc7b25dc49a8fd084e"
Commit 9e3550e5 authored by A. Unique TensorFlower's avatar A. Unique TensorFlower
Browse files

Minor cleanup in `projects/mobilebert/distillation.py`.

PiperOrigin-RevId: 367244904
parent 417aa073
...@@ -378,7 +378,6 @@ class BertDistillationTask(policies.ProgressivePolicy, base_task.Task): ...@@ -378,7 +378,6 @@ class BertDistillationTask(policies.ProgressivePolicy, base_task.Task):
# Shape: [batch, max_predictions_per_seq, vocab_size] # Shape: [batch, max_predictions_per_seq, vocab_size]
lm_label = tf.one_hot(indices=lm_label, depth=vocab_size, on_value=1.0, lm_label = tf.one_hot(indices=lm_label, depth=vocab_size, on_value=1.0,
off_value=0.0, axis=-1, dtype=tf.float32) off_value=0.0, axis=-1, dtype=tf.float32)
lm_label_weights = labels['masked_lm_weights']
gt_ratio = distill_config.distill_ground_truth_ratio gt_ratio = distill_config.distill_ground_truth_ratio
if gt_ratio != 1.0: if gt_ratio != 1.0:
teacher_mlm_logits = outputs['teacher_pretrainer_output']['mlm_logits'] teacher_mlm_logits = outputs['teacher_pretrainer_output']['mlm_logits']
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment