Commit 89fd2bb1 authored by Hongkun Yu's avatar Hongkun Yu Committed by A. Unique TensorFlower
Browse files

Py3 cleanup for nlp/

PiperOrigin-RevId: 339071563
parent 4085c19a
......@@ -13,9 +13,6 @@
# limitations under the License.
# ==============================================================================
r"""Convert checkpoints created by Estimator (tf1) to be Keras compatible."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
import tensorflow.compat.v1 as tf # TF 1.x
......
......@@ -18,9 +18,6 @@ The conversion will yield an object-oriented checkpoint that can be used
to restore a BertEncoder or BertPretrainerV2 object (see the `converted_model`
FLAG below).
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
......
......@@ -19,10 +19,6 @@ The file is forked from:
https://github.com/google-research/bert/blob/master/tokenization.py.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
import re
import unicodedata
......
......@@ -12,9 +12,6 @@
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import tempfile
......
......@@ -14,10 +14,6 @@
# ==============================================================================
"""BERT library to process data for classification task."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
import csv
import importlib
......
......@@ -14,10 +14,6 @@
# ==============================================================================
"""BERT finetuning task dataset generator."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import functools
import json
import os
......
......@@ -13,9 +13,6 @@
# limitations under the License.
# ==============================================================================
"""Create masked LM/next sentence masked_lm TF examples for BERT."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
import itertools
......
......@@ -13,12 +13,7 @@
# limitations under the License.
# ==============================================================================
"""Library to process data for SQuAD 1.1 and SQuAD 2.0."""
# pylint: disable=g-bad-import-order
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
import copy
import json
......
......@@ -18,10 +18,6 @@ The file is forked from:
https://github.com/google-research/ALBERT/blob/master/run_squad_sp.py
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
import copy
import json
......
......@@ -14,10 +14,6 @@
# ==============================================================================
"""Classification and regression network."""
# pylint: disable=g-classes-have-attributes
from __future__ import absolute_import
from __future__ import division
# from __future__ import google_type_annotations
from __future__ import print_function
import collections
import tensorflow as tf
......
......@@ -15,11 +15,6 @@
# ==============================================================================
"""Transformer-based text encoder network."""
# pylint: disable=g-classes-have-attributes
from __future__ import absolute_import
from __future__ import division
# from __future__ import google_type_annotations
from __future__ import print_function
import inspect
from absl import logging
......
......@@ -14,10 +14,6 @@
# ==============================================================================
"""Span labeling network."""
# pylint: disable=g-classes-have-attributes
from __future__ import absolute_import
from __future__ import division
# from __future__ import google_type_annotations
from __future__ import print_function
import collections
import tensorflow as tf
......
......@@ -14,11 +14,6 @@
# ==============================================================================
"""Transformer decoder that mimics a BERT encoder, to load BERT checkpoints."""
from __future__ import absolute_import
from __future__ import division
# from __future__ import google_type_annotations
from __future__ import print_function
import tensorflow as tf
from official.modeling import tf_utils
from official.nlp.modeling import layers
......
......@@ -14,10 +14,6 @@
# ==============================================================================
"""Tests for nlp.nhnet.decoder."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
import tensorflow as tf
from official.nlp.modeling import layers
......
......@@ -14,11 +14,7 @@
# ==============================================================================
"""Input pipelines."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow.compat.v2 as tf
import tensorflow as tf
def decode_record(record, name_to_features):
......
......@@ -13,12 +13,6 @@
# limitations under the License.
# ==============================================================================
"""tf.keras Models for NHNet."""
from __future__ import absolute_import
from __future__ import division
# from __future__ import google_type_annotations
from __future__ import print_function
from absl import logging
import gin
import tensorflow as tf
......
......@@ -14,11 +14,6 @@
# ==============================================================================
"""Optimizer and learning rate scheduler."""
from __future__ import absolute_import
from __future__ import division
# from __future__ import google_type_annotations
from __future__ import print_function
import tensorflow as tf
from official.modeling.hyperparams import params_dict
......
......@@ -15,10 +15,6 @@
# ==============================================================================
"""Run NHNet model training and eval."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
# Import libraries
......
......@@ -13,11 +13,6 @@
# limitations under the License.
# ==============================================================================
"""Utility helpers for Bert2Bert."""
from __future__ import absolute_import
from __future__ import division
# from __future__ import google_type_annotations
from __future__ import print_function
from absl import logging
import tensorflow as tf
from typing import Optional, Text
......
......@@ -13,11 +13,6 @@
# limitations under the License.
# ==============================================================================
"""Implementation of multiheaded attention and self-attention layers."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import math
import tensorflow as tf
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment