Unverified Commit 77abd1e7 authored by Lysandre Debut's avatar Lysandre Debut Committed by GitHub
Browse files

Centralize logging (#6434)



* Logging

* Style

* hf_logging > utils.logging

* Address @thomwolf's comments

* Update test

* Update src/transformers/benchmark/benchmark_utils.py
Co-authored-by: default avatarSylvain Gugger <35901082+sgugger@users.noreply.github.com>

* Revert bad change
Co-authored-by: default avatarSylvain Gugger <35901082+sgugger@users.noreply.github.com>
parent 461ae868
......@@ -15,12 +15,12 @@
# limitations under the License.
import copy
import logging
from .configuration_utils import PretrainedConfig
from .utils import logging
logger = logging.getLogger(__name__)
logger = logging.get_logger(__name__)
class EncoderDecoderConfig(PretrainedConfig):
......
......@@ -14,13 +14,11 @@
# limitations under the License.
""" Flaubert configuration, based on XLM. """
import logging
from .configuration_xlm import XLMConfig
from .utils import logging
logger = logging.getLogger(__name__)
logger = logging.get_logger(__name__)
FLAUBERT_PRETRAINED_CONFIG_ARCHIVE_MAP = {
"flaubert/flaubert_small_cased": "https://s3.amazonaws.com/models.huggingface.co/bert/flaubert/flaubert_small_cased/config.json",
......
......@@ -15,13 +15,11 @@
# limitations under the License.
""" OpenAI GPT-2 configuration """
import logging
from .configuration_utils import PretrainedConfig
from .utils import logging
logger = logging.getLogger(__name__)
logger = logging.get_logger(__name__)
GPT2_PRETRAINED_CONFIG_ARCHIVE_MAP = {
"gpt2": "https://s3.amazonaws.com/models.huggingface.co/bert/gpt2-config.json",
......
......@@ -14,13 +14,13 @@
# limitations under the License.
""" Longformer configuration """
import logging
from typing import List, Union
from .configuration_roberta import RobertaConfig
from .utils import logging
logger = logging.getLogger(__name__)
logger = logging.get_logger(__name__)
LONGFORMER_PRETRAINED_CONFIG_ARCHIVE_MAP = {
"allenai/longformer-base-4096": "https://s3.amazonaws.com/models.huggingface.co/bert/allenai/longformer-base-4096/config.json",
......
......@@ -14,12 +14,11 @@
# limitations under the License.
""" MBART configuration """
import logging
from .configuration_bart import BartConfig
from .utils import logging
logger = logging.getLogger(__name__)
logger = logging.get_logger(__name__)
MBART_PRETRAINED_CONFIG_ARCHIVE_MAP = {
"facebook/mbart-large-en-ro": "https://s3.amazonaws.com/models.huggingface.co/bert/facebook/mbart-large-en-ro/config.json",
......
......@@ -15,11 +15,10 @@
# limitations under the License.
""" MMBT configuration """
from .utils import logging
import logging
logger = logging.getLogger(__name__)
logger = logging.get_logger(__name__)
class MMBTConfig(object):
......
......@@ -12,12 +12,11 @@
# limitations under the License.
""" MobileBERT model configuration """
import logging
from .configuration_utils import PretrainedConfig
from .utils import logging
logger = logging.getLogger(__name__)
logger = logging.get_logger(__name__)
MOBILEBERT_PRETRAINED_CONFIG_ARCHIVE_MAP = {
"mobilebert-uncased": "https://s3.amazonaws.com/models.huggingface.co/bert/google/mobilebert-uncased/config.json"
......
......@@ -15,13 +15,11 @@
# limitations under the License.
""" OpenAI GPT configuration """
import logging
from .configuration_utils import PretrainedConfig
from .utils import logging
logger = logging.getLogger(__name__)
logger = logging.get_logger(__name__)
OPENAI_GPT_PRETRAINED_CONFIG_ARCHIVE_MAP = {
"openai-gpt": "https://s3.amazonaws.com/models.huggingface.co/bert/openai-gpt-config.json"
......
......@@ -14,13 +14,12 @@
# limitations under the License.
""" PEGASUS model configuration """
import logging
from .configuration_bart import BART_CONFIG_ARGS_DOC, BartConfig
from .file_utils import add_start_docstrings_to_callable
from .utils import logging
logger = logging.getLogger(__name__)
logger = logging.get_logger(__name__)
# These config values do not vary between checkpoints
DEFAULTS = dict(
......
......@@ -15,13 +15,11 @@
# limitations under the License.
""" Reformer model configuration """
import logging
from .configuration_utils import PretrainedConfig
from .utils import logging
logger = logging.getLogger(__name__)
logger = logging.get_logger(__name__)
REFORMER_PRETRAINED_CONFIG_ARCHIVE_MAP = {
"google/reformer-crime-and-punishment": "https://cdn.huggingface.co/google/reformer-crime-and-punishment/config.json",
......
......@@ -14,13 +14,11 @@
# limitations under the License.
""" RetriBERT model configuration """
import logging
from .configuration_utils import PretrainedConfig
from .utils import logging
logger = logging.getLogger(__name__)
logger = logging.get_logger(__name__)
# TODO: uploadto AWS
RETRIBERT_PRETRAINED_CONFIG_ARCHIVE_MAP = {
......
......@@ -15,13 +15,11 @@
# limitations under the License.
""" RoBERTa configuration """
import logging
from .configuration_bert import BertConfig
from .utils import logging
logger = logging.getLogger(__name__)
logger = logging.get_logger(__name__)
ROBERTA_PRETRAINED_CONFIG_ARCHIVE_MAP = {
"roberta-base": "https://s3.amazonaws.com/models.huggingface.co/bert/roberta-base-config.json",
......
......@@ -14,13 +14,11 @@
# limitations under the License.
""" T5 model configuration """
import logging
from .configuration_utils import PretrainedConfig
from .utils import logging
logger = logging.getLogger(__name__)
logger = logging.get_logger(__name__)
T5_PRETRAINED_CONFIG_ARCHIVE_MAP = {
"t5-small": "https://s3.amazonaws.com/models.huggingface.co/bert/t5-small-config.json",
......
......@@ -16,13 +16,12 @@
""" Transformer XL configuration """
import logging
import warnings
from .configuration_utils import PretrainedConfig
from .utils import logging
logger = logging.getLogger(__name__)
logger = logging.get_logger(__name__)
TRANSFO_XL_PRETRAINED_CONFIG_ARCHIVE_MAP = {
"transfo-xl-wt103": "https://s3.amazonaws.com/models.huggingface.co/bert/transfo-xl-wt103-config.json",
......
......@@ -18,14 +18,14 @@
import copy
import json
import logging
import os
from typing import Any, Dict, Tuple
from .file_utils import CONFIG_NAME, cached_path, hf_bucket_url, is_remote_url
from .utils import logging
logger = logging.getLogger(__name__)
logger = logging.get_logger(__name__)
class PretrainedConfig(object):
......
......@@ -14,13 +14,11 @@
# limitations under the License.
""" XLM configuration """
import logging
from .configuration_utils import PretrainedConfig
from .utils import logging
logger = logging.getLogger(__name__)
logger = logging.get_logger(__name__)
XLM_PRETRAINED_CONFIG_ARCHIVE_MAP = {
"xlm-mlm-en-2048": "https://s3.amazonaws.com/models.huggingface.co/bert/xlm-mlm-en-2048-config.json",
......
......@@ -15,13 +15,11 @@
# limitations under the License.
""" XLM-RoBERTa configuration """
import logging
from .configuration_roberta import RobertaConfig
from .utils import logging
logger = logging.getLogger(__name__)
logger = logging.get_logger(__name__)
XLM_ROBERTA_PRETRAINED_CONFIG_ARCHIVE_MAP = {
"xlm-roberta-base": "https://s3.amazonaws.com/models.huggingface.co/bert/xlm-roberta-base-config.json",
......
......@@ -15,13 +15,13 @@
# limitations under the License.
""" XLNet configuration """
import logging
import warnings
from .configuration_utils import PretrainedConfig
from .utils import logging
logger = logging.getLogger(__name__)
logger = logging.get_logger(__name__)
XLNET_PRETRAINED_CONFIG_ARCHIVE_MAP = {
"xlnet-base-cased": "https://s3.amazonaws.com/models.huggingface.co/bert/xlnet-base-cased-config.json",
......
......@@ -16,14 +16,15 @@
import argparse
import logging
import torch
from transformers import AlbertConfig, AlbertForPreTraining, load_tf_weights_in_albert
from .utils import logging
logging.basicConfig(level=logging.INFO)
logging.set_verbosity_info()
def convert_tf_checkpoint_to_pytorch(tf_checkpoint_path, albert_config_file, pytorch_dump_path):
......
......@@ -16,7 +16,6 @@
import argparse
import logging
import os
from pathlib import Path
......@@ -33,6 +32,8 @@ from transformers import (
)
from transformers.modeling_bart import _make_linear_from_emb
from .utils import logging
FAIRSEQ_MODELS = ["bart.large", "bart.large.mnli", "bart.large.cnn", "bart_xsum/model.pt"]
extra_arch = {"bart.large": BartModel, "bart.large.mnli": BartForSequenceClassification}
......@@ -40,8 +41,8 @@ if version.parse(fairseq.__version__) < version.parse("0.9.0"):
raise Exception("requires fairseq >= 0.9.0")
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)
logging.set_verbosity_info()
logger = logging.get_logger(__name__)
SAMPLE_TEXT = " Hello world! cécé herlolip"
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment