Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
chenpangpang
transformers
Commits
c9035e45
Unverified
Commit
c9035e45
authored
Apr 07, 2021
by
Stas Bekman
Committed by
GitHub
Apr 07, 2021
Browse files
fix: The 'warn' method is deprecated (#11105)
* The 'warn' method is deprecated * fix test
parent
247bed38
Changes
52
Show whitespace changes
Inline
Side-by-side
Showing
20 changed files
with
24 additions
and
22 deletions
+24
-22
src/transformers/file_utils.py
src/transformers/file_utils.py
+1
-1
src/transformers/integrations.py
src/transformers/integrations.py
+1
-1
src/transformers/modeling_tf_utils.py
src/transformers/modeling_tf_utils.py
+5
-3
src/transformers/models/auto/tokenization_auto.py
src/transformers/models/auto/tokenization_auto.py
+1
-1
src/transformers/models/bart/modeling_bart.py
src/transformers/models/bart/modeling_bart.py
+1
-1
src/transformers/models/bert/modeling_bert.py
src/transformers/models/bert/modeling_bert.py
+1
-1
src/transformers/models/bert_generation/modeling_bert_generation.py
...ormers/models/bert_generation/modeling_bert_generation.py
+1
-1
src/transformers/models/big_bird/modeling_big_bird.py
src/transformers/models/big_bird/modeling_big_bird.py
+1
-1
src/transformers/models/blenderbot/modeling_blenderbot.py
src/transformers/models/blenderbot/modeling_blenderbot.py
+1
-1
src/transformers/models/blenderbot_small/modeling_blenderbot_small.py
...mers/models/blenderbot_small/modeling_blenderbot_small.py
+1
-1
src/transformers/models/electra/modeling_electra.py
src/transformers/models/electra/modeling_electra.py
+1
-1
src/transformers/models/gpt2/modeling_gpt2.py
src/transformers/models/gpt2/modeling_gpt2.py
+1
-1
src/transformers/models/gpt_neo/modeling_gpt_neo.py
src/transformers/models/gpt_neo/modeling_gpt_neo.py
+1
-1
src/transformers/models/layoutlm/modeling_layoutlm.py
src/transformers/models/layoutlm/modeling_layoutlm.py
+1
-1
src/transformers/models/led/modeling_led.py
src/transformers/models/led/modeling_led.py
+1
-1
src/transformers/models/m2m_100/modeling_m2m_100.py
src/transformers/models/m2m_100/modeling_m2m_100.py
+1
-1
src/transformers/models/marian/modeling_marian.py
src/transformers/models/marian/modeling_marian.py
+1
-1
src/transformers/models/mbart/modeling_mbart.py
src/transformers/models/mbart/modeling_mbart.py
+1
-1
src/transformers/models/pegasus/modeling_pegasus.py
src/transformers/models/pegasus/modeling_pegasus.py
+1
-1
src/transformers/models/prophetnet/modeling_prophetnet.py
src/transformers/models/prophetnet/modeling_prophetnet.py
+1
-1
No files found.
src/transformers/file_utils.py
View file @
c9035e45
...
...
@@ -194,7 +194,7 @@ if (
and
"PYTORCH_TRANSFORMERS_CACHE"
not
in
os
.
environ
and
"TRANSFORMERS_CACHE"
not
in
os
.
environ
):
logger
.
warn
(
logger
.
warn
ing
(
"In Transformers v4.0.0, the default path to cache downloaded models changed from "
"'~/.cache/torch/transformers' to '~/.cache/huggingface/transformers'. Since you don't seem to have overridden "
"and '~/.cache/torch/transformers' is a directory that exists, we're moving it to "
...
...
src/transformers/integrations.py
View file @
c9035e45
...
...
@@ -54,7 +54,7 @@ from .trainer_utils import PREFIX_CHECKPOINT_DIR, BestRun, IntervalStrategy # n
def
is_wandb_available
():
# any value of WANDB_DISABLED disables wandb
if
os
.
getenv
(
"WANDB_DISABLED"
,
""
).
upper
()
in
ENV_VARS_TRUE_VALUES
:
logger
.
warn
(
logger
.
warn
ing
(
"Using the `WAND_DISABLED` environment variable is deprecated and will be removed in v5. Use the "
"--report_to flag to control the integrations used for logging result (for instance --report_to none)."
)
...
...
src/transformers/modeling_tf_utils.py
View file @
c9035e45
...
...
@@ -290,7 +290,7 @@ def booleans_processing(config, **kwargs):
or
kwargs
[
"output_hidden_states"
]
is
not
None
or
(
"use_cache"
in
kwargs
and
kwargs
[
"use_cache"
]
is
not
None
)
):
tf_logger
.
warn
(
tf_logger
.
warn
ing
(
"The parameters `output_attentions`, `output_hidden_states` and `use_cache` cannot be updated when calling a model."
"They have to be set to True/False in the config object (i.e.: `config=XConfig.from_pretrained('name', output_attentions=True)`)."
)
...
...
@@ -299,7 +299,9 @@ def booleans_processing(config, **kwargs):
final_booleans
[
"output_hidden_states"
]
=
config
.
output_hidden_states
if
kwargs
[
"return_dict"
]
is
not
None
:
tf_logger
.
warn
(
"The parameter `return_dict` cannot be set in graph mode and will always be set to `True`."
)
tf_logger
.
warning
(
"The parameter `return_dict` cannot be set in graph mode and will always be set to `True`."
)
final_booleans
[
"return_dict"
]
=
True
if
"use_cache"
in
kwargs
:
...
...
@@ -398,7 +400,7 @@ def input_processing(func, config, input_ids, **kwargs):
if
isinstance
(
v
,
allowed_types
)
or
v
is
None
:
output
[
k
]
=
v
elif
k
not
in
parameter_names
and
"args"
not
in
parameter_names
:
logger
.
warn
(
logger
.
warn
ing
(
f
"The parameter
{
k
}
does not belongs to the parameter list
{
parameter_names
}
and will be ignored."
)
continue
...
...
src/transformers/models/auto/tokenization_auto.py
View file @
c9035e45
...
...
@@ -409,7 +409,7 @@ class AutoTokenizer:
# if model is an encoder decoder, the encoder tokenizer class is used by default
if
isinstance
(
config
,
EncoderDecoderConfig
):
if
type
(
config
.
decoder
)
is
not
type
(
config
.
encoder
):
# noqa: E721
logger
.
warn
(
logger
.
warn
ing
(
f
"The encoder model config class:
{
config
.
encoder
.
__class__
}
is different from the decoder model "
f
"config class:
{
config
.
decoder
.
__class
}
. It is not recommended to use the "
"`AutoTokenizer.from_pretrained()` method in this case. Please use the encoder and decoder "
...
...
src/transformers/models/bart/modeling_bart.py
View file @
c9035e45
...
...
@@ -1011,7 +1011,7 @@ class BartDecoder(BartPretrainedModel):
if
getattr
(
self
.
config
,
"gradient_checkpointing"
,
False
)
and
self
.
training
:
if
use_cache
:
logger
.
warn
(
logger
.
warn
ing
(
"`use_cache=True` is incompatible with `config.gradient_checkpointing=True`. Setting "
"`use_cache=False`..."
)
...
...
src/transformers/models/bert/modeling_bert.py
View file @
c9035e45
...
...
@@ -544,7 +544,7 @@ class BertEncoder(nn.Module):
if
getattr
(
self
.
config
,
"gradient_checkpointing"
,
False
)
and
self
.
training
:
if
use_cache
:
logger
.
warn
(
logger
.
warn
ing
(
"`use_cache=True` is incompatible with `config.gradient_checkpointing=True`. Setting "
"`use_cache=False`..."
)
...
...
src/transformers/models/bert_generation/modeling_bert_generation.py
View file @
c9035e45
...
...
@@ -450,7 +450,7 @@ class BertGenerationDecoder(BertGenerationPreTrainedModel):
super
().
__init__
(
config
)
if
not
config
.
is_decoder
:
logger
.
warn
(
"If you want to use `BertGenerationDecoder` as a standalone, add `is_decoder=True.`"
)
logger
.
warn
ing
(
"If you want to use `BertGenerationDecoder` as a standalone, add `is_decoder=True.`"
)
self
.
bert
=
BertGenerationEncoder
(
config
)
self
.
lm_head
=
BertGenerationOnlyLMHead
(
config
)
...
...
src/transformers/models/big_bird/modeling_big_bird.py
View file @
c9035e45
...
...
@@ -1586,7 +1586,7 @@ class BigBirdEncoder(nn.Module):
if
getattr
(
self
.
config
,
"gradient_checkpointing"
,
False
)
and
self
.
training
:
if
use_cache
:
logger
.
warn
(
logger
.
warn
ing
(
"`use_cache=True` is incompatible with `config.gradient_checkpointing=True`. Setting "
"`use_cache=False`..."
)
...
...
src/transformers/models/blenderbot/modeling_blenderbot.py
View file @
c9035e45
...
...
@@ -973,7 +973,7 @@ class BlenderbotDecoder(BlenderbotPreTrainedModel):
if
getattr
(
self
.
config
,
"gradient_checkpointing"
,
False
)
and
self
.
training
:
if
use_cache
:
logger
.
warn
(
logger
.
warn
ing
(
"`use_cache=True` is incompatible with `config.gradient_checkpointing=True`. Setting "
"`use_cache=False`..."
)
...
...
src/transformers/models/blenderbot_small/modeling_blenderbot_small.py
View file @
c9035e45
...
...
@@ -974,7 +974,7 @@ class BlenderbotSmallDecoder(BlenderbotSmallPreTrainedModel):
if
getattr
(
self
.
config
,
"gradient_checkpointing"
,
False
)
and
self
.
training
:
if
use_cache
:
logger
.
warn
(
logger
.
warn
ing
(
"`use_cache=True` is incompatible with `config.gradient_checkpointing=True`. Setting "
"`use_cache=False`..."
)
...
...
src/transformers/models/electra/modeling_electra.py
View file @
c9035e45
...
...
@@ -541,7 +541,7 @@ class ElectraEncoder(nn.Module):
if
getattr
(
self
.
config
,
"gradient_checkpointing"
,
False
)
and
self
.
training
:
if
use_cache
:
logger
.
warn
(
logger
.
warn
ing
(
"`use_cache=True` is incompatible with `config.gradient_checkpointing=True`. Setting "
"`use_cache=False`..."
)
...
...
src/transformers/models/gpt2/modeling_gpt2.py
View file @
c9035e45
...
...
@@ -726,7 +726,7 @@ class GPT2Model(GPT2PreTrainedModel):
if
getattr
(
self
.
config
,
"gradient_checkpointing"
,
False
)
and
self
.
training
:
if
use_cache
:
logger
.
warn
(
logger
.
warn
ing
(
"`use_cache=True` is incompatible with `config.gradient_checkpointing=True`. Setting "
"`use_cache=False`..."
)
...
...
src/transformers/models/gpt_neo/modeling_gpt_neo.py
View file @
c9035e45
...
...
@@ -823,7 +823,7 @@ class GPTNeoModel(GPTNeoPreTrainedModel):
if
getattr
(
self
.
config
,
"gradient_checkpointing"
,
False
)
and
self
.
training
:
if
use_cache
:
logger
.
warn
(
logger
.
warn
ing
(
"`use_cache=True` is incompatible with `config.gradient_checkpointing=True`. Setting "
"`use_cache=False`..."
)
...
...
src/transformers/models/layoutlm/modeling_layoutlm.py
View file @
c9035e45
...
...
@@ -470,7 +470,7 @@ class LayoutLMEncoder(nn.Module):
if
getattr
(
self
.
config
,
"gradient_checkpointing"
,
False
)
and
self
.
training
:
if
use_cache
:
logger
.
warn
(
logger
.
warn
ing
(
"`use_cache=True` is incompatible with `config.gradient_checkpointing=True`. Setting "
"`use_cache=False`..."
)
...
...
src/transformers/models/led/modeling_led.py
View file @
c9035e45
...
...
@@ -2070,7 +2070,7 @@ class LEDDecoder(LEDPreTrainedModel):
if
getattr
(
self
.
config
,
"gradient_checkpointing"
,
False
)
and
self
.
training
:
if
use_cache
:
logger
.
warn
(
logger
.
warn
ing
(
"`use_cache=True` is incompatible with `config.gradient_checkpointing=True`. Setting "
"`use_cache=False`..."
)
...
...
src/transformers/models/m2m_100/modeling_m2m_100.py
View file @
c9035e45
...
...
@@ -968,7 +968,7 @@ class M2M100Decoder(M2M100PreTrainedModel):
if
getattr
(
self
.
config
,
"gradient_checkpointing"
,
False
)
and
self
.
training
:
if
use_cache
:
logger
.
warn
(
logger
.
warn
ing
(
"`use_cache=True` is incompatible with `config.gradient_checkpointing=True`. Setting "
"`use_cache=False`..."
)
...
...
src/transformers/models/marian/modeling_marian.py
View file @
c9035e45
...
...
@@ -981,7 +981,7 @@ class MarianDecoder(MarianPreTrainedModel):
if
getattr
(
self
.
config
,
"gradient_checkpointing"
,
False
)
and
self
.
training
:
if
use_cache
:
logger
.
warn
(
logger
.
warn
ing
(
"`use_cache=True` is incompatible with `config.gradient_checkpointing=True`. Setting "
"`use_cache=False`..."
)
...
...
src/transformers/models/mbart/modeling_mbart.py
View file @
c9035e45
...
...
@@ -1020,7 +1020,7 @@ class MBartDecoder(MBartPreTrainedModel):
if
getattr
(
self
.
config
,
"gradient_checkpointing"
,
False
)
and
self
.
training
:
if
use_cache
:
logger
.
warn
(
logger
.
warn
ing
(
"`use_cache=True` is incompatible with `config.gradient_checkpointing=True`. Setting "
"`use_cache=False`..."
)
...
...
src/transformers/models/pegasus/modeling_pegasus.py
View file @
c9035e45
...
...
@@ -987,7 +987,7 @@ class PegasusDecoder(PegasusPreTrainedModel):
if
getattr
(
self
.
config
,
"gradient_checkpointing"
,
False
)
and
self
.
training
:
if
use_cache
:
logger
.
warn
(
logger
.
warn
ing
(
"`use_cache=True` is incompatible with `config.gradient_checkpointing=True`. Setting "
"`use_cache=False`..."
)
...
...
src/transformers/models/prophetnet/modeling_prophetnet.py
View file @
c9035e45
...
...
@@ -1475,7 +1475,7 @@ class ProphetNetDecoder(ProphetNetPreTrainedModel):
if
getattr
(
self
.
config
,
"gradient_checkpointing"
,
False
)
and
self
.
training
:
if
use_cache
:
logger
.
warn
(
logger
.
warn
ing
(
"`use_cache=True` is incompatible with `config.gradient_checkpointing=True`. Setting "
"`use_cache=False`..."
)
...
...
Prev
1
2
3
Next
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment