Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
chenpangpang
transformers
Commits
c7f3abc2
Unverified
Commit
c7f3abc2
authored
Feb 27, 2023
by
Stas Bekman
Committed by
GitHub
Feb 27, 2023
Browse files
introduce `logger.warning_once` and use it for grad checkpointing code (#21804)
* logger.warning_once * style
parent
f95f60c8
Changes
58
Hide whitespace changes
Inline
Side-by-side
Showing
20 changed files
with
20 additions
and
20 deletions
+20
-20
src/transformers/models/git/modeling_git.py
src/transformers/models/git/modeling_git.py
+1
-1
src/transformers/models/gpt2/modeling_gpt2.py
src/transformers/models/gpt2/modeling_gpt2.py
+1
-1
src/transformers/models/gpt_neo/modeling_gpt_neo.py
src/transformers/models/gpt_neo/modeling_gpt_neo.py
+1
-1
src/transformers/models/gptj/modeling_gptj.py
src/transformers/models/gptj/modeling_gptj.py
+1
-1
src/transformers/models/imagegpt/modeling_imagegpt.py
src/transformers/models/imagegpt/modeling_imagegpt.py
+1
-1
src/transformers/models/layoutlm/modeling_layoutlm.py
src/transformers/models/layoutlm/modeling_layoutlm.py
+1
-1
src/transformers/models/led/modeling_led.py
src/transformers/models/led/modeling_led.py
+1
-1
src/transformers/models/m2m_100/modeling_m2m_100.py
src/transformers/models/m2m_100/modeling_m2m_100.py
+1
-1
src/transformers/models/marian/modeling_marian.py
src/transformers/models/marian/modeling_marian.py
+1
-1
src/transformers/models/markuplm/modeling_markuplm.py
src/transformers/models/markuplm/modeling_markuplm.py
+1
-1
src/transformers/models/mbart/modeling_mbart.py
src/transformers/models/mbart/modeling_mbart.py
+1
-1
src/transformers/models/megatron_bert/modeling_megatron_bert.py
...ansformers/models/megatron_bert/modeling_megatron_bert.py
+1
-1
src/transformers/models/mt5/modeling_mt5.py
src/transformers/models/mt5/modeling_mt5.py
+1
-1
src/transformers/models/mvp/modeling_mvp.py
src/transformers/models/mvp/modeling_mvp.py
+1
-1
src/transformers/models/nezha/modeling_nezha.py
src/transformers/models/nezha/modeling_nezha.py
+1
-1
src/transformers/models/opt/modeling_opt.py
src/transformers/models/opt/modeling_opt.py
+1
-1
src/transformers/models/pegasus/modeling_pegasus.py
src/transformers/models/pegasus/modeling_pegasus.py
+1
-1
src/transformers/models/pegasus_x/modeling_pegasus_x.py
src/transformers/models/pegasus_x/modeling_pegasus_x.py
+1
-1
src/transformers/models/plbart/modeling_plbart.py
src/transformers/models/plbart/modeling_plbart.py
+1
-1
src/transformers/models/prophetnet/modeling_prophetnet.py
src/transformers/models/prophetnet/modeling_prophetnet.py
+1
-1
No files found.
src/transformers/models/git/modeling_git.py
View file @
c7f3abc2
...
...
@@ -444,7 +444,7 @@ class GitEncoder(nn.Module):
if
self
.
gradient_checkpointing
and
self
.
training
:
if
use_cache
:
logger
.
warning
(
logger
.
warning
_once
(
"`use_cache=True` is incompatible with gradient checkpointing. Setting `use_cache=False`..."
)
use_cache
=
False
...
...
src/transformers/models/gpt2/modeling_gpt2.py
View file @
c7f3abc2
...
...
@@ -853,7 +853,7 @@ class GPT2Model(GPT2PreTrainedModel):
if
self
.
gradient_checkpointing
and
self
.
training
:
if
use_cache
:
logger
.
warning
(
logger
.
warning
_once
(
"`use_cache=True` is incompatible with gradient checkpointing. Setting `use_cache=False`..."
)
use_cache
=
False
...
...
src/transformers/models/gpt_neo/modeling_gpt_neo.py
View file @
c7f3abc2
...
...
@@ -589,7 +589,7 @@ class GPTNeoModel(GPTNeoPreTrainedModel):
if
self
.
gradient_checkpointing
and
self
.
training
:
if
use_cache
:
logger
.
warning
(
logger
.
warning
_once
(
"`use_cache=True` is incompatible with gradient checkpointing. Setting `use_cache=False`..."
)
use_cache
=
False
...
...
src/transformers/models/gptj/modeling_gptj.py
View file @
c7f3abc2
...
...
@@ -653,7 +653,7 @@ class GPTJModel(GPTJPreTrainedModel):
if
self
.
gradient_checkpointing
and
self
.
training
:
if
use_cache
:
logger
.
warning
(
logger
.
warning
_once
(
"`use_cache=True` is incompatible with gradient checkpointing. Setting `use_cache=False`..."
)
use_cache
=
False
...
...
src/transformers/models/imagegpt/modeling_imagegpt.py
View file @
c7f3abc2
...
...
@@ -812,7 +812,7 @@ class ImageGPTModel(ImageGPTPreTrainedModel):
if
self
.
gradient_checkpointing
and
self
.
training
:
if
use_cache
:
logger
.
warning
(
logger
.
warning
_once
(
"`use_cache=True` is incompatible with gradient checkpointing. Setting `use_cache=False`..."
)
use_cache
=
False
...
...
src/transformers/models/layoutlm/modeling_layoutlm.py
View file @
c7f3abc2
...
...
@@ -479,7 +479,7 @@ class LayoutLMEncoder(nn.Module):
if
self
.
gradient_checkpointing
and
self
.
training
:
if
use_cache
:
logger
.
warning
(
logger
.
warning
_once
(
"`use_cache=True` is incompatible with gradient checkpointing. Setting `use_cache=False`..."
)
use_cache
=
False
...
...
src/transformers/models/led/modeling_led.py
View file @
c7f3abc2
...
...
@@ -2136,7 +2136,7 @@ class LEDDecoder(LEDPreTrainedModel):
if
self
.
gradient_checkpointing
and
self
.
training
:
if
use_cache
:
logger
.
warning
(
logger
.
warning
_once
(
"`use_cache=True` is incompatible with gradient checkpointing. Setting `use_cache=False`..."
)
use_cache
=
False
...
...
src/transformers/models/m2m_100/modeling_m2m_100.py
View file @
c7f3abc2
...
...
@@ -1055,7 +1055,7 @@ class M2M100Decoder(M2M100PreTrainedModel):
if
self
.
gradient_checkpointing
and
self
.
training
:
if
use_cache
:
logger
.
warning
(
logger
.
warning
_once
(
"`use_cache=True` is incompatible with gradient checkpointing. Setting"
" `use_cache=False`..."
)
...
...
src/transformers/models/marian/modeling_marian.py
View file @
c7f3abc2
...
...
@@ -1020,7 +1020,7 @@ class MarianDecoder(MarianPreTrainedModel):
if
self
.
gradient_checkpointing
and
self
.
training
:
if
use_cache
:
logger
.
warning
(
logger
.
warning
_once
(
"`use_cache=True` is incompatible with gradient checkpointing. Setting `use_cache=False`..."
)
use_cache
=
False
...
...
src/transformers/models/markuplm/modeling_markuplm.py
View file @
c7f3abc2
...
...
@@ -641,7 +641,7 @@ class MarkupLMEncoder(nn.Module):
if
self
.
gradient_checkpointing
and
self
.
training
:
if
use_cache
:
logger
.
warning
(
logger
.
warning
_once
(
"`use_cache=True` is incompatible with gradient checkpointing. Setting `use_cache=False`..."
)
use_cache
=
False
...
...
src/transformers/models/mbart/modeling_mbart.py
View file @
c7f3abc2
...
...
@@ -1069,7 +1069,7 @@ class MBartDecoder(MBartPreTrainedModel):
if
self
.
gradient_checkpointing
and
self
.
training
:
if
use_cache
:
logger
.
warning
(
logger
.
warning
_once
(
"`use_cache=True` is incompatible with gradient checkpointing`. Setting `use_cache=False`..."
)
use_cache
=
False
...
...
src/transformers/models/megatron_bert/modeling_megatron_bert.py
View file @
c7f3abc2
...
...
@@ -544,7 +544,7 @@ class MegatronBertEncoder(nn.Module):
if
self
.
gradient_checkpointing
and
self
.
training
:
if
use_cache
:
logger
.
warning
(
logger
.
warning
_once
(
"`use_cache=True` is incompatible with gradient checkpointing. Setting `use_cache=False`..."
)
use_cache
=
False
...
...
src/transformers/models/mt5/modeling_mt5.py
View file @
c7f3abc2
...
...
@@ -1008,7 +1008,7 @@ class MT5Stack(MT5PreTrainedModel):
if
self
.
gradient_checkpointing
and
self
.
training
:
if
use_cache
:
logger
.
warning
(
logger
.
warning
_once
(
"`use_cache=True` is incompatible with gradient checkpointing. Setting `use_cache=False`..."
)
use_cache
=
False
...
...
src/transformers/models/mvp/modeling_mvp.py
View file @
c7f3abc2
...
...
@@ -1212,7 +1212,7 @@ class MvpDecoder(MvpPreTrainedModel):
if
self
.
gradient_checkpointing
and
self
.
training
:
if
use_cache
:
logger
.
warning
(
logger
.
warning
_once
(
"`use_cache=True` is incompatible with gradient checkpointing. Setting `use_cache=False`..."
)
use_cache
=
False
...
...
src/transformers/models/nezha/modeling_nezha.py
View file @
c7f3abc2
...
...
@@ -571,7 +571,7 @@ class NezhaEncoder(nn.Module):
if
self
.
gradient_checkpointing
and
self
.
training
:
if
use_cache
:
logger
.
warning
(
logger
.
warning
_once
(
"`use_cache=True` is incompatible with gradient checkpointing. Setting `use_cache=False`..."
)
use_cache
=
False
...
...
src/transformers/models/opt/modeling_opt.py
View file @
c7f3abc2
...
...
@@ -671,7 +671,7 @@ class OPTDecoder(OPTPreTrainedModel):
if
self
.
gradient_checkpointing
and
self
.
training
:
if
use_cache
:
logger
.
warning
(
logger
.
warning
_once
(
"`use_cache=True` is incompatible with gradient checkpointing. Setting `use_cache=False`..."
)
use_cache
=
False
...
...
src/transformers/models/pegasus/modeling_pegasus.py
View file @
c7f3abc2
...
...
@@ -1070,7 +1070,7 @@ class PegasusDecoder(PegasusPreTrainedModel):
if
self
.
gradient_checkpointing
and
self
.
training
:
if
use_cache
:
logger
.
warning
(
logger
.
warning
_once
(
"`use_cache=True` is incompatible with gradient checkpointing. Setting `use_cache=False`..."
)
use_cache
=
False
...
...
src/transformers/models/pegasus_x/modeling_pegasus_x.py
View file @
c7f3abc2
...
...
@@ -1311,7 +1311,7 @@ class PegasusXDecoder(PegasusXPreTrainedModel):
if
self
.
gradient_checkpointing
and
self
.
training
:
if
use_cache
:
logger
.
warning
(
logger
.
warning
_once
(
"`use_cache=True` is incompatible with gradient checkpointing. Setting `use_cache=False`..."
)
use_cache
=
False
...
...
src/transformers/models/plbart/modeling_plbart.py
View file @
c7f3abc2
...
...
@@ -1048,7 +1048,7 @@ class PLBartDecoder(PLBartPreTrainedModel):
if
self
.
gradient_checkpointing
and
self
.
training
:
if
use_cache
:
logger
.
warning
(
logger
.
warning
_once
(
"`use_cache=True` is incompatible with gradient checkpointing. Setting `use_cache=False`..."
)
use_cache
=
False
...
...
src/transformers/models/prophetnet/modeling_prophetnet.py
View file @
c7f3abc2
...
...
@@ -1572,7 +1572,7 @@ class ProphetNetDecoder(ProphetNetPreTrainedModel):
if
self
.
gradient_checkpointing
and
self
.
training
:
if
use_cache
:
logger
.
warning
(
logger
.
warning
_once
(
"`use_cache=True` is incompatible with gradient checkpointing. Setting `use_cache=False`..."
)
use_cache
=
False
...
...
Prev
1
2
3
Next
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment