Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
chenpangpang
transformers
Commits
2bbbf1be
Unverified
Commit
2bbbf1be
authored
Mar 28, 2024
by
Arthur
Committed by
GitHub
Mar 28, 2024
Browse files
[`BC`] Fix BC for other libraries (#29934)
* fi xbc? * nit
parent
4df5b9b4
Changes
3
Show whitespace changes
Inline
Side-by-side
Showing
3 changed files
with
3 additions
and
3 deletions
+3
-3
src/transformers/models/cohere/modeling_cohere.py
src/transformers/models/cohere/modeling_cohere.py
+1
-1
src/transformers/models/gemma/modeling_gemma.py
src/transformers/models/gemma/modeling_gemma.py
+1
-1
src/transformers/models/llama/modeling_llama.py
src/transformers/models/llama/modeling_llama.py
+1
-1
No files found.
src/transformers/models/cohere/modeling_cohere.py
View file @
2bbbf1be
...
@@ -1139,7 +1139,7 @@ class CohereForCausalLM(CoherePreTrainedModel):
...
@@ -1139,7 +1139,7 @@ class CohereForCausalLM(CoherePreTrainedModel):
# TODO joao: standardize interface for the different Cache classes and remove of this if
# TODO joao: standardize interface for the different Cache classes and remove of this if
has_static_cache
=
False
has_static_cache
=
False
if
past_key_values
is
None
:
if
past_key_values
is
None
:
past_key_values
=
getattr
(
self
.
model
.
layers
[
0
]
.
self_attn
,
"past_key_value"
,
None
)
past_key_values
=
getattr
(
getattr
(
self
.
model
.
layers
[
0
]
,
"
self_attn
"
,
{})
,
"past_key_value"
,
None
)
has_static_cache
=
past_key_values
is
not
None
has_static_cache
=
past_key_values
is
not
None
past_length
=
0
past_length
=
0
...
...
src/transformers/models/gemma/modeling_gemma.py
View file @
2bbbf1be
...
@@ -1143,7 +1143,7 @@ class GemmaForCausalLM(GemmaPreTrainedModel):
...
@@ -1143,7 +1143,7 @@ class GemmaForCausalLM(GemmaPreTrainedModel):
# TODO joao: standardize interface for the different Cache classes and remove of this if
# TODO joao: standardize interface for the different Cache classes and remove of this if
has_static_cache
=
False
has_static_cache
=
False
if
past_key_values
is
None
:
if
past_key_values
is
None
:
past_key_values
=
getattr
(
self
.
model
.
layers
[
0
]
.
self_attn
,
"past_key_value"
,
None
)
past_key_values
=
getattr
(
getattr
(
self
.
model
.
layers
[
0
]
,
"
self_attn
"
,
{})
,
"past_key_value"
,
None
)
has_static_cache
=
past_key_values
is
not
None
has_static_cache
=
past_key_values
is
not
None
past_length
=
0
past_length
=
0
...
...
src/transformers/models/llama/modeling_llama.py
View file @
2bbbf1be
...
@@ -1240,7 +1240,7 @@ class LlamaForCausalLM(LlamaPreTrainedModel):
...
@@ -1240,7 +1240,7 @@ class LlamaForCausalLM(LlamaPreTrainedModel):
# TODO joao: standardize interface for the different Cache classes and remove of this if
# TODO joao: standardize interface for the different Cache classes and remove of this if
has_static_cache
=
False
has_static_cache
=
False
if
past_key_values
is
None
:
if
past_key_values
is
None
:
past_key_values
=
getattr
(
self
.
model
.
layers
[
0
]
.
self_attn
,
"past_key_value"
,
None
)
past_key_values
=
getattr
(
getattr
(
self
.
model
.
layers
[
0
]
,
"
self_attn
"
,
{})
,
"past_key_value"
,
None
)
has_static_cache
=
past_key_values
is
not
None
has_static_cache
=
past_key_values
is
not
None
past_length
=
0
past_length
=
0
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment