Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
ModelZoo
donut_pytorch
Commits
a0e94bf1
Unverified
Commit
a0e94bf1
authored
Apr 06, 2023
by
Geewook Kim
Committed by
GitHub
Apr 06, 2023
Browse files
Merge pull request #165 from dotneet/fix/past_key_values
supports latest transformers
parents
217cffb1
48479fe8
Changes
1
Show whitespace changes
Inline
Side-by-side
Showing
1 changed file
with
6 additions
and
3 deletions
+6
-3
donut/model.py
donut/model.py
+6
-3
No files found.
donut/model.py
View file @
a0e94bf1
...
@@ -206,7 +206,7 @@ class BARTDecoder(nn.Module):
...
@@ -206,7 +206,7 @@ class BARTDecoder(nn.Module):
if
newly_added_num
>
0
:
if
newly_added_num
>
0
:
self
.
model
.
resize_token_embeddings
(
len
(
self
.
tokenizer
))
self
.
model
.
resize_token_embeddings
(
len
(
self
.
tokenizer
))
def
prepare_inputs_for_inference
(
self
,
input_ids
:
torch
.
Tensor
,
encoder_outputs
:
torch
.
Tensor
,
past
=
None
,
use_cache
:
bool
=
None
,
attention_mask
:
torch
.
Tensor
=
None
):
def
prepare_inputs_for_inference
(
self
,
input_ids
:
torch
.
Tensor
,
encoder_outputs
:
torch
.
Tensor
,
past_key_values
=
None
,
past
=
None
,
use_cache
:
bool
=
None
,
attention_mask
:
torch
.
Tensor
=
None
):
"""
"""
Args:
Args:
input_ids: (batch_size, sequence_lenth)
input_ids: (batch_size, sequence_lenth)
...
@@ -215,13 +215,16 @@ class BARTDecoder(nn.Module):
...
@@ -215,13 +215,16 @@ class BARTDecoder(nn.Module):
attention_mask: (batch_size, sequence_length)
attention_mask: (batch_size, sequence_length)
encoder_hidden_states: (batch_size, sequence_length, embedding_dim)
encoder_hidden_states: (batch_size, sequence_length, embedding_dim)
"""
"""
attention_mask
=
input_ids
.
ne
(
self
.
tokenizer
.
pad_token_id
).
long
()
# for compatibility with transformers==4.11.x
if
past
is
not
None
:
if
past
is
not
None
:
past_key_values
=
past
attention_mask
=
input_ids
.
ne
(
self
.
tokenizer
.
pad_token_id
).
long
()
if
past_key_values
is
not
None
:
input_ids
=
input_ids
[:,
-
1
:]
input_ids
=
input_ids
[:,
-
1
:]
output
=
{
output
=
{
"input_ids"
:
input_ids
,
"input_ids"
:
input_ids
,
"attention_mask"
:
attention_mask
,
"attention_mask"
:
attention_mask
,
"past_key_values"
:
past
,
"past_key_values"
:
past
_key_values
,
"use_cache"
:
use_cache
,
"use_cache"
:
use_cache
,
"encoder_hidden_states"
:
encoder_outputs
.
last_hidden_state
,
"encoder_hidden_states"
:
encoder_outputs
.
last_hidden_state
,
}
}
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment