Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
chenpangpang
transformers
Commits
903b97d8
Unverified
Commit
903b97d8
authored
Jun 28, 2023
by
Younes Belkada
Committed by
GitHub
Jun 28, 2023
Browse files
[`gpt2-int8`] Add gpt2-xl int8 test (#24543)
add gpt2-xl test
parent
b0651655
Changes
1
Show whitespace changes
Inline
Side-by-side
Showing
1 changed file
with
19 additions
and
3 deletions
+19
-3
tests/bnb/test_mixed_int8.py
tests/bnb/test_mixed_int8.py
+19
-3
No files found.
tests/bnb/test_mixed_int8.py
View file @
903b97d8
...
...
@@ -762,8 +762,24 @@ class MixedInt8TestTraining(BaseMixedInt8Test):
class
MixedInt8GPT2Test
(
MixedInt8Test
):
model_name
=
"gpt2-xl"
EXPECTED_RELATIVE_DIFFERENCE
=
1.8720077507258357
EXPECTED_OUTPUT
=
"Hello my name is John Doe, and I
a
m a
member of the
"
EXPECTED_OUTPUT
=
"Hello my name is John Doe, and I
'
m a
big fan of
"
def
test_int8_from_pretrained
(
self
):
# TODO @younesbelkada: Test loading quantized gpt2 model from the hub.
pass
r
"""
Test whether loading a 8bit model from the Hub works as expected
"""
from
bitsandbytes.nn
import
Int8Params
model_id
=
"ybelkada/gpt2-xl-8bit"
model
=
AutoModelForCausalLM
.
from_pretrained
(
model_id
)
linear
=
get_some_linear_layer
(
model
)
self
.
assertTrue
(
linear
.
weight
.
__class__
==
Int8Params
)
self
.
assertTrue
(
hasattr
(
linear
.
weight
,
"SCB"
))
# generate
encoded_input
=
self
.
tokenizer
(
self
.
input_text
,
return_tensors
=
"pt"
)
output_sequences
=
model
.
generate
(
input_ids
=
encoded_input
[
"input_ids"
].
to
(
0
),
max_new_tokens
=
10
)
self
.
assertEqual
(
self
.
tokenizer
.
decode
(
output_sequences
[
0
],
skip_special_tokens
=
True
),
self
.
EXPECTED_OUTPUT
)
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment