Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
gaoqiong
lm-evaluation-harness
Commits
0371f45c
Commit
0371f45c
authored
May 18, 2023
by
bzantium
Browse files
fix for merge from master
parent
baa8b0d3
Changes
1
Hide whitespace changes
Inline
Side-by-side
Showing
1 changed file
with
0 additions
and
23 deletions
+0
-23
lm_eval/models/gpt2.py
lm_eval/models/gpt2.py
+0
-23
No files found.
lm_eval/models/gpt2.py
View file @
0371f45c
...
...
@@ -52,28 +52,6 @@ class HFLM(BaseLM):
self
.
tokenizer
=
transformers
.
AutoTokenizer
.
from_pretrained
(
pretrained
if
tokenizer
is
None
else
tokenizer
,
<<<<<<<
HEAD
revision
=
revision
+
(
"/"
+
subfolder
if
subfolder
is
not
None
else
""
))
# assert isinstance(self.tokenizer, (
# transformers.GPT2Tokenizer, transformers.GPT2TokenizerFast,
# transformers.T5Tokenizer, transformers.T5TokenizerFast,
# )), "this tokenizer has not been checked for compatibility yet!"
self
.
vocab_size
=
self
.
tokenizer
.
vocab_size
# if isinstance(self.tokenizer, (transformers.GPT2Tokenizer, transformers.GPT2TokenizerFast)):
# assert self.tokenizer.encode('hello\n\nhello') == [31373, 198, 198, 31373], \
# self.tokenizer.encode('hello\n\nhello')
# multithreading and batching
self
.
batch_size_per_gpu
=
batch_size
# todo: adaptive batch size
# TODO: fix multi-gpu
# gpus = torch.cuda.device_count()
# if gpus > 1:
# self.gpt2 = nn.DataParallel(self.gpt2)
=======
revision
=
revision
,
trust_remote_code
=
trust_remote_code
,
)
...
...
@@ -95,7 +73,6 @@ class HFLM(BaseLM):
self
.
batch_size_per_gpu
=
batch_size
else
:
self
.
batch_size_per_gpu
=
int
(
batch_size
)
>>>>>>>
d145167959c2b1826d900524912cb99c44d5fb30
@
property
def
eot_token_id
(
self
):
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment