Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
gaoqiong
lm-evaluation-harness
Commits
dc1c816b
Commit
dc1c816b
authored
Dec 04, 2023
by
baberabb
Browse files
added `_encode_pair`
parent
19f745aa
Changes
1
Hide whitespace changes
Inline
Side-by-side
Showing
1 changed file
with
16 additions
and
7 deletions
+16
-7
lm_eval/models/vllm_causallms.py
lm_eval/models/vllm_causallms.py
+16
-7
No files found.
lm_eval/models/vllm_causallms.py
View file @
dc1c816b
...
@@ -15,7 +15,6 @@ try:
...
@@ -15,7 +15,6 @@ try:
except
ModuleNotFoundError
:
except
ModuleNotFoundError
:
pass
pass
eval_logger
=
utils
.
eval_logger
eval_logger
=
utils
.
eval_logger
...
@@ -162,6 +161,21 @@ please install vllm via `pip install lm-eval[vllm]` or `pip install -e .[vllm]`"
...
@@ -162,6 +161,21 @@ please install vllm via `pip install lm-eval[vllm]` or `pip install -e .[vllm]`"
return
outputs
return
outputs
def
_encode_pair
(
self
,
context
:
str
,
continuation
:
str
)
->
Tuple
[
List
[
int
],
List
[
int
]]:
n_spaces
=
len
(
context
)
-
len
(
context
.
rstrip
())
if
n_spaces
>
0
:
continuation
=
context
[
-
n_spaces
:]
+
continuation
context
=
context
[:
-
n_spaces
]
whole_enc
=
self
.
tok_encode
(
context
+
continuation
,
add_special_tokens
=
False
)
context_enc
=
self
.
tok_encode
(
context
,
add_special_tokens
=
False
)
context_enc_len
=
len
(
context_enc
)
continuation_enc
=
whole_enc
[
context_enc_len
:]
return
context_enc
,
continuation_enc
def
loglikelihood
(
self
,
requests
:
List
[
Instance
])
->
List
[
Tuple
[
float
,
bool
]]:
def
loglikelihood
(
self
,
requests
:
List
[
Instance
])
->
List
[
Tuple
[
float
,
bool
]]:
new_reqs
=
[]
new_reqs
=
[]
for
context
,
continuation
in
[
req
.
args
for
req
in
requests
]:
for
context
,
continuation
in
[
req
.
args
for
req
in
requests
]:
...
@@ -171,12 +185,7 @@ please install vllm via `pip install lm-eval[vllm]` or `pip install -e .[vllm]`"
...
@@ -171,12 +185,7 @@ please install vllm via `pip install lm-eval[vllm]` or `pip install -e .[vllm]`"
continuation
continuation
)
)
else
:
else
:
context_enc
,
continuation_enc
=
self
.
tokenizer
(
context_enc
,
continuation_enc
=
self
.
_encode_pair
(
context
,
continuation
)
[
context
,
continuation
],
truncation
=
"do_not_truncate"
,
add_special_tokens
=
False
,
return_attention_mask
=
False
,
).
input_ids
new_reqs
.
append
(((
context
,
continuation
),
context_enc
,
continuation_enc
))
new_reqs
.
append
(((
context
,
continuation
),
context_enc
,
continuation_enc
))
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment