Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
gaoqiong
lm-evaluation-harness
Commits
b8510001
Commit
b8510001
authored
Jul 29, 2023
by
baberabb
Browse files
fixed error handling
parent
3a3655d6
Changes
1
Hide whitespace changes
Inline
Side-by-side
Showing
1 changed file
with
22 additions
and
21 deletions
+22
-21
lm_eval/models/anthropic_llms.py
lm_eval/models/anthropic_llms.py
+22
-21
No files found.
lm_eval/models/anthropic_llms.py
View file @
b8510001
...
...
@@ -41,12 +41,6 @@ def anthropic_completion(
)
time
.
sleep
(
backoff_time
)
backoff_time
*=
1.5
except
anthropic
.
APIConnectionError
as
e
:
eval_logger
.
critical
(
f
"Server unreachable:
{
e
.
__cause__
}
"
)
break
except
anthropic
.
APIStatusError
as
e
:
eval_logger
.
critical
(
f
"API error
{
e
.
status_code
}
:
{
e
.
message
}
"
)
break
@
register_model
(
"anthropic"
)
...
...
@@ -122,21 +116,28 @@ class AnthropicLM(LM):
res
=
[]
for
request
in
tqdm
(
requests
):
inp
=
request
[
0
]
request_args
=
request
[
1
]
until
=
request_args
[
"until"
]
response
=
anthropic_completion
(
client
=
self
.
client
,
model
=
self
.
model
,
prompt
=
inp
,
max_tokens_to_sample
=
self
.
max_tokens_to_sample
,
temperature
=
self
.
temperature
,
# TODO: implement non-greedy sampling for Anthropic
stop
=
until
,
**
self
.
kwargs
,
)
res
.
append
(
response
)
self
.
cache_hook
.
add_partial
(
"greedy_until"
,
request
,
response
)
try
:
inp
=
request
[
0
]
request_args
=
request
[
1
]
until
=
request_args
[
"until"
]
response
=
anthropic_completion
(
client
=
self
.
client
,
model
=
self
.
model
,
prompt
=
inp
,
max_tokens_to_sample
=
self
.
max_tokens_to_sample
,
temperature
=
self
.
temperature
,
# TODO: implement non-greedy sampling for Anthropic
stop
=
until
,
**
self
.
kwargs
,
)
res
.
append
(
response
)
self
.
cache_hook
.
add_partial
(
"greedy_until"
,
request
,
response
)
except
anthropic
.
APIConnectionError
as
e
:
eval_logger
.
critical
(
f
"Server unreachable:
{
e
.
__cause__
}
"
)
break
except
anthropic
.
APIStatusError
as
e
:
eval_logger
.
critical
(
f
"API error
{
e
.
status_code
}
:
{
e
.
message
}
"
)
break
return
res
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment