Skip to content
GitLab
Menu
Projects
Groups
Snippets
Loading...
Help
Help
Support
Community forum
Keyboard shortcuts
?
Submit feedback
Contribute to GitLab
Sign in / Register
Toggle navigation
Menu
Open sidebar
gaoqiong
lm-evaluation-harness
Commits
3a3655d6
Commit
3a3655d6
authored
Jul 28, 2023
by
baberabb
Browse files
passed kwargs to client
parent
fe358061
Changes
1
Hide whitespace changes
Inline
Side-by-side
Showing
1 changed file
with
13 additions
and
3 deletions
+13
-3
lm_eval/models/anthropic_llms.py
lm_eval/models/anthropic_llms.py
+13
-3
No files found.
lm_eval/models/anthropic_llms.py
View file @
3a3655d6
...
...
@@ -15,6 +15,7 @@ def anthropic_completion(
max_tokens_to_sample
:
int
,
temperature
:
float
,
stop
:
List
[
str
],
**
kwargs
:
Any
,
):
"""Query Anthropic API for completion.
...
...
@@ -31,6 +32,7 @@ def anthropic_completion(
stop_sequences
=
[
anthropic
.
HUMAN_PROMPT
]
+
stop
,
max_tokens_to_sample
=
max_tokens_to_sample
,
temperature
=
temperature
,
**
kwargs
,
)
return
response
.
completion
except
anthropic
.
RateLimitError
as
e
:
...
...
@@ -56,22 +58,29 @@ class AnthropicLM(LM):
batch_size
=
None
,
model
:
str
=
"claude-2.0"
,
max_tokens_to_sample
:
int
=
256
,
temperature
:
float
=
0
.0
,
**
kwargs
:
Any
,
#
api_key, auth_token
, etc.
temperature
:
float
=
1
.0
,
# defaults to 1
**
kwargs
:
Any
,
#
top_p, top_k
, etc.
):
# TODO: remove batch_size
"""Anthropic API wrapper.
:param model: str
Anthropic model e.g. 'claude-instant-v1', 'claude-2'
:param max_tokens_to_sample: int
Maximum number of tokens to sample from the model
:param temperature: float
Sampling temperature
:param kwargs: Any
Additional model_args to pass to the API client
"""
super
().
__init__
()
self
.
model
=
model
# defaults to os.environ.get("ANTHROPIC_API_KEY")
self
.
client
=
anthropic
.
Anthropic
(
**
kwargs
)
self
.
client
=
anthropic
.
Anthropic
()
self
.
temperature
=
temperature
self
.
max_tokens_to_sample
=
max_tokens_to_sample
self
.
tokenizer
=
self
.
client
.
get_tokenizer
()
self
.
kwargs
=
kwargs
@
property
def
eot_token_id
(
self
):
...
...
@@ -123,6 +132,7 @@ class AnthropicLM(LM):
max_tokens_to_sample
=
self
.
max_tokens_to_sample
,
temperature
=
self
.
temperature
,
# TODO: implement non-greedy sampling for Anthropic
stop
=
until
,
**
self
.
kwargs
,
)
res
.
append
(
response
)
...
...
Write
Preview
Markdown
is supported
0%
Try again
or
attach a new file
.
Attach a file
Cancel
You are about to add
0
people
to the discussion. Proceed with caution.
Finish editing this message first!
Cancel
Please
register
or
sign in
to comment