"vscode:/vscode.git/clone" did not exist on "952a4cadf2fc2a3720638ee61b924890426b916c"
Unverified Commit 5a49b2a3 authored by Hailey Schoelkopf's avatar Hailey Schoelkopf Committed by GitHub
Browse files

Merge pull request #738 from baberabb/master_anthropic

[Main] updated to new anthropic API
parents fe803c29 d504944b
...@@ -4,7 +4,9 @@ from tqdm import tqdm ...@@ -4,7 +4,9 @@ from tqdm import tqdm
import time import time
def anthropic_completion(client, model, prompt, max_tokens_to_sample, temperature, stop): def anthropic_completion(
client, model, prompt, max_tokens_to_sample, temperature, stop
):
"""Query Anthropic API for completion. """Query Anthropic API for completion.
Retry with back-off until they respond Retry with back-off until they respond
...@@ -14,7 +16,7 @@ def anthropic_completion(client, model, prompt, max_tokens_to_sample, temperatur ...@@ -14,7 +16,7 @@ def anthropic_completion(client, model, prompt, max_tokens_to_sample, temperatur
backoff_time = 3 backoff_time = 3
while True: while True:
try: try:
response = client.completion( response = client.completions.create(
prompt=f"{anthropic.HUMAN_PROMPT} {prompt}{anthropic.AI_PROMPT}", prompt=f"{anthropic.HUMAN_PROMPT} {prompt}{anthropic.AI_PROMPT}",
model=model, model=model,
# NOTE: Claude really likes to do CoT, and overly aggressive stop sequences # NOTE: Claude really likes to do CoT, and overly aggressive stop sequences
...@@ -24,7 +26,7 @@ def anthropic_completion(client, model, prompt, max_tokens_to_sample, temperatur ...@@ -24,7 +26,7 @@ def anthropic_completion(client, model, prompt, max_tokens_to_sample, temperatur
temperature=temperature, temperature=temperature,
) )
print(response) print(response)
return response["completion"] return response.completion
except RuntimeError: except RuntimeError:
# TODO: I don't actually know what error Anthropic raises when it times out # TODO: I don't actually know what error Anthropic raises when it times out
# So err update this error when we find out. # So err update this error when we find out.
...@@ -38,7 +40,7 @@ def anthropic_completion(client, model, prompt, max_tokens_to_sample, temperatur ...@@ -38,7 +40,7 @@ def anthropic_completion(client, model, prompt, max_tokens_to_sample, temperatur
class AnthropicLM(BaseLM): class AnthropicLM(BaseLM):
REQ_CHUNK_SIZE = 20 REQ_CHUNK_SIZE = 20
def __init__(self, model): def __init__(self, model="claude-2"):
""" """
:param model: str :param model: str
...@@ -46,8 +48,9 @@ class AnthropicLM(BaseLM): ...@@ -46,8 +48,9 @@ class AnthropicLM(BaseLM):
""" """
super().__init__() super().__init__()
import anthropic import anthropic
self.model = model self.model = model
self.client = anthropic.Client(os.environ['ANTHROPIC_API_KEY']) self.client = anthropic.Anthropic(api_key=os.environ["ANTHROPIC_API_KEY"])
@property @property
def eot_token_id(self): def eot_token_id(self):
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment