"...resnet50_tensorflow.git" did not exist on "5983e3d2233504a4b09721cf743714eaff73dde9"
Commit e3b881ae authored by Baber's avatar Baber
Browse files

add evaluator

parent 8181f43c
import re import re
from typing import Optional from typing import Optional
import requests
# from api_model import make_concurrent_requests # from api_model import make_concurrent_requests
from Levenshtein import distance from Levenshtein import distance
API_KEY = "your_openai_api_key"
API_URL = "https://api.openai.com/v1/chat/completions"
# required for external LM call # required for external LM call
DEMO_PROMPT = """ DEMO_PROMPT = """
...@@ -47,6 +53,30 @@ Extracted answer: B ...@@ -47,6 +53,30 @@ Extracted answer: B
""" """
# Function to send a single request to the OpenAI API
def send_request(prompt: str):
try:
headers = {
"Authorization": f"Bearer {API_KEY}",
"Content-Type": "application/json",
}
data = {
"model": "gpt-4",
"messages": [
{"role": "user", "content": prompt},
],
"max_tokens": 1024,
}
response = requests.post(API_URL, headers=headers, json=data)
response.raise_for_status()
result = response.json()
return result["choices"][0]["message"]["content"]
except Exception as e:
print(f"An error occurred while requesting: {e}")
return None
def create_test_prompt(demo_prompt, query, response): def create_test_prompt(demo_prompt, query, response):
demo_prompt = demo_prompt.strip() demo_prompt = demo_prompt.strip()
test_prompt = f"{query}\n\n{response}" test_prompt = f"{query}\n\n{response}"
...@@ -152,8 +182,8 @@ def extract_answer(response: str, problem: dict, quick_extract=True) -> str: ...@@ -152,8 +182,8 @@ def extract_answer(response: str, problem: dict, quick_extract=True) -> str:
question_type = problem["question_type"] question_type = problem["question_type"]
answer_type = problem["answer_type"] answer_type = problem["answer_type"]
choices = problem["choices"] choices = problem["choices"]
# query = problem["query"] query = problem["query"]
# pid = problem["pid"] pid = problem["pid"]
if response == "": if response == "":
return "" return ""
...@@ -187,16 +217,14 @@ def extract_answer(response: str, problem: dict, quick_extract=True) -> str: ...@@ -187,16 +217,14 @@ def extract_answer(response: str, problem: dict, quick_extract=True) -> str:
pass pass
# general extraction # general extraction
# try: try:
# full_prompt = create_test_prompt(DEMO_PROMPT, query, response) full_prompt = create_test_prompt(DEMO_PROMPT, query, response)
# extraction = make_concurrent_requests(full_prompt) extraction = send_request(full_prompt)
# return extraction return extraction
# except Exception: except Exception:
# print( print(
# f"Error in extracting answer for problem: {pid} with response: {response}" f"Error in extracting answer for problem: {pid} with response: {response}"
# ) )
# # logging.info(f"Error in extracting answer for problem: {pid} with response: {response}")
# # logging.info(e)
return "" return ""
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment