Commit 53adc6a0 authored by Timothy J. Baek's avatar Timothy J. Baek
Browse files

fix: rag issue

parent b88c64f8
...@@ -222,7 +222,7 @@ async def pull_model( ...@@ -222,7 +222,7 @@ async def pull_model(
r = requests.request( r = requests.request(
method="POST", method="POST",
url=f"{url}/api/pull", url=f"{url}/api/pull",
data=form_data.model_dump_json(exclude_none=True), data=form_data.model_dump_json(exclude_none=True).encode(),
stream=True, stream=True,
) )
...@@ -294,7 +294,7 @@ async def push_model( ...@@ -294,7 +294,7 @@ async def push_model(
r = requests.request( r = requests.request(
method="POST", method="POST",
url=f"{url}/api/push", url=f"{url}/api/push",
data=form_data.model_dump_json(exclude_none=True), data=form_data.model_dump_json(exclude_none=True).encode(),
) )
r.raise_for_status() r.raise_for_status()
...@@ -356,7 +356,7 @@ async def create_model( ...@@ -356,7 +356,7 @@ async def create_model(
r = requests.request( r = requests.request(
method="POST", method="POST",
url=f"{url}/api/create", url=f"{url}/api/create",
data=form_data.model_dump_json(exclude_none=True), data=form_data.model_dump_json(exclude_none=True).encode(),
stream=True, stream=True,
) )
...@@ -419,7 +419,7 @@ async def copy_model( ...@@ -419,7 +419,7 @@ async def copy_model(
r = requests.request( r = requests.request(
method="POST", method="POST",
url=f"{url}/api/copy", url=f"{url}/api/copy",
data=form_data.model_dump_json(exclude_none=True), data=form_data.model_dump_json(exclude_none=True).encode(),
) )
r.raise_for_status() r.raise_for_status()
...@@ -466,7 +466,7 @@ async def delete_model( ...@@ -466,7 +466,7 @@ async def delete_model(
r = requests.request( r = requests.request(
method="DELETE", method="DELETE",
url=f"{url}/api/delete", url=f"{url}/api/delete",
data=form_data.model_dump_json(exclude_none=True), data=form_data.model_dump_json(exclude_none=True).encode(),
) )
r.raise_for_status() r.raise_for_status()
...@@ -506,7 +506,7 @@ async def show_model_info(form_data: ModelNameForm, user=Depends(get_current_use ...@@ -506,7 +506,7 @@ async def show_model_info(form_data: ModelNameForm, user=Depends(get_current_use
r = requests.request( r = requests.request(
method="POST", method="POST",
url=f"{url}/api/show", url=f"{url}/api/show",
data=form_data.model_dump_json(exclude_none=True), data=form_data.model_dump_json(exclude_none=True).encode(),
) )
r.raise_for_status() r.raise_for_status()
...@@ -558,7 +558,7 @@ async def generate_embeddings( ...@@ -558,7 +558,7 @@ async def generate_embeddings(
r = requests.request( r = requests.request(
method="POST", method="POST",
url=f"{url}/api/embeddings", url=f"{url}/api/embeddings",
data=form_data.model_dump_json(exclude_none=True), data=form_data.model_dump_json(exclude_none=True).encode(),
) )
r.raise_for_status() r.raise_for_status()
...@@ -644,7 +644,7 @@ async def generate_completion( ...@@ -644,7 +644,7 @@ async def generate_completion(
r = requests.request( r = requests.request(
method="POST", method="POST",
url=f"{url}/api/generate", url=f"{url}/api/generate",
data=form_data.model_dump_json(exclude_none=True), data=form_data.model_dump_json(exclude_none=True).encode(),
stream=True, stream=True,
) )
...@@ -714,7 +714,7 @@ async def generate_chat_completion( ...@@ -714,7 +714,7 @@ async def generate_chat_completion(
r = None r = None
print(form_data.model_dump_json(exclude_none=True)) print(form_data.model_dump_json(exclude_none=True).encode())
def get_request(): def get_request():
nonlocal form_data nonlocal form_data
...@@ -744,7 +744,7 @@ async def generate_chat_completion( ...@@ -744,7 +744,7 @@ async def generate_chat_completion(
r = requests.request( r = requests.request(
method="POST", method="POST",
url=f"{url}/api/chat", url=f"{url}/api/chat",
data=form_data.model_dump_json(exclude_none=True), data=form_data.model_dump_json(exclude_none=True).encode(),
stream=True, stream=True,
) )
...@@ -756,6 +756,7 @@ async def generate_chat_completion( ...@@ -756,6 +756,7 @@ async def generate_chat_completion(
headers=dict(r.headers), headers=dict(r.headers),
) )
except Exception as e: except Exception as e:
print(e)
raise e raise e
try: try:
...@@ -843,7 +844,7 @@ async def generate_openai_chat_completion( ...@@ -843,7 +844,7 @@ async def generate_openai_chat_completion(
r = requests.request( r = requests.request(
method="POST", method="POST",
url=f"{url}/v1/chat/completions", url=f"{url}/v1/chat/completions",
data=form_data.model_dump_json(exclude_none=True), data=form_data.model_dump_json(exclude_none=True).encode(),
stream=True, stream=True,
) )
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment