Commit 76bc4d04 authored by Matt Williams's avatar Matt Williams
Browse files

Cleanup as per Bruce


Signed-off-by: default avatarMatt Williams <m@technovangelist.com>
parent aec742b6
...@@ -15,6 +15,8 @@ def chat(messages): ...@@ -15,6 +15,8 @@ def chat(messages):
for line in r.iter_lines(): for line in r.iter_lines():
body = json.loads(line) body = json.loads(line)
if "error" in body:
raise Exception(body["error"])
if body.get("done") is False: if body.get("done") is False:
message = body.get("message", "") message = body.get("message", "")
content = message.get("content", "") content = message.get("content", "")
...@@ -22,8 +24,6 @@ def chat(messages): ...@@ -22,8 +24,6 @@ def chat(messages):
# the response streams one token at a time, print that as we receive it # the response streams one token at a time, print that as we receive it
print(content, end="", flush=True) print(content, end="", flush=True)
if "error" in body:
raise Exception(body["error"])
if body.get("done", False): if body.get("done", False):
message["content"] = output message["content"] = output
...@@ -32,7 +32,7 @@ def chat(messages): ...@@ -32,7 +32,7 @@ def chat(messages):
def main(): def main():
messages = [] messages = []
) # the context stores a conversation history, you can use this to make the model more context aware
while True: while True:
user_input = input("Enter a prompt: ") user_input = input("Enter a prompt: ")
print() print()
......
Markdown is supported
0% or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment