"vscode:/vscode.git/clone" did not exist on "cc05e9d47b851d2b853ce8bd56231f27a584bdab"
cli_demo.py 1.02 KB
Newer Older
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
import argparse
from fastllm_pytools import llm

def args_parser():
    parser = argparse.ArgumentParser(description = 'fastllm_chat_demo')
    parser.add_argument('-p', '--path', type = str, required = True, default = '', help = '模型文件的路径')
    args = parser.parse_args()
    return args

if __name__ == "__main__":
    args = args_parser()
    model = llm.model(args.path)

    history = []
    print("输入内容即可进行对话,clear 清空对话历史,stop 终止程序")
    while True:
        query = input("\n用户:")
        if query.strip() == "stop":
            break
        if query.strip() == "clear":
            history = []
            print("输入内容即可进行对话,clear 清空对话历史,stop 终止程序")
            continue
zhouxiang's avatar
zhouxiang committed
24
25
        print("AI:", end = "")
        curResponse = ""
26
        for response in model.stream_response(query, history = history):
zhouxiang's avatar
zhouxiang committed
27
            curResponse += response
28
29
            print(response, flush = True, end = "")
        history.append((query, curResponse))