import json import requests import argparse import re ''' 使用示例: 公共知识库检索:python client.py --action query --query '问题' 私有知识库检索:python client.py --action query --query '问题' --user_id 'user_id' ''' base_url = 'http://127.0.0.1:8000/%s' def query(query, user_id=None): url = base_url % 'work' try: header = {'Content-Type': 'application/json'} # Add history to data data = { 'query': query, 'history': [] } if user_id: data['user_id'] = user_id resp = requests.post(url, headers=header, data=json.dumps(data), timeout=300) if resp.status_code != 200: raise Exception(str((resp.status_code, resp.reason))) return resp.json()['reply'], resp.json()['references'] except Exception as e: print(str(e)) return '' def get_streaming_response(response: requests.Response): for chunk in response.iter_lines(chunk_size=1024, decode_unicode=False, delimiter=b"\0"): if chunk: pattern = re.compile(rb'data: "(\\u[0-9a-fA-F]{4})"') matches = pattern.findall(chunk) decoded_data = [] for match in matches: hex_value = match[2:].decode('ascii') char = chr(int(hex_value, 16)) decoded_data.append(char) print(char, end="", flush=True) def stream_query(query, user_id=None): url = base_url % 'stream' try: headers={ "Content-Type": "text/event-stream", "Cache-Control": "no-cache", "Connection": "keep-alive" } # Add history to data data = { 'query': query, 'history': [] } resp = requests.get(url, headers=headers, data=json.dumps(data), timeout=300, verify=False, stream=True) get_streaming_response(resp) except Exception as e: print(str(e)) def parse_args(): parser = argparse.ArgumentParser(description='.') parser.add_argument('--query', default='your query', help='') parser.add_argument('--user_id', default='') parser.add_argument('--stream', action='store_true') return parser.parse_args() if __name__ == '__main__': args = parse_args() if args.stream: stream_query(args.query, args.user_id) else: reply, ref = query(args.query, args.user_id) print('reply: {} \nref: {} '.format(reply, ref))