diff --git a/cli_demo.py b/cli_demo.py index 4aa35d3..d7bb6ea 100644 --- a/cli_demo.py +++ b/cli_demo.py @@ -1,8 +1,7 @@ +# coding=gbk import os import platform -import signal from transformers import AutoTokenizer, AutoModel -import readline tokenizer = AutoTokenizer.from_pretrained("THUDM/chatglm2-6b", trust_remote_code=True) model = AutoModel.from_pretrained("THUDM/chatglm2-6b", trust_remote_code=True).cuda() @@ -15,20 +14,6 @@ clear_command = 'cls' if os_name == 'Windows' else 'clear' stop_stream = False - -def build_prompt(history): - prompt = "欢迎使用 ChatGLM2-6B 模型,输入内容即可进行对话,clear 清空对话历史,stop 终止程序" - for query, response in history: - prompt += f"\n\n用户:{query}" - prompt += f"\n\nChatGLM2-6B:{response}" - return prompt - - -def signal_handler(signal, frame): - global stop_stream - stop_stream = True - - def main(): past_key_values, history = None, [] global stop_stream