Spaces:
Runtime error
Runtime error
Update app.py
Browse files
app.py
CHANGED
@@ -37,7 +37,17 @@ def chat_with_model(user_input, history):
|
|
37 |
command = f"./llama.cpp/build/bin/main -m models/ZhongJing1_5-1_8b-fp16.gguf -n 256 --repeat_penalty 1.0 --color -i -r \"User:\" -f llama.cpp/prompts/TcmChat.txt"
|
38 |
result = subprocess.run(command, shell=True, capture_output=True, text=True)
|
39 |
|
40 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
41 |
history.append((user_input, response))
|
42 |
return history, history
|
43 |
|
|
|
37 |
command = f"./llama.cpp/build/bin/main -m models/ZhongJing1_5-1_8b-fp16.gguf -n 256 --repeat_penalty 1.0 --color -i -r \"User:\" -f llama.cpp/prompts/TcmChat.txt"
|
38 |
result = subprocess.run(command, shell=True, capture_output=True, text=True)
|
39 |
|
40 |
+
# 解析输出
|
41 |
+
output_lines = result.stdout.split("\n")
|
42 |
+
response = ""
|
43 |
+
capture = False
|
44 |
+
for line in output_lines:
|
45 |
+
if "User:" in line:
|
46 |
+
capture = True
|
47 |
+
if capture:
|
48 |
+
response += line + "\n"
|
49 |
+
|
50 |
+
response = response.strip()
|
51 |
history.append((user_input, response))
|
52 |
return history, history
|
53 |
|