CMLL commited on
Commit
a9638ab
·
verified ·
1 Parent(s): c1e4174

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +25 -5
app.py CHANGED
@@ -2,11 +2,11 @@ import os
2
  import subprocess
3
 
4
  def run_command(command):
5
- process = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
6
  stdout, stderr = process.communicate()
7
  if process.returncode != 0:
8
- raise Exception(f"Command failed with error: {stderr.decode('utf-8')}")
9
- return stdout.decode('utf-8')
10
 
11
  def main():
12
  # Clone the repository
@@ -30,14 +30,34 @@ def main():
30
  Transcript of a dialog, where the User interacts with an Assistant named Bob. Bob is a helpful TCM medical assistant, also named 仲景中医大语言模型, and never fails to answer the User's requests immediately and with precision.
31
 
32
  User: Hello, Bob.
33
- Bob: Hello. How may I help you today?
34
  """
35
  with open("prompts/chat-with-bob.txt", "w") as f:
36
  f.write(dialog_content)
37
 
38
  # Run the llama-cli command
39
  print("Running the llama-cli command...")
40
- run_command("./llama-cli -m ZhongJing1_5-1_8b-q4_0.gguf -n 256 --repeat_penalty 1.0 --color -i -r 'User:' -f prompts/chat-with-bob.txt")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
41
 
42
  if __name__ == "__main__":
43
  main()
 
 
2
  import subprocess
3
 
4
  def run_command(command):
5
+ process = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE, text=True)
6
  stdout, stderr = process.communicate()
7
  if process.returncode != 0:
8
+ raise Exception(f"Command failed with error: {stderr}")
9
+ return stdout
10
 
11
  def main():
12
  # Clone the repository
 
30
  Transcript of a dialog, where the User interacts with an Assistant named Bob. Bob is a helpful TCM medical assistant, also named 仲景中医大语言模型, and never fails to answer the User's requests immediately and with precision.
31
 
32
  User: Hello, Bob.
33
+ Bob: Hello. How may I help you today?不断地从书中获取知识,以获得更全面的知识结构。
34
  """
35
  with open("prompts/chat-with-bob.txt", "w") as f:
36
  f.write(dialog_content)
37
 
38
  # Run the llama-cli command
39
  print("Running the llama-cli command...")
40
+ process = subprocess.Popen(
41
+ ['./llama-cli', '-m', 'ZhongJing1_5-1_8b-q4_0.gguf', '-n', '256', '--repeat_penalty', '1.0', '--color', '-i', '-r', 'User:', '-f', 'prompts/chat-with-bob.txt'],
42
+ stdout=subprocess.PIPE,
43
+ stderr=subprocess.PIPE,
44
+ text=True
45
+ )
46
+
47
+ # Capture the output and filter it
48
+ while True:
49
+ output = process.stdout.readline()
50
+ if 'Transcript of a dialog' in output:
51
+ print(output, end='')
52
+ break
53
+
54
+ while True:
55
+ output = process.stdout.readline()
56
+ if output.startswith('User:') or output.startswith('Bob:'):
57
+ print(output, end='')
58
+ elif process.poll() is not None:
59
+ break
60
 
61
  if __name__ == "__main__":
62
  main()
63
+