CMLL commited on
Commit
643c943
1 Parent(s): e75e07f

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +33 -65
app.py CHANGED
@@ -1,75 +1,43 @@
1
- import gradio as gr
2
- from huggingface_hub import hf_hub_download
3
  import os
4
  import subprocess
5
 
6
- # 模型下载链接
7
- model_url = "https://huggingface.co/CMLL/ZhongJing-2-1_8b-GGUF/resolve/main/ZhongJing1_5-1_8b-q4_0.gguf"
8
-
9
- # 下载模型函数
10
- def download_model(url, model_dir="models"):
11
- os.makedirs(model_dir, exist_ok=True)
12
- model_path = hf_hub_download(repo_id="CMLL/ZhongJing-2-1_8b-GGUF", filename="ZhongJing1_5-1_8b-q4_0.gguf", local_dir=model_dir)
13
- return model_path
14
-
15
- # 安装环境函数
16
- def setup_environment():
17
- if not os.path.exists("llama.cpp"):
18
- os.system("git clone https://github.com/ggerganov/llama.cpp.git")
19
- os.system("cd llama.cpp && mkdir build && cd build && cmake .. && make")
20
-
21
- model_path = download_model(model_url)
22
 
23
- prompts_dir = "llama.cpp/prompts"
24
- os.makedirs(prompts_dir, exist_ok=True)
25
- with open(os.path.join(prompts_dir, "TcmChat.txt"), "w") as f:
26
- f.write("You are a helpful TCM medical assistant named 仲景中医大语言模型.\n")
27
 
28
- return "Environment setup complete. Model downloaded and prompts file created."
29
-
30
- # 聊天函数
31
- def chat_with_model(user_input, history):
32
- prompt = f"User: {user_input}\nAssistant:"
33
- with open(os.path.join("llama.cpp/prompts", "TcmChat.txt"), "a") as f:
34
- f.write(prompt + "\n")
35
-
36
- # 执行命令并捕获输出
37
- command = f"./llama.cpp/build/bin/main -m models/ZhongJing1_5-1_8b-q4_0.gguf -n 256 --repeat_penalty 1.0 --color -i -r \"User:\" -f llama.cpp/prompts/chat-with-bob.txt"
38
- result = subprocess.run(command, shell=True, capture_output=True, text=True)
39
 
40
- # 解析输出
41
- output_lines = result.stdout.split("\n")
42
- response = ""
43
- capture = False
44
- for line in output_lines:
45
- if "User:" in line:
46
- capture = True
47
- if capture:
48
- response += line + "\n"
49
 
50
- response = response.strip()
51
- history.append((user_input, response))
52
- return history, history
53
-
54
- # 创建 Gradio 界面
55
- with gr.Blocks() as demo:
56
- chatbot = gr.Chatbot()
57
- state = gr.State([])
58
-
59
- with gr.Row():
60
- with gr.Column():
61
- setup_btn = gr.Button("Setup Environment")
62
- setup_output = gr.Textbox(label="Setup Output")
63
-
64
- with gr.Column():
65
- user_input = gr.Textbox(show_label=False, placeholder="Enter your message...")
66
- submit_btn = gr.Button("Submit")
67
 
68
- setup_btn.click(setup_environment, outputs=setup_output)
69
- submit_btn.click(chat_with_model, [user_input, state], [chatbot, state])
 
70
 
71
  if __name__ == "__main__":
72
- demo.launch()
73
-
74
-
75
-
 
 
 
1
  import os
2
  import subprocess
3
 
4
+ def run_command(command):
5
+ process = subprocess.Popen(command, shell=True, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
6
+ stdout, stderr = process.communicate()
7
+ if process.returncode != 0:
8
+ raise Exception(f"Command failed with error: {stderr.decode('utf-8')}")
9
+ return stdout.decode('utf-8')
10
+
11
+ def main():
12
+ # Clone the repository
13
+ print("Cloning the repository...")
14
+ run_command("git clone https://github.com/ggerganov/llama.cpp.git")
 
 
 
 
 
15
 
16
+ # Change directory to the cloned repository
17
+ os.chdir("/content/llama.cpp")
 
 
18
 
19
+ # Download the file
20
+ print("Downloading the GGUF file...")
21
+ run_command("wget -O ZhongJing1_5-1_8b-q4_0.gguf https://huggingface.co/CMLL/ZhongJing-2-1_8b-GGUF/resolve/main/ZhongJing1_5-1_8b-q4_0.gguf?download=true")
 
 
 
 
 
 
 
 
22
 
23
+ # Compile the project
24
+ print("Compiling the project...")
25
+ run_command("make")
 
 
 
 
 
 
26
 
27
+ # Modify the prompts/chat-with-bob.txt file
28
+ print("Modifying the chat-with-bob.txt file...")
29
+ dialog_content = """
30
+ Transcript of a dialog, where the User interacts with an Assistant named Bob. Bob is a helpful TCM medical assistant, also named 仲景中医大语言模型, and never fails to answer the User's requests immediately and with precision.
31
+
32
+ User: Hello, Bob.
33
+ Bob: Hello. How may I help you today?
34
+ """
35
+ with open("prompts/chat-with-bob.txt", "w") as f:
36
+ f.write(dialog_content)
 
 
 
 
 
 
 
37
 
38
+ # Run the llama-cli command
39
+ print("Running the llama-cli command...")
40
+ run_command("./llama-cli -m /content/ZhongJing1_5-1_8b-q4_0.gguf -n 256 --repeat_penalty 1.0 --color -i -r 'User:' -f prompts/chat-with-bob.txt")
41
 
42
  if __name__ == "__main__":
43
+ main()