Spaces:
Running
Running
File size: 4,962 Bytes
c2374f4 0aa159f c2374f4 0aa159f c2374f4 0aa159f c2374f4 0aa159f c2374f4 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 |
import os
import uuid
import gradio as gr
import requests
from datetime import datetime
# Cấu hình
LLAMA_SERVER_URL = "http://localhost:3000/completion" # llama-server chạy trên cổng 3000
# Hàm gọi API llama-server
def call_llama_server(messages, max_length=50):
payload = {
"prompt": messages[-1]["content"], # Chỉ gửi prompt cuối
"n_predict": max_length
}
try:
response = requests.post(LLAMA_SERVER_URL, json=payload, timeout=10)
response.raise_for_status()
return response.json().get("content", "No response")
except requests.RequestException as e:
return f"Error: {str(e)}"
# Định dạng lịch sử cho giao diện
def format_history(history):
return [{"role": "user", "content": item["content"]} if item["role"] == "user"
else {"role": "assistant", "content": item["content"]}
for item in history if item["role"] != "placeholder"]
# Giao diện chính
with gr.Blocks(
title="LLaMA Chat",
theme=gr.themes.Soft(primary_hue="purple", secondary_hue="gray"),
css="""
.chatbot { height: calc(100vh - 100px); overflow-y: auto; }
.message-user { background-color: #e6f3ff; padding: 10px; border-radius: 5px; margin: 5px 0; }
.message-assistant { background-color: #f0f0f0; padding: 10px; border-radius: 5px; margin: 5px 0; }
"""
) as demo:
# Trạng thái lưu lịch sử
state = gr.State({
"conversations_history": {},
"conversations": [],
"conversation_id": ""
})
gr.Markdown("# LLaMA Chat\nChat với mô hình Qwen2.5-0.5B - Powered by llama.cpp")
with gr.Row():
# Cột trái: Danh sách hội thoại
with gr.Column(scale=1, min_width=200):
gr.Markdown("### Hội thoại")
conversations = gr.Dropdown(label="Chọn hội thoại", choices=[], value="")
new_chat_btn = gr.Button("Tạo hội thoại mới", variant="primary")
clear_btn = gr.Button("Xóa lịch sử", variant="secondary")
# Cột phải: Chatbot
with gr.Column(scale=3):
chatbot = gr.Chatbot(label="Cuộc trò chuyện", elem_classes="chatbot")
with gr.Row():
prompt_input = gr.Textbox(
label="Nhập tin nhắn",
placeholder="Gõ tin nhắn hoặc '/' để xem gợi ý...",
show_label=False,
container=False
)
submit_btn = gr.Button("Gửi", variant="primary")
# Hàm xử lý sự kiện
def submit_prompt(prompt, state):
if not prompt.strip():
return state, [], ""
# Tạo hội thoại mới nếu chưa có
if not state["conversation_id"]:
convo_id = str(uuid.uuid4())
state["conversation_id"] = convo_id
state["conversations_history"][convo_id] = []
state["conversations"].append({"label": prompt[:20] + "...", "value": convo_id})
history = state["conversations_history"][state["conversation_id"]]
history.append({"role": "user", "content": prompt, "key": str(uuid.uuid4())})
# Gọi llama-server
response = call_llama_server(format_history(history))
history.append({"role": "assistant", "content": response, "key": str(uuid.uuid4())})
return (
state,
[(item["content"], None) if item["role"] == "user" else (None, item["content"])
for item in history],
""
)
def new_chat(state):
state["conversation_id"] = ""
return state, [], gr.update(choices=[(c["label"], c["value"]) for c in state["conversations"]])
def select_conversation(state, convo_id):
if convo_id and convo_id in state["conversations_history"]:
state["conversation_id"] = convo_id
history = state["conversations_history"][convo_id]
return (
state,
[(item["content"], None) if item["role"] == "user" else (None, item["content"])
for item in history]
)
return state, []
def clear_history(state):
if state["conversation_id"]:
state["conversations_history"][state["conversation_id"]] = []
return state, []
# Sự kiện
submit_btn.click(
fn=submit_prompt,
inputs=[prompt_input, state],
outputs=[state, chatbot, prompt_input]
)
new_chat_btn.click(
fn=new_chat,
inputs=[state],
outputs=[state, chatbot, conversations]
)
conversations.change(
fn=select_conversation,
inputs=[state, conversations],
outputs=[state, chatbot]
)
clear_btn.click(
fn=clear_history,
inputs=[state],
outputs=[state, chatbot]
)
demo.launch(server_name="0.0.0.0", server_port=3000) |