Spaces:
Running
Running
Update app.py
Browse files
app.py
CHANGED
@@ -5,19 +5,23 @@ import json
|
|
5 |
from transformers import AutoTokenizer, AutoModelForCausalLM
|
6 |
import gradio as gr
|
7 |
import modelscope_studio.components.antd as antd
|
|
|
8 |
import modelscope_studio.components.base as ms
|
9 |
import modelscope_studio.components.pro as pro
|
10 |
|
|
|
11 |
MODEL_PATHS = {
|
12 |
"LeCarnet-3M": "MaxLSB/LeCarnet-3M",
|
13 |
"LeCarnet-8M": "MaxLSB/LeCarnet-8M",
|
14 |
"LeCarnet-21M": "MaxLSB/LeCarnet-21M",
|
15 |
}
|
16 |
|
|
|
17 |
hf_token = os.environ.get("HUGGINGFACEHUB_API_TOKEN")
|
18 |
if not hf_token:
|
19 |
raise ValueError("HUGGINGFACEHUB_API_TOKEN environment variable not set.")
|
20 |
|
|
|
21 |
tokenizer = None
|
22 |
model = None
|
23 |
|
@@ -38,6 +42,21 @@ def generate_response(prompt, max_new_tokens=200):
|
|
38 |
response = tokenizer.decode(outputs[0], skip_special_tokens=True)
|
39 |
return response[len(prompt):].strip()
|
40 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
41 |
DEFAULT_SETTINGS = {
|
42 |
"model": "LeCarnet-3M",
|
43 |
"sys_prompt": "",
|
@@ -54,17 +73,28 @@ state = gr.State({
|
|
54 |
},
|
55 |
})
|
56 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
57 |
with gr.Blocks(css=css) as demo:
|
58 |
with ms.Application(), antd.Row(gutter=[20, 20], wrap=False, elem_id="chatbot"):
|
59 |
# Right Column - Chat Interface
|
60 |
with antd.Col(flex=1, elem_style=dict(height="100%")):
|
61 |
with antd.Flex(vertical=True, gap="small", elem_classes="chatbot-chat"):
|
62 |
-
chatbot = pro.Chatbot(
|
|
|
|
|
|
|
|
|
63 |
with antdx.Suggestion(items=["Hello", "How are you?", "Tell me something"]) as suggestion:
|
64 |
with ms.Slot("children"):
|
65 |
input = antdx.Sender(placeholder="Type your message here...")
|
66 |
|
67 |
-
# Internal State
|
68 |
current_state = state
|
69 |
|
70 |
def add_message(user_input, state_value):
|
@@ -77,7 +107,13 @@ with gr.Blocks(css=css) as demo:
|
|
77 |
yield {"chatbot": gr.update(value=history)}
|
78 |
|
79 |
# Start assistant response
|
80 |
-
history.append({
|
|
|
|
|
|
|
|
|
|
|
|
|
81 |
yield {"chatbot": gr.update(value=history)}
|
82 |
|
83 |
try:
|
|
|
5 |
from transformers import AutoTokenizer, AutoModelForCausalLM
|
6 |
import gradio as gr
|
7 |
import modelscope_studio.components.antd as antd
|
8 |
+
import modelscope_studio.components.antdx as antdx
|
9 |
import modelscope_studio.components.base as ms
|
10 |
import modelscope_studio.components.pro as pro
|
11 |
|
12 |
+
# Define model paths
|
13 |
MODEL_PATHS = {
|
14 |
"LeCarnet-3M": "MaxLSB/LeCarnet-3M",
|
15 |
"LeCarnet-8M": "MaxLSB/LeCarnet-8M",
|
16 |
"LeCarnet-21M": "MaxLSB/LeCarnet-21M",
|
17 |
}
|
18 |
|
19 |
+
# Set HF token
|
20 |
hf_token = os.environ.get("HUGGINGFACEHUB_API_TOKEN")
|
21 |
if not hf_token:
|
22 |
raise ValueError("HUGGINGFACEHUB_API_TOKEN environment variable not set.")
|
23 |
|
24 |
+
# Load tokenizer and model globally
|
25 |
tokenizer = None
|
26 |
model = None
|
27 |
|
|
|
42 |
response = tokenizer.decode(outputs[0], skip_special_tokens=True)
|
43 |
return response[len(prompt):].strip()
|
44 |
|
45 |
+
# CSS for styling chatbot header with avatar
|
46 |
+
css = """
|
47 |
+
.chatbot-chat-messages .ant-pro-chat-message .ant-pro-chat-message-header {
|
48 |
+
display: flex;
|
49 |
+
align-items: center;
|
50 |
+
}
|
51 |
+
.chatbot-chat-messages .ant-pro-chat-message .ant-pro-chat-message-header img {
|
52 |
+
width: 20px;
|
53 |
+
height: 20px;
|
54 |
+
margin-right: 8px;
|
55 |
+
vertical-align: middle;
|
56 |
+
}
|
57 |
+
"""
|
58 |
+
|
59 |
+
# Default settings
|
60 |
DEFAULT_SETTINGS = {
|
61 |
"model": "LeCarnet-3M",
|
62 |
"sys_prompt": "",
|
|
|
73 |
},
|
74 |
})
|
75 |
|
76 |
+
# Welcome message (optional)
|
77 |
+
def welcome_config():
|
78 |
+
return {
|
79 |
+
"title": "LeCarnet Chatbot",
|
80 |
+
"description": "Start chatting below!",
|
81 |
+
"promptSuggestions": ["Hello", "Tell me a story", "How are you?"]
|
82 |
+
}
|
83 |
+
|
84 |
with gr.Blocks(css=css) as demo:
|
85 |
with ms.Application(), antd.Row(gutter=[20, 20], wrap=False, elem_id="chatbot"):
|
86 |
# Right Column - Chat Interface
|
87 |
with antd.Col(flex=1, elem_style=dict(height="100%")):
|
88 |
with antd.Flex(vertical=True, gap="small", elem_classes="chatbot-chat"):
|
89 |
+
chatbot = pro.Chatbot(
|
90 |
+
elem_classes="chatbot-chat-messages",
|
91 |
+
height=0,
|
92 |
+
welcome_config=welcome_config
|
93 |
+
)
|
94 |
with antdx.Suggestion(items=["Hello", "How are you?", "Tell me something"]) as suggestion:
|
95 |
with ms.Slot("children"):
|
96 |
input = antdx.Sender(placeholder="Type your message here...")
|
97 |
|
|
|
98 |
current_state = state
|
99 |
|
100 |
def add_message(user_input, state_value):
|
|
|
107 |
yield {"chatbot": gr.update(value=history)}
|
108 |
|
109 |
# Start assistant response
|
110 |
+
history.append({
|
111 |
+
"role": "assistant",
|
112 |
+
"content": [],
|
113 |
+
"key": str(uuid.uuid4()),
|
114 |
+
"header": f'<img src="/file=media/le-carnet.png" style="width:20px;height:20px;margin-right:8px;"> <span>{selected_model}</span>',
|
115 |
+
"loading": True
|
116 |
+
})
|
117 |
yield {"chatbot": gr.update(value=history)}
|
118 |
|
119 |
try:
|