Update app.py
Browse files
app.py
CHANGED
@@ -2,7 +2,7 @@ import gradio as gr
|
|
2 |
from huggingface_hub import InferenceClient
|
3 |
import os
|
4 |
import pandas as pd
|
5 |
-
from typing import List,
|
6 |
|
7 |
# ์ถ๋ก API ํด๋ผ์ด์ธํธ ์ค์
|
8 |
hf_client = InferenceClient("CohereForAI/c4ai-command-r-plus-08-2024", token=os.getenv("HF_TOKEN"))
|
@@ -37,7 +37,7 @@ test_parquet_content = load_parquet('test.parquet')
|
|
37 |
|
38 |
def respond(
|
39 |
message,
|
40 |
-
history: List[
|
41 |
system_message="", # ๊ธฐ๋ณธ๊ฐ ์ถ๊ฐ
|
42 |
max_tokens=4000, # ๊ธฐ๋ณธ๊ฐ ๋ณ๊ฒฝ
|
43 |
temperature=0.7, # ๊ธฐ๋ณธ๊ฐ ์ ์ง
|
@@ -61,7 +61,10 @@ def respond(
|
|
61 |
parquet_content = ""
|
62 |
for item in history:
|
63 |
if item['role'] == 'assistant' and 'test.parquet ํ์ผ ๋ด์ฉ' in item['content']:
|
64 |
-
|
|
|
|
|
|
|
65 |
break
|
66 |
system_message += f"\n\ntest.parquet ํ์ผ ๋ด์ฉ:\n```markdown\n{parquet_content}\n```"
|
67 |
message = "test.parquet ํ์ผ์ ๋ํ ๋ด์ฉ์ ํ์ตํ์๊ณ , ๊ด๋ จ ์ค๋ช
๋ฐ Q&A๋ฅผ ์งํํ ์ค๋น๊ฐ ๋์ด์๋ค. ๊ถ๊ธํ ์ ์ด ์์ผ๋ฉด ๋ฌผ์ด๋ณด๋ผ."
|
@@ -153,7 +156,7 @@ def text_to_parquet(text):
|
|
153 |
with open(parquet_filename, "rb") as f:
|
154 |
data = f.read()
|
155 |
|
156 |
-
return f"{parquet_filename} ํ์ผ์ด ์ฑ๊ณต์ ์ผ๋ก ๋ณํ๋์์ต๋๋ค.", parquet_content, data
|
157 |
except Exception as e:
|
158 |
return f"ํ
์คํธ ๋ณํ ์ค ์ค๋ฅ๊ฐ ๋ฐ์ํ์ต๋๋ค: {str(e)}", "", None
|
159 |
|
@@ -161,18 +164,11 @@ css = """
|
|
161 |
footer {
|
162 |
visibility: hidden;
|
163 |
}
|
164 |
-
#chatbot-container {
|
165 |
height: 600px;
|
166 |
overflow-y: scroll;
|
167 |
}
|
168 |
-
#chatbot-container .message {
|
169 |
-
font-size: 14px;
|
170 |
-
}
|
171 |
-
#chatbot-data-upload {
|
172 |
-
height: 600px;
|
173 |
-
overflow-y: scroll;
|
174 |
-
}
|
175 |
-
#chatbot-data-upload .message {
|
176 |
font-size: 14px;
|
177 |
}
|
178 |
"""
|
@@ -183,7 +179,7 @@ with gr.Blocks(theme="Nymbo/Nymbo_Theme", css=css) as demo:
|
|
183 |
|
184 |
with gr.Tab("์ฑ๋ด"):
|
185 |
gr.Markdown("### LLM๊ณผ ๋ํํ๊ธฐ")
|
186 |
-
chatbot = gr.Chatbot(label="์ฑ๋ด", elem_id="chatbot-container")
|
187 |
msg = gr.Textbox(label="๋ฉ์์ง ์
๋ ฅ", placeholder="์ฌ๊ธฐ์ ๋ฉ์์ง๋ฅผ ์
๋ ฅํ์ธ์...")
|
188 |
send = gr.Button("์ ์ก")
|
189 |
|
@@ -251,7 +247,7 @@ with gr.Blocks(theme="Nymbo/Nymbo_Theme", css=css) as demo:
|
|
251 |
# ํ์ผ์ ๋ค์ด๋ก๋ํ ์ ์๋๋ก ๋ฐ์ด๋๋ฆฌ ๋ฐ์ดํฐ๋ก ์ฝ๊ธฐ
|
252 |
with open(parquet_filename, "rb") as f:
|
253 |
data = f.read()
|
254 |
-
return message, load_parquet(parquet_filename), data
|
255 |
else:
|
256 |
return message, "", None
|
257 |
|
@@ -289,13 +285,13 @@ with gr.Blocks(theme="Nymbo/Nymbo_Theme", css=css) as demo:
|
|
289 |
)
|
290 |
|
291 |
gr.Markdown("### LLM๊ณผ ๋ํํ๊ธฐ")
|
292 |
-
chatbot_data_upload = gr.Chatbot(label="์ฑ๋ด ๋ฐ์ดํฐ ์
๋ก๋", elem_id="chatbot-data-upload")
|
293 |
msg_data_upload = gr.Textbox(label="๋ฉ์์ง ์
๋ ฅ", placeholder="์ฌ๊ธฐ์ ๋ฉ์์ง๋ฅผ ์
๋ ฅํ์ธ์...")
|
294 |
send_data_upload = gr.Button("์ ์ก")
|
295 |
|
296 |
# ์ฑ๋ด ๋ฉ์์ง ์ฒ๋ฆฌ ํจ์ (๋ฐ์ดํฐ ์
๋ก๋ ๋ฒ์ )
|
297 |
def handle_message_data_upload(message, history, system_message, max_tokens, temperature, top_p, parquet_data):
|
298 |
-
# Parquet
|
299 |
# ํ์ฌ๋ ๊ธฐ๋ณธ์ ์ผ๋ก ๋ฉ์์ง๋ฅผ ์ฒ๋ฆฌ
|
300 |
history = history or []
|
301 |
history.append({"role": "user", "content": message})
|
@@ -353,5 +349,4 @@ with gr.Blocks(theme="Nymbo/Nymbo_Theme", css=css) as demo:
|
|
353 |
gr.Markdown("### Gradio ์ธํฐํ์ด์ค๋ฅผ ์ฌ์ฉํ์ฌ LLM ๋ชจ๋ธ๊ณผ ์ํธ์์ฉํ์ธ์!")
|
354 |
|
355 |
if __name__ == "__main__":
|
356 |
-
demo.launch()
|
357 |
-
|
|
|
2 |
from huggingface_hub import InferenceClient
|
3 |
import os
|
4 |
import pandas as pd
|
5 |
+
from typing import List, Dict
|
6 |
|
7 |
# ์ถ๋ก API ํด๋ผ์ด์ธํธ ์ค์
|
8 |
hf_client = InferenceClient("CohereForAI/c4ai-command-r-plus-08-2024", token=os.getenv("HF_TOKEN"))
|
|
|
37 |
|
38 |
def respond(
|
39 |
message,
|
40 |
+
history: List[Dict[str, str]],
|
41 |
system_message="", # ๊ธฐ๋ณธ๊ฐ ์ถ๊ฐ
|
42 |
max_tokens=4000, # ๊ธฐ๋ณธ๊ฐ ๋ณ๊ฒฝ
|
43 |
temperature=0.7, # ๊ธฐ๋ณธ๊ฐ ์ ์ง
|
|
|
61 |
parquet_content = ""
|
62 |
for item in history:
|
63 |
if item['role'] == 'assistant' and 'test.parquet ํ์ผ ๋ด์ฉ' in item['content']:
|
64 |
+
try:
|
65 |
+
parquet_content = item['content'].split("```markdown\n")[1].split("\n```")[0]
|
66 |
+
except IndexError:
|
67 |
+
parquet_content = ""
|
68 |
break
|
69 |
system_message += f"\n\ntest.parquet ํ์ผ ๋ด์ฉ:\n```markdown\n{parquet_content}\n```"
|
70 |
message = "test.parquet ํ์ผ์ ๋ํ ๋ด์ฉ์ ํ์ตํ์๊ณ , ๊ด๋ จ ์ค๋ช
๋ฐ Q&A๋ฅผ ์งํํ ์ค๋น๊ฐ ๋์ด์๋ค. ๊ถ๊ธํ ์ ์ด ์์ผ๋ฉด ๋ฌผ์ด๋ณด๋ผ."
|
|
|
156 |
with open(parquet_filename, "rb") as f:
|
157 |
data = f.read()
|
158 |
|
159 |
+
return f"{parquet_filename} ํ์ผ์ด ์ฑ๊ณต์ ์ผ๋ก ๋ณํ๋์์ต๋๋ค.", parquet_content, (parquet_filename, data)
|
160 |
except Exception as e:
|
161 |
return f"ํ
์คํธ ๋ณํ ์ค ์ค๋ฅ๊ฐ ๋ฐ์ํ์ต๋๋ค: {str(e)}", "", None
|
162 |
|
|
|
164 |
footer {
|
165 |
visibility: hidden;
|
166 |
}
|
167 |
+
#chatbot-container, #chatbot-data-upload {
|
168 |
height: 600px;
|
169 |
overflow-y: scroll;
|
170 |
}
|
171 |
+
#chatbot-container .message, #chatbot-data-upload .message {
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
172 |
font-size: 14px;
|
173 |
}
|
174 |
"""
|
|
|
179 |
|
180 |
with gr.Tab("์ฑ๋ด"):
|
181 |
gr.Markdown("### LLM๊ณผ ๋ํํ๊ธฐ")
|
182 |
+
chatbot = gr.Chatbot(label="์ฑ๋ด", type="messages", elem_id="chatbot-container")
|
183 |
msg = gr.Textbox(label="๋ฉ์์ง ์
๋ ฅ", placeholder="์ฌ๊ธฐ์ ๋ฉ์์ง๋ฅผ ์
๋ ฅํ์ธ์...")
|
184 |
send = gr.Button("์ ์ก")
|
185 |
|
|
|
247 |
# ํ์ผ์ ๋ค์ด๋ก๋ํ ์ ์๋๋ก ๋ฐ์ด๋๋ฆฌ ๋ฐ์ดํฐ๋ก ์ฝ๊ธฐ
|
248 |
with open(parquet_filename, "rb") as f:
|
249 |
data = f.read()
|
250 |
+
return message, load_parquet(parquet_filename), (parquet_filename, data)
|
251 |
else:
|
252 |
return message, "", None
|
253 |
|
|
|
285 |
)
|
286 |
|
287 |
gr.Markdown("### LLM๊ณผ ๋ํํ๊ธฐ")
|
288 |
+
chatbot_data_upload = gr.Chatbot(label="์ฑ๋ด ๋ฐ์ดํฐ ์
๋ก๋", type="messages", elem_id="chatbot-data-upload")
|
289 |
msg_data_upload = gr.Textbox(label="๋ฉ์์ง ์
๋ ฅ", placeholder="์ฌ๊ธฐ์ ๋ฉ์์ง๋ฅผ ์
๋ ฅํ์ธ์...")
|
290 |
send_data_upload = gr.Button("์ ์ก")
|
291 |
|
292 |
# ์ฑ๋ด ๋ฉ์์ง ์ฒ๋ฆฌ ํจ์ (๋ฐ์ดํฐ ์
๋ก๋ ๋ฒ์ )
|
293 |
def handle_message_data_upload(message, history, system_message, max_tokens, temperature, top_p, parquet_data):
|
294 |
+
# Parquet ๋ฐ์ดํฐ๋ฅผ ํ์ฉํ ์ ์๋ ๋ก์ง์ ์ถ๊ฐ (์: ๋ฐ์ดํฐ ๋ถ์, ์ง์์๋ต)
|
295 |
# ํ์ฌ๋ ๊ธฐ๋ณธ์ ์ผ๋ก ๋ฉ์์ง๋ฅผ ์ฒ๋ฆฌ
|
296 |
history = history or []
|
297 |
history.append({"role": "user", "content": message})
|
|
|
349 |
gr.Markdown("### Gradio ์ธํฐํ์ด์ค๋ฅผ ์ฌ์ฉํ์ฌ LLM ๋ชจ๋ธ๊ณผ ์ํธ์์ฉํ์ธ์!")
|
350 |
|
351 |
if __name__ == "__main__":
|
352 |
+
demo.launch(share=True)
|
|