Spaces:
Sleeping
Sleeping
changes in app
Browse files
app.py
CHANGED
@@ -1,6 +1,8 @@
|
|
1 |
import gradio as gr
|
2 |
from huggingface_hub import InferenceClient
|
3 |
|
|
|
|
|
4 |
def format_prompt(message, history):
|
5 |
prompt = "<s>"
|
6 |
for user_prompt, bot_response in history:
|
@@ -33,7 +35,8 @@ def inference(message, history, model="mistralai/Mixtral-8x7B-Instruct-v0.1", Te
|
|
33 |
yield partial_message
|
34 |
|
35 |
|
36 |
-
chatbot = gr.Chatbot(
|
|
|
37 |
|
38 |
|
39 |
UI= gr.ChatInterface(
|
@@ -41,7 +44,7 @@ UI= gr.ChatInterface(
|
|
41 |
chatbot=chatbot,
|
42 |
description="The Rapid TGI (Text Generation Inference) has developed by learning purpose",
|
43 |
title="Rapid TGI",
|
44 |
-
additional_inputs_accordion="Additional Configuration to get better response",
|
45 |
retry_btn=None,
|
46 |
undo_btn=None,
|
47 |
clear_btn="Clear",
|
@@ -57,6 +60,6 @@ UI= gr.ChatInterface(
|
|
57 |
gr.Slider(value=0.95, maximum=1.0,label="Top P"),
|
58 |
gr.Slider(value=0.93, maximum=1.0,label="Repetition Penalty"),
|
59 |
],
|
60 |
-
examples=[["Hello"], ["
|
61 |
)
|
62 |
UI.queue().launch(debug=True)
|
|
|
1 |
import gradio as gr
|
2 |
from huggingface_hub import InferenceClient
|
3 |
|
4 |
+
client = InferenceClient("mistralai/Mixtral-8x7B-Instruct-v0.1")
|
5 |
+
|
6 |
def format_prompt(message, history):
|
7 |
prompt = "<s>"
|
8 |
for user_prompt, bot_response in history:
|
|
|
35 |
yield partial_message
|
36 |
|
37 |
|
38 |
+
chatbot = gr.Chatbot(avatar_images=["/content/icons/user.png", "/content/icons/chat_bot.png"],
|
39 |
+
bubble_full_width=False, show_label=False, show_copy_button=True, likeable=True,)
|
40 |
|
41 |
|
42 |
UI= gr.ChatInterface(
|
|
|
44 |
chatbot=chatbot,
|
45 |
description="The Rapid TGI (Text Generation Inference) has developed by learning purpose",
|
46 |
title="Rapid TGI",
|
47 |
+
additional_inputs_accordion=gr.Accordion(label="Additional Configuration to get better response",open=False),
|
48 |
retry_btn=None,
|
49 |
undo_btn=None,
|
50 |
clear_btn="Clear",
|
|
|
60 |
gr.Slider(value=0.95, maximum=1.0,label="Top P"),
|
61 |
gr.Slider(value=0.93, maximum=1.0,label="Repetition Penalty"),
|
62 |
],
|
63 |
+
examples=[["Hello"], ["can i know about generative ai ?"], ["how can i deploy a LLM in hugguingface inference endpoint ?"]],
|
64 |
)
|
65 |
UI.queue().launch(debug=True)
|