Update app.py
Browse files
app.py
CHANGED
@@ -4,7 +4,7 @@ from huggingface_hub import InferenceClient
|
|
4 |
"""
|
5 |
For more information on `huggingface_hub` Inference API support, please check the docs: https://huggingface.co/docs/huggingface_hub/v0.22.2/en/guides/inference
|
6 |
"""
|
7 |
-
client = InferenceClient("
|
8 |
|
9 |
|
10 |
def respond(
|
@@ -27,15 +27,15 @@ def respond(
|
|
27 |
|
28 |
response = ""
|
29 |
|
30 |
-
for message in client.
|
31 |
-
|
32 |
-
|
33 |
stream=True,
|
34 |
temperature=temperature,
|
35 |
top_p=top_p,
|
|
|
36 |
):
|
37 |
-
token = message.
|
38 |
-
|
39 |
response += token
|
40 |
yield response
|
41 |
|
@@ -46,7 +46,10 @@ For information on how to customize the ChatInterface, peruse the gradio docs: h
|
|
46 |
demo = gr.ChatInterface(
|
47 |
respond,
|
48 |
additional_inputs=[
|
49 |
-
gr.Textbox(
|
|
|
|
|
|
|
50 |
gr.Slider(minimum=1, maximum=2048, value=512, step=1, label="Max new tokens"),
|
51 |
gr.Slider(minimum=0.1, maximum=4.0, value=0.7, step=0.1, label="Temperature"),
|
52 |
gr.Slider(
|
|
|
4 |
"""
|
5 |
For more information on `huggingface_hub` Inference API support, please check the docs: https://huggingface.co/docs/huggingface_hub/v0.22.2/en/guides/inference
|
6 |
"""
|
7 |
+
client = InferenceClient("ContactDoctor/Bio-Medical-MultiModal-Llama-3-8B-V1")
|
8 |
|
9 |
|
10 |
def respond(
|
|
|
27 |
|
28 |
response = ""
|
29 |
|
30 |
+
for message in client.text_generation(
|
31 |
+
prompt=message,
|
32 |
+
max_new_tokens=max_tokens,
|
33 |
stream=True,
|
34 |
temperature=temperature,
|
35 |
top_p=top_p,
|
36 |
+
do_sample=True,
|
37 |
):
|
38 |
+
token = message.token.text
|
|
|
39 |
response += token
|
40 |
yield response
|
41 |
|
|
|
46 |
demo = gr.ChatInterface(
|
47 |
respond,
|
48 |
additional_inputs=[
|
49 |
+
gr.Textbox(
|
50 |
+
value="Bạn là bác sĩ đa khoa với 20 năm kinh nghiệm. Hãy trả lời ngắn gọn, dễ hiểu và chuyên nghiệp. Khi không chắc chắn, hãy đề xuất thăm khám trực tiếp. Giới hạn câu trả lời trong 400 ký tự.",
|
51 |
+
label="System message"
|
52 |
+
),
|
53 |
gr.Slider(minimum=1, maximum=2048, value=512, step=1, label="Max new tokens"),
|
54 |
gr.Slider(minimum=0.1, maximum=4.0, value=0.7, step=0.1, label="Temperature"),
|
55 |
gr.Slider(
|