Spaces:
Sleeping
Sleeping
Commit
·
caceecd
1
Parent(s):
6ea52a1
fix
Browse files
app.py
CHANGED
@@ -1,5 +1,6 @@
|
|
1 |
-
import
|
2 |
from huggingface_hub import InferenceClient
|
|
|
3 |
|
4 |
HF_API_KEY = os.getenv("HF_API_KEY") # Retrieve API key from environment variable
|
5 |
|
@@ -7,7 +8,6 @@ models = ["HuggingFaceH4/zephyr-7b-beta", "microsoft/Phi-4-mini-instruct"]
|
|
7 |
client = InferenceClient(model=models[1], token=HF_API_KEY) # Pass API key to client
|
8 |
|
9 |
|
10 |
-
|
11 |
def respond(
|
12 |
message,
|
13 |
history: list[tuple[str, str]],
|
@@ -16,7 +16,7 @@ def respond(
|
|
16 |
temperature,
|
17 |
top_p,
|
18 |
):
|
19 |
-
|
20 |
|
21 |
for val in history:
|
22 |
if val[0]:
|
@@ -60,5 +60,6 @@ demo = gr.ChatInterface(
|
|
60 |
],
|
61 |
)
|
62 |
|
|
|
63 |
if __name__ == "__main__":
|
64 |
demo.launch()
|
|
|
1 |
+
import gradio as gr
|
2 |
from huggingface_hub import InferenceClient
|
3 |
+
import os
|
4 |
|
5 |
HF_API_KEY = os.getenv("HF_API_KEY") # Retrieve API key from environment variable
|
6 |
|
|
|
8 |
client = InferenceClient(model=models[1], token=HF_API_KEY) # Pass API key to client
|
9 |
|
10 |
|
|
|
11 |
def respond(
|
12 |
message,
|
13 |
history: list[tuple[str, str]],
|
|
|
16 |
temperature,
|
17 |
top_p,
|
18 |
):
|
19 |
+
messages = [{"role": "system", "content": system_message}]
|
20 |
|
21 |
for val in history:
|
22 |
if val[0]:
|
|
|
60 |
],
|
61 |
)
|
62 |
|
63 |
+
|
64 |
if __name__ == "__main__":
|
65 |
demo.launch()
|