Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -44,7 +44,7 @@ terminators = [
|
|
44 |
from huggingface_hub import InferenceClient
|
45 |
model_id = "meta-llama/Meta-Llama-3-8B-Instruct"
|
46 |
client = InferenceClient(model=model_id, token="HF_TOKEN")
|
47 |
-
|
48 |
embeddings_model = HuggingFaceEmbeddings(model_name="sentence-transformers/all-mpnet-base-v2")
|
49 |
|
50 |
ASR_MODEL_NAME = "openai/whisper-large-v3"
|
@@ -79,16 +79,20 @@ def respond(
|
|
79 |
top_p,
|
80 |
):
|
81 |
|
82 |
-
messages = [{"role": "system", "content": system_message}]
|
83 |
|
84 |
-
for val in history:
|
85 |
-
|
86 |
-
|
87 |
-
|
88 |
-
|
89 |
|
90 |
-
messages.append({"role": "user", "content": message})
|
91 |
|
|
|
|
|
|
|
|
|
92 |
response = ""
|
93 |
|
94 |
for message in client.chat_completion(
|
|
|
44 |
from huggingface_hub import InferenceClient
|
45 |
model_id = "meta-llama/Meta-Llama-3-8B-Instruct"
|
46 |
client = InferenceClient(model=model_id, token="HF_TOKEN")
|
47 |
+
print("Client object created!")
|
48 |
embeddings_model = HuggingFaceEmbeddings(model_name="sentence-transformers/all-mpnet-base-v2")
|
49 |
|
50 |
ASR_MODEL_NAME = "openai/whisper-large-v3"
|
|
|
79 |
top_p,
|
80 |
):
|
81 |
|
82 |
+
# messages = [{"role": "system", "content": system_message}]
|
83 |
|
84 |
+
# for val in history:
|
85 |
+
# if val[0]:
|
86 |
+
# messages.append({"role": "user", "content": val[0]})
|
87 |
+
# if val[1]:
|
88 |
+
# messages.append({"role": "assistant", "content": val[1]})
|
89 |
|
90 |
+
# messages.append({"role": "user", "content": message})
|
91 |
|
92 |
+
messages =[
|
93 |
+
{ role: "user", content: "What is Python Programming?" },
|
94 |
+
]
|
95 |
+
|
96 |
response = ""
|
97 |
|
98 |
for message in client.chat_completion(
|