Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -250,7 +250,7 @@ def respond(message, history, model, temperature, num_calls, use_web_search):
|
|
250 |
|
251 |
logging.basicConfig(level=logging.DEBUG)
|
252 |
|
253 |
-
def get_response_from_cloudflare(prompt, num_calls=3, temperature=0.2):
|
254 |
headers = {
|
255 |
"Authorization": f"Bearer {API_TOKEN}",
|
256 |
"Content-Type": "application/json"
|
@@ -259,7 +259,7 @@ def get_response_from_cloudflare(prompt, num_calls=3, temperature=0.2):
|
|
259 |
|
260 |
inputs = [
|
261 |
{"role": "system", "content": "You are a friendly assistant that helps answer questions based on provided context."},
|
262 |
-
{"role": "user", "content":
|
263 |
]
|
264 |
|
265 |
payload = {
|
@@ -343,7 +343,7 @@ Write a detailed and complete response that answers the following user question:
|
|
343 |
|
344 |
if model == "@cf/meta/llama-3.1-8b-instruct":
|
345 |
# Use Cloudflare API with the retrieved context
|
346 |
-
for response in get_response_from_cloudflare(prompt, num_calls, temperature):
|
347 |
yield response
|
348 |
else:
|
349 |
# Use Hugging Face API
|
|
|
250 |
|
251 |
logging.basicConfig(level=logging.DEBUG)
|
252 |
|
253 |
+
def get_response_from_cloudflare(prompt, context, query, num_calls=3, temperature=0.2):
|
254 |
headers = {
|
255 |
"Authorization": f"Bearer {API_TOKEN}",
|
256 |
"Content-Type": "application/json"
|
|
|
259 |
|
260 |
inputs = [
|
261 |
{"role": "system", "content": "You are a friendly assistant that helps answer questions based on provided context."},
|
262 |
+
{"role": "user", "content": f"Context: {context}\n\nQuestion: {query}"}
|
263 |
]
|
264 |
|
265 |
payload = {
|
|
|
343 |
|
344 |
if model == "@cf/meta/llama-3.1-8b-instruct":
|
345 |
# Use Cloudflare API with the retrieved context
|
346 |
+
for response in get_response_from_cloudflare(prompt, context_str, query, num_calls, temperature):
|
347 |
yield response
|
348 |
else:
|
349 |
# Use Hugging Face API
|