Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -280,42 +280,6 @@ def duckduckgo_search(query):
|
|
280 |
results = ddgs.text(query, max_results=5)
|
281 |
return results
|
282 |
|
283 |
-
def chat(self, keywords: str, model: str = "gpt-4o-mini", timeout: int = 30) -> str:
|
284 |
-
"""Initiates a chat session with DuckDuckGo AI.
|
285 |
-
|
286 |
-
Args:
|
287 |
-
keywords (str): The initial message or question to send to the AI.
|
288 |
-
model (str): The model to use: "gpt-4o-mini", "claude-3-haiku", "llama-3.1-70b", "mixtral-8x7b". Defaults to "gpt-4o-mini".
|
289 |
-
timeout (int): Timeout value for the HTTP client. Defaults to 30.
|
290 |
-
|
291 |
-
Returns:
|
292 |
-
str: The response from the AI.
|
293 |
-
"""
|
294 |
-
if model == "gpt-4o-mini":
|
295 |
-
url = "https://api.duckduckgo.com/api/d2a/v1/chat/gpt-4o-mini"
|
296 |
-
elif model == "claude-3-haiku":
|
297 |
-
url = "https://api.duckduckgo.com/api/d2a/v1/chat/claude-3-haiku"
|
298 |
-
elif model == "llama-3.1-70b":
|
299 |
-
url = "https://api.duckduckgo.com/api/d2a/v1/chat/llama-3.1-70b"
|
300 |
-
elif model == "mixtral-8x7b":
|
301 |
-
url = "https://api.duckduckgo.com/api/d2a/v1/chat/mixtral-8x7b"
|
302 |
-
else:
|
303 |
-
raise ValueError(f"Invalid model: {model}")
|
304 |
-
|
305 |
-
payload = {"keywords": keywords}
|
306 |
-
headers = {
|
307 |
-
"Content-Type": "application/json",
|
308 |
-
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/58.0.3029.110 Safari/537.3"
|
309 |
-
}
|
310 |
-
|
311 |
-
try:
|
312 |
-
response = requests.post(url, json=payload, headers=headers, timeout=timeout)
|
313 |
-
response.raise_for_status()
|
314 |
-
return response.json()["response"]
|
315 |
-
except requests.exceptions.RequestException as e:
|
316 |
-
logging.error(f"Error in DuckDuckGo chat: {str(e)}")
|
317 |
-
return "Error in DuckDuckGo chat. Please try again later."
|
318 |
-
|
319 |
class CitingSources(BaseModel):
|
320 |
sources: List[str] = Field(
|
321 |
...,
|
@@ -406,6 +370,42 @@ def respond(message, history, model, temperature, num_calls, use_web_search, sel
|
|
406 |
else:
|
407 |
yield f"An error occurred with the {model} model: {str(e)}. Please try again or select a different model."
|
408 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
409 |
logging.basicConfig(level=logging.DEBUG)
|
410 |
|
411 |
def get_response_from_cloudflare(prompt, context, query, num_calls=3, temperature=0.2, search_type="pdf"):
|
|
|
280 |
results = ddgs.text(query, max_results=5)
|
281 |
return results
|
282 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
283 |
class CitingSources(BaseModel):
|
284 |
sources: List[str] = Field(
|
285 |
...,
|
|
|
370 |
else:
|
371 |
yield f"An error occurred with the {model} model: {str(e)}. Please try again or select a different model."
|
372 |
|
373 |
+
def chat(self, keywords: str, model: str = "gpt-4o-mini", timeout: int = 30) -> str:
|
374 |
+
"""Initiates a chat session with DuckDuckGo AI.
|
375 |
+
|
376 |
+
Args:
|
377 |
+
keywords (str): The initial message or question to send to the AI.
|
378 |
+
model (str): The model to use: "gpt-4o-mini", "claude-3-haiku", "llama-3.1-70b", "mixtral-8x7b". Defaults to "gpt-4o-mini".
|
379 |
+
timeout (int): Timeout value for the HTTP client. Defaults to 30.
|
380 |
+
|
381 |
+
Returns:
|
382 |
+
str: The response from the AI.
|
383 |
+
"""
|
384 |
+
if model == "gpt-4o-mini":
|
385 |
+
url = "https://api.duckduckgo.com/api/d2a/v1/chat/gpt-4o-mini"
|
386 |
+
elif model == "claude-3-haiku":
|
387 |
+
url = "https://api.duckduckgo.com/api/d2a/v1/chat/claude-3-haiku"
|
388 |
+
elif model == "llama-3.1-70b":
|
389 |
+
url = "https://api.duckduckgo.com/api/d2a/v1/chat/llama-3.1-70b"
|
390 |
+
elif model == "mixtral-8x7b":
|
391 |
+
url = "https://api.duckduckgo.com/api/d2a/v1/chat/mixtral-8x7b"
|
392 |
+
else:
|
393 |
+
raise ValueError(f"Invalid model: {model}")
|
394 |
+
|
395 |
+
payload = {"keywords": keywords}
|
396 |
+
headers = {
|
397 |
+
"Content-Type": "application/json",
|
398 |
+
"User-Agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/58.0.3029.110 Safari/537.3"
|
399 |
+
}
|
400 |
+
|
401 |
+
try:
|
402 |
+
response = requests.post(url, json=payload, headers=headers, timeout=timeout)
|
403 |
+
response.raise_for_status()
|
404 |
+
return response.json()["response"]
|
405 |
+
except requests.exceptions.RequestException as e:
|
406 |
+
logging.error(f"Error in DuckDuckGo chat: {str(e)}")
|
407 |
+
return "Error in DuckDuckGo chat. Please try again later."
|
408 |
+
|
409 |
logging.basicConfig(level=logging.DEBUG)
|
410 |
|
411 |
def get_response_from_cloudflare(prompt, context, query, num_calls=3, temperature=0.2, search_type="pdf"):
|