Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -37,7 +37,11 @@ MODELS = [
|
|
37 |
"mistralai/Mistral-7B-Instruct-v0.3",
|
38 |
"mistralai/Mixtral-8x7B-Instruct-v0.1",
|
39 |
"@cf/meta/llama-3.1-8b-instruct",
|
40 |
-
"mistralai/Mistral-Nemo-Instruct-2407"
|
|
|
|
|
|
|
|
|
41 |
]
|
42 |
|
43 |
# Initialize LlamaParse
|
@@ -297,41 +301,60 @@ def retry_last_response(history, model, temperature, num_calls):
|
|
297 |
|
298 |
return chatbot_interface(last_user_msg, history, model, temperature, num_calls)
|
299 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
300 |
def respond(message, history, model, temperature, num_calls, selected_docs):
|
301 |
logging.info(f"User Query: {message}")
|
302 |
logging.info(f"Model Used: {model}")
|
303 |
logging.info(f"Selected Documents: {selected_docs}")
|
304 |
|
305 |
try:
|
306 |
-
|
307 |
-
|
308 |
-
|
309 |
-
retriever = database.as_retriever(search_kwargs={"k": 20})
|
310 |
-
|
311 |
-
# Filter relevant documents based on user selection
|
312 |
-
all_relevant_docs = retriever.get_relevant_documents(message)
|
313 |
-
relevant_docs = [doc for doc in all_relevant_docs if doc.metadata["source"] in selected_docs]
|
314 |
-
|
315 |
-
if not relevant_docs:
|
316 |
-
yield "No relevant information found in the selected documents. Please try selecting different documents or rephrasing your query."
|
317 |
-
return
|
318 |
-
|
319 |
-
context_str = "\n".join([doc.page_content for doc in relevant_docs])
|
320 |
-
else:
|
321 |
-
context_str = "No documents available."
|
322 |
-
yield "No documents available. Please upload PDF documents to answer questions."
|
323 |
-
return
|
324 |
-
|
325 |
-
if model == "@cf/meta/llama-3.1-8b-instruct":
|
326 |
-
# Use Cloudflare API
|
327 |
-
for partial_response in get_response_from_cloudflare(prompt="", context=context_str, query=message, num_calls=num_calls, temperature=temperature, search_type="pdf"):
|
328 |
-
first_line = partial_response.split('\n')[0] if partial_response else ''
|
329 |
yield partial_response
|
330 |
else:
|
331 |
-
#
|
332 |
-
|
333 |
-
|
334 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
335 |
except Exception as e:
|
336 |
logging.error(f"Error with {model}: {str(e)}")
|
337 |
if "microsoft/Phi-3-mini-4k-instruct" in model:
|
@@ -528,6 +551,7 @@ use_web_search = gr.Checkbox(label="Use Web Search", value=True)
|
|
528 |
|
529 |
custom_placeholder = "Ask a question (Note: You can toggle between Web Search and PDF Chat in Additional Inputs below)"
|
530 |
|
|
|
531 |
demo = gr.ChatInterface(
|
532 |
respond,
|
533 |
additional_inputs=[
|
@@ -536,8 +560,8 @@ demo = gr.ChatInterface(
|
|
536 |
gr.Slider(minimum=1, maximum=5, value=1, step=1, label="Number of API Calls"),
|
537 |
document_selector
|
538 |
],
|
539 |
-
title="AI-powered PDF Chat Assistant",
|
540 |
-
description="Chat with your PDFs to answer questions.",
|
541 |
theme=gr.themes.Soft(
|
542 |
primary_hue="orange",
|
543 |
secondary_hue="amber",
|
@@ -560,18 +584,19 @@ demo = gr.ChatInterface(
|
|
560 |
examples=[
|
561 |
["Tell me about the contents of the uploaded PDFs."],
|
562 |
["What are the main topics discussed in the documents?"],
|
563 |
-
["Can you summarize the key points from the PDFs?"]
|
|
|
564 |
],
|
565 |
cache_examples=False,
|
566 |
analytics_enabled=False,
|
567 |
-
textbox=gr.Textbox(placeholder="Ask a question about the uploaded PDFs", container=False, scale=7),
|
568 |
chatbot = gr.Chatbot(
|
569 |
-
|
570 |
-
|
571 |
-
|
572 |
-
|
573 |
-
|
574 |
-
)
|
575 |
)
|
576 |
|
577 |
# Add file upload functionality
|
|
|
37 |
"mistralai/Mistral-7B-Instruct-v0.3",
|
38 |
"mistralai/Mixtral-8x7B-Instruct-v0.1",
|
39 |
"@cf/meta/llama-3.1-8b-instruct",
|
40 |
+
"mistralai/Mistral-Nemo-Instruct-2407",
|
41 |
+
"duckduckgo/gpt-4o-mini",
|
42 |
+
"duckduckgo/claude-3-haiku",
|
43 |
+
"duckduckgo/llama-3.1-70b",
|
44 |
+
"duckduckgo/mixtral-8x7b"
|
45 |
]
|
46 |
|
47 |
# Initialize LlamaParse
|
|
|
301 |
|
302 |
return chatbot_interface(last_user_msg, history, model, temperature, num_calls)
|
303 |
|
304 |
+
def get_response_from_duckduckgo(query, model, num_calls=1, temperature=0.2):
|
305 |
+
logging.info(f"Using DuckDuckGo chat with model: {model}")
|
306 |
+
ddg_model = model.split('/')[-1] # Extract the model name from the full string
|
307 |
+
|
308 |
+
full_response = ""
|
309 |
+
for _ in range(num_calls):
|
310 |
+
try:
|
311 |
+
results = DDGS().chat(query, model=ddg_model)
|
312 |
+
full_response += results + "\n"
|
313 |
+
except Exception as e:
|
314 |
+
logging.error(f"Error in generating response from DuckDuckGo: {str(e)}")
|
315 |
+
yield f"An error occurred with the {model} model: {str(e)}. Please try again."
|
316 |
+
return
|
317 |
+
|
318 |
+
yield full_response.strip()
|
319 |
+
|
320 |
def respond(message, history, model, temperature, num_calls, selected_docs):
|
321 |
logging.info(f"User Query: {message}")
|
322 |
logging.info(f"Model Used: {model}")
|
323 |
logging.info(f"Selected Documents: {selected_docs}")
|
324 |
|
325 |
try:
|
326 |
+
if model.startswith("duckduckgo/"):
|
327 |
+
# Use DuckDuckGo chat for the new models
|
328 |
+
for partial_response in get_response_from_duckduckgo(message, model, num_calls, temperature):
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
329 |
yield partial_response
|
330 |
else:
|
331 |
+
# Existing logic for PDF-based queries and other models
|
332 |
+
embed = get_embeddings()
|
333 |
+
if os.path.exists("faiss_database"):
|
334 |
+
database = FAISS.load_local("faiss_database", embed, allow_dangerous_deserialization=True)
|
335 |
+
retriever = database.as_retriever(search_kwargs={"k": 20})
|
336 |
+
|
337 |
+
all_relevant_docs = retriever.get_relevant_documents(message)
|
338 |
+
relevant_docs = [doc for doc in all_relevant_docs if doc.metadata["source"] in selected_docs]
|
339 |
+
|
340 |
+
if not relevant_docs:
|
341 |
+
yield "No relevant information found in the selected documents. Please try selecting different documents or rephrasing your query."
|
342 |
+
return
|
343 |
+
|
344 |
+
context_str = "\n".join([doc.page_content for doc in relevant_docs])
|
345 |
+
else:
|
346 |
+
context_str = "No documents available."
|
347 |
+
yield "No documents available. Please upload PDF documents to answer questions."
|
348 |
+
return
|
349 |
+
|
350 |
+
if model == "@cf/meta/llama-3.1-8b-instruct":
|
351 |
+
# Use Cloudflare API
|
352 |
+
for partial_response in get_response_from_cloudflare(prompt="", context=context_str, query=message, num_calls=num_calls, temperature=temperature, search_type="pdf"):
|
353 |
+
yield partial_response
|
354 |
+
else:
|
355 |
+
# Use Hugging Face API
|
356 |
+
for partial_response in get_response_from_pdf(message, model, selected_docs, num_calls=num_calls, temperature=temperature):
|
357 |
+
yield partial_response
|
358 |
except Exception as e:
|
359 |
logging.error(f"Error with {model}: {str(e)}")
|
360 |
if "microsoft/Phi-3-mini-4k-instruct" in model:
|
|
|
551 |
|
552 |
custom_placeholder = "Ask a question (Note: You can toggle between Web Search and PDF Chat in Additional Inputs below)"
|
553 |
|
554 |
+
# Update the demo interface
|
555 |
demo = gr.ChatInterface(
|
556 |
respond,
|
557 |
additional_inputs=[
|
|
|
560 |
gr.Slider(minimum=1, maximum=5, value=1, step=1, label="Number of API Calls"),
|
561 |
document_selector
|
562 |
],
|
563 |
+
title="AI-powered PDF Chat and DuckDuckGo Chat Assistant",
|
564 |
+
description="Chat with your PDFs or use DuckDuckGo chat models to answer questions.",
|
565 |
theme=gr.themes.Soft(
|
566 |
primary_hue="orange",
|
567 |
secondary_hue="amber",
|
|
|
584 |
examples=[
|
585 |
["Tell me about the contents of the uploaded PDFs."],
|
586 |
["What are the main topics discussed in the documents?"],
|
587 |
+
["Can you summarize the key points from the PDFs?"],
|
588 |
+
["What's the latest news about artificial intelligence?"]
|
589 |
],
|
590 |
cache_examples=False,
|
591 |
analytics_enabled=False,
|
592 |
+
textbox=gr.Textbox(placeholder="Ask a question about the uploaded PDFs or any topic", container=False, scale=7),
|
593 |
chatbot = gr.Chatbot(
|
594 |
+
show_copy_button=True,
|
595 |
+
likeable=True,
|
596 |
+
layout="bubble",
|
597 |
+
height=400,
|
598 |
+
value=initial_conversation()
|
599 |
+
)
|
600 |
)
|
601 |
|
602 |
# Add file upload functionality
|