File size: 3,932 Bytes
c2e85ee 7e6b008 c2e85ee f716fe7 7873eac e1bfa66 d81fcc1 31d485f 7873eac c2e85ee 7873eac c2e85ee d81fcc1 c2e85ee 7873eac 31d485f 7873eac c2e85ee 7873eac c2e85ee 31d485f c2e85ee 31d485f c2e85ee 31d485f 7873eac a3d4f4f 7873eac 7b8bd19 7873eac a3d4f4f 7873eac 7b8bd19 7873eac a3d4f4f f29f4f1 a3d4f4f c4c3f4e 0235a21 31d485f 0235a21 7873eac 0235a21 a3d4f4f 7873eac 0235a21 f29f4f1 7873eac 0235a21 e1bfa66 a3d4f4f 0825d4d 7873eac |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 |
import gradio as gr
import httpx
import time
from datetime import datetime
from llama_index.core import VectorStoreIndex, SimpleDirectoryReader
from llama_parse import LlamaParse
from llama_index.embeddings.huggingface import HuggingFaceEmbedding
from llama_index.llms.huggingface_api import HuggingFaceInferenceAPI
# LLM and Parser Initialization
llm = HuggingFaceInferenceAPI(model_name="tiiuae/falcon-7b-instruct")
parser = LlamaParse(api_key='llx-zKtsC5UBLs8DOApOsLluXMBdQhC75ea0Vs80SmPSjsmDzuhh', result_type='markdown')
# PDF document extraction and indexing
file_extractor = {'.pdf': parser}
documents = SimpleDirectoryReader('data/', file_extractor=file_extractor).load_data()
# Embedding Model and Query Engine Initialization
embed_model = HuggingFaceEmbedding(model_name="BAAI/bge-small-en-v1.5")
vector_index = VectorStoreIndex.from_documents(documents, embed_model=embed_model)
query_engine = vector_index.as_query_engine(llm=llm)
# System Prompt for LLM
system_prompt = """
You are an AI assistant designed to answer questions about the Hund Ecosystem based on the uploaded PDF document.
Your primary responsibility is to provide detailed, accurate, and clear answers to user queries related to the content of the document.
For any question that is not related to the content of the document, kindly ask the user to refer to the Hund Ecosystem.
Please ensure to be polite and professional in your responses. If the question cannot be answered based on the document, kindly guide the user accordingly.
"""
# Query Retry Logic
def query_with_retry(query, max_retries=3, wait_time=5):
for attempt in range(max_retries):
try:
start_time = datetime.now()
response = query_engine.query(query) # System prompt is added here
end_time = datetime.now()
duration = (end_time - start_time).total_seconds()
print(f"Query completed in {duration:.2f} seconds.\n {response}")
return response
except httpx.ReadTimeout:
if attempt < max_retries - 1:
print(f"Timeout occurred. Retrying in {wait_time} seconds...")
time.sleep(wait_time)
else:
raise
except Exception as e:
print(f"An error occurred: {e}")
break
# Manage user messages and bot responses
def respond(message, history):
try:
# Run the query engine with the user message
bot_message = query_engine.query(message)
print(f"\n{datetime.now()}:{llm.model_name}:: {message} --> {str(bot_message)}\n")
# Add user's message and bot's response to history
history.append((message, str(bot_message)))
# Clear the input field after sending the message
return history, "" # Clear the input field for the next question
except Exception as e:
if str(e) == "'NoneType' object has no attribute 'as_query_engine'":
return "Please upload a file.", history
return f"An error occurred: {e}", history
# UI Setup
with gr.Blocks(theme=gr.themes.Soft(font=[gr.themes.GoogleFont("Roboto Mono")])) as demo:
gr.Markdown("# HundAI Chatbot🤖")
with gr.Row():
with gr.Column(scale=3):
chatbot = gr.Chatbot(height=500) # Display chat history here
user_message = gr.Textbox(placeholder="Ask me questions about the Hund Ecosystem!", container=False)
submit_btn = gr.Button("Send")
clear_btn = gr.Button("Clear Chat")
# When submit is clicked, process the message and show response
submit_btn.click(fn=respond, inputs=[user_message, chatbot], outputs=[chatbot, user_message])
# Clear the chat history when the clear button is clicked
clear_btn.click(lambda: [None, []], outputs=[chatbot, chatbot])
# Launch the demo
if __name__ == "__main__":
demo.launch()
|