Pijush2023's picture
Update app.py
be1ea7b verified
raw
history blame
3.74 kB
import os
import gradio as gr
from langchain_redis import RedisConfig, RedisVectorStore
from langchain_core.output_parsers import StrOutputParser
from langchain_core.prompts import ChatPromptTemplate
from langchain_core.runnables import RunnablePassthrough
from langchain_groq import ChatGroq
from langchain_community.embeddings import OpenAIEmbeddings
# Set API keys
openai_api_key=os.environ["OPENAI_API_KEY"]
groq_api_key=os.environ["GROQ_API_KEY"]
# Define Redis configuration
REDIS_URL = "redis://:KWq0uAoBYjBGErKvyMvexMqB9ep7v2Ct@redis-11044.c266.us-east-1-3.ec2.redns.redis-cloud.com:11044"
config = RedisConfig(
index_name="radar_data_index",
redis_url=REDIS_URL,
metadata_schema=[
{"name": "category", "type": "tag"},
{"name": "name", "type": "text"},
{"name": "address", "type": "text"},
{"name": "phone", "type": "text"},
],
)
# Initialize OpenAI Embeddings
embeddings = OpenAIEmbeddings(api_key=os.environ['OPENAI_API_KEY'])
# Initialize Redis Vector Store with Hugging Face embeddings
vector_store = RedisVectorStore(embeddings, config=config)
retriever = vector_store.as_retriever(search_type="similarity", search_kwargs={"k": 2})
# Define the language model
llm = ChatGroq(model="llama-3.2-1b-preview")
# Define prompt
prompt = ChatPromptTemplate.from_messages(
[
(
"human",
"""You’re Annie, a charming media personality created by Amit Lamba, guiding people around Birmingham, Alabama. Like Rick Steves or Anthony Bourdain, you’re here to give friendly, concise, and accurate advice with a dash of humor.
Keep answers relevant, upbeat, and engaging, spotlighting partner businesses whenever possible. Be conversational, like chatting with a clever friend, and encourage follow-up questions to create a smooth flow. Make users smile and deliver a delightful, informative experience—like a perfect dessert after a great meal.
Question: {question}
Context: {context}
Answer:""",
),
]
)
def format_docs(docs):
return "\n\n".join(doc.page_content for doc in docs)
rag_chain = (
{"context": retriever | format_docs, "question": RunnablePassthrough()}
| prompt
| llm
| StrOutputParser()
)
# Function to handle chatbot interaction
def rag_chain_response(messages, user_message):
# Generate a response using the RAG chain
response = rag_chain.invoke(user_message)
# Append the user's message and the response to the chat
messages.append((user_message, response))
# Return the updated chat and clear the input box
return messages, ""
# Define the Gradio app
with gr.Blocks(theme="rawrsor1/Everforest") as app:
chatbot = gr.Chatbot([], elem_id="RADAR", bubble_full_width=False)
question_input = gr.Textbox(label="Ask a Question", placeholder="Type your question here...")
submit_btn = gr.Button("Submit")
# Set up interaction for both Enter key and Submit button
question_input.submit(
rag_chain_response, # Function to handle input and generate response
inputs=[chatbot, question_input], # Pass current conversation state and user input
outputs=[chatbot, question_input], # Update conversation state and clear the input
api_name="api_get_response_on_enter"
)
submit_btn.click(
rag_chain_response, # Function to handle input and generate response
inputs=[chatbot, question_input], # Pass current conversation state and user input
outputs=[chatbot, question_input], # Update conversation state and clear the input
api_name="api_get_response_on_submit_button"
)
# Launch the Gradio app
app.launch(show_error=True)