Spaces:
Runtime error
Runtime error
File size: 3,849 Bytes
192447d deecb9b 192447d deecb9b e19c53d 21449ef 12b3625 7737d43 deecb9b 7892466 deecb9b fbe986d deecb9b c2d26c1 6c87583 7892466 c2d26c1 deecb9b d114aed 9207660 78cb7fe deecb9b 78cb7fe deecb9b 3e2aa34 6424ae8 821a357 deecb9b c1009f8 b370650 3e2aa34 deecb9b 089a83f deecb9b b8d3256 16913a7 d114aed e110d68 393e5d8 d5add66 fe5f1a6 d5add66 16913a7 c2d26c1 d663512 e110d68 393e5d8 d5add66 43badf5 fe5f1a6 d5add66 613256c d5add66 be1ea7b 613256c deecb9b d5add66 be1ea7b 192447d b8d3256 fe5f1a6 d663512 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 |
import os
import gradio as gr
from langchain_redis import RedisConfig, RedisVectorStore
from langchain_core.output_parsers import StrOutputParser
from langchain_core.prompts import ChatPromptTemplate
from langchain_core.runnables import RunnablePassthrough
from langchain_groq import ChatGroq
from langchain_community.embeddings import OpenAIEmbeddings
import logging
from huggingface_hub import login
hf_token = os.getenv("HF_TOKEN")
if hf_token is None:
print("Please set your Hugging Face token in the environment variables.")
else:
login(token=hf_token)
logging.basicConfig(level=logging.DEBUG)
# Set API keys
openai_api_key=os.environ["OPENAI_API_KEY"]
groq_api_key=os.environ["GROQ_API_KEY"]
# Define Redis configuration
REDIS_URL = "redis://:KWq0uAoBYjBGErKvyMvexMqB9ep7v2Ct@redis-11044.c266.us-east-1-3.ec2.redns.redis-cloud.com:11044"
config = RedisConfig(
index_name="radar_data_index",
redis_url=REDIS_URL,
metadata_schema=[
{"name": "category", "type": "tag"},
{"name": "name", "type": "text"},
{"name": "address", "type": "text"},
{"name": "phone", "type": "text"},
],
)
# Initialize OpenAI Embeddings
embeddings = OpenAIEmbeddings(api_key=os.environ['OPENAI_API_KEY'])
# Initialize Redis Vector Store with Hugging Face embeddings
vector_store = RedisVectorStore(embeddings, config=config)
retriever = vector_store.as_retriever(search_type="similarity", search_kwargs={"k": 5})
# Define the language model
llm = ChatGroq(model="llama-3.2-1b-preview")
# Define prompt
prompt = ChatPromptTemplate.from_messages(
[
(
"human",
""""You’re Annie, a country music voicebot and media personality created by Amit Lamba, guiding folks around Birmingham, Alabama.
Provide complete, accurate, and relevant information, ensuring no key details are missed. Keep responses concise and engaging, with a touch of Southern charm and humor, while strictly avoiding irrelevant content, engaging answers that encourage follow-up questions.
Question: {question}
Context: {context}
Answer:""",
),
]
)
def format_docs(docs):
return "\n\n".join(doc.page_content for doc in docs)
rag_chain = (
{"context": retriever | format_docs, "question": RunnablePassthrough()}
| prompt
| llm
| StrOutputParser()
)
#
# Function to handle chatbot interaction
def rag_chain_response(messages, user_message):
# Generate a response using the RAG chain
response = rag_chain.invoke(user_message)
# Append the user's message and the response to the chat
messages.append((user_message, response))
# Return the updated chat and clear the input box
return messages, ""
# Define the Gradio app
with gr.Blocks(theme="rawrsor1/Everforest") as app:
chatbot = gr.Chatbot([], elem_id="RADAR", bubble_full_width=False)
question_input = gr.Textbox(label="Ask a Question", placeholder="Type your question here...")
submit_btn = gr.Button("Submit")
# Set up interaction for both Enter key and Submit button
question_input.submit(
rag_chain_response, # Function to handle input and generate response
inputs=[chatbot, question_input], # Pass current conversation state and user input
outputs=[chatbot, question_input], # Update conversation state and clear the input
api_name="api_get_response_on_enter"
)
submit_btn.click(
rag_chain_response, # Function to handle input and generate response
inputs=[chatbot, question_input], # Pass current conversation state and user input
outputs=[chatbot, question_input], # Update conversation state and clear the input
api_name="api_get_response_on_submit_button"
)
# Launch the Gradio app
app.launch(show_error=True)
|