Spaces:
Sleeping
Sleeping
""" | |
chatbot.py | |
Module to create a chatbot using RetrievalQA and the ChromaDB embeddings. | |
""" | |
from langchain_openai import OpenAI | |
from langchain.chains import RetrievalQA | |
def create_chatbot(vector_store): | |
"""Creates a chatbot that retrieves and answers questions. | |
Args: | |
vector_store (Chroma): Vector store with document embeddings. | |
Returns: | |
RetrievalQA: A retrieval-based QA system. | |
""" | |
llm = OpenAI(temperature=0.5) | |
retriever = vector_store.as_retriever(search_type="mmr", k=3) | |
qa = RetrievalQA.from_chain_type( | |
llm=llm, | |
chain_type="stuff", | |
retriever=retriever, | |
return_source_documents=True | |
) | |
return qa | |
def ask_question(qa, query): | |
"""Queries the chatbot and returns the answer. | |
Args: | |
qa (RetrievalQA): The QA system. | |
query (str): The user query. | |
Returns: | |
str: The answer with source information if available. | |
""" | |
try: | |
response = qa.invoke({"query": query}) | |
answer = response.get('result', 'No answer found.') | |
sources = response.get('source_documents', []) | |
return f"Answer: {answer}\n" | |
except Exception as e: | |
print(f"Error processing query '{query}': {e}") | |
return f"Error: {e}" | |