Spaces:
Runtime error
Runtime error
from langchain.chains import ConversationalRetrievalChain | |
from langchain.chat_models import ChatOpenAI | |
from langchain.vectorstores import Pinecone | |
from langchain.embeddings.openai import OpenAIEmbeddings | |
from langchain.memory import ConversationBufferMemory | |
import pinecone | |
import os | |
from langchain.vectorstores import Chroma | |
from dotenv import load_dotenv | |
load_dotenv() | |
openai_api_key=os.getenv('OPENAI_API_KEY') | |
def create_conversation(query: str, chat_history: list, collection_name: str) -> tuple: | |
try: | |
embeddings = OpenAIEmbeddings( | |
openai_api_key=openai_api_key | |
) | |
persist_directory = './db_metadata' | |
db = Chroma( | |
collection_name=collection_name, | |
persist_directory=persist_directory, | |
embedding_function=embeddings | |
) | |
memory = ConversationBufferMemory( | |
memory_key='chat_history', | |
return_messages=False | |
) | |
cqa = ConversationalRetrievalChain.from_llm( | |
llm=ChatOpenAI(temperature=0.0, | |
openai_api_key=openai_api_key), | |
chain_type='stuff', | |
retriever=db.as_retriever(), | |
memory=memory, | |
get_chat_history=lambda h: h, | |
verbose=True, | |
return_source_documents=False, | |
) | |
result = cqa({'question': query, 'chat_history': chat_history}) | |
chat_history.append((query, result['answer'])) | |
return '', chat_history | |
# except Exception as e: | |
# chat_history.append((query, e)) | |
# return '', chat_history | |
except pinecone.exceptions.PineconeException as pe: | |
chat_history.append((query, f"Pinecone Error: {pe}")) | |
return '', chat_history | |
except Exception as e: | |
chat_history.append((query, f"Unexpected Error: {e}")) | |
return '', chat_history | |