bentobytes commited on
Commit
21da56d
·
1 Parent(s): 71c240e

Update conversation.py

Browse files
Files changed (1) hide show
  1. conversation.py +25 -9
conversation.py CHANGED
@@ -4,21 +4,28 @@ from langchain.vectorstores import Pinecone
4
  from langchain.embeddings.openai import OpenAIEmbeddings
5
  from langchain.memory import ConversationBufferMemory
6
  import pinecone
 
 
 
 
7
 
 
8
 
9
- def create_conversation(query: str, chat_history: list, pinecone_api_key: str, pinecone_environment: str, pinecone_index_name: str, openai_api_key: str) -> tuple:
10
  try:
11
- pinecone.init(
12
- api_key=pinecone_api_key,
13
- environment=pinecone_environment,
14
- )
15
  embeddings = OpenAIEmbeddings(
16
  openai_api_key=openai_api_key
17
  )
18
- db = Pinecone.from_existing_index(
19
- index_name=pinecone_index_name,
20
- embedding=embeddings
 
 
 
 
21
  )
 
22
  memory = ConversationBufferMemory(
23
  memory_key='chat_history',
24
  return_messages=False
@@ -26,13 +33,22 @@ def create_conversation(query: str, chat_history: list, pinecone_api_key: str, p
26
  cqa = ConversationalRetrievalChain.from_llm(
27
  llm=ChatOpenAI(temperature=0.0,
28
  openai_api_key=openai_api_key),
 
29
  retriever=db.as_retriever(),
30
  memory=memory,
31
  get_chat_history=lambda h: h,
 
 
32
  )
33
  result = cqa({'question': query, 'chat_history': chat_history})
34
  chat_history.append((query, result['answer']))
35
  return '', chat_history
 
 
 
 
 
 
36
  except Exception as e:
37
- chat_history.append((query, e))
38
  return '', chat_history
 
4
  from langchain.embeddings.openai import OpenAIEmbeddings
5
  from langchain.memory import ConversationBufferMemory
6
  import pinecone
7
+ import os
8
+ from langchain.vectorstores import Chroma
9
+ from dotenv import load_dotenv
10
+ load_dotenv()
11
 
12
+ openai_api_key=os.getenv('OPENAI_API_KEY')
13
 
14
+ def create_conversation(query: str, chat_history: list, collection_name: str) -> tuple:
15
  try:
16
+
 
 
 
17
  embeddings = OpenAIEmbeddings(
18
  openai_api_key=openai_api_key
19
  )
20
+
21
+ persist_directory = './db_metadata'
22
+
23
+ db = Chroma(
24
+ collection_name=collection_name,
25
+ persist_directory=persist_directory,
26
+ embedding_function=embeddings
27
  )
28
+
29
  memory = ConversationBufferMemory(
30
  memory_key='chat_history',
31
  return_messages=False
 
33
  cqa = ConversationalRetrievalChain.from_llm(
34
  llm=ChatOpenAI(temperature=0.0,
35
  openai_api_key=openai_api_key),
36
+ chain_type='stuff',
37
  retriever=db.as_retriever(),
38
  memory=memory,
39
  get_chat_history=lambda h: h,
40
+ verbose=True,
41
+ return_source_documents=True,
42
  )
43
  result = cqa({'question': query, 'chat_history': chat_history})
44
  chat_history.append((query, result['answer']))
45
  return '', chat_history
46
+ # except Exception as e:
47
+ # chat_history.append((query, e))
48
+ # return '', chat_history
49
+ except pinecone.exceptions.PineconeException as pe:
50
+ chat_history.append((query, f"Pinecone Error: {pe}"))
51
+ return '', chat_history
52
  except Exception as e:
53
+ chat_history.append((query, f"Unexpected Error: {e}"))
54
  return '', chat_history