Update conversation.py
Browse files- conversation.py +5 -6
conversation.py
CHANGED
@@ -1,10 +1,12 @@
|
|
1 |
import os
|
2 |
|
3 |
-
from langchain.embeddings import OpenAIEmbeddings
|
4 |
from langchain.vectorstores import Chroma
|
5 |
from langchain.chat_models import ChatOpenAI
|
6 |
from langchain.chains import ConversationalRetrievalChain
|
7 |
from langchain.memory import ConversationBufferMemory
|
|
|
|
|
8 |
from dotenv import load_dotenv
|
9 |
load_dotenv()
|
10 |
|
@@ -12,10 +14,7 @@ def create_conversation() -> ConversationalRetrievalChain:
|
|
12 |
|
13 |
persist_directory = 'db'
|
14 |
|
15 |
-
embeddings =
|
16 |
-
openai_api_key=os.getenv('OPENAI_API_KEY')
|
17 |
-
)
|
18 |
-
|
19 |
db = Chroma(
|
20 |
persist_directory=persist_directory,
|
21 |
embedding_function=embeddings
|
@@ -27,7 +26,7 @@ def create_conversation() -> ConversationalRetrievalChain:
|
|
27 |
)
|
28 |
|
29 |
qa = ConversationalRetrievalChain.from_llm(
|
30 |
-
llm=
|
31 |
chain_type='stuff',
|
32 |
retriever=db.as_retriever(),
|
33 |
memory=memory,
|
|
|
1 |
import os
|
2 |
|
3 |
+
from langchain.embeddings import OpenAIEmbeddings, HuggingFaceInstructEmbeddings
|
4 |
from langchain.vectorstores import Chroma
|
5 |
from langchain.chat_models import ChatOpenAI
|
6 |
from langchain.chains import ConversationalRetrievalChain
|
7 |
from langchain.memory import ConversationBufferMemory
|
8 |
+
from langchain.llms import HuggingFaceHub
|
9 |
+
|
10 |
from dotenv import load_dotenv
|
11 |
load_dotenv()
|
12 |
|
|
|
14 |
|
15 |
persist_directory = 'db'
|
16 |
|
17 |
+
embeddings = HuggingFaceInstructEmbeddings(model_name="hkunlp/instructor-xl")
|
|
|
|
|
|
|
18 |
db = Chroma(
|
19 |
persist_directory=persist_directory,
|
20 |
embedding_function=embeddings
|
|
|
26 |
)
|
27 |
|
28 |
qa = ConversationalRetrievalChain.from_llm(
|
29 |
+
llm=HuggingFaceHub(repo_id="google/flan-t5-xxl", model_kwargs={"temperature":0.5, "max_length":512}),
|
30 |
chain_type='stuff',
|
31 |
retriever=db.as_retriever(),
|
32 |
memory=memory,
|