Spaces:
Sleeping
Sleeping
Create functions.py
Browse files- functions.py +42 -0
functions.py
ADDED
@@ -0,0 +1,42 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
def create_retriever_from_chroma(vectorstore_path="./docs/chroma/", search_type='mmr', k=7, chunk_size=300, chunk_overlap=30,lambda_mult= 0.7):
|
2 |
+
|
3 |
+
model_name = "Alibaba-NLP/gte-large-en-v1.5"
|
4 |
+
model_kwargs = {'device': 'cpu',
|
5 |
+
"trust_remote_code" : 'False'}
|
6 |
+
encode_kwargs = {'normalize_embeddings': True}
|
7 |
+
embeddings = HuggingFaceEmbeddings(
|
8 |
+
model_name=model_name,
|
9 |
+
model_kwargs=model_kwargs,
|
10 |
+
encode_kwargs=encode_kwargs
|
11 |
+
)
|
12 |
+
|
13 |
+
|
14 |
+
|
15 |
+
if os.path.exists(vectorstore_path) and os.listdir(vectorstore_path):
|
16 |
+
vectorstore = Chroma(persist_directory=vectorstore_path,embedding_function=embeddings)
|
17 |
+
|
18 |
+
else:
|
19 |
+
st.write("Vector store doesnt exist and will be created now")
|
20 |
+
loader = DirectoryLoader('./data/', glob="./*.txt", loader_cls=TextLoader)
|
21 |
+
docs = loader.load()
|
22 |
+
|
23 |
+
|
24 |
+
text_splitter = RecursiveCharacterTextSplitter.from_tiktoken_encoder(
|
25 |
+
chunk_size=chunk_size, chunk_overlap=chunk_overlap,
|
26 |
+
separators=["\n\n \n\n","\n\n\n", "\n\n", r"In \[[0-9]+\]", r"\n+", r"\s+"],
|
27 |
+
is_separator_regex = True
|
28 |
+
)
|
29 |
+
split_docs = text_splitter.split_documents(docs)
|
30 |
+
|
31 |
+
|
32 |
+
vectorstore = Chroma.from_documents(
|
33 |
+
documents=split_docs, embedding=embeddings, persist_directory=vectorstore_path
|
34 |
+
)
|
35 |
+
|
36 |
+
|
37 |
+
retriever=vectorstore.as_retriever(search_type = search_type, search_kwargs={"k": k})
|
38 |
+
|
39 |
+
|
40 |
+
|
41 |
+
|
42 |
+
return retriever
|