Orami01 commited on
Commit
0766a90
·
1 Parent(s): 1afbdbe

Create app.py

Browse files
Files changed (1) hide show
  1. app.py +53 -0
app.py ADDED
@@ -0,0 +1,53 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from langchain.document_loaders.csv_loader import CSVLoader
2
+ from langchain.text_splitter import RecursiveCharacterTextSplitter
3
+ from langchain.embeddings import HuggingFaceEmbeddings
4
+ from langchain.vectorstores import FAISS
5
+ from langchain.llms import CTransformers
6
+ from langchain.memory import ConversationBufferMemory
7
+ from langchain.chains import ConversationalRetrievalChain
8
+ import sys
9
+
10
+ DB_FAISS_PATH = "vectorstore/db_faiss"
11
+ loader = CSVLoader(file_path="data/2019.csv", encoding="utf-8", csv_args={'delimiter': ','})
12
+ data = loader.load()
13
+ print(data)
14
+
15
+ # Split the text into Chunks
16
+ text_splitter = RecursiveCharacterTextSplitter(chunk_size=500, chunk_overlap=20)
17
+ text_chunks = text_splitter.split_documents(data)
18
+
19
+ print(len(text_chunks))
20
+
21
+ # Download Sentence Transformers Embedding From Hugging Face
22
+ embeddings = HuggingFaceEmbeddings(model_name = 'sentence-transformers/all-MiniLM-L6-v2')
23
+
24
+ # COnverting the text Chunks into embeddings and saving the embeddings into FAISS Knowledge Base
25
+ docsearch = FAISS.from_documents(text_chunks, embeddings)
26
+
27
+ docsearch.save_local(DB_FAISS_PATH)
28
+
29
+
30
+ #query = "What is the value of GDP per capita of Finland provided in the data?"
31
+
32
+ #docs = docsearch.similarity_search(query, k=3)
33
+
34
+ #print("Result", docs)
35
+
36
+ llm = CTransformers(model="models/llama-2-7b-chat.ggmlv3.q4_0.bin",
37
+ model_type="llama",
38
+ max_new_tokens=512,
39
+ temperature=0.1)
40
+
41
+ qa = ConversationalRetrievalChain.from_llm(llm, retriever=docsearch.as_retriever())
42
+
43
+ while True:
44
+ chat_history = []
45
+ #query = "What is the value of GDP per capita of Finland provided in the data?"
46
+ query = input(f"Input Prompt: ")
47
+ if query == 'exit':
48
+ print('Exiting')
49
+ sys.exit()
50
+ if query == '':
51
+ continue
52
+ result = qa({"question":query, "chat_history":chat_history})
53
+ print("Response: ", result['answer'])