Draken007 commited on
Commit
481ebeb
Β·
verified Β·
1 Parent(s): 4b501fd

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +75 -75
app.py CHANGED
@@ -1,76 +1,76 @@
1
- import streamlit as st
2
- from streamlit_chat import message
3
- from langchain.chains import ConversationalRetrievalChain
4
- from langchain_community.document_loaders import PyPDFLoader, DirectoryLoader
5
- from langchain_community.embeddings import HuggingFaceEmbeddings
6
- from langchain_community.llms import CTransformers
7
- from langchain.text_splitter import RecursiveCharacterTextSplitter
8
- from langchain_community.vectorstores import FAISS
9
- from langchain.memory import ConversationBufferMemory
10
-
11
- #load the pdf files from the path
12
- loader = DirectoryLoader('data/',glob="*.pdf",loader_cls=PyPDFLoader)
13
- documents = loader.load()
14
-
15
- #split text into chunks
16
- text_splitter = RecursiveCharacterTextSplitter(chunk_size=500,chunk_overlap=50)
17
- text_chunks = text_splitter.split_documents(documents)
18
-
19
- #create embeddings
20
- embeddings = HuggingFaceEmbeddings(model_name="sentence-transformers/all-MiniLM-L6-v2",
21
- model_kwargs={'device':"cpu"})
22
-
23
- # #vectorstore
24
- vector_store = FAISS.from_documents(text_chunks,embeddings)
25
-
26
- # #create llm
27
- llm = CTransformers(model="llama-2-7b-chat.ggmlv3.q4_0.bin",model_type="llama",
28
- config={'max_new_tokens':128,'temperature':0.01})
29
-
30
- memory = ConversationBufferMemory(memory_key="chat_history", return_messages=True)
31
-
32
- chain = ConversationalRetrievalChain.from_llm(llm=llm,chain_type='stuff',
33
- retriever=vector_store.as_retriever(search_kwargs={"k":2}),
34
- memory=memory)
35
-
36
- st.title("Geo ChatBot ")
37
- def conversation_chat(query):
38
- result = chain({"question": query, "chat_history": st.session_state['history']})
39
- st.session_state['history'].append((query, result["answer"]))
40
- return result["answer"]
41
-
42
- def initialize_session_state():
43
- if 'history' not in st.session_state:
44
- st.session_state['history'] = []
45
-
46
- if 'generated' not in st.session_state:
47
- st.session_state['generated'] = ["Hello! Ask me anything about πŸ€—"]
48
-
49
- if 'past' not in st.session_state:
50
- st.session_state['past'] = ["Hey! πŸ‘‹"]
51
-
52
- def display_chat_history():
53
- reply_container = st.container()
54
- container = st.container()
55
-
56
- with container:
57
- with st.form(key='my_form', clear_on_submit=True):
58
- user_input = st.text_input("Question:", placeholder="Ask about geology", key='input')
59
- submit_button = st.form_submit_button(label='Send')
60
-
61
- if submit_button and user_input:
62
- output = conversation_chat(user_input)
63
-
64
- st.session_state['past'].append(user_input)
65
- st.session_state['generated'].append(output)
66
-
67
- if st.session_state['generated']:
68
- with reply_container:
69
- for i in range(len(st.session_state['generated'])):
70
- message(st.session_state["past"][i], is_user=True, key=str(i) + '_user', avatar_style="thumbs")
71
- message(st.session_state["generated"][i], key=str(i), avatar_style="fun-emoji")
72
-
73
- # Initialize session state
74
- initialize_session_state()
75
- # Display chat history
76
  display_chat_history()
 
1
+ import streamlit as st
2
+ # from streamlit_chat import message
3
+ from langchain.chains import ConversationalRetrievalChain
4
+ from langchain_community.document_loaders import PyPDFLoader, DirectoryLoader
5
+ from langchain_community.embeddings import HuggingFaceEmbeddings
6
+ from langchain_community.llms import CTransformers
7
+ from langchain.text_splitter import RecursiveCharacterTextSplitter
8
+ from langchain_community.vectorstores import FAISS
9
+ from langchain.memory import ConversationBufferMemory
10
+
11
+ #load the pdf files from the path
12
+ loader = DirectoryLoader('data/',glob="*.pdf",loader_cls=PyPDFLoader)
13
+ documents = loader.load()
14
+
15
+ #split text into chunks
16
+ text_splitter = RecursiveCharacterTextSplitter(chunk_size=500,chunk_overlap=50)
17
+ text_chunks = text_splitter.split_documents(documents)
18
+
19
+ #create embeddings
20
+ embeddings = HuggingFaceEmbeddings(model_name="sentence-transformers/all-MiniLM-L6-v2",
21
+ model_kwargs={'device':"cpu"})
22
+
23
+ # #vectorstore
24
+ vector_store = FAISS.from_documents(text_chunks,embeddings)
25
+
26
+ # #create llm
27
+ llm = CTransformers(model="llama-2-7b-chat.ggmlv3.q4_0.bin",model_type="llama",
28
+ config={'max_new_tokens':128,'temperature':0.01})
29
+
30
+ memory = ConversationBufferMemory(memory_key="chat_history", return_messages=True)
31
+
32
+ chain = ConversationalRetrievalChain.from_llm(llm=llm,chain_type='stuff',
33
+ retriever=vector_store.as_retriever(search_kwargs={"k":2}),
34
+ memory=memory)
35
+
36
+ st.title("Geo ChatBot ")
37
+ def conversation_chat(query):
38
+ result = chain({"question": query, "chat_history": st.session_state['history']})
39
+ st.session_state['history'].append((query, result["answer"]))
40
+ return result["answer"]
41
+
42
+ def initialize_session_state():
43
+ if 'history' not in st.session_state:
44
+ st.session_state['history'] = []
45
+
46
+ if 'generated' not in st.session_state:
47
+ st.session_state['generated'] = ["Hello! Ask me anything about πŸ€—"]
48
+
49
+ if 'past' not in st.session_state:
50
+ st.session_state['past'] = ["Hey! πŸ‘‹"]
51
+
52
+ def display_chat_history():
53
+ reply_container = st.container()
54
+ container = st.container()
55
+
56
+ with container:
57
+ with st.form(key='my_form', clear_on_submit=True):
58
+ user_input = st.text_input("Question:", placeholder="Ask about geology", key='input')
59
+ submit_button = st.form_submit_button(label='Send')
60
+
61
+ if submit_button and user_input:
62
+ output = conversation_chat(user_input)
63
+
64
+ st.session_state['past'].append(user_input)
65
+ st.session_state['generated'].append(output)
66
+
67
+ if st.session_state['generated']:
68
+ with reply_container:
69
+ for i in range(len(st.session_state['generated'])):
70
+ message(st.session_state["past"][i], is_user=True, key=str(i) + '_user', avatar_style="thumbs")
71
+ message(st.session_state["generated"][i], key=str(i), avatar_style="fun-emoji")
72
+
73
+ # Initialize session state
74
+ initialize_session_state()
75
+ # Display chat history
76
  display_chat_history()