Orami01 commited on
Commit
1ad8ba9
Β·
1 Parent(s): 42039d6

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +22 -2
app.py CHANGED
@@ -1,5 +1,6 @@
1
  import streamlit as st
2
  from streamlit_chat import message
 
3
  from langchain.document_loaders.csv_loader import CSVLoader
4
  from langchain.text_splitter import RecursiveCharacterTextSplitter
5
  from langchain.embeddings import HuggingFaceEmbeddings
@@ -9,18 +10,23 @@ from langchain.memory import ConversationBufferMemory
9
  from langchain.chains import ConversationalRetrievalChain
10
  import sys
11
 
 
12
  st.title("Chat with CSV using open source LLM Inference Point πŸ¦™πŸ¦œ")
 
13
  st.markdown("<h3 style='text-align: center; color: white;'>Built by <a href='https://github.com/AIAnytime'>AI Anytime with ❀️ </a></h3>", unsafe_allow_html=True)
14
 
 
15
  uploaded_file = st.sidebar.file_uploader("Upload your Data", type="csv")
16
 
17
  if uploaded_file :
 
18
  #use tempfile because CSVLoader only accepts a file_path
19
  with tempfile.NamedTemporaryFile(delete=False) as tmp_file:
20
  tmp_file.write(uploaded_file.getvalue())
21
  tmp_file_path = tmp_file.name
22
 
23
  db = DB_FAISS_PATH = "vectorstore/db_faiss"
 
24
  loader = CSVLoader(file_path="data/2019.csv", encoding="utf-8", csv_args={'delimiter': ','})
25
  data = loader.load()
26
  db.save_local(DB_FAISS_PATH)
@@ -29,17 +35,25 @@ if uploaded_file :
29
  chain = ConversationalRetrievalChain.from_llm(llm=llm, retriever=db.as_retriever())
30
 
31
  def conversational_chat(query):
 
32
  result = chain({"question": query, "chat_history": st.session_state['history']})
 
33
  st.session_state['history'].append((query, result["answer"]))
34
  return result["answer"]
35
 
 
36
  if 'history' not in st.session_state:
 
37
  st.session_state['history'] = []
38
 
 
39
  if 'generated' not in st.session_state:
 
40
  st.session_state['generated'] = ["Hello ! Ask me anything about " + uploaded_file.name + " πŸ€—"]
41
 
 
42
  if 'past' not in st.session_state:
 
43
  st.session_state['past'] = ["Hey ! πŸ‘‹"]
44
 
45
  #container for the chat history
@@ -56,13 +70,19 @@ if uploaded_file :
56
  if submit_button and user_input:
57
  output = conversational_chat(user_input)
58
 
 
59
  st.session_state['past'].append(user_input)
 
60
  st.session_state['generated'].append(output)
61
 
 
62
  if st.session_state['generated']:
63
  with response_container:
 
64
  for i in range(len(st.session_state['generated'])):
 
65
  message(st.session_state["past"][i], is_user=True, key=str(i) + '_user', avatar_style="big-smile")
 
66
  message(st.session_state["generated"][i], key=str(i), avatar_style="thumbs")
67
 
68
  # Split the text into Chunks
@@ -95,8 +115,8 @@ qa = ConversationalRetrievalChain.from_llm(llm, retriever=docsearch.as_retriever
95
 
96
  # Insert a chat message container.
97
  with st.chat_message("user"):
98
- st.write("Hello πŸ‘‹")
99
- st.line_chart(np.random.randn(30, 3))
100
 
101
  # Display a chat input widget.
102
  st.chat_input("Say something")
 
1
  import streamlit as st
2
  from streamlit_chat import message
3
+ # Initialize the CSVLoader to load the uploaded CSV file
4
  from langchain.document_loaders.csv_loader import CSVLoader
5
  from langchain.text_splitter import RecursiveCharacterTextSplitter
6
  from langchain.embeddings import HuggingFaceEmbeddings
 
10
  from langchain.chains import ConversationalRetrievalChain
11
  import sys
12
 
13
+ # Display the title of the web page
14
  st.title("Chat with CSV using open source LLM Inference Point πŸ¦™πŸ¦œ")
15
+ # Display a markdown message with additional information
16
  st.markdown("<h3 style='text-align: center; color: white;'>Built by <a href='https://github.com/AIAnytime'>AI Anytime with ❀️ </a></h3>", unsafe_allow_html=True)
17
 
18
+ # Allow users to upload a CSV file
19
  uploaded_file = st.sidebar.file_uploader("Upload your Data", type="csv")
20
 
21
  if uploaded_file :
22
+ # Initialize the CSVLoader to load the uploaded CSV file
23
  #use tempfile because CSVLoader only accepts a file_path
24
  with tempfile.NamedTemporaryFile(delete=False) as tmp_file:
25
  tmp_file.write(uploaded_file.getvalue())
26
  tmp_file_path = tmp_file.name
27
 
28
  db = DB_FAISS_PATH = "vectorstore/db_faiss"
29
+ # Initialize the CSVLoader to load the uploaded CSV file
30
  loader = CSVLoader(file_path="data/2019.csv", encoding="utf-8", csv_args={'delimiter': ','})
31
  data = loader.load()
32
  db.save_local(DB_FAISS_PATH)
 
35
  chain = ConversationalRetrievalChain.from_llm(llm=llm, retriever=db.as_retriever())
36
 
37
  def conversational_chat(query):
38
+ # Maintain and display the chat history
39
  result = chain({"question": query, "chat_history": st.session_state['history']})
40
+ # Maintain and display the chat history
41
  st.session_state['history'].append((query, result["answer"]))
42
  return result["answer"]
43
 
44
+ # Maintain and display the chat history
45
  if 'history' not in st.session_state:
46
+ # Maintain and display the chat history
47
  st.session_state['history'] = []
48
 
49
+ # Maintain and display the chat history
50
  if 'generated' not in st.session_state:
51
+ # Maintain and display the chat history
52
  st.session_state['generated'] = ["Hello ! Ask me anything about " + uploaded_file.name + " πŸ€—"]
53
 
54
+ # Maintain and display the chat history
55
  if 'past' not in st.session_state:
56
+ # Maintain and display the chat history
57
  st.session_state['past'] = ["Hey ! πŸ‘‹"]
58
 
59
  #container for the chat history
 
70
  if submit_button and user_input:
71
  output = conversational_chat(user_input)
72
 
73
+ # Maintain and display the chat history
74
  st.session_state['past'].append(user_input)
75
+ # Maintain and display the chat history
76
  st.session_state['generated'].append(output)
77
 
78
+ # Maintain and display the chat history
79
  if st.session_state['generated']:
80
  with response_container:
81
+ # Maintain and display the chat history
82
  for i in range(len(st.session_state['generated'])):
83
+ # Maintain and display the chat history
84
  message(st.session_state["past"][i], is_user=True, key=str(i) + '_user', avatar_style="big-smile")
85
+ # Maintain and display the chat history
86
  message(st.session_state["generated"][i], key=str(i), avatar_style="thumbs")
87
 
88
  # Split the text into Chunks
 
115
 
116
  # Insert a chat message container.
117
  with st.chat_message("user"):
118
+ st.write("Hello πŸ‘‹")
119
+ st.line_chart(np.random.randn(30, 3))
120
 
121
  # Display a chat input widget.
122
  st.chat_input("Say something")