Orami01 commited on
Commit
dc79961
·
1 Parent(s): ddda3d2

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +14 -5
app.py CHANGED
@@ -7,19 +7,30 @@ from langchain.llms import CTransformers
7
  from langchain.memory import ConversationBufferMemory
8
  from langchain.chains import ConversationalRetrievalChain
9
  import sys
 
10
  # Initialize the CSVLoader to load the uploaded CSV file
11
  from langchain.document_loaders.csv_loader import CSVLoader
12
- # Allow users to upload a CSV file
13
- uploaded_file = st.sidebar.file_uploader("Upload your Data", type="csv")
 
 
 
 
 
 
 
14
 
15
  # Display the title of the web page
16
  st.title("Chat with CSV using open source LLM Inference Point 🦙🦜")
17
  # Display a markdown message with additional information
18
  st.markdown("<h3 style='text-align: center; color: white;'>Built by <a href='https://github.com/AIAnytime'>AI Anytime with ❤️ </a></h3>", unsafe_allow_html=True)
19
 
 
 
 
20
  if uploaded_file:
21
  # Initialize the CSVLoader to load the uploaded CSV file
22
- import tempfile
23
  with tempfile.NamedTemporaryFile(delete=False) as tmp_file:
24
  tmp_file.write(uploaded_file.getvalue())
25
  tmp_file_path = tmp_file.name
@@ -103,9 +114,7 @@ docsearch.save_local(DB_FAISS_PATH)
103
 
104
  #print("Result", docs)
105
 
106
- from transformers import pipeline
107
 
108
- pipe = pipeline("text-generation",model="mistralai/Mistral-7B-v0.1",model_type="llama",max_new_tokens=512,temperature=0.1 )
109
 
110
  qa = ConversationalRetrievalChain.from_llm(llm, retriever=docsearch.as_retriever())
111
 
 
7
  from langchain.memory import ConversationBufferMemory
8
  from langchain.chains import ConversationalRetrievalChain
9
  import sys
10
+ import tempfile
11
  # Initialize the CSVLoader to load the uploaded CSV file
12
  from langchain.document_loaders.csv_loader import CSVLoader
13
+
14
+ DB_FAISS_PATH = 'vectorstore/db_faiss'
15
+
16
+ from transformers import pipeline
17
+
18
+ pipe = pipeline("text-generation",model="mistralai/Mistral-7B-v0.1",model_type="llama",max_new_tokens=512,temperature=0.1 )
19
+ llm=pipe
20
+
21
+
22
 
23
  # Display the title of the web page
24
  st.title("Chat with CSV using open source LLM Inference Point 🦙🦜")
25
  # Display a markdown message with additional information
26
  st.markdown("<h3 style='text-align: center; color: white;'>Built by <a href='https://github.com/AIAnytime'>AI Anytime with ❤️ </a></h3>", unsafe_allow_html=True)
27
 
28
+ # Allow users to upload a CSV file
29
+ uploaded_file = st.sidebar.file_uploader("Upload your Data", type="csv")
30
+
31
  if uploaded_file:
32
  # Initialize the CSVLoader to load the uploaded CSV file
33
+
34
  with tempfile.NamedTemporaryFile(delete=False) as tmp_file:
35
  tmp_file.write(uploaded_file.getvalue())
36
  tmp_file_path = tmp_file.name
 
114
 
115
  #print("Result", docs)
116
 
 
117
 
 
118
 
119
  qa = ConversationalRetrievalChain.from_llm(llm, retriever=docsearch.as_retriever())
120