Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -1,6 +1,5 @@
|
|
1 |
import streamlit as st
|
2 |
import pandas as pd
|
3 |
-
import io
|
4 |
import os
|
5 |
from dotenv import load_dotenv
|
6 |
from llama_index.core import Settings, VectorStoreIndex, SimpleDirectoryReader
|
@@ -16,9 +15,9 @@ from langchain.chains.combine_documents import create_stuff_documents_chain
|
|
16 |
from langchain_core.prompts import ChatPromptTemplate
|
17 |
from langchain_openai import OpenAIEmbeddings, ChatOpenAI
|
18 |
import faiss
|
19 |
-
import tempfile
|
20 |
|
21 |
# Load environment variables
|
|
|
22 |
os.environ["OPENAI_API_KEY"] = os.getenv("OPENAI_API_KEY")
|
23 |
|
24 |
# Global settings for LlamaIndex
|
@@ -45,14 +44,8 @@ if uploaded_file:
|
|
45 |
with tab1:
|
46 |
st.subheader("LangChain Query")
|
47 |
try:
|
48 |
-
#
|
49 |
-
|
50 |
-
# Write the DataFrame to the temp file
|
51 |
-
data.to_csv(temp_file.name, index=False)
|
52 |
-
temp_file_path = temp_file.name
|
53 |
-
|
54 |
-
# Use CSVLoader with the temporary file path
|
55 |
-
loader = CSVLoader(file_path=temp_file_path)
|
56 |
docs = loader.load_and_split()
|
57 |
|
58 |
# Preview the first document
|
@@ -91,24 +84,15 @@ if uploaded_file:
|
|
91 |
|
92 |
except Exception as e:
|
93 |
st.error(f"Error processing with LangChain: {e}")
|
94 |
-
finally:
|
95 |
-
# Clean up the temporary file
|
96 |
-
if 'temp_file_path' in locals() and os.path.exists(temp_file_path):
|
97 |
-
os.remove(temp_file_path)
|
98 |
|
99 |
# LlamaIndex Tab
|
100 |
with tab2:
|
101 |
st.subheader("LlamaIndex Query")
|
102 |
try:
|
103 |
-
# Save uploaded file content to a temporary CSV file for LlamaIndex
|
104 |
-
with tempfile.NamedTemporaryFile(delete=False, suffix=".csv", mode="w") as temp_file:
|
105 |
-
data.to_csv(temp_file.name, index=False)
|
106 |
-
temp_file_path = temp_file.name
|
107 |
-
|
108 |
# Use PagedCSVReader for LlamaIndex
|
109 |
csv_reader = PagedCSVReader()
|
110 |
reader = SimpleDirectoryReader(
|
111 |
-
input_files=[
|
112 |
file_extractor={".csv": csv_reader},
|
113 |
)
|
114 |
docs = reader.load_data()
|
@@ -137,9 +121,5 @@ if uploaded_file:
|
|
137 |
st.write(f"Answer: {response.response}")
|
138 |
except Exception as e:
|
139 |
st.error(f"Error processing with LlamaIndex: {e}")
|
140 |
-
finally:
|
141 |
-
# Clean up the temporary file
|
142 |
-
if 'temp_file_path' in locals() and os.path.exists(temp_file_path):
|
143 |
-
os.remove(temp_file_path)
|
144 |
except Exception as e:
|
145 |
st.error(f"Error reading uploaded file: {e}")
|
|
|
1 |
import streamlit as st
|
2 |
import pandas as pd
|
|
|
3 |
import os
|
4 |
from dotenv import load_dotenv
|
5 |
from llama_index.core import Settings, VectorStoreIndex, SimpleDirectoryReader
|
|
|
15 |
from langchain_core.prompts import ChatPromptTemplate
|
16 |
from langchain_openai import OpenAIEmbeddings, ChatOpenAI
|
17 |
import faiss
|
|
|
18 |
|
19 |
# Load environment variables
|
20 |
+
load_dotenv()
|
21 |
os.environ["OPENAI_API_KEY"] = os.getenv("OPENAI_API_KEY")
|
22 |
|
23 |
# Global settings for LlamaIndex
|
|
|
44 |
with tab1:
|
45 |
st.subheader("LangChain Query")
|
46 |
try:
|
47 |
+
# Use CSVLoader with the uploaded file
|
48 |
+
loader = CSVLoader(file_path=uploaded_file)
|
|
|
|
|
|
|
|
|
|
|
|
|
49 |
docs = loader.load_and_split()
|
50 |
|
51 |
# Preview the first document
|
|
|
84 |
|
85 |
except Exception as e:
|
86 |
st.error(f"Error processing with LangChain: {e}")
|
|
|
|
|
|
|
|
|
87 |
|
88 |
# LlamaIndex Tab
|
89 |
with tab2:
|
90 |
st.subheader("LlamaIndex Query")
|
91 |
try:
|
|
|
|
|
|
|
|
|
|
|
92 |
# Use PagedCSVReader for LlamaIndex
|
93 |
csv_reader = PagedCSVReader()
|
94 |
reader = SimpleDirectoryReader(
|
95 |
+
input_files=[uploaded_file.name],
|
96 |
file_extractor={".csv": csv_reader},
|
97 |
)
|
98 |
docs = reader.load_data()
|
|
|
121 |
st.write(f"Answer: {response.response}")
|
122 |
except Exception as e:
|
123 |
st.error(f"Error processing with LlamaIndex: {e}")
|
|
|
|
|
|
|
|
|
124 |
except Exception as e:
|
125 |
st.error(f"Error reading uploaded file: {e}")
|