Commit
Β·
d1e599e
1
Parent(s):
6131df7
better caching
Browse files
app.py
CHANGED
|
@@ -95,6 +95,7 @@ logging.getLogger().addHandler(logging.StreamHandler(stream=sys.stdout))
|
|
| 95 |
llama_debug = LlamaDebugHandler(print_trace_on_end=True)
|
| 96 |
callback_manager = CallbackManager([llama_debug])
|
| 97 |
|
|
|
|
| 98 |
#One doc embedding
|
| 99 |
def load_emb_uploaded_document(filename):
|
| 100 |
# You may want to add a check to prevent execution during initialization.
|
|
@@ -164,7 +165,6 @@ if 'emb_model' not in st.session_state:
|
|
| 164 |
# Use the models from session state
|
| 165 |
query_engine = st.session_state.emb_model
|
| 166 |
|
| 167 |
-
|
| 168 |
# ------------------------------------layout----------------------------------------
|
| 169 |
|
| 170 |
with st.sidebar:
|
|
@@ -235,8 +235,10 @@ with tab2:
|
|
| 235 |
with tab3:
|
| 236 |
st.title("π One single document Q&A with Llama Index using local open llms")
|
| 237 |
if st.button('Reinitialize Query Engine', key='reinit_engine'):
|
| 238 |
-
|
|
|
|
| 239 |
st.write("Query engine reinitialized.")
|
|
|
|
| 240 |
uploaded_file = st.file_uploader("Upload an File", type=("txt", "csv", "md","pdf"))
|
| 241 |
question = st.text_input(
|
| 242 |
"Ask something about the files",
|
|
@@ -251,20 +253,23 @@ with tab3:
|
|
| 251 |
if not os.path.exists("draft_docs"):
|
| 252 |
st.error("draft_docs directory does not exist. Please download and copy paste a model in folder models.")
|
| 253 |
os.makedirs("draft_docs")
|
| 254 |
-
|
| 255 |
with open("draft_docs/"+uploaded_file.name, "wb") as f:
|
| 256 |
text = uploaded_file.read()
|
| 257 |
f.write(text)
|
| 258 |
text = uploaded_file.read()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 259 |
# if load_emb_uploaded_document:
|
| 260 |
# load_emb_uploaded_document.clear()
|
| 261 |
#load_emb_uploaded_document.clear()
|
| 262 |
-
query_engine = load_emb_uploaded_document("draft_docs/"+uploaded_file.name)
|
| 263 |
st.write("File ",uploaded_file.name, "was loaded successfully")
|
| 264 |
|
| 265 |
if uploaded_file and question and api_server_info:
|
| 266 |
contextual_prompt = st.session_state.memory + "\n" + question
|
| 267 |
-
response =
|
| 268 |
text_response = response.response
|
| 269 |
st.write("### Answer")
|
| 270 |
st.markdown(text_response)
|
|
|
|
| 95 |
llama_debug = LlamaDebugHandler(print_trace_on_end=True)
|
| 96 |
callback_manager = CallbackManager([llama_debug])
|
| 97 |
|
| 98 |
+
@st.cache_resource
|
| 99 |
#One doc embedding
|
| 100 |
def load_emb_uploaded_document(filename):
|
| 101 |
# You may want to add a check to prevent execution during initialization.
|
|
|
|
| 165 |
# Use the models from session state
|
| 166 |
query_engine = st.session_state.emb_model
|
| 167 |
|
|
|
|
| 168 |
# ------------------------------------layout----------------------------------------
|
| 169 |
|
| 170 |
with st.sidebar:
|
|
|
|
| 235 |
with tab3:
|
| 236 |
st.title("π One single document Q&A with Llama Index using local open llms")
|
| 237 |
if st.button('Reinitialize Query Engine', key='reinit_engine'):
|
| 238 |
+
del st.session_state["emb_model_upload_doc"]
|
| 239 |
+
st.session_state.emb_model_upload_doc = ""
|
| 240 |
st.write("Query engine reinitialized.")
|
| 241 |
+
|
| 242 |
uploaded_file = st.file_uploader("Upload an File", type=("txt", "csv", "md","pdf"))
|
| 243 |
question = st.text_input(
|
| 244 |
"Ask something about the files",
|
|
|
|
| 253 |
if not os.path.exists("draft_docs"):
|
| 254 |
st.error("draft_docs directory does not exist. Please download and copy paste a model in folder models.")
|
| 255 |
os.makedirs("draft_docs")
|
|
|
|
| 256 |
with open("draft_docs/"+uploaded_file.name, "wb") as f:
|
| 257 |
text = uploaded_file.read()
|
| 258 |
f.write(text)
|
| 259 |
text = uploaded_file.read()
|
| 260 |
+
# Embedding Model Loading
|
| 261 |
+
if 'emb_model_upload_doc' not in st.session_state:
|
| 262 |
+
st.session_state.emb_model_upload_doc = load_emb_uploaded_document("draft_docs/"+uploaded_file.name)
|
| 263 |
+
# Use the models from session state
|
| 264 |
+
query_engine_upload_doc = st.session_state.emb_model_upload_doc
|
| 265 |
# if load_emb_uploaded_document:
|
| 266 |
# load_emb_uploaded_document.clear()
|
| 267 |
#load_emb_uploaded_document.clear()
|
|
|
|
| 268 |
st.write("File ",uploaded_file.name, "was loaded successfully")
|
| 269 |
|
| 270 |
if uploaded_file and question and api_server_info:
|
| 271 |
contextual_prompt = st.session_state.memory + "\n" + question
|
| 272 |
+
response = query_engine_upload_doc.query(contextual_prompt)
|
| 273 |
text_response = response.response
|
| 274 |
st.write("### Answer")
|
| 275 |
st.markdown(text_response)
|