Spaces:
Runtime error
Runtime error
Commit
·
38ae2b6
1
Parent(s):
cdca970
frontend with chatbot completion
Browse files
frontend/components/file_streaming.py
CHANGED
@@ -7,11 +7,11 @@ from langchain.callbacks.base import BaseCallbackHandler
|
|
7 |
|
8 |
|
9 |
@st.cache_resource(ttl="1h")
|
10 |
-
def upload_data(uploaded_files):
|
11 |
files = {"file": uploaded_files}
|
12 |
with st.spinner("Uploading PDF..."):
|
13 |
response = requests.post(
|
14 |
-
"
|
15 |
)
|
16 |
|
17 |
if response.status_code == 200:
|
|
|
7 |
|
8 |
|
9 |
@st.cache_resource(ttl="1h")
|
10 |
+
def upload_data(uploaded_files, BASE_URL):
|
11 |
files = {"file": uploaded_files}
|
12 |
with st.spinner("Uploading PDF..."):
|
13 |
response = requests.post(
|
14 |
+
f"{BASE_URL}/api/upload", files=files
|
15 |
)
|
16 |
|
17 |
if response.status_code == 200:
|
frontend/pages/2_🤖_bot.py
CHANGED
@@ -3,7 +3,6 @@ import requests
|
|
3 |
import streamlit as st
|
4 |
|
5 |
from layouts.mainlayout import mainlayout
|
6 |
-
from langchain.memory.chat_message_histories import StreamlitChatMessageHistory
|
7 |
|
8 |
from components.file_streaming import *
|
9 |
from components.display import *
|
@@ -32,13 +31,13 @@ def display():
|
|
32 |
|
33 |
display()
|
34 |
|
35 |
-
BASE_URL = "
|
36 |
uploaded_files = st.sidebar.file_uploader(label="Upload PDF files", type=["pdf"])
|
37 |
|
38 |
if not uploaded_files:
|
39 |
st.info("Please upload PDF documents to continue.")
|
40 |
st.stop()
|
41 |
-
upload_data(uploaded_files)
|
42 |
|
43 |
|
44 |
if "messages" not in st.session_state.keys():
|
@@ -83,18 +82,14 @@ def generate_mistral_response(question: str):
|
|
83 |
|
84 |
answer = response["result"]["answer"]
|
85 |
|
|
|
|
|
|
|
|
|
86 |
except Exception as e:
|
87 |
if response.json()=='exception.ModelDeployingException()':
|
88 |
st.error("Model is deploying in the backend servers. Please try again after some time")
|
89 |
st.stop()
|
90 |
-
|
91 |
-
|
92 |
-
with st.expander("Source documents 🧐", expanded=True):
|
93 |
-
source_documents = requests.post(
|
94 |
-
f"{BASE_URL}/api/inference",
|
95 |
-
json={"promptMessage": question}).json()["result"]["source_documents"]
|
96 |
-
display_source_document(source_documents)
|
97 |
-
|
98 |
|
99 |
return answer
|
100 |
|
|
|
3 |
import streamlit as st
|
4 |
|
5 |
from layouts.mainlayout import mainlayout
|
|
|
6 |
|
7 |
from components.file_streaming import *
|
8 |
from components.display import *
|
|
|
31 |
|
32 |
display()
|
33 |
|
34 |
+
BASE_URL = "https://hemanthsai7-studybotapi.hf.space"
|
35 |
uploaded_files = st.sidebar.file_uploader(label="Upload PDF files", type=["pdf"])
|
36 |
|
37 |
if not uploaded_files:
|
38 |
st.info("Please upload PDF documents to continue.")
|
39 |
st.stop()
|
40 |
+
upload_data(uploaded_files, BASE_URL)
|
41 |
|
42 |
|
43 |
if "messages" not in st.session_state.keys():
|
|
|
82 |
|
83 |
answer = response["result"]["answer"]
|
84 |
|
85 |
+
with st.expander("Source documents 🧐", expanded=True):
|
86 |
+
source_documents = response["result"]["source_documents"]
|
87 |
+
display_source_document(source_documents)
|
88 |
+
|
89 |
except Exception as e:
|
90 |
if response.json()=='exception.ModelDeployingException()':
|
91 |
st.error("Model is deploying in the backend servers. Please try again after some time")
|
92 |
st.stop()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
93 |
|
94 |
return answer
|
95 |
|