Spaces:
Runtime error
Runtime error
Srinivasulu kethanaboina
commited on
Update app.py
Browse files
app.py
CHANGED
@@ -1,5 +1,6 @@
|
|
1 |
import gradio as gr
|
2 |
import os
|
|
|
3 |
from dotenv import load_dotenv
|
4 |
from llama_index.core import StorageContext, load_index_from_storage, VectorStoreIndex, SimpleDirectoryReader, ChatPromptTemplate, Settings
|
5 |
from llama_index.llms.huggingface import HuggingFaceInferenceAPI
|
@@ -31,18 +32,19 @@ PDF_DIRECTORY = 'data'
|
|
31 |
os.makedirs(PDF_DIRECTORY, exist_ok=True)
|
32 |
os.makedirs(PERSIST_DIR, exist_ok=True)
|
33 |
|
34 |
-
#
|
35 |
-
|
36 |
-
|
37 |
-
|
38 |
-
|
39 |
-
|
40 |
-
|
41 |
-
|
42 |
-
|
43 |
-
|
44 |
-
|
45 |
-
|
|
|
46 |
chat_text_qa_msgs = [
|
47 |
(
|
48 |
"user",
|
@@ -62,7 +64,7 @@ def handle_query(query):
|
|
62 |
|
63 |
# Use chat history to enhance response
|
64 |
context_str = ""
|
65 |
-
for past_query, response in reversed(
|
66 |
if past_query.strip():
|
67 |
context_str += f"User asked: '{past_query}'\nBot answered: '{response}'\n"
|
68 |
|
@@ -78,15 +80,15 @@ def handle_query(query):
|
|
78 |
|
79 |
# Update current chat history dictionary (use unique ID as key)
|
80 |
chat_id = str(datetime.datetime.now().timestamp())
|
81 |
-
|
82 |
|
83 |
return response
|
84 |
|
85 |
# Define your Gradio chat interface function
|
86 |
-
def chat_interface(message, history):
|
87 |
try:
|
88 |
# Process the user message and generate a response
|
89 |
-
response = handle_query(message)
|
90 |
|
91 |
# Return the bot response
|
92 |
return response
|
@@ -124,19 +126,7 @@ div.svelte-1rjryqp{display: none;}
|
|
124 |
div.progress-text.svelte-z7cif2.meta-text {display: none;}
|
125 |
'''
|
126 |
|
127 |
-
# JavaScript to handle chat history saving
|
128 |
-
js_code = '''
|
129 |
-
<script>
|
130 |
-
function saveHistory(message, response) {
|
131 |
-
// Store message and response in session storage
|
132 |
-
let history = JSON.parse(sessionStorage.getItem('chatHistory') || '[]');
|
133 |
-
history.push({message, response});
|
134 |
-
sessionStorage.setItem('chatHistory', JSON.stringify(history));
|
135 |
-
}
|
136 |
-
</script>
|
137 |
-
'''
|
138 |
-
|
139 |
# Use Gradio Blocks to wrap components
|
140 |
-
chat = gr.ChatInterface(chat_interface, css=css, clear_btn=None, undo_btn=None, retry_btn=None
|
141 |
|
142 |
# Launch the Gradio interface
|
|
|
1 |
import gradio as gr
|
2 |
import os
|
3 |
+
from http.cookies import SimpleCookie
|
4 |
from dotenv import load_dotenv
|
5 |
from llama_index.core import StorageContext, load_index_from_storage, VectorStoreIndex, SimpleDirectoryReader, ChatPromptTemplate, Settings
|
6 |
from llama_index.llms.huggingface import HuggingFaceInferenceAPI
|
|
|
32 |
os.makedirs(PDF_DIRECTORY, exist_ok=True)
|
33 |
os.makedirs(PERSIST_DIR, exist_ok=True)
|
34 |
|
35 |
+
# Function to save chat history to cookies
|
36 |
+
def save_chat_history_to_cookies(chat_id, query, response, cookies):
|
37 |
+
history = cookies.get('chat_history', '[]')
|
38 |
+
history_list = eval(history)
|
39 |
+
history_list.append({
|
40 |
+
"chat_id": chat_id,
|
41 |
+
"query": query,
|
42 |
+
"response": response,
|
43 |
+
"timestamp": str(datetime.datetime.now())
|
44 |
+
})
|
45 |
+
cookies['chat_history'] = str(history_list)
|
46 |
+
|
47 |
+
def handle_query(query, cookies):
|
48 |
chat_text_qa_msgs = [
|
49 |
(
|
50 |
"user",
|
|
|
64 |
|
65 |
# Use chat history to enhance response
|
66 |
context_str = ""
|
67 |
+
for past_query, response in reversed(cookies.get('chat_history', [])):
|
68 |
if past_query.strip():
|
69 |
context_str += f"User asked: '{past_query}'\nBot answered: '{response}'\n"
|
70 |
|
|
|
80 |
|
81 |
# Update current chat history dictionary (use unique ID as key)
|
82 |
chat_id = str(datetime.datetime.now().timestamp())
|
83 |
+
save_chat_history_to_cookies(chat_id, query, response, cookies)
|
84 |
|
85 |
return response
|
86 |
|
87 |
# Define your Gradio chat interface function
|
88 |
+
def chat_interface(message, history, cookies):
|
89 |
try:
|
90 |
# Process the user message and generate a response
|
91 |
+
response = handle_query(message, cookies)
|
92 |
|
93 |
# Return the bot response
|
94 |
return response
|
|
|
126 |
div.progress-text.svelte-z7cif2.meta-text {display: none;}
|
127 |
'''
|
128 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
129 |
# Use Gradio Blocks to wrap components
|
130 |
+
chat = gr.ChatInterface(chat_interface, css=css, clear_btn=None, undo_btn=None, retry_btn=None).launch()
|
131 |
|
132 |
# Launch the Gradio interface
|