Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
@@ -3,7 +3,6 @@ import os
|
|
3 |
from llama_index.core import VectorStoreIndex, SimpleDirectoryReader, Settings
|
4 |
from llama_index.embeddings.huggingface import HuggingFaceEmbedding
|
5 |
|
6 |
-
|
7 |
# β
Access OpenAI API Key
|
8 |
openai_api_key = os.environ.get("OPENAI_API_KEY")
|
9 |
if not openai_api_key:
|
@@ -12,19 +11,22 @@ os.environ["OPENAI_API_KEY"] = openai_api_key
|
|
12 |
|
13 |
# β
Set Hugging Face Embedding globally via Settings
|
14 |
embed_model = HuggingFaceEmbedding(model_name="sentence-transformers/all-MiniLM-L6-v2")
|
15 |
-
Settings.embed_model = embed_model
|
16 |
|
17 |
-
# β
|
18 |
def load_filtered_docs(folder):
|
19 |
-
|
20 |
-
|
|
|
|
|
|
|
|
|
21 |
|
22 |
-
# β
Load Paul Graham documents
|
23 |
pg_docs = load_filtered_docs("data/paul")
|
24 |
pg_index = VectorStoreIndex.from_documents(pg_docs)
|
25 |
pg_engine = pg_index.as_query_engine()
|
26 |
|
27 |
-
# β
Load Insurance documents
|
28 |
ins_docs = load_filtered_docs("data/insurance")
|
29 |
ins_index = VectorStoreIndex.from_documents(ins_docs)
|
30 |
ins_engine = ins_index.as_query_engine()
|
@@ -62,6 +64,8 @@ insurance_questions = [
|
|
62 |
"What is the difference between premium and coverage?",
|
63 |
"What should you check before buying insurance?"
|
64 |
]
|
|
|
|
|
65 |
def launch_interface():
|
66 |
with gr.Blocks(
|
67 |
title="RAG App",
|
@@ -80,7 +84,6 @@ def launch_interface():
|
|
80 |
|
81 |
with gr.Tabs():
|
82 |
with gr.Tab("Paul Graham"):
|
83 |
-
# β
Logo moved above heading
|
84 |
if os.path.exists("data/logo.png"):
|
85 |
gr.Image("data/logo.png", show_label=False, container=False, height=120)
|
86 |
|
@@ -103,13 +106,12 @@ def launch_interface():
|
|
103 |
return "β Please select or enter a question."
|
104 |
return query_pg(final_query)
|
105 |
|
106 |
-
|
107 |
-
|
108 |
-
def handle_pg_clear():
|
109 |
-
return "", "", ""
|
110 |
-
|
111 |
-
clear_pg.click(fn=handle_pg_clear, outputs=[dropdown_pg, textbox_pg, output_pg])
|
112 |
|
|
|
|
|
|
|
113 |
|
114 |
with gr.Tab("Insurance"):
|
115 |
gr.Markdown("""
|
@@ -131,16 +133,14 @@ def launch_interface():
|
|
131 |
return "β Please select or enter a question."
|
132 |
return query_ins(final_query)
|
133 |
|
134 |
-
dropdown_ins.change(fn=query_ins, inputs=dropdown_ins, outputs=output_ins)
|
135 |
-
submit_ins.click(handle_ins_submit, inputs=[dropdown_ins, textbox_ins], outputs=output_ins)
|
136 |
def handle_ins_clear():
|
137 |
-
|
138 |
|
|
|
|
|
139 |
clear_ins.click(fn=handle_ins_clear, outputs=[dropdown_ins, textbox_ins, output_ins])
|
140 |
|
141 |
-
|
142 |
demo.launch()
|
143 |
|
144 |
-
|
145 |
-
|
146 |
-
launch_interface()
|
|
|
3 |
from llama_index.core import VectorStoreIndex, SimpleDirectoryReader, Settings
|
4 |
from llama_index.embeddings.huggingface import HuggingFaceEmbedding
|
5 |
|
|
|
6 |
# β
Access OpenAI API Key
|
7 |
openai_api_key = os.environ.get("OPENAI_API_KEY")
|
8 |
if not openai_api_key:
|
|
|
11 |
|
12 |
# β
Set Hugging Face Embedding globally via Settings
|
13 |
embed_model = HuggingFaceEmbedding(model_name="sentence-transformers/all-MiniLM-L6-v2")
|
14 |
+
Settings.embed_model = embed_model
|
15 |
|
16 |
+
# β
Load and filter documents
|
17 |
def load_filtered_docs(folder):
|
18 |
+
try:
|
19 |
+
docs = SimpleDirectoryReader(folder).load_data()
|
20 |
+
return [doc for doc in docs if doc.text and doc.text.strip()]
|
21 |
+
except Exception as e:
|
22 |
+
print(f"β Error loading docs from {folder}: {e}")
|
23 |
+
return []
|
24 |
|
25 |
+
# β
Load Paul Graham and Insurance documents
|
26 |
pg_docs = load_filtered_docs("data/paul")
|
27 |
pg_index = VectorStoreIndex.from_documents(pg_docs)
|
28 |
pg_engine = pg_index.as_query_engine()
|
29 |
|
|
|
30 |
ins_docs = load_filtered_docs("data/insurance")
|
31 |
ins_index = VectorStoreIndex.from_documents(ins_docs)
|
32 |
ins_engine = ins_index.as_query_engine()
|
|
|
64 |
"What is the difference between premium and coverage?",
|
65 |
"What should you check before buying insurance?"
|
66 |
]
|
67 |
+
|
68 |
+
# β
Gradio interface
|
69 |
def launch_interface():
|
70 |
with gr.Blocks(
|
71 |
title="RAG App",
|
|
|
84 |
|
85 |
with gr.Tabs():
|
86 |
with gr.Tab("Paul Graham"):
|
|
|
87 |
if os.path.exists("data/logo.png"):
|
88 |
gr.Image("data/logo.png", show_label=False, container=False, height=120)
|
89 |
|
|
|
106 |
return "β Please select or enter a question."
|
107 |
return query_pg(final_query)
|
108 |
|
109 |
+
def handle_pg_clear():
|
110 |
+
return "", "", ""
|
|
|
|
|
|
|
|
|
111 |
|
112 |
+
dropdown_pg.change(query_pg, inputs=dropdown_pg, outputs=output_pg)
|
113 |
+
submit_pg.click(handle_pg_submit, inputs=[dropdown_pg, textbox_pg], outputs=output_pg)
|
114 |
+
clear_pg.click(fn=handle_pg_clear, outputs=[dropdown_pg, textbox_pg, output_pg])
|
115 |
|
116 |
with gr.Tab("Insurance"):
|
117 |
gr.Markdown("""
|
|
|
133 |
return "β Please select or enter a question."
|
134 |
return query_ins(final_query)
|
135 |
|
|
|
|
|
136 |
def handle_ins_clear():
|
137 |
+
return "", "", ""
|
138 |
|
139 |
+
dropdown_ins.change(query_ins, inputs=dropdown_ins, outputs=output_ins)
|
140 |
+
submit_ins.click(handle_ins_submit, inputs=[dropdown_ins, textbox_ins], outputs=output_ins)
|
141 |
clear_ins.click(fn=handle_ins_clear, outputs=[dropdown_ins, textbox_ins, output_ins])
|
142 |
|
|
|
143 |
demo.launch()
|
144 |
|
145 |
+
# β
Launch app (for Spaces)
|
146 |
+
launch_interface()
|
|