Poojashetty357 commited on
Commit
c94cd9f
Β·
verified Β·
1 Parent(s): c8564b5

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +12 -15
app.py CHANGED
@@ -1,33 +1,32 @@
1
  #pip install llama-index-embeddings-huggingface
2
  import gradio as gr
3
  import os
4
- from llama_index.core import VectorStoreIndex, SimpleDirectoryReader, ServiceContext
5
- from llama_index.embeddings.huggingface import HuggingFaceEmbedding
6
- # Access from Hugging Face secret
7
- openai_api_key = os.environ.get("OPENAI_API_KEY")
8
 
9
- if openai_api_key:
10
- os.environ["OPENAI_API_KEY"] = openai_api_key # optional if your library reads it directly
11
- else:
12
  raise ValueError("❌ OPENAI_API_KEY not found. Add it in Space settings > Secrets.")
 
13
 
14
- # βœ… Use Hugging Face embedding model
15
  embed_model = HuggingFaceEmbedding(model_name="sentence-transformers/all-MiniLM-L6-v2")
16
- service_context = ServiceContext.from_defaults(embed_model=embed_model)
17
 
18
  # βœ… Helper to load and filter documents
19
  def load_filtered_docs(folder):
20
  docs = SimpleDirectoryReader(folder).load_data()
21
  return [doc for doc in docs if doc.text and doc.text.strip()]
22
 
23
- # βœ… Load and index Paul Graham documents
24
  pg_docs = load_filtered_docs("data/paul")
25
- pg_index = VectorStoreIndex.from_documents(pg_docs, service_context=service_context)
26
  pg_engine = pg_index.as_query_engine()
27
 
28
- # βœ… Load and index Insurance documents (PDF included)
29
  ins_docs = load_filtered_docs("data/insurance")
30
- ins_index = VectorStoreIndex.from_documents(ins_docs, service_context=service_context)
31
  ins_engine = ins_index.as_query_engine()
32
 
33
  # βœ… Query functions
@@ -86,7 +85,6 @@ def launch_interface():
86
  """)
87
 
88
  with gr.Tabs():
89
- # Paul Graham Tab
90
  with gr.Tab("Paul Graham"):
91
  if os.path.exists("data/logo.png"):
92
  gr.Image("data/logo.png", show_label=False, container=False, height=120)
@@ -109,7 +107,6 @@ def launch_interface():
109
  submit_pg.click(query_pg, inputs=textbox_pg, outputs=output_pg)
110
  clear_pg.click(lambda: ("", "", ""), outputs=[dropdown_pg, textbox_pg, output_pg])
111
 
112
- # Insurance Tab
113
  with gr.Tab("Insurance"):
114
  gr.Markdown("""
115
  <div id='header-text'>
 
1
  #pip install llama-index-embeddings-huggingface
2
  import gradio as gr
3
  import os
4
+ from llama_index.core import VectorStoreIndex, SimpleDirectoryReader, Settings
5
+ from llama_index.embeddings import HuggingFaceEmbedding
 
 
6
 
7
+ # βœ… Access OpenAI API Key
8
+ openai_api_key = os.environ.get("OPENAI_API_KEY")
9
+ if not openai_api_key:
10
  raise ValueError("❌ OPENAI_API_KEY not found. Add it in Space settings > Secrets.")
11
+ os.environ["OPENAI_API_KEY"] = openai_api_key
12
 
13
+ # βœ… Set Hugging Face Embedding globally via Settings
14
  embed_model = HuggingFaceEmbedding(model_name="sentence-transformers/all-MiniLM-L6-v2")
15
+ Settings.embed_model = embed_model # Replaces deprecated ServiceContext
16
 
17
  # βœ… Helper to load and filter documents
18
  def load_filtered_docs(folder):
19
  docs = SimpleDirectoryReader(folder).load_data()
20
  return [doc for doc in docs if doc.text and doc.text.strip()]
21
 
22
+ # βœ… Load Paul Graham documents
23
  pg_docs = load_filtered_docs("data/paul")
24
+ pg_index = VectorStoreIndex.from_documents(pg_docs)
25
  pg_engine = pg_index.as_query_engine()
26
 
27
+ # βœ… Load Insurance documents
28
  ins_docs = load_filtered_docs("data/insurance")
29
+ ins_index = VectorStoreIndex.from_documents(ins_docs)
30
  ins_engine = ins_index.as_query_engine()
31
 
32
  # βœ… Query functions
 
85
  """)
86
 
87
  with gr.Tabs():
 
88
  with gr.Tab("Paul Graham"):
89
  if os.path.exists("data/logo.png"):
90
  gr.Image("data/logo.png", show_label=False, container=False, height=120)
 
107
  submit_pg.click(query_pg, inputs=textbox_pg, outputs=output_pg)
108
  clear_pg.click(lambda: ("", "", ""), outputs=[dropdown_pg, textbox_pg, output_pg])
109
 
 
110
  with gr.Tab("Insurance"):
111
  gr.Markdown("""
112
  <div id='header-text'>