gaur3009 commited on
Commit
e0d703d
·
verified ·
1 Parent(s): 7f655d1

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +13 -10
app.py CHANGED
@@ -7,7 +7,7 @@ from transformers import AutoTokenizer, AutoModel
7
  from weaviate.classes.init import Auth
8
  import cohere
9
 
10
- # Load credentials from environment variables
11
  WEAVIATE_URL = "vgwhgmrlqrqqgnlb1avjaa.c0.us-west3.gcp.weaviate.cloud"
12
  WEAVIATE_API_KEY = "7VoeYTjkOS4aHINuhllGpH4JPgE2QquFmSMn"
13
  COHERE_API_KEY = "LEvCVeZkqZMW1aLYjxDqlstCzWi4Cvlt9PiysqT8"
@@ -67,16 +67,19 @@ def generate_response(context, query):
67
 
68
  def qa_pipeline(pdf_file, query):
69
  """Main pipeline for QA: parse PDF, embed chunks, query Weaviate, and generate answer."""
70
- document_text = load_pdf(pdf_file)
71
- document_chunks = [document_text[i:i+500] for i in range(0, len(document_text), 500)]
 
72
 
73
- upload_document_chunks(document_chunks)
74
- top_docs = query_answer(query)
75
 
76
- context = ' '.join([doc.properties['content'] for doc in top_docs])
77
- answer = generate_response(context, query)
78
 
79
- return context, answer
 
 
80
 
81
  # Gradio UI
82
  with gr.Blocks(theme="compact") as demo:
@@ -103,7 +106,7 @@ with gr.Blocks(theme="compact") as demo:
103
  answer_output = gr.Textbox(label="💬 Answer", lines=3)
104
 
105
  submit_button.click(
106
- qa_pipeline,
107
  inputs=[pdf_input, query_input],
108
  outputs=[doc_segments_output, answer_output]
109
  )
@@ -141,4 +144,4 @@ with gr.Blocks(theme="compact") as demo:
141
  """
142
  )
143
 
144
- demo.launch()
 
7
  from weaviate.classes.init import Auth
8
  import cohere
9
 
10
+ # Load credentials from environment variables or hardcode them temporarily
11
  WEAVIATE_URL = "vgwhgmrlqrqqgnlb1avjaa.c0.us-west3.gcp.weaviate.cloud"
12
  WEAVIATE_API_KEY = "7VoeYTjkOS4aHINuhllGpH4JPgE2QquFmSMn"
13
  COHERE_API_KEY = "LEvCVeZkqZMW1aLYjxDqlstCzWi4Cvlt9PiysqT8"
 
67
 
68
  def qa_pipeline(pdf_file, query):
69
  """Main pipeline for QA: parse PDF, embed chunks, query Weaviate, and generate answer."""
70
+ try:
71
+ document_text = load_pdf(pdf_file)
72
+ document_chunks = [document_text[i:i+500] for i in range(0, len(document_text), 500)]
73
 
74
+ upload_document_chunks(document_chunks)
75
+ top_docs = query_answer(query)
76
 
77
+ context = ' '.join([doc.properties['content'] for doc in top_docs])
78
+ answer = generate_response(context, query)
79
 
80
+ return str(context), str(answer)
81
+ finally:
82
+ client.close() # ✅ Properly close client to avoid memory leaks
83
 
84
  # Gradio UI
85
  with gr.Blocks(theme="compact") as demo:
 
106
  answer_output = gr.Textbox(label="💬 Answer", lines=3)
107
 
108
  submit_button.click(
109
+ fn=qa_pipeline,
110
  inputs=[pdf_input, query_input],
111
  outputs=[doc_segments_output, answer_output]
112
  )
 
144
  """
145
  )
146
 
147
+ demo.launch(share=True) # ✅ Required for Hugging Face Spaces