gufett0 commited on
Commit
f51dfbf
·
1 Parent(s): 99d4e42

added text streamer

Browse files
Files changed (2) hide show
  1. app.py +2 -10
  2. backend.py +2 -3
app.py CHANGED
@@ -3,21 +3,13 @@ import gradio as gr
3
 
4
 
5
 
6
-
7
- def setup_and_run():
8
- # Call build_index() before starting the chat interface
9
- build_index() # This builds the index on app initialization
10
-
11
- # Pass the index into handle_query (you can modify handle_query to accept it as an argument if needed)
12
- iface = gr.ChatInterface(
13
  fn=handle_query, # Query handling remains here
14
  title="PDF Information and Inference",
15
  description="Retrieval-Augmented Generation - Ask me anything about the content of the PDF.",
16
  )
17
 
18
- iface.launch()
19
-
20
 
21
  if __name__ == "__main__":
22
  progress=gr.Progress(track_tqdm=True)
23
- setup_and_run()
 
3
 
4
 
5
 
6
+ iface = gr.ChatInterface(
 
 
 
 
 
 
7
  fn=handle_query, # Query handling remains here
8
  title="PDF Information and Inference",
9
  description="Retrieval-Augmented Generation - Ask me anything about the content of the PDF.",
10
  )
11
 
 
 
12
 
13
  if __name__ == "__main__":
14
  progress=gr.Progress(track_tqdm=True)
15
+ iface.launch()
backend.py CHANGED
@@ -41,9 +41,7 @@ parser = SentenceSplitter.from_defaults(
41
  chunk_size=256, chunk_overlap=64, paragraph_separator="\n\n"
42
  )
43
 
44
- index = None
45
  def build_index():
46
- global index
47
  # Load documents from a file
48
  documents = SimpleDirectoryReader(input_files=["data/blockchainprova.txt"]).load_data()
49
  # Parse the documents into nodes
@@ -56,7 +54,8 @@ def build_index():
56
 
57
  @spaces.GPU(duration=20)
58
  def handle_query(query_str, chathistory):
59
- global index
 
60
 
61
  qa_prompt_str = (
62
  "Context information is below.\n"
 
41
  chunk_size=256, chunk_overlap=64, paragraph_separator="\n\n"
42
  )
43
 
 
44
  def build_index():
 
45
  # Load documents from a file
46
  documents = SimpleDirectoryReader(input_files=["data/blockchainprova.txt"]).load_data()
47
  # Parse the documents into nodes
 
54
 
55
  @spaces.GPU(duration=20)
56
  def handle_query(query_str, chathistory):
57
+
58
+ index = build_index()
59
 
60
  qa_prompt_str = (
61
  "Context information is below.\n"