Yoxas commited on
Commit
52dd91e
·
verified ·
1 Parent(s): be520f8

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +2 -0
app.py CHANGED
@@ -32,6 +32,7 @@ llama_model = AutoModelForCausalLM.from_pretrained("openai-community/gpt2").to(d
32
  summarizer = pipeline("summarization", model="facebook/bart-large-cnn", device=0 if device == 'cuda' else -1)
33
 
34
  # Define the function to find the most relevant document using FAISS
 
35
  def retrieve_relevant_doc(query):
36
  query_embedding = sentence_model.encode(query, convert_to_tensor=False)
37
  _, indices = index.search(np.array([query_embedding]), k=1)
@@ -39,6 +40,7 @@ def retrieve_relevant_doc(query):
39
  return df.iloc[best_match_idx]['Abstract']
40
 
41
  # Define the function to generate a response
 
42
  def generate_response(query):
43
  relevant_doc = retrieve_relevant_doc(query)
44
  if len(relevant_doc) > 512: # Truncate long documents
 
32
  summarizer = pipeline("summarization", model="facebook/bart-large-cnn", device=0 if device == 'cuda' else -1)
33
 
34
  # Define the function to find the most relevant document using FAISS
35
+ @spaces.GPU(duration=120)
36
  def retrieve_relevant_doc(query):
37
  query_embedding = sentence_model.encode(query, convert_to_tensor=False)
38
  _, indices = index.search(np.array([query_embedding]), k=1)
 
40
  return df.iloc[best_match_idx]['Abstract']
41
 
42
  # Define the function to generate a response
43
+ @spaces.GPU(duration=120)
44
  def generate_response(query):
45
  relevant_doc = retrieve_relevant_doc(query)
46
  if len(relevant_doc) > 512: # Truncate long documents