Shreyas094 commited on
Commit
d8d3738
·
verified ·
1 Parent(s): 0e98cea

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +25 -25
app.py CHANGED
@@ -122,7 +122,30 @@ class CitingSources(BaseModel):
122
  description="List of sources to cite. Should be an URL of the source."
123
  )
124
 
125
- def get_response_from_pdf(query, model, temperature=0.2):
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
126
  embed = get_embeddings()
127
  if os.path.exists("faiss_database"):
128
  database = FAISS.load_local("faiss_database", embed, allow_dangerous_deserialization=True)
@@ -138,7 +161,7 @@ def get_response_from_pdf(query, model, temperature=0.2):
138
  Write a detailed and complete response that answers the following user question: '{query}'
139
  Do not include a list of sources in your response. [/INST]"""
140
 
141
- generated_text = generate_chunked_response(prompt, model, temperature=temperature)
142
 
143
  # Clean the response
144
  clean_text = re.sub(r'<s>\[INST\].*?\[/INST\]\s*', '', generated_text, flags=re.DOTALL)
@@ -146,29 +169,6 @@ Do not include a list of sources in your response. [/INST]"""
146
 
147
  return clean_text
148
 
149
- def get_response_with_search(query, model, temperature=0.2):
150
- search_results = duckduckgo_search(query)
151
- context = "\n".join(f"{result['title']}\n{result['body']}\nSource: {result['href']}\n"
152
- for result in search_results if 'body' in result)
153
-
154
- prompt = f"""<s>[INST] Using the following context:
155
- {context}
156
- Write a detailed and complete research document that fulfills the following user request: '{query}'
157
- After writing the document, please provide a list of sources used in your response. [/INST]"""
158
-
159
- generated_text = generate_chunked_response(prompt, model, temperature=temperature)
160
-
161
- # Clean the response
162
- clean_text = re.sub(r'<s>\[INST\].*?\[/INST\]\s*', '', generated_text, flags=re.DOTALL)
163
- clean_text = clean_text.replace("Using the following context:", "").strip()
164
-
165
- # Split the content and sources
166
- parts = clean_text.split("Sources:", 1)
167
- main_content = parts[0].strip()
168
- sources = parts[1].strip() if len(parts) > 1 else ""
169
-
170
- return main_content, sources
171
-
172
  def chatbot_interface(message, history, use_web_search, model, temperature):
173
  if not message.strip(): # Check if the message is empty or just whitespace
174
  return history
 
122
  description="List of sources to cite. Should be an URL of the source."
123
  )
124
 
125
+ def get_response_with_search(query, model, num_calls=3, temperature=0.2):
126
+ search_results = duckduckgo_search(query)
127
+ context = "\n".join(f"{result['title']}\n{result['body']}\nSource: {result['href']}\n"
128
+ for result in search_results if 'body' in result)
129
+
130
+ prompt = f"""<s>[INST] Using the following context:
131
+ {context}
132
+ Write a detailed and complete research document that fulfills the following user request: '{query}'
133
+ After writing the document, please provide a list of sources used in your response. [/INST]"""
134
+
135
+ generated_text = generate_chunked_response(prompt, model, num_calls=num_calls, temperature=temperature)
136
+
137
+ # Clean the response
138
+ clean_text = re.sub(r'<s>\[INST\].*?\[/INST\]\s*', '', generated_text, flags=re.DOTALL)
139
+ clean_text = clean_text.replace("Using the following context:", "").strip()
140
+
141
+ # Split the content and sources
142
+ parts = clean_text.split("Sources:", 1)
143
+ main_content = parts[0].strip()
144
+ sources = parts[1].strip() if len(parts) > 1 else ""
145
+
146
+ return main_content, sources
147
+
148
+ def get_response_from_pdf(query, model, num_calls=3, temperature=0.2):
149
  embed = get_embeddings()
150
  if os.path.exists("faiss_database"):
151
  database = FAISS.load_local("faiss_database", embed, allow_dangerous_deserialization=True)
 
161
  Write a detailed and complete response that answers the following user question: '{query}'
162
  Do not include a list of sources in your response. [/INST]"""
163
 
164
+ generated_text = generate_chunked_response(prompt, model, num_calls=num_calls, temperature=temperature)
165
 
166
  # Clean the response
167
  clean_text = re.sub(r'<s>\[INST\].*?\[/INST\]\s*', '', generated_text, flags=re.DOTALL)
 
169
 
170
  return clean_text
171
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
172
  def chatbot_interface(message, history, use_web_search, model, temperature):
173
  if not message.strip(): # Check if the message is empty or just whitespace
174
  return history