Spaces:
Running
Running
Update app.py
Browse files
app.py
CHANGED
@@ -32,7 +32,7 @@ def query_llama(payload):
|
|
32 |
print(f"Error querying Llama model: {e}")
|
33 |
return None
|
34 |
|
35 |
-
def google_search(term, num_results=
|
36 |
"""Perform a Google search and return results"""
|
37 |
print(f"Searching for term: {term}")
|
38 |
|
@@ -223,7 +223,7 @@ Provide a detailed, coherent summary focusing on financial implications and anal
|
|
223 |
{combined_summary}
|
224 |
Focus on the most important financial implications and analysis."""
|
225 |
|
226 |
-
final_summary = query_llama({"inputs": final_prompt, "parameters": {"max_length":
|
227 |
|
228 |
if final_summary and isinstance(final_summary, list) and 'generated_text' in final_summary[0]:
|
229 |
return final_summary[0]['generated_text']
|
|
|
32 |
print(f"Error querying Llama model: {e}")
|
33 |
return None
|
34 |
|
35 |
+
def google_search(term, num_results=1, lang="en", timeout=5, safe="active", ssl_verify=None, days_back=90):
|
36 |
"""Perform a Google search and return results"""
|
37 |
print(f"Searching for term: {term}")
|
38 |
|
|
|
223 |
{combined_summary}
|
224 |
Focus on the most important financial implications and analysis."""
|
225 |
|
226 |
+
final_summary = query_llama({"inputs": final_prompt, "parameters": {"max_length": 3000}})
|
227 |
|
228 |
if final_summary and isinstance(final_summary, list) and 'generated_text' in final_summary[0]:
|
229 |
return final_summary[0]['generated_text']
|