Spaces:
Running
Running
Update app.py
Browse files
app.py
CHANGED
@@ -508,7 +508,7 @@ If any part of the information cannot be verified from this source, clearly stat
|
|
508 |
yield accumulated_response, ""
|
509 |
|
510 |
# Generate an overall summary after processing all sources
|
511 |
-
overall_prompt = f"""Based on the summaries you've generated for each source, provide a concise overall summary that addresses the user's query: '{query}'
|
512 |
Highlight any conflicting information or gaps in the available data."""
|
513 |
|
514 |
if model == "@cf/meta/llama-3.1-8b-instruct":
|
@@ -534,7 +534,7 @@ Highlight any conflicting information or gaps in the available data."""
|
|
534 |
yield accumulated_response, ""
|
535 |
|
536 |
# Generate an overall summary after processing all sources
|
537 |
-
overall_prompt = f"""Based on the summaries you've generated for each source, provide a concise overall summary that addresses the user's query: '{query}'
|
538 |
Highlight any conflicting information or gaps in the available data."""
|
539 |
|
540 |
if model == "@cf/meta/llama-3.1-8b-instruct":
|
|
|
508 |
yield accumulated_response, ""
|
509 |
|
510 |
# Generate an overall summary after processing all sources
|
511 |
+
overall_prompt = f"""Based on the summaries you've generated for each source: '{accumulated_response}', provide a concise overall summary that addresses the user's query: '{query}'
|
512 |
Highlight any conflicting information or gaps in the available data."""
|
513 |
|
514 |
if model == "@cf/meta/llama-3.1-8b-instruct":
|
|
|
534 |
yield accumulated_response, ""
|
535 |
|
536 |
# Generate an overall summary after processing all sources
|
537 |
+
overall_prompt = f"""Based on the summaries you've generated for each source: '{accumulated_response}', provide a concise overall summary that addresses the user's query: '{query}'
|
538 |
Highlight any conflicting information or gaps in the available data."""
|
539 |
|
540 |
if model == "@cf/meta/llama-3.1-8b-instruct":
|