Spaces:
Sleeping
Sleeping
create app.py
Browse files
app.py
ADDED
@@ -0,0 +1,79 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import os
|
2 |
+
|
3 |
+
import gradio as gr
|
4 |
+
import wikipedia
|
5 |
+
from huggingface_hub import InferenceApi
|
6 |
+
from yarl import URL
|
7 |
+
|
8 |
+
|
9 |
+
def get_article_urls(topic: str) -> str:
|
10 |
+
search_results = wikipedia.search(topic, results=5)
|
11 |
+
urls = []
|
12 |
+
for title in search_results:
|
13 |
+
try:
|
14 |
+
page = wikipedia.page(title)
|
15 |
+
url = str(URL(page.url))
|
16 |
+
urls.append(url)
|
17 |
+
except (wikipedia.exceptions.DisambiguationError, wikipedia.exceptions.PageError):
|
18 |
+
continue
|
19 |
+
return "\n".join(urls)
|
20 |
+
|
21 |
+
|
22 |
+
def summarize_articles(topic: str) -> str:
|
23 |
+
HF_API_TOKEN = os.getenv("HF_API_TOKEN")
|
24 |
+
if not HF_API_TOKEN:
|
25 |
+
return "Error: Hugging Face API token is not set. Please set the HF_API_TOKEN environment variable."
|
26 |
+
|
27 |
+
MODEL_REPO_ID = "Qwen/Qwen2.5-72B-Instruct"
|
28 |
+
parameters = {
|
29 |
+
"max_length": 200,
|
30 |
+
"do_sample": False,
|
31 |
+
"num_beams": 5,
|
32 |
+
"early_stopping": True
|
33 |
+
}
|
34 |
+
inference_api = InferenceApi(repo_id=MODEL_REPO_ID, token=HF_API_TOKEN)
|
35 |
+
|
36 |
+
search_results = wikipedia.search(topic, results=5)
|
37 |
+
summaries = []
|
38 |
+
for title in search_results:
|
39 |
+
try:
|
40 |
+
page = wikipedia.page(title)
|
41 |
+
content = page.content[:4000]
|
42 |
+
prompt = f"Summarize the following text:\n\n{content}\n\nSummary:"
|
43 |
+
response = inference_api(prompt, params=parameters)
|
44 |
+
summary = response.get("generated_text", "").strip()
|
45 |
+
url = str(URL(page.url))
|
46 |
+
summaries.append(f"URL: {url}\n\nSummary:\n{summary}\n{'-'*80}")
|
47 |
+
except (wikipedia.exceptions.DisambiguationError, wikipedia.exceptions.PageError):
|
48 |
+
continue
|
49 |
+
if summaries:
|
50 |
+
return "\n\n".join(summaries)
|
51 |
+
else:
|
52 |
+
return "No articles found or unable to generate summaries."
|
53 |
+
|
54 |
+
|
55 |
+
def main():
|
56 |
+
with gr.Blocks() as demo:
|
57 |
+
gr.Markdown("# 📚 Wikipedia Article Summarizer")
|
58 |
+
topic_input = gr.Textbox(label="Enter a Topic", placeholder="e.g., Climate Change")
|
59 |
+
with gr.Row():
|
60 |
+
get_urls_button = gr.Button("Get Article URLs")
|
61 |
+
summarize_button = gr.Button("Summarize Articles")
|
62 |
+
urls_output = gr.Textbox(label="Article URLs", lines=5)
|
63 |
+
summary_output = gr.Textbox(label="Article Summaries", lines=20)
|
64 |
+
|
65 |
+
get_urls_button.click(
|
66 |
+
fn=get_article_urls,
|
67 |
+
inputs=topic_input,
|
68 |
+
outputs=urls_output
|
69 |
+
)
|
70 |
+
summarize_button.click(
|
71 |
+
fn=summarize_articles,
|
72 |
+
inputs=topic_input,
|
73 |
+
outputs=summary_output
|
74 |
+
)
|
75 |
+
|
76 |
+
demo.launch()
|
77 |
+
|
78 |
+
if __name__ == "__main__":
|
79 |
+
main()
|