File size: 5,157 Bytes
c6bd2a2 3497525 0b76f2d 3497525 c6bd2a2 0b76f2d 2e25e84 0b76f2d 2e25e84 3497525 0b76f2d 3497525 0b76f2d 3497525 0b76f2d 3497525 2e25e84 3497525 0b76f2d 3497525 0b76f2d 3497525 0b76f2d 3497525 0b76f2d 3497525 0b76f2d c6bd2a2 0b76f2d c6bd2a2 0b76f2d c6bd2a2 0b76f2d c6bd2a2 0b76f2d c6bd2a2 0b76f2d c6bd2a2 0b76f2d c6bd2a2 3497525 0b76f2d 3497525 c6bd2a2 0b76f2d c6bd2a2 3497525 c6bd2a2 3497525 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135 136 137 138 139 140 141 142 143 |
import gradio as gr
import requests
from datetime import datetime, timezone
API_URL = "https://huggingface.co/api/daily_papers"
class PaperManager:
def __init__(self, papers_per_page=10):
self.papers_per_page = papers_per_page
self.current_page = 1
self.papers = []
self.total_pages = 1
def fetch_papers(self, page=1):
try:
response = requests.get(f"{API_URL}?page={page}&limit={self.papers_per_page}")
response.raise_for_status()
data = response.json()
self.papers = sorted(data, key=lambda x: x.get('paper', {}).get('upvotes', 0), reverse=True)
# Update total_pages based on the 'X-Total-Pages' header if available
self.total_pages = int(response.headers.get('X-Total-Pages', self.total_pages))
return True
except requests.RequestException as e:
print(f"Error fetching papers: {e}")
return False
def format_paper(self, paper):
title = paper.get('title', 'No title')
url = f"https://huggingface.co/papers/{paper['paper'].get('id', '')}"
authors = ', '.join([author.get('name', '') for author in paper['paper'].get('authors', [])])
upvotes = paper.get('paper', {}).get('upvotes', 0)
comments = paper.get('numComments', 0)
published_time = datetime.fromisoformat(paper.get('publishedAt', datetime.now(timezone.utc).isoformat()).replace('Z', '+00:00'))
time_ago = (datetime.now(timezone.utc) - published_time).days
return f"""<div style='border-bottom: 1px solid #eee; padding: 10px 0;'>
<a href='{url}' target='_blank' style='color: #000; text-decoration: none; font-weight: bold;'>{title}</a>
<div style='font-size: 0.8em; color: #666; margin-top: 5px;'>
{upvotes} upvotes | by {authors} | {time_ago} days ago | {comments} comments
</div>
</div>"""
def render_papers(self):
if not self.fetch_papers(self.current_page):
return "<div>Failed to fetch papers. Please try again later.</div>"
if not self.papers:
return "<div>No papers available for this page.</div>"
return "".join([self.format_paper(paper) for paper in self.papers])
def search_papers(self, query):
if not query:
self.current_page = 1
return self.render_papers()
self.fetch_papers(self.current_page) # Ensure we have the latest data
filtered_papers = [paper for paper in self.papers if query.lower() in paper.get('title', '').lower()]
return "".join([self.format_paper(paper) for paper in filtered_papers])
def next_page(self):
if self.current_page < self.total_pages:
self.current_page += 1
return self.render_papers(), f"Page {self.current_page} of {self.total_pages}"
def prev_page(self):
if self.current_page > 1:
self.current_page -= 1
return self.render_papers(), f"Page {self.current_page} of {self.total_pages}"
css = """
html, body {
height: 100%;
margin: 0;
padding: 0;
display: flex;
justify-content: center;
align-items: center;
background-color: #f0f0f0;
}
.container {
font-family: Arial, sans-serif;
max-width: 800px;
width: 100%;
background-color: white;
padding: 20px;
border-radius: 10px;
box-shadow: 0 0 10px rgba(0,0,0,0.1);
}
.paper-list {
max-height: 400px;
overflow-y: auto;
border: 1px solid #eee;
border-radius: 5px;
padding: 10px;
margin-bottom: 10px;
}
.search-row {
display: flex;
gap: 10px;
margin-bottom: 20px;
}
.title {
text-align: center;
color: #333;
}
.footer {
display: flex;
justify-content: space-between;
align-items: center;
margin-top: 10px;
}
"""
paper_manager = PaperManager()
def refresh_papers():
paper_manager.current_page = 1
return paper_manager.render_papers(), f"Page {paper_manager.current_page} of {paper_manager.total_pages}"
demo = gr.Blocks(css=css)
with demo:
with gr.Column(elem_classes=["container"]):
gr.Markdown("# Daily Papers - HackerNews Style", elem_classes=["title"])
with gr.Row(elem_classes=["search-row"]):
search_input = gr.Textbox(label="Search papers", placeholder="Enter search term...")
refresh_button = gr.Button("Refresh")
paper_list = gr.HTML(paper_manager.render_papers(), elem_classes=["paper-list"])
with gr.Row(elem_classes=["footer"]):
prev_button = gr.Button("Previous Page")
page_info = gr.Markdown(f"Page {paper_manager.current_page} of {paper_manager.total_pages}")
next_button = gr.Button("Next Page")
search_input.change(paper_manager.search_papers, inputs=[search_input], outputs=[paper_list])
refresh_button.click(refresh_papers, outputs=[paper_list, page_info])
prev_button.click(paper_manager.prev_page, outputs=[paper_list, page_info])
next_button.click(paper_manager.next_page, outputs=[paper_list, page_info])
demo.launch() |