import streamlit as st from huggingface_hub import HfApi import asyncio import os # Initialize the Hugging Face API api = HfApi() # Directory to save the generated HTML files HTML_DIR = "generated_html_pages" if not os.path.exists(HTML_DIR): os.makedirs(HTML_DIR) # Default list of Hugging Face usernames - where all the magic begins! πŸͺ„ default_users = { "users": [ "awacke1", "rogerxavier", "jonatasgrosman", "kenshinn", "Csplk", "DavidVivancos", "cdminix", "Jaward", "TuringsSolutions", "Severian", "Wauplin", "phosseini", "Malikeh1375", "gokaygokay", "MoritzLaurer", "mrm8488", "TheBloke", "lhoestq", "xw-eric", "Paul", "Muennighoff", "ccdv", "haonan-li", "chansung", "lukaemon", "hails", "pharmapsychotic", "KingNish", "merve", "ameerazam08", "ashleykleynhans" ] } # Asynchronous function to fetch user content - because why wait when you can multitask? πŸš€ async def fetch_user_content(username): try: # Fetch models and datasets - the stars of our show! 🌟 models = await asyncio.to_thread(api.list_models, author=username) datasets = await asyncio.to_thread(api.list_datasets, author=username) return { "username": username, "models": models, "datasets": datasets } except Exception as e: # Oops! Something went wrong - blame it on the gremlins! 😈 return {"username": username, "error": str(e)} # Fetch all users concurrently - more hands (or threads) make light work! πŸ’ͺ async def fetch_all_users(usernames): tasks = [fetch_user_content(username) for username in usernames] return await asyncio.gather(*tasks) # Generate HTML content for a user and save it to a file - because who doesn't love a good download link? πŸ’Ύ def generate_html_page(username, models, datasets): html_content = f""" {username}'s Hugging Face Content

{username}'s Hugging Face Profile

πŸ”— Profile Link

🧠 Models

πŸ“š Datasets

""" # Save the HTML content to a file html_file_path = os.path.join(HTML_DIR, f"{username}.html") with open(html_file_path, "w") as html_file: html_file.write(html_content) return html_file_path # Cache the HTML generation process using Streamlit's caching decorator @st.cache_data(show_spinner=False) def get_cached_html_page(username): user_data = asyncio.run(fetch_user_content(username)) if "error" in user_data: return None, user_data["error"] return generate_html_page(username, user_data["models"], user_data["datasets"]), None # Streamlit app setup - the nerve center of our operation! πŸŽ›οΈ st.title("Hugging Face User Content Display - Let's Automate Some Fun! πŸŽ‰") # Convert the default users list to a string - because nobody likes typing out long lists! πŸ“ default_users_str = "\n".join(default_users["users"]) # Text area with default list of usernames - feel free to add your friends! πŸ‘₯ usernames = st.text_area("Enter Hugging Face usernames (one per line):", value=default_users_str, height=300) # Show User Content button - the big red button! (But actually it's blue) πŸ–±οΈ if st.button("Show User Content"): if usernames: username_list = [username.strip() for username in usernames.split('\n') if username.strip()] st.markdown("### User Content Overview") for username in username_list: with st.container(): # Profile link - because everyone deserves their 15 seconds of fame! 🎀 st.markdown(f"**{username}** [πŸ”— Profile](https://huggingface.co/{username})") # Generate HTML page and provide download link - because who wouldn't want a custom webpage? 🌐 html_file_path, error = get_cached_html_page(username) if error: st.warning(f"{username}: {error} - Looks like the AI needs a coffee break β˜•") else: st.markdown(f"[πŸ“„ Download {username}'s HTML Page]({html_file_path})") st.markdown("---") else: st.warning("Please enter at least one username. Don't be shy! πŸ˜…") # Sidebar instructions - just in case you get lost! πŸ—ΊοΈ st.sidebar.markdown(""" ## How to use: 1. The text area is pre-filled with a list of Hugging Face usernames. You can edit this list or add more usernames. 2. Click 'Show User Content'. 3. View the user's models and datasets along with a link to their Hugging Face profile. 4. Download an HTML page for each user to use the absolute links offline! """)