import gradio as gr from transformers import pipeline from huggingface_hub import HfApi, ModelFilter # Get the list of models from the Hugging Face Hub api = HfApi() models = api.list_models(author="gia-project", filter=ModelFilter(tags="text-generation")) models_names = [model.modelId for model in models] # Dictionary to store loaded models and their pipelines model_pipelines = {} # Load a default model initially default_model_name = "gia-project/gia2-small-untrained" default_generator = pipeline("text-generation", model=default_model_name, trust_remote_code=True) model_pipelines[default_model_name] = default_generator def generate_text(model_name, input_text): # Check if the selected model is already loaded if model_name not in model_pipelines: # Load the model and create a pipeline if it's not already loaded generator = pipeline("text-generation", model=model_name, trust_remote_code=True) model_pipelines[model_name] = generator # Get the pipeline for the selected model and generate text generator = model_pipelines[model_name] generated_text = generator(input_text)[0]['generated_text'] return generated_text # Define the Gradio interface iface = gr.Interface( fn=generate_text, # Function to be called on user input inputs=[ gr.inputs.Dropdown(choices=models_names, label="Select Model", default=default_model_name), # Dropdown to select model gr.inputs.Textbox(lines=5, label="Input Text") # Textbox for entering text ], outputs=gr.outputs.Textbox(label="Generated Text"), # Textbox to display the generated text ) # Launch the Gradio interface iface.launch()