Spaces:
Runtime error
Runtime error
import gradio as gr | |
# Define the available models | |
models = { | |
"Flux Lora": "models/prashanth970/flux-lora-uncensored", | |
"TrioHMH Flux": "models/DiegoJR1973/NSFW-TrioHMH-Flux", | |
"Master": "models/pimpilikipilapi1/NSFW_master" | |
} | |
# Function to generate an image from text using the selected model | |
def generate_image(text, model_name): | |
model_path = models[model_name] # Get the path of the selected model | |
print(f"Fetching model from: {model_path}") | |
try: | |
# Dynamically load the model based on the selected model path | |
model = gr.load(model_path) # Ensure this is the correct method to load your model | |
result_image = model(text) # Generate the image from the input text | |
# Ensure the result is in a proper image format | |
if isinstance(result_image, str): # if model returns a path to the image | |
return gr.Image(result_image) | |
elif isinstance(result_image, bytes): # if model returns raw image bytes | |
return gr.Image(value=result_image) | |
else: | |
return result_image | |
except Exception as e: | |
print(f"Error loading model: {e}") | |
return None | |
# Gradio Interface setup | |
interface = gr.Interface( | |
fn=generate_image, | |
inputs=[ | |
gr.Textbox(label="Type here your imagination:", placeholder="Type your description here..."), # Textbox for input | |
gr.Dropdown(label="Select Model", choices=list(models.keys()), value="Flux Lora") # Dropdown for selecting the model | |
], | |
outputs=gr.Image(label="Generated Image"), # Image output | |
theme="NoCrypt/miku", # Set theme for the interface | |
description="Sorry for the inconvenience. The model is currently running on the CPU, which might affect performance. We appreciate your understanding.", | |
) | |
# Launch the Gradio interface | |
interface.launch() | |