Jonny001's picture
Update app.py
25cb0a6 verified
raw
history blame
1.25 kB
import gradio as gr
models = {
"Flux Lora": "models/prashanth970/flux-lora-uncensored",
"TrioHMH Flux": "models/DiegoJR1973/NSFW-TrioHMH-Flux",
"Master": "models/pimpilikipilapi1/NSFW_master"
}
def generate_image(text, model_name):
model_path = models[model_name]
print(f"Fetching model from: {model_path}")
try:
model = gr.load(model_path)
result_image = model(text)
if isinstance(result_image, str):
return gr.Image(value=result_image)
elif isinstance(result_image, bytes):
return gr.Image(value=result_image)
else:
return result_image
except Exception as e:
print(f"Error loading model: {e}")
return None
interface = gr.Interface(
fn=generate_image,
inputs=[
gr.Textbox(label="Type here your imagination:", placeholder="Type your description here..."),
gr.Dropdown(label="Select Model", choices=list(models.keys()), value="Flux Lora")
],
outputs=gr.Image(label="Generated Image"),
theme="NoCrypt/miku",
description="Sorry for the inconvenience. The model is currently running on the CPU, which might affect performance. We appreciate your understanding.",
)
interface.launch()