Spaces:
Runtime error
Runtime error
File size: 3,967 Bytes
d047d28 17e58be d047d28 77efc8c d047d28 6dee790 d047d28 625e1fa 6dee790 d047d28 50043a1 6dee790 20a03f1 6dee790 3e587f4 9860f3a 3e587f4 50043a1 6dee790 d047d28 6dee790 d047d28 6dee790 f5bddfa d047d28 0389d06 6dee790 0389d06 3528cb9 50043a1 20a03f1 3e587f4 50043a1 945386e 6dee790 50043a1 6dee790 3e587f4 0389d06 146f57d d047d28 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 |
# import gradio as gr
# model1 = gr.load("models/Jonny001/NSFW_master")
# model2 = gr.load("models/Jonny001/Alita-v1")
# model3 = gr.load("models/lexa862/NSFWmodel")
# model4 = gr.load("models/Keltezaa/flux_pussy_NSFW")
# model5 = gr.load("models/prashanth970/flux-lora-uncensored")
# def generate_images(text, selected_model):
# if selected_model == "Model 1 (NSFW Master)":
# model = model1
# elif selected_model == "Model 2 (Alita)":
# model = model2
# elif selected_model == "Model 3 (Lexa NSFW)":
# model = model3
# elif selected_model == "Model 4 (Flux NSFW)":
# model = model4
# elif selected_model == "Model 5 (Lora Uncensored)":
# model = model5
# else:
# return "Invalid model selection."
# results = []
# for i in range(3):
# modified_text = f"{text} variation {i+1}"
# result = model(modified_text)
# results.append(result)
# return results
# interface = gr.Interface(
# fn=generate_images,
# inputs=[
# gr.Textbox(label="Type here your imagination:", placeholder="Type your prompt..."),
# gr.Radio(
# ["Model 1 (NSFW Master)", "Model 2 (Alita)", "Model 3 (Lexa NSFW)", "Model 4 (Flux NSFW)", "Model 5 (Lora Uncensored)"],
# label="Select Model (Try All Models & Get Different Results)",
# value="Model 1 (NSFW Master)",
# ),
# ],
# outputs=[
# gr.Image(label="Generated Image 1"),
# gr.Image(label="Generated Image 2"),
# gr.Image(label="Generated Image 3"),
# ],
# theme="Yntec/HaleyCH_Theme_Orange",
# description="⚠ Sorry for the inconvenience. The models are currently running on the CPU, which might affect performance. We appreciate your understanding.",
# cache_examples=False,
# )
# interface.launch()
import gradio as gr
import torch
# Check if GPU is available
device = "cuda" if torch.cuda.is_available() else "cpu"
# Load models on GPU if available, otherwise fallback to CPU
model1 = gr.load("models/Jonny001/NSFW_master", device=device) # GPU
model2 = gr.load("models/Jonny001/Alita-v1", device=device) # GPU
model3 = gr.load("models/lexa862/NSFWmodel", device=device) # GPU
model4 = gr.load("models/Keltezaa/flux_pussy_NSFW", device=device) # GPU
model5 = gr.load("models/prashanth970/flux-lora-uncensored", device=device) # GPU
def generate_images(text, selected_model):
# Model selection logic
if selected_model == "Model 1 (NSFW Master)":
model = model1
elif selected_model == "Model 2 (Alita)":
model = model2
elif selected_model == "Model 3 (Lexa NSFW)":
model = model3
elif selected_model == "Model 4 (Flux NSFW)":
model = model4
elif selected_model == "Model 5 (Lora Uncensored)":
model = model5
else:
return "Invalid model selection."
# Generate two variations for each input prompt
results = []
for i in range(2):
modified_text = f"{text} variation {i+1}"
result = model(modified_text)
results.append(result)
return results
# Gradio interface
interface = gr.Interface(
fn=generate_images,
inputs=[
gr.Textbox(label="Type here your imagination:", placeholder="Type your prompt..."),
gr.Radio(
["Model 1 (NSFW Master)", "Model 2 (Alita)", "Model 3 (Lexa NSFW)", "Model 4 (Flux NSFW)", "Model 5 (Lora Uncensored)"],
label="Select Model (Try All Models & Get Different Results)",
value="Model 1 (NSFW Master)",
),
],
outputs=[
gr.Image(label="Generated Image 1"),
gr.Image(label="Generated Image 2"),
],
theme="Yntec/HaleyCH_Theme_Orange",
description="⚠ Sorry for the inconvenience. The models are currently running on the CPU, which might affect performance. We appreciate your understanding.",
cache_examples=False,
)
interface.launch()
|