Jonny001 commited on
Commit
945386e
β€’
1 Parent(s): 78c8861

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +34 -20
app.py CHANGED
@@ -1,11 +1,9 @@
1
  import gradio as gr
2
 
3
 
4
- models = {
5
- "Gen Image 01": gr.load("models/pimpilikipilapi1/NSFW_master"),
6
- "Gen Image 02": gr.load("models/DiegoJR1973/NSFW-TrioHMH-Flux"),
7
- "Gen Image 03": gr.load("models/prashanth970/flux-lora-uncensored")
8
- }
9
 
10
 
11
  default_negative_prompt = (
@@ -21,29 +19,45 @@ default_negative_prompt = (
21
  )
22
 
23
 
24
- def generate_image(text, negative_prompt):
25
- result_images = {}
26
- for model_name, model in models.items():
27
- try:
 
 
28
 
29
- result_images[model_name] = model(text, negative_prompt=negative_prompt)
30
- except TypeError:
31
-
32
- result_images[model_name] = model(text)
33
-
34
- return [result_images[model_name] for model_name in models]
35
 
 
 
 
 
 
 
36
 
37
  interface = gr.Interface(
38
- fn=generate_image,
 
 
 
 
39
  inputs=[
40
- gr.Textbox(label="Type your prompt here:", placeholder="Describe what you want..."),
41
  gr.Textbox(label="Negative prompt:", value=default_negative_prompt),
42
  ],
43
- outputs=[gr.Image(label=model_name) for model_name in models],
 
 
 
 
 
44
  theme="NoCrypt/miku",
45
- description="Sorry for the inconvenience. The model is currently running on the CPU, which might affect performance. We appreciate your understanding.",
46
  )
47
 
48
-
49
  interface.launch()
 
1
  import gradio as gr
2
 
3
 
4
+ model_1 = gr.load("models/pimpilikipilapi1/NSFW_master")
5
+ model_2 = gr.load("models/DiegoJR1973/NSFW-TrioHMH-Flux")
6
+ model_3 = gr.load("models/prashanth970/flux-lora-uncensored")
 
 
7
 
8
 
9
  default_negative_prompt = (
 
19
  )
20
 
21
 
22
+ def generate_image_model_1(prompt, negative_prompt):
23
+ prompt += " 1"
24
+ try:
25
+ return model_1(prompt, negative_prompt=negative_prompt)
26
+ except TypeError:
27
+ return model_1(prompt)
28
 
29
+ def generate_image_model_2(prompt, negative_prompt):
30
+ prompt += " 2"
31
+ try:
32
+ return model_2(prompt, negative_prompt=negative_prompt)
33
+ except TypeError:
34
+ return model_2(prompt)
35
 
36
+ def generate_image_model_3(prompt, negative_prompt):
37
+ prompt += " 3"
38
+ try:
39
+ return model_3(prompt, negative_prompt=negative_prompt)
40
+ except TypeError:
41
+ return model_3(prompt)
42
 
43
  interface = gr.Interface(
44
+ fn=lambda prompt, negative_prompt: (
45
+ generate_image_model_1(prompt, negative_prompt),
46
+ generate_image_model_2(prompt, negative_prompt),
47
+ generate_image_model_3(prompt, negative_prompt)
48
+ ),
49
  inputs=[
50
+ gr.Textbox(label="Type your prompt here: ✍️", placeholder="Describe what you want..."),
51
  gr.Textbox(label="Negative prompt:", value=default_negative_prompt),
52
  ],
53
+ outputs=[
54
+ gr.Image(label="Generated Image - Model 1"),
55
+ gr.Image(label="Generated Image - Model 2"),
56
+ gr.Image(label="Generated Image - Model 3"),
57
+ ],
58
+ title="Text to Image (NSFW) πŸ”ž",
59
  theme="NoCrypt/miku",
60
+ description="⚠️ Sorry for the inconvenience. The model is currently running on the CPU, which might affect performance. We appreciate your understanding.",
61
  )
62
 
 
63
  interface.launch()