Update app.py
Browse files
app.py
CHANGED
@@ -1,63 +1,247 @@
|
|
1 |
import gradio as gr
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
2 |
|
3 |
-
|
4 |
-
|
5 |
-
|
6 |
-
|
7 |
-
|
8 |
-
|
9 |
-
|
10 |
-
)
|
11 |
-
|
12 |
-
|
13 |
-
|
14 |
-
|
15 |
-
|
16 |
-
|
17 |
-
|
18 |
-
background_fill_primary='*primary_100',
|
19 |
-
background_fill_secondary='*secondary_200',
|
20 |
|
21 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
22 |
|
23 |
-
|
24 |
-
|
25 |
-
|
26 |
|
27 |
-
|
28 |
-
|
29 |
-
|
30 |
-
|
31 |
-
|
32 |
-
|
33 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
34 |
|
35 |
-
|
36 |
-
|
37 |
-
|
38 |
-
|
39 |
-
|
40 |
-
|
41 |
-
|
42 |
-
|
43 |
-
|
44 |
-
|
45 |
-
|
46 |
-
|
47 |
-
|
48 |
-
|
49 |
-
|
50 |
-
|
51 |
-
|
52 |
-
|
53 |
-
|
54 |
-
|
55 |
-
|
56 |
-
|
57 |
-
|
58 |
-
|
59 |
-
|
60 |
-
|
61 |
-
|
62 |
-
|
63 |
-
|
|
|
1 |
import gradio as gr
|
2 |
+
import requests
|
3 |
+
import time
|
4 |
+
import json
|
5 |
+
from contextlib import closing
|
6 |
+
from websocket import create_connection
|
7 |
+
from deep_translator import GoogleTranslator
|
8 |
+
from langdetect import detect
|
9 |
+
import os
|
10 |
+
from PIL import Image
|
11 |
+
import io
|
12 |
+
from io import BytesIO
|
13 |
+
import base64
|
14 |
+
import re
|
15 |
+
from gradio_client import Client
|
16 |
+
from fake_useragent import UserAgent
|
17 |
+
import random
|
18 |
+
from theme import theme
|
19 |
+
from fastapi import FastAPI
|
20 |
|
21 |
+
app = FastAPI()
|
22 |
+
|
23 |
+
|
24 |
+
def query(prompt, negative_prompt, task, steps, sampler, cfg_scale, seed, width, height):
|
25 |
+
result = {"prompt": prompt,"negative_prompt": negative_prompt,"task": task,"steps": steps,"sampler": sampler,"cfg_scale": cfg_scale,"seed": seed, "width": width, "height": height}
|
26 |
+
print(result)
|
27 |
+
try:
|
28 |
+
language = detect(prompt)
|
29 |
+
if language == 'ru':
|
30 |
+
prompt = GoogleTranslator(source='ru', target='en').translate(prompt)
|
31 |
+
print(prompt)
|
32 |
+
except:
|
33 |
+
pass
|
34 |
+
|
35 |
+
prompt = re.sub(r'[^a-zA-Zа-яА-Я\s]', '', prompt)
|
|
|
|
|
36 |
|
37 |
+
cfg = int(cfg_scale)
|
38 |
+
steps = int(steps)
|
39 |
+
seed = int(seed)
|
40 |
+
|
41 |
+
width = 896
|
42 |
+
height = 1152
|
43 |
+
|
44 |
+
if task == "Playground v2":
|
45 |
+
ua = UserAgent()
|
46 |
+
headers = {
|
47 |
+
'user-agent': f'{ua.random}'
|
48 |
+
}
|
49 |
+
client = Client("https://ashrafb-arpr.hf.space/", headers=headers)
|
50 |
+
result = client.predict(prompt, fn_index=0)
|
51 |
+
return result
|
52 |
+
|
53 |
+
if task == "Artigen v3":
|
54 |
+
ua = UserAgent()
|
55 |
+
headers = {
|
56 |
+
'user-agent': f'{ua.random}'
|
57 |
+
}
|
58 |
+
client = Client("https://ashrafb-arv3s.hf.space/", headers=headers)
|
59 |
+
result = client.predict(prompt,0,"Cinematic", fn_index=0)
|
60 |
+
return result
|
61 |
+
try:
|
62 |
+
with closing(create_connection("wss://google-sdxl.hf.space/queue/join")) as conn:
|
63 |
+
conn.send('{"fn_index":3,"session_hash":""}')
|
64 |
+
conn.send(f'{{"data":["{prompt}, 4k photo","[deformed | disfigured], poorly drawn, [bad : wrong] anatomy, [extra | missing | floating | disconnected] limb, (mutated hands and fingers), blurry",7.5,"(No style)"],"event_data":null,"fn_index":3,"session_hash":""}}')
|
65 |
+
c = 0
|
66 |
+
while c < 60:
|
67 |
+
status = json.loads(conn.recv())['msg']
|
68 |
+
if status == 'estimation':
|
69 |
+
c += 1
|
70 |
+
time.sleep(1)
|
71 |
+
continue
|
72 |
+
if status == 'process_starts':
|
73 |
+
break
|
74 |
+
photo = json.loads(conn.recv())['output']['data'][0][0]
|
75 |
+
photo = photo.replace('data:image/jpeg;base64,', '').replace('data:image/png;base64,', '')
|
76 |
+
photo = Image.open(io.BytesIO(base64.decodebytes(bytes(photo, "utf-8"))))
|
77 |
+
return photo
|
78 |
+
except:
|
79 |
+
try:
|
80 |
+
ua = UserAgent()
|
81 |
+
headers = {
|
82 |
+
'authority': 'ehristoforu-dalle-3-xl-lora-v2.hf.space',
|
83 |
+
'accept': 'text/event-stream',
|
84 |
+
'accept-language': 'ru,en;q=0.9,la;q=0.8,ja;q=0.7',
|
85 |
+
'cache-control': 'no-cache',
|
86 |
+
'referer': 'https://ehristoforu-dalle-3-xl-lora-v2.hf.space/?__theme=light',
|
87 |
+
'sec-ch-ua': '"Not_A Brand";v="8", "Chromium";v="120", "YaBrowser";v="24.1", "Yowser";v="2.5"',
|
88 |
+
'sec-ch-ua-mobile': '?0',
|
89 |
+
'sec-ch-ua-platform': '"Windows"',
|
90 |
+
'sec-fetch-dest': 'empty',
|
91 |
+
'sec-fetch-mode': 'cors',
|
92 |
+
'sec-fetch-site': 'same-origin',
|
93 |
+
'user-agent': f'{ua.random}'
|
94 |
+
}
|
95 |
+
client = Client("ehristoforu/dalle-3-xl-lora-v2", headers=headers)
|
96 |
+
result = client.predict(prompt,"(deformed, distorted, disfigured:1.3), poorly drawn, bad anatomy, wrong anatomy, extra limb, missing limb, floating limbs, (mutated hands and fingers:1.4), disconnected limbs, mutation, mutated, ugly, disgusting, blurry, amputation",True,0,1024,1024,6,True, api_name='/run')
|
97 |
+
return result[0][0]['image']
|
98 |
+
except:
|
99 |
+
try:
|
100 |
+
ua = UserAgent()
|
101 |
+
headers = {
|
102 |
+
'authority': 'nymbo-sd-xl.hf.space',
|
103 |
+
'accept': 'text/event-stream',
|
104 |
+
'accept-language': 'ru,en;q=0.9,la;q=0.8,ja;q=0.7',
|
105 |
+
'cache-control': 'no-cache',
|
106 |
+
'referer': 'https://nymbo-sd-xl.hf.space/?__theme=light',
|
107 |
+
'sec-ch-ua': '"Not_A Brand";v="8", "Chromium";v="120", "YaBrowser";v="24.1", "Yowser";v="2.5"',
|
108 |
+
'sec-ch-ua-mobile': '?0',
|
109 |
+
'sec-ch-ua-platform': '"Windows"',
|
110 |
+
'sec-fetch-dest': 'empty',
|
111 |
+
'sec-fetch-mode': 'cors',
|
112 |
+
'sec-fetch-site': 'same-origin',
|
113 |
+
'user-agent': f'{ua.random}'
|
114 |
+
}
|
115 |
+
client = Client("Nymbo/SD-XL", headers=headers)
|
116 |
+
result = client.predict(prompt,negative_prompt,"","",True,False,False,0,896,1152,7,1,25,25,False,api_name="/run")
|
117 |
+
return result
|
118 |
+
except:
|
119 |
+
try:
|
120 |
+
ua = UserAgent()
|
121 |
+
headers = {
|
122 |
+
'authority': 'radames-real-time-text-to-image-sdxl-lightning.hf.space',
|
123 |
+
'accept': 'text/event-stream',
|
124 |
+
'accept-language': 'ru,en;q=0.9,la;q=0.8,ja;q=0.7',
|
125 |
+
'cache-control': 'no-cache',
|
126 |
+
'referer': 'https://radames-real-time-text-to-image-sdxl-lightning.hf.space/?__theme=light',
|
127 |
+
'sec-ch-ua': '"Not_A Brand";v="8", "Chromium";v="120", "YaBrowser";v="24.1", "Yowser";v="2.5"',
|
128 |
+
'sec-ch-ua-mobile': '?0',
|
129 |
+
'sec-ch-ua-platform': '"Windows"',
|
130 |
+
'sec-fetch-dest': 'empty',
|
131 |
+
'sec-fetch-mode': 'cors',
|
132 |
+
'sec-fetch-site': 'same-origin',
|
133 |
+
'user-agent': f'{ua.random}'
|
134 |
+
}
|
135 |
+
client = Client("radames/Real-Time-Text-to-Image-SDXL-Lightning", headers=headers)
|
136 |
+
result = client.predict(prompt, [], 0, random.randint(1, 999999), fn_index=0)
|
137 |
+
return result
|
138 |
+
except:
|
139 |
+
try:
|
140 |
+
ua = UserAgent()
|
141 |
+
headers = {
|
142 |
+
'user-agent': f'{ua.random}'
|
143 |
+
}
|
144 |
+
client = Client("https://ashrafb-arpr.hf.space/", headers=headers)
|
145 |
+
result = client.predict(prompt, fn_index=0)
|
146 |
+
return result
|
147 |
+
except:
|
148 |
+
ua = UserAgent()
|
149 |
+
headers = {
|
150 |
+
'user-agent': f'{ua.random}'
|
151 |
+
}
|
152 |
+
client = Client("https://ashrafb-arv3s.hf.space/", headers=headers)
|
153 |
+
result = client.predict(prompt,0,"Cinematic", fn_index=0)
|
154 |
+
return result
|
155 |
+
|
156 |
+
|
157 |
+
|
158 |
+
def mirror(image_output, scale_by, method, gfpgan, codeformer):
|
159 |
+
|
160 |
+
url_up = "https://darkstorm2150-protogen-web-ui.hf.space/run/predict/"
|
161 |
+
url_up_f = "https://darkstorm2150-protogen-web-ui.hf.space/file="
|
162 |
|
163 |
+
scale_by = int(scale_by)
|
164 |
+
gfpgan = int(gfpgan)
|
165 |
+
codeformer = int(codeformer)
|
166 |
|
167 |
+
with open(image_output, "rb") as image_file:
|
168 |
+
encoded_string2 = base64.b64encode(image_file.read())
|
169 |
+
encoded_string2 = str(encoded_string2).replace("b'", '')
|
170 |
+
|
171 |
+
encoded_string2 = "data:image/png;base64," + encoded_string2
|
172 |
+
data = {"fn_index":81,"data":[0,0,encoded_string2,None,"","",True,gfpgan,codeformer,0,scale_by,896,1152,None,method,"None",1,False,[],"",""],"session_hash":""}
|
173 |
+
r = requests.post(url_up, json=data, timeout=100)
|
174 |
+
print(r.text)
|
175 |
+
print(r.json()['data'][0][0]['name'])
|
176 |
+
ph = "https://darkstorm2150-protogen-web-ui.hf.space/file=" + str(r.json()['data'][0][0]['name'])
|
177 |
+
print(ph)
|
178 |
+
response2 = requests.get(ph)
|
179 |
+
img = Image.open(BytesIO(response2.content))
|
180 |
+
return img
|
181 |
+
|
182 |
+
css = """
|
183 |
+
.gradio-container {background-color: MediumAquaMarine}
|
184 |
+
footer{display:none !important}
|
185 |
+
#generate {
|
186 |
+
width: 100%;
|
187 |
+
}
|
188 |
+
#image_output {
|
189 |
+
height: 100% !important;
|
190 |
+
}
|
191 |
+
"""
|
192 |
+
|
193 |
+
with gr.Blocks(css=css, theme=theme) as app:
|
194 |
+
|
195 |
+
with gr.Tab("Basic Settings"):
|
196 |
+
with gr.Row():
|
197 |
+
prompt = gr.Textbox(placeholder="Enter the image description...", show_label=True, label='Image Prompt ✍️', lines=3, show_copy_button = True)
|
198 |
+
with gr.Row():
|
199 |
+
task = gr.Radio(interactive=True, value="Stable Diffusion XL 1.0", show_label=True, label="Model of neural network 🧠 ", choices=['Stable Diffusion XL 1.0', 'Crystal Clear XL',
|
200 |
+
'Juggernaut XL', 'DreamShaper XL',
|
201 |
+
'SDXL Niji', 'Cinemax SDXL', 'NightVision XL'])
|
202 |
+
with gr.Tab("Extended settings"):
|
203 |
+
with gr.Row():
|
204 |
+
negative_prompt = gr.Textbox(placeholder="Negative Prompt", show_label=True, label='Negative Prompt:', lines=3, value="[deformed | disfigured], poorly drawn, [bad : wrong] anatomy, [extra | missing | floating | disconnected] limb, (mutated hands and fingers), blurry")
|
205 |
+
with gr.Row():
|
206 |
+
sampler = gr.Dropdown(value="DPM++ 2M Karras", show_label=True, label="Sampling Method:", choices=[
|
207 |
+
"DPM++ 2M Karras", "DPM++ 2S a Karras", "DPM2 a Karras", "DPM2 Karras", "DPM++ SDE Karras", "DEIS", "LMS", "DPM Adaptive", "DPM++ 2M", "DPM2 Ancestral", "DPM++ S", "DPM++ SDE", "DDPM", "DPM Fast", "dpmpp_2s_ancestral", "Euler", "Euler CFG PP", "Euler a", "Euler Ancestral", "Euler+beta", "Heun", "Heun PP2", "DDIM", "LMS Karras", "PLMS", "UniPC", "UniPC BH2"])
|
208 |
+
with gr.Row():
|
209 |
+
steps = gr.Slider(show_label=True, label="Sampling Steps:", minimum=1, maximum=50, value=35, step=1)
|
210 |
+
with gr.Row():
|
211 |
+
cfg_scale = gr.Slider(show_label=True, label="CFG Scale:", minimum=1, maximum=20, value=7, step=1)
|
212 |
+
with gr.Row():
|
213 |
+
seed = gr.Number(show_label=True, label="Seed:", minimum=-1, maximum=1000000, value=-1, step=1)
|
214 |
+
with gr.Row():
|
215 |
+
width = gr.Slider(label="Width", minimum=512, maximum=2048, step=8, value=896, interactive=True,)
|
216 |
+
with gr.Row():
|
217 |
+
height = gr.Slider(label="Height", minimum=512, maximum=2048, step=8, value=1152,interactive=True,)
|
218 |
|
219 |
+
with gr.Tab("Upscaling Settings"):
|
220 |
+
with gr.Column():
|
221 |
+
with gr.Row():
|
222 |
+
scale_by = gr.Number(show_label=True, label="How many times to increase:", minimum=1, maximum=2, value=2, step=1)
|
223 |
+
with gr.Row():
|
224 |
+
method = gr.Dropdown(show_label=True, value="ESRGAN_4x", label="Increasing algorithm", choices=["ScuNET GAN", "SwinIR 4x", "ESRGAN_4x", "R-ESRGAN 4x+", "R-ESRGAN 4x+ Anime6B"])
|
225 |
+
with gr.Column():
|
226 |
+
with gr.Row():
|
227 |
+
gfpgan = gr.Slider(show_label=True, label="Effect GFPGAN (For facial improvement)", minimum=0, maximum=1, value=0, step=0.1)
|
228 |
+
with gr.Row():
|
229 |
+
codeformer = gr.Slider(show_label=True, label="Effect CodeFormer (Improve the face)", minimum=0, maximum=1, value=0, step=0.1)
|
230 |
+
|
231 |
+
with gr.Column():
|
232 |
+
text_button = gr.Button("Generate image", variant="primary", interactive=True, elem_id="generate")
|
233 |
+
with gr.Column():
|
234 |
+
image_output = gr.Image(show_download_button=True, interactive=False, label='Generated Image 🌄', show_share_button=False, format="png", type="filepath", elem_id="gallery")
|
235 |
+
|
236 |
+
text_button.click(query, inputs=[prompt, negative_prompt, task, steps, sampler, cfg_scale, seed, width, height], outputs=image_output, concurrency_limit=48)
|
237 |
+
clear_prompt =gr.Button("Clear 🗑️",variant="primary", elem_id="clear_button")
|
238 |
+
clear_prompt.click(lambda: (None, None), None, [prompt, image_output], queue=False, show_api=False)
|
239 |
+
|
240 |
+
img2img_b = gr.Button("Increase the image", variant='secondary')
|
241 |
+
image_i2i = gr.Image(show_label=True, label='Increased image:')
|
242 |
+
img2img_b.click(mirror, inputs=[image_output, scale_by, method, gfpgan, codeformer], outputs=image_i2i, concurrency_limit=48)
|
243 |
+
|
244 |
+
|
245 |
+
app.queue(default_concurrency_limit=200, max_size=200) # <-- Sets up a queue with default parameters
|
246 |
+
if __name__ == "__main__":
|
247 |
+
app.launch()
|