burman-ai commited on
Commit
4faa2a1
·
verified ·
1 Parent(s): 57928ed

Delete app.py

Browse files
Files changed (1) hide show
  1. app.py +0 -144
app.py DELETED
@@ -1,144 +0,0 @@
1
- import gradio as gr
2
- from random import randint
3
- from all_models import models
4
-
5
- from externalmod import gr_Interface_load, randomize_seed
6
-
7
- import asyncio
8
- import os
9
- from threading import RLock
10
- lock = RLock()
11
- HF_TOKEN = os.environ.get("HF_TOKEN") if os.environ.get("HF_TOKEN") else None # If private or gated models aren't used, ENV setting is unnecessary.
12
-
13
-
14
- def load_fn(models):
15
- global models_load
16
- models_load = {}
17
-
18
- for model in models:
19
- if model not in models_load.keys():
20
- try:
21
- m = gr_Interface_load(f'models/{model}', hf_token=HF_TOKEN)
22
- except Exception as error:
23
- print(error)
24
- m = gr.Interface(lambda: None, ['text'], ['image'])
25
- models_load.update({model: m})
26
-
27
-
28
- load_fn(models)
29
-
30
-
31
- num_models = 9
32
-
33
- default_models = models[:num_models]
34
- inference_timeout = 600
35
- MAX_SEED=666666666
36
- starting_seed = randint(666666000, 666666666)
37
-
38
- def extend_choices(choices):
39
- return choices[:num_models] + (num_models - len(choices[:num_models])) * ['NA']
40
-
41
-
42
- def update_imgbox(choices):
43
- choices_plus = extend_choices(choices[:num_models])
44
- return [gr.Image(None, label=m, visible=(m!='NA')) for m in choices_plus]
45
-
46
- async def infer(model_str, prompt, seed=1, timeout=inference_timeout):
47
- from pathlib import Path
48
- kwargs = {}
49
- noise = ""
50
- kwargs["seed"] = seed
51
- task = asyncio.create_task(asyncio.to_thread(models_load[model_str].fn,
52
- prompt=f'{prompt} {noise}', **kwargs, token=HF_TOKEN))
53
- await asyncio.sleep(0)
54
- try:
55
- result = await asyncio.wait_for(task, timeout=timeout)
56
- except (Exception, asyncio.TimeoutError) as e:
57
- print(e)
58
- print(f"Task timed out: {model_str}")
59
- if not task.done(): task.cancel()
60
- result = None
61
- if task.done() and result is not None:
62
- with lock:
63
- png_path = "image.png"
64
- result.save(png_path)
65
- image = str(Path(png_path).resolve())
66
- return image
67
- return None
68
-
69
-
70
-
71
-
72
- def gen_fnseed(model_str, prompt, seed=1):
73
- if model_str == 'NA':
74
- return None
75
- try:
76
- loop = asyncio.new_event_loop()
77
- result = loop.run_until_complete(infer(model_str, prompt, seed, inference_timeout))
78
- except (Exception, asyncio.CancelledError) as e:
79
- print(e)
80
- print(f"Task aborted: {model_str}")
81
- result = None
82
- with lock:
83
- image = "https://huggingface.co/spaces/Yntec/ToyWorld/resolve/main/error.png"
84
- result = image
85
- finally:
86
- loop.close()
87
- return result
88
-
89
- with gr.Blocks(theme='Yntec/HaleyCH_Theme_craiyon') as demo:
90
- gr.HTML(
91
- """
92
- <div>
93
- <p> <center><img src="https://huggingface.co/spaces/Yntec/open-craiyon/resolve/main/open_craiyon.png" style="height:79px; width:367px; margin-top: -22px; margin-bottom: -44px;" span title="Best free ai art image generator open craiyon"></center>
94
- </p>
95
- """
96
- )
97
- with gr.Tab('🖍️ AI models drawing images from any prompt! 🖍️'):
98
- with gr.Row():
99
- txt_input = gr.Textbox(label='Your prompt:', lines=4, scale=3)
100
- gen_button = gr.Button('Draw it! 🖍️', scale=1)
101
- with gr.Row():
102
- seed = gr.Slider(label="Use a seed to replicate the same image later (maximum 666666666)", minimum=0, maximum=MAX_SEED, step=1, value=starting_seed, scale=3)
103
- seed_rand = gr.Button("Randomize Seed 🎲", size="sm", variant="secondary", scale=1)
104
- seed_rand.click(randomize_seed, None, [seed], queue=False)
105
- #stop_button = gr.Button('Stop', variant = 'secondary', interactive = False)
106
-
107
- gen_button.click(lambda s: gr.update(interactive = True), None)
108
- gr.HTML(
109
- """
110
- <div style="text-align: center; max-width: 1200px; margin: 0 auto;">
111
- <div>
112
- <body>
113
- <div class="center"><p style="margin-bottom: 10px; color: #5b6272;">Scroll down to see more images and select models.</p>
114
- </div>
115
- </body>
116
- </div>
117
- </div>
118
- """
119
- )
120
- with gr.Row():
121
- output = [gr.Image(min_width=480) for m in default_models]
122
- #output = [gr.Image(label = m, min_width=480) for m in default_models]
123
- current_models = [gr.Textbox(m, visible = False) for m in default_models]
124
-
125
- for m, o in zip(current_models, output):
126
- gen_event = gr.on(triggers=[gen_button.click, txt_input.submit], fn=gen_fnseed,
127
- inputs=[m, txt_input, seed], outputs=[o], concurrency_limit=None, queue=False)
128
- #stop_button.click(lambda s: gr.update(interactive = False), None, stop_button, cancels = [gen_event])
129
- with gr.Accordion('Model selection'):
130
- model_choice = gr.CheckboxGroup(models, label = 'Untick the models you will not be using', value=default_models, interactive=True)
131
- #model_choice = gr.CheckboxGroup(models, label = f'Choose up to {num_models} different models from the 2 available! Untick them to only use one!', value = default_models, multiselect = True, max_choices = num_models, interactive = True, filterable = False)
132
- model_choice.change(update_imgbox, model_choice, output)
133
- model_choice.change(extend_choices, model_choice, current_models)
134
- with gr.Row():
135
- gr.HTML(
136
- """
137
- <div class="footer">
138
- <p> For more than a hundred times more models (that's not a typo) check out <a href="https://huggingface.co/spaces/Yntec/ToyWorld">Toy World</a>!</a>
139
- </p>
140
- """
141
- )
142
-
143
- demo.queue(default_concurrency_limit=200, max_size=200)
144
- demo.launch(show_api=False, max_threads=400)