DemiPoto commited on
Commit
3983f0d
·
verified ·
1 Parent(s): bed04b1

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +37 -17
app.py CHANGED
@@ -13,6 +13,8 @@ lock = RLock()
13
  HF_TOKEN = os.environ.get("HF_TOKEN") if os.environ.get("HF_TOKEN") else None # If private or gated models aren't used, ENV setting is unnecessary.
14
 
15
 
 
 
16
  now2 = 0
17
  inference_timeout = 300
18
  MAX_SEED = 2**32-1
@@ -25,6 +27,7 @@ nb_models=nb_mod_dif*nb_rep
25
  cache_image={}
26
  cache_id_image={}
27
  cache_list_task={}
 
28
 
29
 
30
  def load_fn(models):
@@ -164,6 +167,7 @@ def set_session(id):
164
  cache_id_image[f"{randTemp}"]=[]
165
 
166
  cache_list_task[f"{randTemp}"]=[]
 
167
  return gr.Number(visible=False,value=randTemp)
168
  else :
169
  return id
@@ -301,8 +305,20 @@ def crea_list_task(id_session,list_param,list_models_to_gen,nb_images_by_prompt)
301
  for i in range(nb_images_by_prompt):
302
  dict_temp["task"].append({"prompt":p[0],"nprompt":p[1],"width":p[2],"height":p[3],"steps":p[4],"cfg":p[5],"seed":p[6]})
303
  cache_list_task[f"{id_session}"].append(dict_temp)
304
-
305
 
 
 
 
 
 
 
 
 
 
 
 
 
 
306
  def cutStrg(longStrg,start,end):
307
  shortStrg=''
308
  for i in range(end-start):
@@ -346,11 +362,12 @@ def fonc_load_gallery_by_model(id_session,gallery,models,index):
346
  if cache_image[f"{id_session}"][i]["model"]==models[index]:
347
  gallery=add_gallery(cache_image[f"{id_session}"][i]["image"],cache_image[f"{id_session}"][i]["model"],gallery)
348
  return gallery
349
-
350
  def fonc_start(id_session,id_module,s,cont):
351
  if cont==False:
 
352
  print("manual stop")
353
- return None,gr.Textbox(s)
354
  task_actu={}
355
  model_actu=""
356
  print(f"in fonc : id module={id_module}\n")
@@ -369,9 +386,10 @@ def fonc_start(id_session,id_module,s,cont):
369
  model_actu=model_plus_tasks["model"]
370
  task_actu=model_plus_tasks["task"][0]
371
  print(f"module num {id_module} take {model_actu}\n")
372
- if model_actu=="":#crash
 
373
  print("Stop with :"+s+"\n")
374
- return None,gr.Textbox(s)
375
  print("begin gen image:")
376
  print(model_actu)
377
  print(task_actu)
@@ -383,8 +401,10 @@ def fonc_start(id_session,id_module,s,cont):
383
  for model_plus_tasks in cache_list_task[f"{id_session}"]:
384
  if model_plus_tasks["id_module"]==id_module:
385
  model_plus_tasks["task"].remove(task_actu)
 
386
  if len(model_plus_tasks["task"])==0:
387
  cache_list_task[f"{id_session}"].remove(model_plus_tasks)
 
388
 
389
  task_actu["id_image"]=id_image
390
  task_actu["model"]=model_actu
@@ -394,13 +414,14 @@ def fonc_start(id_session,id_module,s,cont):
394
  cache_image[f"{id_session}"].append(task_actu)
395
  print("image saved\n")
396
  else:
 
397
  print("fail to generate\n")
398
  num_task_to_do=0
399
  for model_plus_tasks in cache_list_task[f"{id_session}"]:
400
  for task in model_plus_tasks["task"]:
401
  num_task_to_do+=1
402
  print(f"\n {num_task_to_do} tasks to do\n")
403
- return result , gr.Textbox(s+"1")
404
 
405
  def fonc_init(s):
406
  return gr.Textbox(s+"1")
@@ -494,15 +515,15 @@ def make_me():
494
 
495
 
496
  with gr.Column():
497
- with gr.Row():
498
- nb_req_simult=gr.Number(10,"Number simultaneous request")
499
- nb_req_simult_max_by_mod=gr.Number(2,"Number max simultaneous request by model")
500
 
501
  button_start=gr.Button("START")
502
  button_stop=gr.Button("STOP")
503
  cont=gr.Checkbox(True,visible=False)
504
  button_start.click(lambda:True,[],[cont])
505
  button_stop.click(lambda:False,[],[cont])
 
 
 
506
  with gr.Row():
507
  outputs=[]
508
  id_modules=[]
@@ -513,19 +534,18 @@ def make_me():
513
  outputs.append(gr.Image(None,interactive=False,visible=False))
514
  id_modules.append(gr.Number(i,interactive=False,visible=False))
515
  states.append(gr.Textbox("1",interactive=False,visible=False))
516
- print(len(outputs))
517
- print(outputs[0])
518
  for o,i,s in zip(outputs,id_modules,states):
519
  #o.change(fonc_start,[id_session,i],[o])
520
  #o.change(test_change,[],[])
521
- s.change(fonc_start,[id_session,i,s,cont],[o,s])
522
  #button_start.click(lambda : gr.Image(None),[],[o])
523
  gen_event = gr.on(triggers=[button_start.click], fn=fonc_init,inputs=[s], outputs=[s])
524
-
525
- gallery = gr.Gallery(label="Output", show_download_button=True, elem_classes="gallery",
526
- interactive=False, show_share_button=True, container=True, format="png",
527
- preview=True, object_fit="cover",columns=4,rows=4)
528
- with gr.Column():
 
529
  button_load_gallery=gr.Button("Load Gallery All")
530
  button_load_gallery.click(fonc_load_gallery,[id_session,gallery],[gallery])
531
 
 
13
  HF_TOKEN = os.environ.get("HF_TOKEN") if os.environ.get("HF_TOKEN") else None # If private or gated models aren't used, ENV setting is unnecessary.
14
 
15
 
16
+ nb_req_simult=10 ########
17
+
18
  now2 = 0
19
  inference_timeout = 300
20
  MAX_SEED = 2**32-1
 
27
  cache_image={}
28
  cache_id_image={}
29
  cache_list_task={}
30
+ cache_text_actu={}
31
 
32
 
33
  def load_fn(models):
 
167
  cache_id_image[f"{randTemp}"]=[]
168
 
169
  cache_list_task[f"{randTemp}"]=[]
170
+ cache_text_actu[f"{randTemp}"]={}
171
  return gr.Number(visible=False,value=randTemp)
172
  else :
173
  return id
 
305
  for i in range(nb_images_by_prompt):
306
  dict_temp["task"].append({"prompt":p[0],"nprompt":p[1],"width":p[2],"height":p[3],"steps":p[4],"cfg":p[5],"seed":p[6]})
307
  cache_list_task[f"{id_session}"].append(dict_temp)
 
308
 
309
+ cache_text_actu[f"{id_session}"]={"nb_modules_use":nb_req_simult,"stop":False,"nb_fail"=0,
310
+ "nb_models_to_do":len(list_models_to_gen) ,"nb_models_tot":len(list_models_to_gen) ,
311
+ "nb_tasks_to_do":len(list_models_to_gen)*len(list_param)*nb_images_by_prompt ,
312
+ "nb_tasks_tot":len(list_models_to_gen)*len(list_param)*nb_images_by_prompt }
313
+
314
+ def fonc_update_actu(text_actu,id):
315
+ s=""
316
+ s+=f"modules: {cache_text_actu[f"{id}"]["nb_modules_use"]}/{nb_req_simult}\n"
317
+ s+=f"models remaining: {cache_text_actu[f"{id}"]["nb_models_to_do"]}/{cache_text_actu[f"{id}"]["nb_models_tot"]}\n"
318
+ s+=f"images remaining: {cache_text_actu[f"{id}"]["nb_tasks_to_do"]}/{cache_text_actu[f"{id}"]["nb_tasks_tot"]}\n"
319
+ s+=f"fail attempt: {cache_text_actu[f"{id}"]["nb_fail"]}"
320
+ return gr.Texbox(s)
321
+
322
  def cutStrg(longStrg,start,end):
323
  shortStrg=''
324
  for i in range(end-start):
 
362
  if cache_image[f"{id_session}"][i]["model"]==models[index]:
363
  gallery=add_gallery(cache_image[f"{id_session}"][i]["image"],cache_image[f"{id_session}"][i]["model"],gallery)
364
  return gallery
365
+
366
  def fonc_start(id_session,id_module,s,cont):
367
  if cont==False:
368
+ cache_text_actu[f"{id_session}"]["nb_modules_use"]-=1
369
  print("manual stop")
370
+ return None,gr.Textbox(s),gr.Number(randint(1,MAX_SEED))
371
  task_actu={}
372
  model_actu=""
373
  print(f"in fonc : id module={id_module}\n")
 
386
  model_actu=model_plus_tasks["model"]
387
  task_actu=model_plus_tasks["task"][0]
388
  print(f"module num {id_module} take {model_actu}\n")
389
+ if model_actu=="":
390
+ cache_text_actu[f"{id_session}"]["nb_modules_use"]-=1
391
  print("Stop with :"+s+"\n")
392
+ return None,gr.Textbox(s),gr.Number(randint(1,MAX_SEED))
393
  print("begin gen image:")
394
  print(model_actu)
395
  print(task_actu)
 
401
  for model_plus_tasks in cache_list_task[f"{id_session}"]:
402
  if model_plus_tasks["id_module"]==id_module:
403
  model_plus_tasks["task"].remove(task_actu)
404
+ cache_text_actu[f"{id_session}"]["nb_tasks_to_do"]-=1
405
  if len(model_plus_tasks["task"])==0:
406
  cache_list_task[f"{id_session}"].remove(model_plus_tasks)
407
+ cache_text_actu[f"{id_session}"]["nb_models_to_do"]-=1
408
 
409
  task_actu["id_image"]=id_image
410
  task_actu["model"]=model_actu
 
414
  cache_image[f"{id_session}"].append(task_actu)
415
  print("image saved\n")
416
  else:
417
+ cache_text_actu[f"{id_session}"]["nb_fail"]+=1
418
  print("fail to generate\n")
419
  num_task_to_do=0
420
  for model_plus_tasks in cache_list_task[f"{id_session}"]:
421
  for task in model_plus_tasks["task"]:
422
  num_task_to_do+=1
423
  print(f"\n {num_task_to_do} tasks to do\n")
424
+ return result , gr.Textbox(s+"1"),gr.Number(randint(1,MAX_SEED))
425
 
426
  def fonc_init(s):
427
  return gr.Textbox(s+"1")
 
515
 
516
 
517
  with gr.Column():
 
 
 
518
 
519
  button_start=gr.Button("START")
520
  button_stop=gr.Button("STOP")
521
  cont=gr.Checkbox(True,visible=False)
522
  button_start.click(lambda:True,[],[cont])
523
  button_stop.click(lambda:False,[],[cont])
524
+ text_actu=gr.Textbox("",label="in progress",interactive=False,lines=6)
525
+ update_actu=gr.Number(0,visible=False)
526
+ update_actu.change(fonc_update_actu,[text_actu,id_session],[text_actu])
527
  with gr.Row():
528
  outputs=[]
529
  id_modules=[]
 
534
  outputs.append(gr.Image(None,interactive=False,visible=False))
535
  id_modules.append(gr.Number(i,interactive=False,visible=False))
536
  states.append(gr.Textbox("1",interactive=False,visible=False))
 
 
537
  for o,i,s in zip(outputs,id_modules,states):
538
  #o.change(fonc_start,[id_session,i],[o])
539
  #o.change(test_change,[],[])
540
+ s.change(fonc_start,[id_session,i,s,cont],[o,s,update_actu])
541
  #button_start.click(lambda : gr.Image(None),[],[o])
542
  gen_event = gr.on(triggers=[button_start.click], fn=fonc_init,inputs=[s], outputs=[s])
543
+
544
+ with gr.Column(scale=2):
545
+ gallery = gr.Gallery(label="Output", show_download_button=True, elem_classes="gallery",
546
+ interactive=False, show_share_button=True, container=True, format="png",
547
+ preview=True, object_fit="cover",columns=4,rows=4)
548
+ with gr.Column(scale=3):
549
  button_load_gallery=gr.Button("Load Gallery All")
550
  button_load_gallery.click(fonc_load_gallery,[id_session,gallery],[gallery])
551