Spaces:
Running
Running
Update app.py
Browse files
app.py
CHANGED
@@ -340,18 +340,25 @@ def fonc_load_gallery(id_session,gallery):
|
|
340 |
def fonc_start(id_session,id_module):
|
341 |
task_actu={}
|
342 |
model_actu=""
|
|
|
343 |
for model_plus_tasks in cache_list_task[f"{id_session}"]:
|
344 |
if model_plus_tasks["id_module"]==id_module:
|
345 |
model_actu=model_plus_tasks["model"]
|
346 |
task_actu=model_plus_tasks["task"][0]
|
|
|
|
|
|
|
347 |
if model_actu=="":
|
348 |
for model_plus_tasks in cache_list_task[f"{id_session}"]:
|
349 |
if model_plus_tasks["id_module"]==-1 and model_actu=="":
|
350 |
model_actu=model_plus_tasks["model"]
|
351 |
task_actu=model_plus_tasks["task"][0]
|
352 |
if model_actu=="":#crash
|
|
|
353 |
return None,None
|
|
|
354 |
result=gen_fn(model_actu, task_actu["prompt"], task_actu["nprompt"], task_actu["height"], task_actu["width"], task_actu["steps"], task_actu["cfg"], task_actu["seed"])
|
|
|
355 |
if result!=None:
|
356 |
result=gr.Image(result)
|
357 |
id_image=len(cache_image[f"{id_session}"])
|
@@ -364,6 +371,14 @@ def fonc_start(id_session,id_module):
|
|
364 |
task_actu["id_image"]=id_image
|
365 |
cache_image[f"{id_session}"].append(result.value)
|
366 |
cache_id_image[f"{id_session}"].append(task_actu)
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
367 |
return result
|
368 |
|
369 |
def make_me():
|
|
|
340 |
def fonc_start(id_session,id_module):
|
341 |
task_actu={}
|
342 |
model_actu=""
|
343 |
+
print("in fonc\n")
|
344 |
for model_plus_tasks in cache_list_task[f"{id_session}"]:
|
345 |
if model_plus_tasks["id_module"]==id_module:
|
346 |
model_actu=model_plus_tasks["model"]
|
347 |
task_actu=model_plus_tasks["task"][0]
|
348 |
+
|
349 |
+
print(f"find model : {model_actu}\n")
|
350 |
+
|
351 |
if model_actu=="":
|
352 |
for model_plus_tasks in cache_list_task[f"{id_session}"]:
|
353 |
if model_plus_tasks["id_module"]==-1 and model_actu=="":
|
354 |
model_actu=model_plus_tasks["model"]
|
355 |
task_actu=model_plus_tasks["task"][0]
|
356 |
if model_actu=="":#crash
|
357 |
+
print("CRASH\n")
|
358 |
return None,None
|
359 |
+
print("begin gen image\n")
|
360 |
result=gen_fn(model_actu, task_actu["prompt"], task_actu["nprompt"], task_actu["height"], task_actu["width"], task_actu["steps"], task_actu["cfg"], task_actu["seed"])
|
361 |
+
print("image generate !\n")
|
362 |
if result!=None:
|
363 |
result=gr.Image(result)
|
364 |
id_image=len(cache_image[f"{id_session}"])
|
|
|
371 |
task_actu["id_image"]=id_image
|
372 |
cache_image[f"{id_session}"].append(result.value)
|
373 |
cache_id_image[f"{id_session}"].append(task_actu)
|
374 |
+
print("image saved\n")
|
375 |
+
else:
|
376 |
+
print("fail to generate\n")
|
377 |
+
num_task_undo=0
|
378 |
+
for model_plus_tasks in cache_list_task[f"{id_session}"]:
|
379 |
+
for task in model_plus_tasks["task"]:
|
380 |
+
num_task_undo+=1
|
381 |
+
print(f"\n {num_task_undo} tasks undo\n")
|
382 |
return result
|
383 |
|
384 |
def make_me():
|