Spaces:
Configuration error
Configuration error
Sreerama
commited on
Commit
·
fce5e9a
1
Parent(s):
a2531fc
move base model to accordion, remove extra concept button
Browse files
app.py
CHANGED
@@ -509,9 +509,6 @@ with gr.Blocks(css=css) as demo:
|
|
509 |
things_naming = gr.Markdown("You should name your concept with a unique made up word that has low chance of the model already knowing it (e.g.: `fantasy_world` here). Images will be automatically cropped to 512x512.")
|
510 |
|
511 |
with gr.Column():
|
512 |
-
with gr.Row() as what_are_you_training:
|
513 |
-
base_model_to_use = gr.Dropdown(label="Which base model would you like to use?", choices=["v1-5", "v2-512", "v2-768"], value="v1-5", interactive=True)
|
514 |
-
|
515 |
file_collection = []
|
516 |
concept_collection = []
|
517 |
buttons_collection = []
|
@@ -531,11 +528,11 @@ with gr.Blocks(css=css) as demo:
|
|
531 |
file_collection.append(gr.File(label=f'''Upload the images for your {ordinal(x+1) if (x>0) else ""} concept''', file_count="multiple", interactive=True, visible=visible))
|
532 |
with gr.Column(visible=visible) as row[x]:
|
533 |
concept_collection.append(gr.Textbox(label=f'''{ordinal(x+1) if (x>0) else ""} concept prompt - use a unique, made up word to avoid collisions'''))
|
534 |
-
with gr.Row():
|
535 |
-
|
536 |
-
|
537 |
-
|
538 |
-
|
539 |
|
540 |
counter_add = 1
|
541 |
for button in buttons_collection:
|
@@ -555,6 +552,9 @@ with gr.Blocks(css=css) as demo:
|
|
555 |
counter_delete += 1
|
556 |
|
557 |
with gr.Accordion("Custom Settings", open=False):
|
|
|
|
|
|
|
558 |
swap_auto_calculated = gr.Checkbox(label="Use custom settings")
|
559 |
gr.Markdown("If not checked, the % of frozen encoder will be tuned automatically to whether you are training an `object`, `person` or `style`. The text-encoder is frozen after 10% of the steps for a style, 30% of the steps for an object and 75% trained for persons. The number of steps varies between 1400 and 2400 depending on how many images uploaded. If you see too many artifacts in your output, it means it may have overfit and you need less steps. If your results aren't really what you wanted, it may be underfitting and you need more steps.")
|
560 |
steps = gr.Number(label="How many steps", value=2400)
|
|
|
509 |
things_naming = gr.Markdown("You should name your concept with a unique made up word that has low chance of the model already knowing it (e.g.: `fantasy_world` here). Images will be automatically cropped to 512x512.")
|
510 |
|
511 |
with gr.Column():
|
|
|
|
|
|
|
512 |
file_collection = []
|
513 |
concept_collection = []
|
514 |
buttons_collection = []
|
|
|
528 |
file_collection.append(gr.File(label=f'''Upload the images for your {ordinal(x+1) if (x>0) else ""} concept''', file_count="multiple", interactive=True, visible=visible))
|
529 |
with gr.Column(visible=visible) as row[x]:
|
530 |
concept_collection.append(gr.Textbox(label=f'''{ordinal(x+1) if (x>0) else ""} concept prompt - use a unique, made up word to avoid collisions'''))
|
531 |
+
# with gr.Row():
|
532 |
+
# if(x < maximum_concepts-1):
|
533 |
+
# buttons_collection.append(gr.Button(value="Add +1 concept", visible=visible))
|
534 |
+
# if(x > 0):
|
535 |
+
# delete_collection.append(gr.Button(value=f"Delete {ordinal(x+1)} concept"))
|
536 |
|
537 |
counter_add = 1
|
538 |
for button in buttons_collection:
|
|
|
552 |
counter_delete += 1
|
553 |
|
554 |
with gr.Accordion("Custom Settings", open=False):
|
555 |
+
with gr.Row() as what_are_you_training:
|
556 |
+
base_model_to_use = gr.Dropdown(label="Which base model would you like to use?", choices=["v1-5", "v2-512", "v2-768"], value="v1-5", interactive=True)
|
557 |
+
|
558 |
swap_auto_calculated = gr.Checkbox(label="Use custom settings")
|
559 |
gr.Markdown("If not checked, the % of frozen encoder will be tuned automatically to whether you are training an `object`, `person` or `style`. The text-encoder is frozen after 10% of the steps for a style, 30% of the steps for an object and 75% trained for persons. The number of steps varies between 1400 and 2400 depending on how many images uploaded. If you see too many artifacts in your output, it means it may have overfit and you need less steps. If your results aren't really what you wanted, it may be underfitting and you need more steps.")
|
560 |
steps = gr.Number(label="How many steps", value=2400)
|