orhir commited on
Commit
5318ebc
·
verified ·
1 Parent(s): b7adf06

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +54 -10
app.py CHANGED
@@ -8,7 +8,7 @@ import gradio as gr
8
  import matplotlib
9
 
10
  from gradio_utils.utils import (process_img, get_select_coords, select_skeleton,
11
- reset_skeleton, reset_kp, process)
12
 
13
  LENGTH = 480 # Length of the square area displaying/editing images
14
 
@@ -45,6 +45,7 @@ with gr.Blocks() as demo:
45
  "skeleton": [],
46
  "prev_point": None,
47
  "curr_type_point": "start",
 
48
  })
49
  with gr.Row():
50
  # Upload & Preprocess Image Column
@@ -57,7 +58,7 @@ with gr.Blocks() as demo:
57
  width=LENGTH,
58
  type="pil",
59
  image_mode="RGB",
60
- label="Preprocess Image",
61
  show_label=True,
62
  interactive=True,
63
  )
@@ -116,19 +117,61 @@ with gr.Blocks() as demo:
116
  gr.Markdown(
117
  """<p style="text-align: center; font-size: 20px">Output</p>"""
118
  )
119
- output_img = gr.Plot(label="Output Image",)
120
  with gr.Row():
121
  eval_btn = gr.Button(value="Evaluate")
122
  with gr.Row():
123
  gr.Markdown("## Examples")
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
124
 
125
- support_image.change(process_img,
126
  inputs=[support_image, global_state],
127
  outputs=[kp_support_image, global_state])
128
  kp_support_image.select(get_select_coords,
129
- [global_state],
130
- [global_state, kp_support_image],
131
- queue=False,)
132
  confirm_kp_button.click(reset_skeleton,
133
  inputs=global_state,
134
  outputs=skel_support_image)
@@ -136,16 +179,17 @@ with gr.Blocks() as demo:
136
  inputs=global_state,
137
  outputs=[kp_support_image, skel_support_image])
138
  undo_skel_button.click(reset_skeleton,
139
- inputs=global_state,
140
- outputs=skel_support_image)
141
  skel_support_image.select(select_skeleton,
142
  inputs=[global_state],
143
  outputs=[global_state, skel_support_image])
144
  eval_btn.click(fn=process,
145
  inputs=[query_image, global_state],
146
- outputs=[output_img, global_state])
147
 
148
  if __name__ == "__main__":
149
  print("Start app", parser.parse_args())
150
  gr.close_all()
151
  demo.launch(show_api=False)
 
 
8
  import matplotlib
9
 
10
  from gradio_utils.utils import (process_img, get_select_coords, select_skeleton,
11
+ reset_skeleton, reset_kp, process, update_examples)
12
 
13
  LENGTH = 480 # Length of the square area displaying/editing images
14
 
 
45
  "skeleton": [],
46
  "prev_point": None,
47
  "curr_type_point": "start",
48
+ "load_example": False,
49
  })
50
  with gr.Row():
51
  # Upload & Preprocess Image Column
 
58
  width=LENGTH,
59
  type="pil",
60
  image_mode="RGB",
61
+ label="Support Image",
62
  show_label=True,
63
  interactive=True,
64
  )
 
117
  gr.Markdown(
118
  """<p style="text-align: center; font-size: 20px">Output</p>"""
119
  )
120
+ output_img = gr.Plot(label="Output Image", )
121
  with gr.Row():
122
  eval_btn = gr.Button(value="Evaluate")
123
  with gr.Row():
124
  gr.Markdown("## Examples")
125
+ with gr.Row():
126
+ example_null = gr.Textbox(type='text',
127
+ visible=False
128
+ )
129
+ with gr.Row():
130
+ examples = gr.Examples([
131
+ ['examples/dog2.png',
132
+ 'examples/dog1.png',
133
+ json.dumps({
134
+ 'points': [(232, 200), (312, 204), (228, 264), (316, 472), (316, 616), (296, 868), (412, 872),
135
+ (416, 624), (604, 608), (648, 860), (764, 852), (696, 608), (684, 432)],
136
+ 'skeleton': [(0, 1), (1, 2), (0, 2), (3, 4), (4, 5),
137
+ (3, 7), (7, 6), (3, 12), (12, 8), (8, 9),
138
+ (12, 11), (11, 10)],
139
+ })
140
+ ],
141
+ ['examples/sofa1.jpg',
142
+ 'examples/sofa2.jpg',
143
+ json.dumps({
144
+ 'points': [(112, 328), (120, 260), (104, 208), (200, 260), (208, 336), (216, 212), (208, 172),
145
+ (284, 180), (276, 324), (156, 308), (172, 228), (256, 232), (168, 184), (260, 196)]
146
+
147
+ ,
148
+ 'skeleton': [(0, 1), (3, 1), (3, 4), (10, 9), (11, 8),
149
+ (1, 10), (10, 11), (11, 3), (1, 2), (7, 6),
150
+ (5, 13), (5, 3), (13, 11), (12, 10), (12, 2),
151
+ (6, 10), (7, 11)],
152
+ })],
153
+ ['examples/person1.jpeg',
154
+ 'examples/person2.jpeg',
155
+ json.dumps({
156
+ 'points': [(380, 484), (640, 488), (520, 616), (424, 736), (612, 724)],
157
+ 'skeleton': [(0, 1), (1, 2), (0, 2), (2, 3), (2, 4),
158
+ (4, 3)],
159
+ })]
160
+ ],
161
+ inputs=[support_image, query_image, example_null],
162
+ outputs=[support_image, kp_support_image, skel_support_image, query_image, global_state],
163
+ fn=update_examples,
164
+ run_on_click=True,
165
+ examples_per_page=5,
166
+ )
167
 
168
+ support_image.upload(process_img,
169
  inputs=[support_image, global_state],
170
  outputs=[kp_support_image, global_state])
171
  kp_support_image.select(get_select_coords,
172
+ [global_state],
173
+ [global_state, kp_support_image],
174
+ queue=False, )
175
  confirm_kp_button.click(reset_skeleton,
176
  inputs=global_state,
177
  outputs=skel_support_image)
 
179
  inputs=global_state,
180
  outputs=[kp_support_image, skel_support_image])
181
  undo_skel_button.click(reset_skeleton,
182
+ inputs=global_state,
183
+ outputs=skel_support_image)
184
  skel_support_image.select(select_skeleton,
185
  inputs=[global_state],
186
  outputs=[global_state, skel_support_image])
187
  eval_btn.click(fn=process,
188
  inputs=[query_image, global_state],
189
+ outputs=[output_img])
190
 
191
  if __name__ == "__main__":
192
  print("Start app", parser.parse_args())
193
  gr.close_all()
194
  demo.launch(show_api=False)
195
+