clockclock commited on
Commit
ac07878
·
verified ·
1 Parent(s): 5819ee4

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +108 -53
app.py CHANGED
@@ -5,7 +5,6 @@ from PIL import Image
5
  import os
6
 
7
  # Try to load the Haar Cascade classifier for face detection
8
- # Handle the case where the file might not be found
9
  face_cascade = None
10
  cascade_paths = [
11
  "haarcascade_frontalface_default.xml",
@@ -29,7 +28,7 @@ if face_cascade is None or face_cascade.empty():
29
  print("Warning: Could not load Haar Cascade classifier. Face detection will be disabled.")
30
  face_cascade = None
31
 
32
- def process_image(image, effect_type):
33
  if image is None:
34
  return None, "Please upload an image first."
35
 
@@ -37,25 +36,52 @@ def process_image(image, effect_type):
37
  img_np_bgr = cv2.cvtColor(img_np, cv2.COLOR_RGB2BGR)
38
  processed_img_np_bgr = img_np_bgr.copy()
39
 
40
- # Only try face detection if cascade is loaded
41
- faces = []
42
- if face_cascade is not None:
43
- gray = cv2.cvtColor(img_np_bgr, cv2.COLOR_BGR2GRAY)
44
- try:
45
- faces = face_cascade.detectMultiScale(gray, 1.1, 4)
46
- except Exception as e:
47
- print(f"Face detection error: {e}")
48
- faces = []
49
-
50
  status_message = ""
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
51
 
52
- if len(faces) > 0:
53
- # Apply effect to all detected faces
54
- for (x, y, w, h) in faces:
55
  roi = processed_img_np_bgr[y:y+h, x:x+w]
 
 
 
 
 
 
 
 
 
 
 
 
 
56
 
 
57
  if effect_type == "blur":
58
- processed_roi = cv2.GaussianBlur(roi, (35, 35), 0)
59
  elif effect_type == "sharpen":
60
  kernel = np.array([[-1,-1,-1], [-1,9,-1], [-1,-1,-1]])
61
  processed_roi = cv2.filter2D(roi, -1, kernel)
@@ -68,44 +94,61 @@ def process_image(image, effect_type):
68
  processed_roi = cv2.resize(temp, (w_roi, h_roi), interpolation=cv2.INTER_NEAREST)
69
  else:
70
  processed_roi = roi
 
 
 
 
 
71
 
72
- processed_img_np_bgr[y:y+h, x:x+w] = processed_roi
73
-
74
- status_message = f"Applied {effect_type} effect to {len(faces)} detected face(s)."
75
- else:
76
- # Apply effect to center region if no faces detected
77
- h, w = img_np_bgr.shape[:2]
78
- center_x, center_y = w // 2, h // 2
79
- region_size = min(200, w//3, h//3)
80
-
81
- x1 = max(0, center_x - region_size // 2)
82
- y1 = max(0, center_y - region_size // 2)
83
- x2 = min(w, center_x + region_size // 2)
84
- y2 = min(h, center_y + region_size // 2)
85
-
86
- roi = processed_img_np_bgr[y1:y2, x1:x2]
87
-
88
- if effect_type == "blur":
89
- processed_roi = cv2.GaussianBlur(roi, (15, 15), 0)
90
- elif effect_type == "sharpen":
91
- kernel = np.array([[-1,-1,-1], [-1,9,-1], [-1,-1,-1]])
92
- processed_roi = cv2.filter2D(roi, -1, kernel)
93
- elif effect_type == "grayscale":
94
- processed_roi = cv2.cvtColor(roi, cv2.COLOR_BGR2GRAY)
95
- processed_roi = cv2.cvtColor(processed_roi, cv2.COLOR_GRAY2BGR)
96
- elif effect_type == "pixelate":
97
- h_roi, w_roi = roi.shape[:2]
98
- temp = cv2.resize(roi, (w_roi//10, h_roi//10), interpolation=cv2.INTER_LINEAR)
99
- processed_roi = cv2.resize(temp, (w_roi, h_roi), interpolation=cv2.INTER_NEAREST)
100
- else:
101
- processed_roi = roi
102
 
103
- processed_img_np_bgr[y1:y2, x1:x2] = processed_roi
104
-
105
- if face_cascade is None:
106
- status_message = f"Applied {effect_type} effect to center region (face detection unavailable)."
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
107
  else:
108
- status_message = f"No faces detected. Applied {effect_type} effect to center region."
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
109
 
110
  img_pil = Image.fromarray(cv2.cvtColor(processed_img_np_bgr, cv2.COLOR_BGR2RGB))
111
  return img_pil, status_message
@@ -165,11 +208,12 @@ with gr.Blocks(css=css, title="AI Image Editor") as demo:
165
  <strong>Instructions:</strong>
166
  <ol>
167
  <li>Upload an image using the file uploader</li>
 
168
  <li>Choose an effect from the dropdown menu</li>
169
  <li>Click "Apply Effect" to process the image</li>
170
  <li>If face detection is available, use "Detect Faces" to see detected faces</li>
171
  </ol>
172
- <em>Note: If face detection is available, effects will be applied to detected faces. Otherwise, effects will be applied to the center region.</em>
173
  </div>
174
  """)
175
 
@@ -207,9 +251,20 @@ with gr.Blocks(css=css, title="AI Image Editor") as demo:
207
  height=400
208
  )
209
 
 
 
 
 
 
 
 
 
 
 
 
210
  process_button.click(
211
  fn=process_image,
212
- inputs=[input_image, effect_dropdown],
213
  outputs=[output_image, status_text]
214
  )
215
 
 
5
  import os
6
 
7
  # Try to load the Haar Cascade classifier for face detection
 
8
  face_cascade = None
9
  cascade_paths = [
10
  "haarcascade_frontalface_default.xml",
 
28
  print("Warning: Could not load Haar Cascade classifier. Face detection will be disabled.")
29
  face_cascade = None
30
 
31
+ def process_image(image, click_x, click_y, effect_type):
32
  if image is None:
33
  return None, "Please upload an image first."
34
 
 
36
  img_np_bgr = cv2.cvtColor(img_np, cv2.COLOR_RGB2BGR)
37
  processed_img_np_bgr = img_np_bgr.copy()
38
 
 
 
 
 
 
 
 
 
 
 
39
  status_message = ""
40
+ applied_to_region = False
41
+
42
+ # Prioritize clicked region if available
43
+ if click_x is not None and click_y is not None:
44
+ # Try to find a face near the click
45
+ faces = []
46
+ if face_cascade is not None:
47
+ gray = cv2.cvtColor(img_np_bgr, cv2.COLOR_BGR2GRAY)
48
+ try:
49
+ all_faces = face_cascade.detectMultiScale(gray, 1.1, 4)
50
+ min_distance = float("inf")
51
+ target_face = None
52
+ for (fx, fy, fw, fh) in all_faces:
53
+ face_center_x = fx + fw // 2
54
+ face_center_y = fy + fh // 2
55
+ distance = np.sqrt((face_center_x - click_x)**2 + (face_center_y - click_y)**2)
56
+ if distance < min_distance and distance < 100: # Within 100 pixels of click
57
+ min_distance = distance
58
+ target_face = (fx, fy, fw, fh)
59
+ if target_face:
60
+ faces.append(target_face)
61
+ except Exception as e:
62
+ print(f"Face detection error during click processing: {e}")
63
 
64
+ if len(faces) > 0:
65
+ # Apply effect to the detected face near click
66
+ x, y, w, h = faces[0]
67
  roi = processed_img_np_bgr[y:y+h, x:x+w]
68
+ status_message = f"Applied {effect_type} effect to detected face near click."
69
+ applied_to_region = True
70
+ else:
71
+ # Apply effect to a general region around the click
72
+ region_size = 100
73
+ x1 = max(0, int(click_x - region_size // 2))
74
+ y1 = max(0, int(click_y - region_size // 2))
75
+ x2 = min(image.width, int(click_x + region_size // 2))
76
+ y2 = min(image.height, int(click_y + region_size // 2))
77
+
78
+ roi = processed_img_np_bgr[y1:y2, x1:x2]
79
+ status_message = f"Applied {effect_type} effect to clicked region."
80
+ applied_to_region = True
81
 
82
+ if applied_to_region:
83
  if effect_type == "blur":
84
+ processed_roi = cv2.GaussianBlur(roi, (15, 15), 0)
85
  elif effect_type == "sharpen":
86
  kernel = np.array([[-1,-1,-1], [-1,9,-1], [-1,-1,-1]])
87
  processed_roi = cv2.filter2D(roi, -1, kernel)
 
94
  processed_roi = cv2.resize(temp, (w_roi, h_roi), interpolation=cv2.INTER_NEAREST)
95
  else:
96
  processed_roi = roi
97
+
98
+ if len(faces) > 0:
99
+ processed_img_np_bgr[y:y+h, x:x+w] = processed_roi
100
+ else:
101
+ processed_img_np_bgr[y1:y2, x1:x2] = processed_roi
102
 
103
+ if not applied_to_region: # Fallback if no click or no specific region applied
104
+ # Apply effect to all detected faces if no click or no face near click
105
+ faces = []
106
+ if face_cascade is not None:
107
+ gray = cv2.cvtColor(img_np_bgr, cv2.COLOR_BGR2GRAY)
108
+ try:
109
+ faces = face_cascade.detectMultiScale(gray, 1.1, 4)
110
+ except Exception as e:
111
+ print(f"Face detection error: {e}")
112
+ faces = []
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
113
 
114
+ if len(faces) > 0:
115
+ for (x, y, w, h) in faces:
116
+ roi = processed_img_np_bgr[y:y+h, x:x+w]
117
+
118
+ if effect_type == "blur":
119
+ processed_roi = cv2.GaussianBlur(roi, (35, 35), 0)
120
+ elif effect_type == "sharpen":
121
+ kernel = np.array([[-1,-1,-1], [-1,9,-1], [-1,-1,-1]])
122
+ processed_roi = cv2.filter2D(roi, -1, kernel)
123
+ elif effect_type == "grayscale":
124
+ processed_roi = cv2.cvtColor(roi, cv2.COLOR_BGR2GRAY)
125
+ processed_roi = cv2.cvtColor(processed_roi, cv2.COLOR_GRAY2BGR)
126
+ elif effect_type == "pixelate":
127
+ h_roi, w_roi = roi.shape[:2]
128
+ temp = cv2.resize(roi, (w_roi//10, h_roi//10), interpolation=cv2.INTER_LINEAR)
129
+ processed_roi = cv2.resize(temp, (w_roi, h_roi), interpolation=cv2.INTER_NEAREST)
130
+ else:
131
+ processed_roi = roi
132
+
133
+ processed_img_np_bgr[y:y+h, x:x+w] = processed_roi
134
+ status_message = f"Applied {effect_type} effect to {len(faces)} detected face(s)."
135
  else:
136
+ # Apply effect to center region if no faces detected and no click
137
+ h, w = img_np_bgr.shape[:2]
138
+ center_x, center_y = w // 2, h // 2
139
+ region_size = min(200, w//3, h//3)
140
+
141
+ x1 = max(0, center_x - region_size // 2)
142
+ y1 = max(0, center_y - region_size // 2)
143
+ x2 = min(w, center_x + region_size // 2)
144
+ y2 = min(h, center_y + region_size // 2)
145
+
146
+ roi = processed_img_np_bgr[y1:y2, x1:x1+roi.shape[1]] = processed_roi
147
+
148
+ if face_cascade is None:
149
+ status_message = f"Applied {effect_type} effect to center region (face detection unavailable)."
150
+ else:
151
+ status_message = f"No faces detected. Applied {effect_type} effect to center region."
152
 
153
  img_pil = Image.fromarray(cv2.cvtColor(processed_img_np_bgr, cv2.COLOR_BGR2RGB))
154
  return img_pil, status_message
 
208
  <strong>Instructions:</strong>
209
  <ol>
210
  <li>Upload an image using the file uploader</li>
211
+ <li>Click on the image to select a region (optional)</li>
212
  <li>Choose an effect from the dropdown menu</li>
213
  <li>Click "Apply Effect" to process the image</li>
214
  <li>If face detection is available, use "Detect Faces" to see detected faces</li>
215
  </ol>
216
+ <em>Note: If you click on the image, the effect will be applied to the clicked region (prioritizing faces near the click). Otherwise, if face detection is available, effects will be applied to all detected faces. As a last resort, effects will be applied to the center region.</em>
217
  </div>
218
  """)
219
 
 
251
  height=400
252
  )
253
 
254
+ # Store click coordinates
255
+ clicked_x = gr.State(None)
256
+ clicked_y = gr.State(None)
257
+
258
+ def get_coords(evt: gr.SelectData):
259
+ if evt.index is not None and len(evt.index) == 2:
260
+ return evt.index[0], evt.index[1]
261
+ return None, None
262
+
263
+ input_image.select(get_coords, None, [clicked_x, clicked_y])
264
+
265
  process_button.click(
266
  fn=process_image,
267
+ inputs=[input_image, clicked_x, clicked_y, effect_dropdown],
268
  outputs=[output_image, status_text]
269
  )
270