Spaces:
Sleeping
Sleeping
Jen-Hung Wang
commited on
Commit
·
8a2f2ee
1
Parent(s):
50f5a57
Add KDE func
Browse files- web_app.py +142 -57
web_app.py
CHANGED
@@ -4,9 +4,13 @@ import pandas as pd
|
|
4 |
import PIL.Image as Image
|
5 |
import gradio as gr
|
6 |
import numpy as np
|
|
|
7 |
import math
|
|
|
|
|
8 |
from pathlib import Path
|
9 |
from ultralytics import ASSETS, YOLO
|
|
|
10 |
|
11 |
DIR_NAME = Path(os.path.dirname(__file__))
|
12 |
DETECTION_MODEL_n = os.path.join(DIR_NAME, 'models', 'YOLOv8-N_CNO_Detection.pt')
|
@@ -19,6 +23,7 @@ DETECTION_MODEL_x = os.path.join(DIR_NAME, 'models', 'YOLOv8-X_CNO_Detection.pt'
|
|
19 |
# model = YOLO(MODEL)
|
20 |
# cno_df = pd.DataFrame()
|
21 |
|
|
|
22 |
def predict_image(name, model, img, conf_threshold, iou_threshold):
|
23 |
"""Predicts and plots labeled objects in an image using YOLOv8 model with adjustable confidence and IOU thresholds."""
|
24 |
gr.Info("Starting process")
|
@@ -48,67 +53,139 @@ def predict_image(name, model, img, conf_threshold, iou_threshold):
|
|
48 |
)
|
49 |
|
50 |
cno_count = []
|
|
|
51 |
afm_image = []
|
52 |
cno_image = []
|
|
|
53 |
file_name = []
|
54 |
|
55 |
-
|
|
|
|
|
|
|
|
|
56 |
|
57 |
for idx, result in enumerate(results):
|
58 |
cno = len(result.boxes)
|
59 |
-
|
60 |
file_label = img[idx].split(os.sep)[-1]
|
61 |
-
|
62 |
-
|
63 |
-
|
64 |
-
|
65 |
-
|
66 |
-
|
67 |
-
|
68 |
-
|
69 |
-
|
70 |
-
|
71 |
-
|
72 |
-
|
73 |
-
|
74 |
-
|
75 |
-
|
76 |
-
|
77 |
-
|
78 |
-
|
79 |
-
|
80 |
-
|
81 |
-
|
82 |
-
|
83 |
-
|
84 |
-
|
85 |
-
|
86 |
-
|
87 |
-
|
88 |
-
|
89 |
-
|
90 |
-
|
91 |
-
|
92 |
-
|
93 |
-
|
94 |
-
|
95 |
-
|
96 |
-
|
97 |
-
|
98 |
-
|
99 |
-
|
100 |
-
|
101 |
-
|
102 |
-
|
103 |
-
|
104 |
-
|
105 |
-
|
106 |
-
|
107 |
-
|
108 |
-
|
109 |
-
|
110 |
-
|
111 |
-
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
112 |
data = {
|
113 |
"Files": file_name,
|
114 |
"CNO Count": cno_count,
|
@@ -117,7 +194,15 @@ def predict_image(name, model, img, conf_threshold, iou_threshold):
|
|
117 |
# load data into a DataFrame object:
|
118 |
cno_df = pd.DataFrame(data)
|
119 |
|
120 |
-
return cno_df, afm_image, cno_image
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
121 |
|
122 |
|
123 |
def highlight_max(s, props=''):
|
@@ -181,15 +266,15 @@ with gr.Blocks(title="AFM AI Analysis", theme="default") as app:
|
|
181 |
afm_gallery = gr.Gallery(label="Result", show_label=True, columns=3, object_fit="contain")
|
182 |
with gr.Tab("CNO"):
|
183 |
cno_gallery = gr.Gallery(label="Result", show_label=True, columns=3, object_fit="contain")
|
184 |
-
|
185 |
-
|
186 |
test_label = gr.Label(label="Analysis Results")
|
187 |
# cno_img = gr.Image(type="pil", label="Result")
|
188 |
|
189 |
analyze_btn.click(
|
190 |
fn=predict_image,
|
191 |
inputs=[name_textbox, model_radio, input_files, conf_slider, iou_slider],
|
192 |
-
outputs=[analysis_results, afm_gallery, cno_gallery]
|
193 |
)
|
194 |
|
195 |
clear_btn.click(reset, outputs=[name_textbox, gender_radio, age_slider, fitzpatrick, history, model_radio,
|
|
|
4 |
import PIL.Image as Image
|
5 |
import gradio as gr
|
6 |
import numpy as np
|
7 |
+
import matplotlib.pyplot as plt
|
8 |
import math
|
9 |
+
import time
|
10 |
+
from sklearn.neighbors import KernelDensity
|
11 |
from pathlib import Path
|
12 |
from ultralytics import ASSETS, YOLO
|
13 |
+
from sklearn.model_selection import GridSearchCV
|
14 |
|
15 |
DIR_NAME = Path(os.path.dirname(__file__))
|
16 |
DETECTION_MODEL_n = os.path.join(DIR_NAME, 'models', 'YOLOv8-N_CNO_Detection.pt')
|
|
|
23 |
# model = YOLO(MODEL)
|
24 |
# cno_df = pd.DataFrame()
|
25 |
|
26 |
+
|
27 |
def predict_image(name, model, img, conf_threshold, iou_threshold):
|
28 |
"""Predicts and plots labeled objects in an image using YOLOv8 model with adjustable confidence and IOU thresholds."""
|
29 |
gr.Info("Starting process")
|
|
|
53 |
)
|
54 |
|
55 |
cno_count = []
|
56 |
+
cno_col = []
|
57 |
afm_image = []
|
58 |
cno_image = []
|
59 |
+
kde_image = []
|
60 |
file_name = []
|
61 |
|
62 |
+
total_layer_area = []
|
63 |
+
total_layer_cno = []
|
64 |
+
total_layer_density = []
|
65 |
+
avg_area_col = []
|
66 |
+
total_area_col = []
|
67 |
|
68 |
for idx, result in enumerate(results):
|
69 |
cno = len(result.boxes)
|
70 |
+
|
71 |
file_label = img[idx].split(os.sep)[-1]
|
72 |
+
single_layer_area = []
|
73 |
+
single_layer_cno = []
|
74 |
+
single_layer_density = []
|
75 |
+
total_area = 0
|
76 |
+
if cno < 5:
|
77 |
+
avg_area_col.append(np.nan)
|
78 |
+
total_area_col.append(np.nan)
|
79 |
+
nan_arr = np.empty([25])
|
80 |
+
nan_arr[:] = np.nan
|
81 |
+
total_layer_area.append(nan_arr)
|
82 |
+
total_layer_cno.append(nan_arr)
|
83 |
+
total_layer_density.append(nan_arr)
|
84 |
+
else:
|
85 |
+
cno_coor = np.empty([cno, 2], dtype=int)
|
86 |
+
|
87 |
+
for j in range(cno):
|
88 |
+
# w = r.boxes.xywh[j][2]
|
89 |
+
# h = r.boxes.xywh[j][3]
|
90 |
+
# area = (math.pi * w * h / 4) * 20 * 20 / (512 * 512)
|
91 |
+
# total_area += area
|
92 |
+
# bbox_img = r.orig_img
|
93 |
+
x = round(result.boxes.xywh[j][0].item())
|
94 |
+
y = round(result.boxes.xywh[j][1].item())
|
95 |
+
|
96 |
+
x1 = round(result.boxes.xyxy[j][0].item())
|
97 |
+
y1 = round(result.boxes.xyxy[j][1].item())
|
98 |
+
x2 = round(result.boxes.xyxy[j][2].item())
|
99 |
+
y2 = round(result.boxes.xyxy[j][3].item())
|
100 |
+
|
101 |
+
cno_coor[j] = [x, y]
|
102 |
+
cv2.rectangle(result.orig_img, (x1, y1), (x2, y2), (0, 255, 0), 1)
|
103 |
+
im_array = result.orig_img
|
104 |
+
afm_image.append([img[idx], file_label])
|
105 |
+
cno_image.append([Image.fromarray(im_array[..., ::-1]), file_label])
|
106 |
+
cno_count.append(cno)
|
107 |
+
file_name.append(file_label)
|
108 |
+
|
109 |
+
### ============================
|
110 |
+
|
111 |
+
kde = KernelDensity(metric='euclidean', kernel='gaussian', algorithm='ball_tree')
|
112 |
+
|
113 |
+
# Finding Optimal Bandwidth
|
114 |
+
ti = time.time()
|
115 |
+
if cno < 7:
|
116 |
+
fold = cno
|
117 |
+
else:
|
118 |
+
fold = 7
|
119 |
+
gs = GridSearchCV(kde, {'bandwidth': np.linspace(20, 60, 41)}, cv=fold)
|
120 |
+
cv = gs.fit(cno_coor)
|
121 |
+
bw = cv.best_params_['bandwidth']
|
122 |
+
tf = time.time()
|
123 |
+
print("Finding optimal bandwidth={:.2f} ({:n}-fold cross-validation): {:.2f} secs".format(bw, cv.cv,
|
124 |
+
(tf - ti)))
|
125 |
+
kde.bandwidth = bw
|
126 |
+
_ = kde.fit(cno_coor)
|
127 |
+
|
128 |
+
xgrid = np.arange(0, result.orig_img.shape[1], 1)
|
129 |
+
ygrid = np.arange(0, result.orig_img.shape[0], 1)
|
130 |
+
xv, yv = np.meshgrid(xgrid, ygrid)
|
131 |
+
xys = np.vstack([xv.ravel(), yv.ravel()]).T
|
132 |
+
gdim = xv.shape
|
133 |
+
zi = np.arange(xys.shape[0])
|
134 |
+
zXY = xys
|
135 |
+
z = np.exp(kde.score_samples(zXY))
|
136 |
+
zg = -9999 + np.zeros(xys.shape[0])
|
137 |
+
zg[zi] = z
|
138 |
+
|
139 |
+
xyz = np.hstack((xys[:, :2], zg[:, None]))
|
140 |
+
x = xyz[:, 0].reshape(gdim)
|
141 |
+
y = xyz[:, 1].reshape(gdim)
|
142 |
+
z = xyz[:, 2].reshape(gdim)
|
143 |
+
levels = np.linspace(0, z.max(), 26)
|
144 |
+
print("levels", levels)
|
145 |
+
|
146 |
+
for j in range(len(levels) - 1):
|
147 |
+
area = np.argwhere(z >= levels[j])
|
148 |
+
area_concatenate = numcat(area)
|
149 |
+
CNO_concatenate = numcat(cno_coor)
|
150 |
+
ecno = np.count_nonzero(np.isin(area_concatenate, CNO_concatenate))
|
151 |
+
layer_area = area.shape[0]
|
152 |
+
if layer_area == 0:
|
153 |
+
density = np.round(0.0, 4)
|
154 |
+
else:
|
155 |
+
density = np.round((ecno / layer_area) * 512 * 512 / 400, 4)
|
156 |
+
print("Level {}: Area={}, CNO={}, density={}".format(j, layer_area, ecno, density))
|
157 |
+
single_layer_area.append(layer_area)
|
158 |
+
single_layer_cno.append(ecno)
|
159 |
+
single_layer_density.append(density)
|
160 |
+
|
161 |
+
total_layer_area.append(single_layer_area)
|
162 |
+
total_layer_cno.append(single_layer_cno)
|
163 |
+
total_layer_density.append(single_layer_density)
|
164 |
+
|
165 |
+
# Plot CNO Distribution
|
166 |
+
plt.contourf(x, y, z, levels=levels, cmap=plt.cm.bone)
|
167 |
+
plt.axis('off')
|
168 |
+
# plt.gcf().set_size_inches(8, 8)
|
169 |
+
plt.gcf().set_size_inches(8 * (gdim[1] / gdim[0]), 8)
|
170 |
+
plt.gca().invert_yaxis()
|
171 |
+
plt.xlim(0, gdim[1] - 1)
|
172 |
+
plt.ylim(gdim[0] - 1, 0)
|
173 |
+
kde_image.append([plt.figure(), file_label])
|
174 |
+
#plt.savefig(os.path.join(kde_dir, '{}_{}_{}_KDE.png'.format(file_list[idx], model_type, conf)),
|
175 |
+
# bbox_inches='tight', pad_inches=0)
|
176 |
+
|
177 |
+
|
178 |
+
|
179 |
+
|
180 |
+
|
181 |
+
|
182 |
+
|
183 |
+
|
184 |
+
|
185 |
+
|
186 |
+
|
187 |
+
### ============================
|
188 |
+
|
189 |
data = {
|
190 |
"Files": file_name,
|
191 |
"CNO Count": cno_count,
|
|
|
194 |
# load data into a DataFrame object:
|
195 |
cno_df = pd.DataFrame(data)
|
196 |
|
197 |
+
return cno_df, afm_image, cno_image, kde_image
|
198 |
+
|
199 |
+
|
200 |
+
def numcat(arr):
|
201 |
+
arr_size = arr.shape[0]
|
202 |
+
arr_cat = np.empty([arr_size, 1], dtype=np.int32)
|
203 |
+
for i in range(arr.shape[0]):
|
204 |
+
arr_cat[i] = arr[i][0] * 1000 + arr[i][1]
|
205 |
+
return arr_cat
|
206 |
|
207 |
|
208 |
def highlight_max(s, props=''):
|
|
|
266 |
afm_gallery = gr.Gallery(label="Result", show_label=True, columns=3, object_fit="contain")
|
267 |
with gr.Tab("CNO"):
|
268 |
cno_gallery = gr.Gallery(label="Result", show_label=True, columns=3, object_fit="contain")
|
269 |
+
with gr.Tab("KDE"):
|
270 |
+
kde_gallery = gr.Gallery(label="Result", show_label=True, columns=3, object_fit="contain")
|
271 |
test_label = gr.Label(label="Analysis Results")
|
272 |
# cno_img = gr.Image(type="pil", label="Result")
|
273 |
|
274 |
analyze_btn.click(
|
275 |
fn=predict_image,
|
276 |
inputs=[name_textbox, model_radio, input_files, conf_slider, iou_slider],
|
277 |
+
outputs=[analysis_results, afm_gallery, cno_gallery, kde_gallery]
|
278 |
)
|
279 |
|
280 |
clear_btn.click(reset, outputs=[name_textbox, gender_radio, age_slider, fitzpatrick, history, model_radio,
|