Commit
·
2529764
1
Parent(s):
0679e0d
update: version 2
Browse files- 0.png +0 -0
- 100.png +0 -0
- 134.png +0 -0
- 19.png +0 -0
- 36.png +0 -0
- 44.png +0 -0
- 602.png +0 -0
- 7316.png +0 -0
- 929.png +0 -0
- Saitama_Team_Presentation_Page1.png +0 -0
- Saitama_Team_Presentation_Page2.png +0 -0
- app.py +11 -436
- app_pages/about.py +15 -0
- app_pages/home.py +26 -0
- app_pages/main.py +5 -0
- multipage.py +68 -0
- requirements.txt +2 -59
0.png
DELETED
Binary file (8.81 kB)
|
|
100.png
DELETED
Binary file (8.25 kB)
|
|
134.png
DELETED
Binary file (8.39 kB)
|
|
19.png
DELETED
Binary file (8.82 kB)
|
|
36.png
DELETED
Binary file (8.27 kB)
|
|
44.png
DELETED
Binary file (8.45 kB)
|
|
602.png
DELETED
Binary file (7.37 kB)
|
|
7316.png
DELETED
Binary file (8.91 kB)
|
|
929.png
DELETED
Binary file (7 kB)
|
|
Saitama_Team_Presentation_Page1.png
ADDED
![]() |
Saitama_Team_Presentation_Page2.png
ADDED
![]() |
app.py
CHANGED
@@ -1,443 +1,18 @@
|
|
1 |
-
import numpy as np
|
2 |
-
import cv2
|
3 |
import streamlit as st
|
4 |
-
|
5 |
-
import
|
6 |
-
from openai import OpenAI
|
7 |
-
import copy
|
8 |
-
import time
|
9 |
-
import io
|
10 |
-
import uuid
|
11 |
-
import html
|
12 |
|
|
|
13 |
st.set_page_config(
|
14 |
-
page_title=
|
15 |
-
|
16 |
-
layout="wide",
|
17 |
initial_sidebar_state="expanded",
|
18 |
)
|
19 |
|
20 |
-
|
21 |
-
|
|
|
|
|
22 |
|
23 |
-
|
24 |
-
|
25 |
-
<div style="display:none" id="{div_id}">
|
26 |
-
<iframe src="javascript: \
|
27 |
-
var script = document.createElement('script'); \
|
28 |
-
script.type = 'text/javascript'; \
|
29 |
-
script.text = {html.escape(repr(source))}; \
|
30 |
-
var div = window.parent.document.getElementById('{div_id}'); \
|
31 |
-
div.appendChild(script); \
|
32 |
-
div.parentElement.parentElement.parentElement.style.display = 'none'; \
|
33 |
-
"/>
|
34 |
-
</div>
|
35 |
-
""",
|
36 |
-
unsafe_allow_html=True,
|
37 |
-
)
|
38 |
-
|
39 |
-
def screenshot_window() -> None:
|
40 |
-
# JS Code to be executed
|
41 |
-
source = """
|
42 |
-
// Function to detect if the current browser is Chrome
|
43 |
-
const isChrome = () => /Chrome/.test(navigator.userAgent) && /Google Inc/.test(navigator.vendor);
|
44 |
-
|
45 |
-
/*
|
46 |
-
const button = document.getElementById('reportButton');
|
47 |
-
button.addEventListener('click', function() {
|
48 |
-
// Alert and exit if the browser is Chrome
|
49 |
-
if (isChrome()) {
|
50 |
-
//alert("Currently this function is available only on Firefox!");
|
51 |
-
//button.style.display = 'none'; // Hides the button
|
52 |
-
//return;
|
53 |
-
}
|
54 |
-
|
55 |
-
// Load a script dynamically and execute a callback after loading
|
56 |
-
const loadScript = (url, isLoaded, callback) => {
|
57 |
-
if (!isLoaded()) {
|
58 |
-
const script = document.createElement('script');
|
59 |
-
script.type = 'text/javascript';
|
60 |
-
script.onload = callback;
|
61 |
-
script.src = url;
|
62 |
-
document.head.appendChild(script);
|
63 |
-
} else {
|
64 |
-
callback();
|
65 |
-
}
|
66 |
-
};
|
67 |
-
|
68 |
-
// Check if html2canvas library is loaded
|
69 |
-
const isHtml2CanvasLoaded = () => typeof html2canvas !== 'undefined';
|
70 |
-
|
71 |
-
// Capture an individual iframe and call a callback with the result
|
72 |
-
const captureIframe = (iframe, callback) => {
|
73 |
-
try {
|
74 |
-
const iframeDoc = iframe.contentDocument || iframe.contentWindow.document;
|
75 |
-
console.log(iframeDoc)
|
76 |
-
html2canvas(iframeDoc.body, {
|
77 |
-
scale: 1,
|
78 |
-
logging: true,
|
79 |
-
useCORS: true,
|
80 |
-
allowTaint: true
|
81 |
-
}).then(canvas => {
|
82 |
-
callback(canvas ? canvas : null);
|
83 |
-
}).catch(error => {
|
84 |
-
console.error('Could not capture iframe:', error);
|
85 |
-
callback(null);
|
86 |
-
});
|
87 |
-
} catch (error) {
|
88 |
-
console.error('Could not access iframe:', error);
|
89 |
-
callback(null);
|
90 |
-
}
|
91 |
-
};
|
92 |
-
|
93 |
-
// Main function to capture all windows
|
94 |
-
const captureAllWindows = () => {
|
95 |
-
const streamlitDoc = window.parent.document;
|
96 |
-
const stApp = streamlitDoc.querySelector('.main > .block-container');
|
97 |
-
const iframes = Array.from(stApp.querySelectorAll('iframe'));
|
98 |
-
let capturedImages = [];
|
99 |
-
|
100 |
-
// Process each iframe sequentially
|
101 |
-
const processIframes = (index = 0) => {
|
102 |
-
if (index < iframes.length) {
|
103 |
-
captureIframe(iframes[index], function(canvas) {
|
104 |
-
if (canvas) {
|
105 |
-
const img = document.createElement('img');
|
106 |
-
img.src = canvas.toDataURL('image/png');
|
107 |
-
capturedImages.push({iframe: iframes[index], img: img});
|
108 |
-
} else {
|
109 |
-
console.error('Skipping an iframe due to capture failure.');
|
110 |
-
}
|
111 |
-
processIframes(index + 1);
|
112 |
-
});
|
113 |
-
} else {
|
114 |
-
// Capture the main app window after processing all iframes
|
115 |
-
html2canvas(stApp, {
|
116 |
-
onclone: function(clonedDocument) {
|
117 |
-
const clonedIframes = Array.from(clonedDocument.querySelectorAll('iframe'));
|
118 |
-
capturedImages.forEach(({img}, index) => {
|
119 |
-
if (index < clonedIframes.length) {
|
120 |
-
const clonedIframe = clonedIframes[index];
|
121 |
-
clonedIframe.parentNode.replaceChild(img, clonedIframe);
|
122 |
-
}
|
123 |
-
});
|
124 |
-
},
|
125 |
-
scale: 1,
|
126 |
-
logging: true,
|
127 |
-
useCORS: true,
|
128 |
-
allowTaint: true,
|
129 |
-
ignoreElements: () => {}
|
130 |
-
}).then(finalCanvas => {
|
131 |
-
// Create a download link for the captured image
|
132 |
-
finalCanvas.toBlob(blob => {
|
133 |
-
const url = window.URL.createObjectURL(blob);
|
134 |
-
var link = document.createElement('a');
|
135 |
-
link.style.display = 'none';
|
136 |
-
link.href = url;
|
137 |
-
link.download = 'screenshot.png';
|
138 |
-
document.body.appendChild(link);
|
139 |
-
link.click();
|
140 |
-
document.body.removeChild(link);
|
141 |
-
window.URL.revokeObjectURL(url);
|
142 |
-
});
|
143 |
-
}).catch(error => {
|
144 |
-
console.error('Screenshot capture failed:', error);
|
145 |
-
});
|
146 |
-
}
|
147 |
-
};
|
148 |
-
|
149 |
-
processIframes();
|
150 |
-
};
|
151 |
-
|
152 |
-
loadScript(
|
153 |
-
'https://cdnjs.cloudflare.com/ajax/libs/html2canvas/1.3.2/html2canvas.min.js',
|
154 |
-
isHtml2CanvasLoaded,
|
155 |
-
captureAllWindows
|
156 |
-
);
|
157 |
-
|
158 |
-
});
|
159 |
-
*/
|
160 |
-
"""
|
161 |
-
|
162 |
-
inject_js_code(source=source)
|
163 |
-
|
164 |
-
def add_reportgen_button():
|
165 |
-
st.markdown(
|
166 |
-
"""
|
167 |
-
<!-- <button id="reportButton" class="st-style-button">Generate Page Report</button> -->
|
168 |
-
|
169 |
-
<style>
|
170 |
-
.st-style-button {
|
171 |
-
display: inline-flex;
|
172 |
-
-webkit-box-align: center;
|
173 |
-
align-items: center;
|
174 |
-
-webkit-box-pack: center;
|
175 |
-
justify-content: center;
|
176 |
-
font-weight: 400;
|
177 |
-
padding: 0.25rem 0.75rem;
|
178 |
-
border-radius: 0.5rem;
|
179 |
-
min-height: 38.4px;
|
180 |
-
margin: 0px;
|
181 |
-
line-height: 1.6;
|
182 |
-
color: inherit;
|
183 |
-
width: auto;
|
184 |
-
user-select: none;
|
185 |
-
background-color: white; /* Set a white background */
|
186 |
-
border: 1px solid rgba(49, 51, 63, 0.2);
|
187 |
-
outline: none; !important
|
188 |
-
box-shadow: none !important;
|
189 |
-
}
|
190 |
-
|
191 |
-
/* Change background on mouse-over */
|
192 |
-
.st-style-button:hover {
|
193 |
-
background-color: white;
|
194 |
-
color: #0A04D2;
|
195 |
-
border: 1px solid #0A04D2;
|
196 |
-
}
|
197 |
-
|
198 |
-
</style>
|
199 |
-
""",
|
200 |
-
unsafe_allow_html=True,
|
201 |
-
)
|
202 |
-
screenshot_window()
|
203 |
-
|
204 |
-
add_reportgen_button()
|
205 |
-
|
206 |
-
client = OpenAI(api_key='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')
|
207 |
-
|
208 |
-
col1, col2 = st.columns(2)
|
209 |
-
|
210 |
-
wl1, wl2 = st.columns(2)
|
211 |
-
|
212 |
-
def to_base64(uploaded_file):
|
213 |
-
file_buffer = uploaded_file.read()
|
214 |
-
b64 = base64.b64encode(file_buffer).decode()
|
215 |
-
return f"data:image/png;base64,{b64}"
|
216 |
-
|
217 |
-
def generate_df(new_img_list):
|
218 |
-
|
219 |
-
# if st.session_state.buttondo:
|
220 |
-
# for img in new_img_list:
|
221 |
-
# with open(img, mode="rb") as f:
|
222 |
-
# uploaded_file = f.read()
|
223 |
-
|
224 |
-
# current_df = pd.DataFrame(
|
225 |
-
# {
|
226 |
-
# "image_id": img,
|
227 |
-
# "image": f"data:image/png;base64,{base64.b64encode(uploaded_file).decode()}",
|
228 |
-
# "name": img,
|
229 |
-
# "defect_type": "",
|
230 |
-
# "description": "",
|
231 |
-
# }
|
232 |
-
# )
|
233 |
-
|
234 |
-
# else:
|
235 |
-
current_df = pd.DataFrame(
|
236 |
-
{
|
237 |
-
"image_id": [img.file_id for img in st.session_state.images],
|
238 |
-
"image": [to_base64(img) for img in st.session_state.images],
|
239 |
-
"name": [img.name for img in st.session_state.images],
|
240 |
-
"defect_type": [""] * len(st.session_state.images),
|
241 |
-
"description": [""] * len(st.session_state.images),
|
242 |
-
}
|
243 |
-
)
|
244 |
-
|
245 |
-
if "df" not in st.session_state:
|
246 |
-
st.session_state.df = current_df
|
247 |
-
return
|
248 |
-
|
249 |
-
new_df = pd.merge(current_df, st.session_state.df, on=["image_id"], how="outer", indicator=True)
|
250 |
-
new_df = new_df[new_df["_merge"] != "right_only"].drop(columns=["_merge", "name_y", "image_y", "description_x"])
|
251 |
-
new_df = new_df.rename(columns={"name_x": "name", "image_x": "image", "description_y": "description"})
|
252 |
-
new_df["defect_type"] = new_df["defect_type"].fillna("")
|
253 |
-
new_df["description"] = new_df["description"].fillna("")
|
254 |
-
|
255 |
-
st.session_state.df = new_df
|
256 |
-
|
257 |
-
|
258 |
-
def render_df():
|
259 |
-
def highlight_col(x):
|
260 |
-
r = 'background-color: red'
|
261 |
-
df1 = pd.DataFrame('', index=x.index, columns=x.columns)
|
262 |
-
df1.iloc[:, 2] = r
|
263 |
-
return df1
|
264 |
-
|
265 |
-
st.session_state.df.style.apply(highlight_col, axis=None)
|
266 |
-
|
267 |
-
st.data_editor(
|
268 |
-
st.session_state.df,
|
269 |
-
column_config={
|
270 |
-
"image": st.column_config.ImageColumn(
|
271 |
-
"Preview Image", help="Image preview", width=100
|
272 |
-
),
|
273 |
-
"name": st.column_config.Column("Name", help="Image name", width=200),
|
274 |
-
"defect_type": st.column_config.Column(
|
275 |
-
"Defect Type", help="Defect description", width=400
|
276 |
-
),
|
277 |
-
"description": st.column_config.Column(
|
278 |
-
"Description", help="Image description", width=800
|
279 |
-
),
|
280 |
-
},
|
281 |
-
disabled=True,
|
282 |
-
hide_index=True,
|
283 |
-
height=500,
|
284 |
-
column_order=["image", "name", "defect_type", "description"],
|
285 |
-
use_container_width=True,
|
286 |
-
)
|
287 |
-
|
288 |
-
|
289 |
-
def generate_description(image_base64):
|
290 |
-
response = client.chat.completions.create(
|
291 |
-
model="gpt-4-vision-preview",
|
292 |
-
messages=[
|
293 |
-
{
|
294 |
-
"role": "user",
|
295 |
-
"content": [
|
296 |
-
{"type": "text", "text": """Please answer at the following format:
|
297 |
-
Defect
|
298 |
-
Description: <Analyze how the wafer defect type in the image>"""},
|
299 |
-
{
|
300 |
-
"type": "image_url",
|
301 |
-
"image_url": {
|
302 |
-
"url": image_base64,
|
303 |
-
},
|
304 |
-
},
|
305 |
-
],
|
306 |
-
}
|
307 |
-
],
|
308 |
-
max_tokens=50,
|
309 |
-
)
|
310 |
-
return response.choices[0].message.content
|
311 |
-
|
312 |
-
|
313 |
-
def update_df():
|
314 |
-
indexes = st.session_state.df[st.session_state.df["description"] == ""].index
|
315 |
-
for idx in indexes:
|
316 |
-
description = generate_description(st.session_state.df.loc[idx, "image"])
|
317 |
-
st.session_state.df.loc[idx, "defect_type"] = description.split("\n")[0]
|
318 |
-
st.session_state.df.loc[idx, "description"] = description.split("\n")[-1]
|
319 |
-
|
320 |
-
#Creating title for Streamlit app
|
321 |
-
st.title("Wafer Defect Detection with LLM Classification and Analyze")
|
322 |
-
|
323 |
-
st.components.v1.iframe("https://whitewolf21.github.io/live/", width=None, height=750, scrolling=False)
|
324 |
-
|
325 |
-
default_img = ""
|
326 |
-
st.session_state.buttondo = False
|
327 |
-
|
328 |
-
with st.sidebar:
|
329 |
-
st.title("Upload Your Images")
|
330 |
-
st.session_state.images = st.file_uploader(label=" ", accept_multiple_files=True)
|
331 |
-
|
332 |
-
genre = st.radio(
|
333 |
-
"Baseline",
|
334 |
-
["Propose Solution", "Baseline 1", "Baseline 2", "Baseline 3"])
|
335 |
-
|
336 |
-
Center = st.checkbox('Center')
|
337 |
-
if Center:
|
338 |
-
st.image('44.png', width=100)
|
339 |
-
|
340 |
-
Donut = st.checkbox('Donut')
|
341 |
-
if Donut:
|
342 |
-
st.image('7316.png', width=100)
|
343 |
-
|
344 |
-
EdgeLoc = st.checkbox('Edge-Loc')
|
345 |
-
if EdgeLoc:
|
346 |
-
st.image('36.png', width=100)
|
347 |
-
|
348 |
-
EdgeRing = st.checkbox('Edge-Ring')
|
349 |
-
if EdgeRing:
|
350 |
-
st.image('100.png', width=100)
|
351 |
-
|
352 |
-
Loc = st.checkbox('Loc')
|
353 |
-
if Loc:
|
354 |
-
st.image('19.png', width=100)
|
355 |
-
|
356 |
-
NearFull = st.checkbox('Near-Full')
|
357 |
-
if NearFull:
|
358 |
-
st.image('929.png', width=100)
|
359 |
-
|
360 |
-
NoDefect = st.checkbox('No Defect')
|
361 |
-
if NoDefect:
|
362 |
-
st.image('0.png', width=100)
|
363 |
-
|
364 |
-
Random = st.checkbox('Random')
|
365 |
-
if Random:
|
366 |
-
st.image('602.png', width=100)
|
367 |
-
|
368 |
-
Scratch = st.checkbox('Scratch')
|
369 |
-
if Scratch:
|
370 |
-
st.image('134.png', width=100)
|
371 |
-
|
372 |
-
if st.button("Detect", type="primary"):
|
373 |
-
st.session_state.buttondo = True
|
374 |
-
|
375 |
-
#uploading file for processing
|
376 |
-
# uploaded_file = st.file_uploader("Choose an image", type=["jpg", "jpeg", "png"])
|
377 |
-
|
378 |
-
if st.session_state.images or st.session_state.buttondo:
|
379 |
-
imgs = []
|
380 |
-
new_img_list = copy.deepcopy(st.session_state.images)
|
381 |
-
# if st.session_state.buttondo:
|
382 |
-
# new_img_list = [default_img]
|
383 |
-
# else:
|
384 |
-
# new_img_list = copy.deepcopy(st.session_state.images)
|
385 |
-
|
386 |
-
for img in new_img_list:
|
387 |
-
image = cv2.imdecode(np.frombuffer(img.read(), np.uint8), 1)
|
388 |
-
# if st.session_state.buttondo:
|
389 |
-
# image = cv2.imread(img)
|
390 |
-
# else:
|
391 |
-
# image = cv2.imdecode(np.frombuffer(img.read(), np.uint8), 1)
|
392 |
-
|
393 |
-
img = image.copy()
|
394 |
-
gray=cv2.cvtColor(img,cv2.COLOR_BGR2GRAY)
|
395 |
-
|
396 |
-
blur=cv2.blur(gray,(10,10))
|
397 |
-
|
398 |
-
dst=cv2.fastNlMeansDenoising(blur,None,10,7,21)
|
399 |
-
|
400 |
-
_,binary=cv2.threshold(dst,127,255,cv2.THRESH_BINARY+cv2.THRESH_OTSU)
|
401 |
-
|
402 |
-
kernel=np.ones((5,5),np.uint8)
|
403 |
-
|
404 |
-
erosion=cv2.erode(binary,kernel,iterations=1)
|
405 |
-
dilation=cv2.dilate(binary,kernel,iterations=1)
|
406 |
-
|
407 |
-
if (dilation==0).sum()>1:
|
408 |
-
contours,_=cv2.findContours(dilation,cv2.RETR_TREE,cv2.CHAIN_APPROX_NONE)
|
409 |
-
for i in contours:
|
410 |
-
if cv2.contourArea(i)<261121.0:
|
411 |
-
cv2.drawContours(img,i,-1,(0,0,255),3)
|
412 |
-
cv2.putText(img,"",(30,40),cv2.FONT_HERSHEY_SIMPLEX,1,(0,0,255),2)
|
413 |
-
else:
|
414 |
-
cv2.putText(img, "Good wafer", (30, 40), cv2.FONT_HERSHEY_SIMPLEX, 1, (0, 0, 255), 2)
|
415 |
-
|
416 |
-
# st.image(image,caption="original image",channels="BGR")
|
417 |
-
# st.image(blur,caption="blur")
|
418 |
-
# st.image(binary,caption="binary")
|
419 |
-
# st.image(erosion,caption="erosion")
|
420 |
-
# st.image(dilation,caption="dilation")
|
421 |
-
imgs.append(img)
|
422 |
-
|
423 |
-
# st.image(imgs)
|
424 |
-
|
425 |
-
with st.spinner('Wait for it...'):
|
426 |
-
time.sleep(10)
|
427 |
-
st.success('Done!')
|
428 |
-
|
429 |
-
generate_df(new_img_list)
|
430 |
-
# print(st.session_state.images)
|
431 |
-
update_df()
|
432 |
-
|
433 |
-
st.download_button(
|
434 |
-
"Download descriptions as CSV",
|
435 |
-
st.session_state.df.drop(['image', "image_id"], axis=1).to_csv(index=False),
|
436 |
-
"descriptions.csv",
|
437 |
-
"text/csv",
|
438 |
-
use_container_width=True
|
439 |
-
)
|
440 |
-
|
441 |
-
render_df()
|
442 |
-
# cv2.waitKey(0)
|
443 |
-
# cv2.destroyAllWindows()
|
|
|
|
|
|
|
1 |
import streamlit as st
|
2 |
+
from multipage import MultiPage
|
3 |
+
from app_pages import home, about, main
|
|
|
|
|
|
|
|
|
|
|
|
|
4 |
|
5 |
+
m = MultiPage()
|
6 |
st.set_page_config(
|
7 |
+
page_title='Utilizing Generative AI for automated wafer defect inspection',
|
8 |
+
layout ="wide",
|
|
|
9 |
initial_sidebar_state="expanded",
|
10 |
)
|
11 |
|
12 |
+
# Add all your application here
|
13 |
+
m.add_page("Home", "house", home.app)
|
14 |
+
m.add_page("About", "info-circle", about.app)
|
15 |
+
m.add_page("App", "cast", main.app)
|
16 |
|
17 |
+
# The main app
|
18 |
+
m.run()
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
app_pages/about.py
ADDED
@@ -0,0 +1,15 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import streamlit as st
|
2 |
+
|
3 |
+
def app():
|
4 |
+
col1, col2, col3 = st.columns([0.2, 1, 0.2])
|
5 |
+
|
6 |
+
with col1:
|
7 |
+
st.write(' ')
|
8 |
+
|
9 |
+
with col2:
|
10 |
+
st.image('Saitama_Team_Presentation_Page2.png', use_column_width=True)
|
11 |
+
|
12 |
+
with col3:
|
13 |
+
st.write(' ')
|
14 |
+
|
15 |
+
st.write("")
|
app_pages/home.py
ADDED
@@ -0,0 +1,26 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import streamlit as st
|
2 |
+
|
3 |
+
def app():
|
4 |
+
col1, col2, col3 = st.columns([0.2, 0.75, 0.2])
|
5 |
+
|
6 |
+
with col1:
|
7 |
+
st.write(' ')
|
8 |
+
|
9 |
+
with col2:
|
10 |
+
st.image('Saitama_Team_Presentation_Page1.png', use_column_width=True)
|
11 |
+
|
12 |
+
st.write("")
|
13 |
+
|
14 |
+
st.markdown('''#### Utilizing Generative AI for automated wafer defect inspection''')
|
15 |
+
st.write("")
|
16 |
+
st.write("")
|
17 |
+
|
18 |
+
st.markdown("##### This app allows you to live demo of our proposed solutions:")
|
19 |
+
st.write("")
|
20 |
+
st.write("")
|
21 |
+
st.markdown("👈 Select the **About** page from the sidebar for our information")
|
22 |
+
|
23 |
+
st.markdown("👈 or directly select the **App** page")
|
24 |
+
|
25 |
+
with col3:
|
26 |
+
st.write(' ')
|
app_pages/main.py
ADDED
@@ -0,0 +1,5 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import streamlit as st
|
2 |
+
|
3 |
+
def app():
|
4 |
+
|
5 |
+
st.components.v1.iframe("https://15a9-14-161-45-131.ngrok-free.app", height=1000)
|
multipage.py
ADDED
@@ -0,0 +1,68 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
"""
|
2 |
+
This file is the framework for generating multiple Streamlit applications
|
3 |
+
through an object oriented framework.
|
4 |
+
|
5 |
+
Source: https://huggingface.co/spaces/deepset/wikipedia-assistant/tree/main
|
6 |
+
"""
|
7 |
+
|
8 |
+
# Import necessary libraries
|
9 |
+
import streamlit as st
|
10 |
+
from streamlit_option_menu import option_menu
|
11 |
+
|
12 |
+
|
13 |
+
# Define the multipage class to manage the multiple apps in our program
|
14 |
+
class MultiPage:
|
15 |
+
"""Framework for combining multiple streamlit applications."""
|
16 |
+
|
17 |
+
def __init__(self) -> None:
|
18 |
+
"""Constructor class to generate a list which will store all our applications as an instance variable."""
|
19 |
+
self.pages = []
|
20 |
+
|
21 |
+
def add_page(self, title, icon, func) -> None:
|
22 |
+
"""Class Method to Add pages to the project
|
23 |
+
|
24 |
+
Args:
|
25 |
+
title ([str]): The title of page which we are adding to the list of apps
|
26 |
+
|
27 |
+
func: Python function to render this page in Streamlit
|
28 |
+
"""
|
29 |
+
|
30 |
+
self.pages.append(
|
31 |
+
{
|
32 |
+
"title": title,
|
33 |
+
"icon": icon,
|
34 |
+
"function": func
|
35 |
+
}
|
36 |
+
)
|
37 |
+
|
38 |
+
def run(self):
|
39 |
+
# Drodown to select the page to run
|
40 |
+
st.markdown("""
|
41 |
+
<style>
|
42 |
+
section[data-testid="stSidebar"] > div:first-of-type {
|
43 |
+
background-color: var(--secondary-background-color);
|
44 |
+
background: var(--secondary-background-color);
|
45 |
+
width: 250px;
|
46 |
+
padding: 0rem 0;
|
47 |
+
box-shadow: -2rem 0px 2rem 2rem rgba(0,0,0,0.16);
|
48 |
+
}
|
49 |
+
section[aria-expanded="true"] > div:nth-of-type(2) {
|
50 |
+
display: none;
|
51 |
+
}
|
52 |
+
.main > div:first-of-type {
|
53 |
+
padding: 1rem 0;
|
54 |
+
}
|
55 |
+
</style>
|
56 |
+
""", unsafe_allow_html=True)
|
57 |
+
|
58 |
+
with st.sidebar:
|
59 |
+
selected = option_menu("OCR Comparator",
|
60 |
+
[page["title"] for page in self.pages],
|
61 |
+
icons=[page["icon"] for page in self.pages],
|
62 |
+
menu_icon="app-indicator", default_index=0)
|
63 |
+
|
64 |
+
# Run the selected page
|
65 |
+
for index, item in enumerate(self.pages):
|
66 |
+
if item["title"] == selected:
|
67 |
+
self.pages[index]["function"]()
|
68 |
+
break
|
requirements.txt
CHANGED
@@ -1,59 +1,2 @@
|
|
1 |
-
|
2 |
-
|
3 |
-
anyio==4.3.0
|
4 |
-
attrs==23.2.0
|
5 |
-
blinker==1.7.0
|
6 |
-
cachetools==5.3.3
|
7 |
-
certifi==2024.2.2
|
8 |
-
charset-normalizer==3.3.2
|
9 |
-
click==8.1.7
|
10 |
-
colorama==0.4.6
|
11 |
-
distro==1.9.0
|
12 |
-
exceptiongroup==1.2.0
|
13 |
-
gitdb==4.0.11
|
14 |
-
GitPython==3.1.42
|
15 |
-
h11==0.14.0
|
16 |
-
httpcore==1.0.4
|
17 |
-
httpx==0.27.0
|
18 |
-
idna==3.6
|
19 |
-
importlib_metadata==7.0.2
|
20 |
-
Jinja2==3.1.3
|
21 |
-
jsonschema==4.21.1
|
22 |
-
jsonschema-specifications==2023.12.1
|
23 |
-
markdown-it-py==3.0.0
|
24 |
-
MarkupSafe==2.1.5
|
25 |
-
mdurl==0.1.2
|
26 |
-
numpy==1.26.4
|
27 |
-
openai==1.12.0
|
28 |
-
opencv-python==4.9.0.80
|
29 |
-
packaging==23.2
|
30 |
-
pandas==2.2.0
|
31 |
-
pillow==10.2.0
|
32 |
-
protobuf==4.25.3
|
33 |
-
pyarrow==15.0.1
|
34 |
-
pydantic==2.6.4
|
35 |
-
pydantic_core==2.16.3
|
36 |
-
pydeck==0.8.1b0
|
37 |
-
Pygments==2.17.2
|
38 |
-
python-dateutil==2.9.0.post0
|
39 |
-
pytz==2024.1
|
40 |
-
referencing==0.33.0
|
41 |
-
requests==2.31.0
|
42 |
-
rich==13.7.1
|
43 |
-
rpds-py==0.18.0
|
44 |
-
six==1.16.0
|
45 |
-
smmap==5.0.1
|
46 |
-
sniffio==1.3.1
|
47 |
-
streamlit==1.31.0
|
48 |
-
tenacity==8.2.3
|
49 |
-
toml==0.10.2
|
50 |
-
toolz==0.12.1
|
51 |
-
tornado==6.4
|
52 |
-
tqdm==4.66.2
|
53 |
-
typing_extensions==4.10.0
|
54 |
-
tzdata==2024.1
|
55 |
-
tzlocal==5.2
|
56 |
-
urllib3==2.2.1
|
57 |
-
validators==0.22.0
|
58 |
-
watchdog==4.0.0
|
59 |
-
zipp==3.18.0
|
|
|
1 |
+
streamlit
|
2 |
+
streamlit_option_menu
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|