Upload 4 files
Browse files- Weed_Detector.py +108 -0
- new_yolov8_best.pt +3 -0
- requirements.txt +145 -0
- utils.py +43 -0
Weed_Detector.py
ADDED
@@ -0,0 +1,108 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import os
|
2 |
+
import cv2
|
3 |
+
import zipfile
|
4 |
+
import numpy as np
|
5 |
+
import streamlit as st
|
6 |
+
from io import BytesIO
|
7 |
+
from PIL import Image
|
8 |
+
from ultralytics import YOLO
|
9 |
+
from utils import create_shapefile_with_latlon
|
10 |
+
|
11 |
+
|
12 |
+
# Define paths
|
13 |
+
path_to_store_bounding_boxes = 'detect/'
|
14 |
+
path_to_save_shapefile = 'weed_detections.shp'
|
15 |
+
|
16 |
+
# Ensure the output directories exist
|
17 |
+
os.makedirs(path_to_store_bounding_boxes, exist_ok=True)
|
18 |
+
|
19 |
+
# loading a custom model
|
20 |
+
model = YOLO('new_yolov8_best.pt')
|
21 |
+
|
22 |
+
# Mapping of class labels to readable names (assuming 'weeds' is class 1)
|
23 |
+
class_names = ["citrus area", "trees", "weeds", "weeds and trees" ]
|
24 |
+
|
25 |
+
|
26 |
+
# Streamlit UI
|
27 |
+
st.title("Weed Detection and Shapefile Creation")
|
28 |
+
|
29 |
+
# Input coordinates for image corners
|
30 |
+
st.sidebar.header("Image Coordinates")
|
31 |
+
top_left = st.sidebar.text_input("Top Left (lon, lat)", value="-48.8864783, -20.5906375")
|
32 |
+
top_right = st.sidebar.text_input("Top Right (lon, lat)", value="-48.8855653, -20.5906264")
|
33 |
+
bottom_right = st.sidebar.text_input("Bottom Right (lon, lat)", value="-48.8855534, -20.5914861")
|
34 |
+
bottom_left = st.sidebar.text_input("Bottom Left (lon, lat)", value="-48.8864664, -20.5914973")
|
35 |
+
|
36 |
+
# Convert input coordinates to tuples
|
37 |
+
image_coords = [
|
38 |
+
tuple(map(float, top_left.split(','))),
|
39 |
+
tuple(map(float, top_right.split(','))),
|
40 |
+
tuple(map(float, bottom_right.split(','))),
|
41 |
+
tuple(map(float, bottom_left.split(',')))
|
42 |
+
]
|
43 |
+
|
44 |
+
# Upload image
|
45 |
+
uploaded_image = st.file_uploader("Upload an image", type=["png", "jpg", "jpeg"])
|
46 |
+
|
47 |
+
if uploaded_image is not None:
|
48 |
+
# Display uploaded image
|
49 |
+
st.image(uploaded_image, caption="Uploaded Image", use_column_width=True)
|
50 |
+
img = Image.open(uploaded_image)
|
51 |
+
img_array = np.array(img)
|
52 |
+
image_height, image_width, _ = img_array.shape
|
53 |
+
temp_image_path = "temp_uploaded_image.png"
|
54 |
+
image = Image.open(uploaded_image)
|
55 |
+
image.save(temp_image_path)
|
56 |
+
|
57 |
+
# Perform weed detection on button click
|
58 |
+
if st.button("Detect Weeds"):
|
59 |
+
# Perform model prediction
|
60 |
+
results = model.predict(temp_image_path, imgsz=640, conf=0.2, iou=0.4)
|
61 |
+
results = results[0]
|
62 |
+
|
63 |
+
weed_bboxes = []
|
64 |
+
|
65 |
+
for i, box in enumerate(results.boxes):
|
66 |
+
tensor = box.xyxy[0]
|
67 |
+
x1 = int(tensor[0].item())
|
68 |
+
y1 = int(tensor[1].item())
|
69 |
+
x2 = int(tensor[2].item())
|
70 |
+
y2 = int(tensor[3].item())
|
71 |
+
conf = box.conf[0].item() # Confidence score
|
72 |
+
label = box.cls[0].item() # Class label
|
73 |
+
|
74 |
+
# Debugging output to ensure boxes are detected
|
75 |
+
print(f"Box {i}: ({x1}, {y1}), ({x2}, {y2}), label: {label}, confidence: {conf}")
|
76 |
+
|
77 |
+
# Only process if the detected class is "weeds"
|
78 |
+
if class_names[int(label)] == "weeds":
|
79 |
+
print("weed detected")
|
80 |
+
# Draw bounding box on the image
|
81 |
+
cv2.rectangle(img_array, (x1, y1), (x2, y2), (255, 0, 255), 3)
|
82 |
+
# Save the bounding box coordinates
|
83 |
+
weed_bboxes.append((x1, y1, x2, y2))
|
84 |
+
|
85 |
+
# Save the image with bounding boxes
|
86 |
+
detected_image_path = os.path.join(path_to_store_bounding_boxes, "detected_image.png")
|
87 |
+
cv2.imwrite(detected_image_path, cv2.cvtColor(img_array, cv2.COLOR_RGB2BGR))
|
88 |
+
|
89 |
+
# Display the image with bounding boxes
|
90 |
+
st.image(img_array, caption="Detected Weeds", use_column_width=True)
|
91 |
+
|
92 |
+
# Create shapefile with bounding boxes
|
93 |
+
create_shapefile_with_latlon(weed_bboxes, (image_width, image_height), image_coords, path_to_save_shapefile)
|
94 |
+
|
95 |
+
# Create ZIP file of the shapefile components
|
96 |
+
zip_buffer = BytesIO()
|
97 |
+
with zipfile.ZipFile(zip_buffer, 'w') as zip_file:
|
98 |
+
for filename in ['weed_detections.shp', 'weed_detections.shx', 'weed_detections.dbf']:
|
99 |
+
zip_file.write(filename, os.path.basename(filename))
|
100 |
+
zip_buffer.seek(0)
|
101 |
+
|
102 |
+
# Download ZIP file
|
103 |
+
st.download_button(
|
104 |
+
label="Download Shapefile ZIP",
|
105 |
+
data=zip_buffer,
|
106 |
+
file_name="weed_detections.zip",
|
107 |
+
mime="application/zip"
|
108 |
+
)
|
new_yolov8_best.pt
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:2664fca042b1ead70a9dc27597052b1c137719ab8ae3a870c940bf16cdfc4c0e
|
3 |
+
size 22508569
|
requirements.txt
ADDED
@@ -0,0 +1,145 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
altair==5.3.0
|
2 |
+
anyio==4.4.0
|
3 |
+
argon2-cffi==23.1.0
|
4 |
+
argon2-cffi-bindings==21.2.0
|
5 |
+
arrow==1.3.0
|
6 |
+
asttokens==2.4.1
|
7 |
+
async-lru==2.0.4
|
8 |
+
attrs==23.2.0
|
9 |
+
Babel==2.15.0
|
10 |
+
beautifulsoup4==4.12.3
|
11 |
+
bleach==6.1.0
|
12 |
+
blinker==1.8.2
|
13 |
+
cachetools==5.3.3
|
14 |
+
certifi==2024.2.2
|
15 |
+
cffi==1.16.0
|
16 |
+
charset-normalizer==3.3.2
|
17 |
+
click==8.1.7
|
18 |
+
click-plugins==1.1.1
|
19 |
+
cligj==0.7.2
|
20 |
+
colorama==0.4.6
|
21 |
+
comm==0.2.2
|
22 |
+
contourpy==1.2.1
|
23 |
+
cycler==0.12.1
|
24 |
+
debugpy==1.8.1
|
25 |
+
decorator==5.1.1
|
26 |
+
defusedxml==0.7.1
|
27 |
+
exceptiongroup==1.2.1
|
28 |
+
executing==2.0.1
|
29 |
+
fastjsonschema==2.19.1
|
30 |
+
filelock==3.14.0
|
31 |
+
fiona==1.9.6
|
32 |
+
fonttools==4.52.3
|
33 |
+
fqdn==1.5.1
|
34 |
+
fsspec==2024.5.0
|
35 |
+
geopandas==0.14.4
|
36 |
+
gitdb==4.0.11
|
37 |
+
GitPython==3.1.43
|
38 |
+
h11==0.14.0
|
39 |
+
httpcore==1.0.5
|
40 |
+
httpx==0.27.0
|
41 |
+
idna==3.7
|
42 |
+
intel-openmp==2021.4.0
|
43 |
+
ipykernel==6.29.4
|
44 |
+
ipython==8.24.0
|
45 |
+
isoduration==20.11.0
|
46 |
+
jedi==0.19.1
|
47 |
+
Jinja2==3.1.4
|
48 |
+
json5==0.9.25
|
49 |
+
jsonpointer==2.4
|
50 |
+
jsonschema==4.22.0
|
51 |
+
jsonschema-specifications==2023.12.1
|
52 |
+
jupyter_client==8.6.2
|
53 |
+
jupyter_core==5.7.2
|
54 |
+
jupyter-events==0.10.0
|
55 |
+
jupyter-lsp==2.2.5
|
56 |
+
jupyter_server==2.14.0
|
57 |
+
jupyter_server_terminals==0.5.3
|
58 |
+
jupyterlab==4.2.1
|
59 |
+
jupyterlab_pygments==0.3.0
|
60 |
+
jupyterlab_server==2.27.2
|
61 |
+
kiwisolver==1.4.5
|
62 |
+
markdown-it-py==3.0.0
|
63 |
+
MarkupSafe==2.1.5
|
64 |
+
matplotlib==3.9.0
|
65 |
+
matplotlib-inline==0.1.7
|
66 |
+
mdurl==0.1.2
|
67 |
+
mistune==3.0.2
|
68 |
+
mkl==2021.4.0
|
69 |
+
mpmath==1.3.0
|
70 |
+
nbclient==0.10.0
|
71 |
+
nbconvert==7.16.4
|
72 |
+
nbformat==5.10.4
|
73 |
+
nest-asyncio==1.6.0
|
74 |
+
networkx==3.3
|
75 |
+
notebook==7.2.0
|
76 |
+
notebook_shim==0.2.4
|
77 |
+
numpy==1.26.4
|
78 |
+
opencv-python==4.9.0.80
|
79 |
+
overrides==7.7.0
|
80 |
+
packaging==24.0
|
81 |
+
pandas==2.2.2
|
82 |
+
pandocfilters==1.5.1
|
83 |
+
parso==0.8.4
|
84 |
+
pillow==10.3.0
|
85 |
+
pip==24.0
|
86 |
+
platformdirs==4.2.2
|
87 |
+
prometheus_client==0.20.0
|
88 |
+
prompt_toolkit==3.0.44
|
89 |
+
protobuf==4.25.3
|
90 |
+
psutil==5.9.8
|
91 |
+
pure-eval==0.2.2
|
92 |
+
py-cpuinfo==9.0.0
|
93 |
+
pyarrow==16.1.0
|
94 |
+
pycparser==2.22
|
95 |
+
pydeck==0.9.1
|
96 |
+
Pygments==2.18.0
|
97 |
+
pyparsing==3.1.2
|
98 |
+
pyproj==3.6.1
|
99 |
+
pyshp==2.3.1
|
100 |
+
python-dateutil==2.9.0.post0
|
101 |
+
python-json-logger==2.0.7
|
102 |
+
PyYAML==6.0.1
|
103 |
+
pyzmq==26.0.3
|
104 |
+
referencing==0.35.1
|
105 |
+
requests==2.32.2
|
106 |
+
rfc3339-validator==0.1.4
|
107 |
+
rfc3986-validator==0.1.1
|
108 |
+
rich==13.7.1
|
109 |
+
rpds-py==0.18.1
|
110 |
+
scipy==1.13.1
|
111 |
+
seaborn==0.13.2
|
112 |
+
Send2Trash==1.8.3
|
113 |
+
setuptools==69.5.1
|
114 |
+
shapely==2.0.4
|
115 |
+
six==1.16.0
|
116 |
+
smmap==5.0.1
|
117 |
+
sniffio==1.3.1
|
118 |
+
soupsieve==2.5
|
119 |
+
stack-data==0.6.3
|
120 |
+
streamlit==1.35.0
|
121 |
+
sympy==1.12
|
122 |
+
tbb==2021.12.0
|
123 |
+
tenacity==8.3.0
|
124 |
+
terminado==0.18.1
|
125 |
+
thop==0.1.1.post2209072238
|
126 |
+
tinycss2==1.3.0
|
127 |
+
toml==0.10.2
|
128 |
+
tomli==2.0.1
|
129 |
+
toolz==0.12.1
|
130 |
+
torch==2.3.0
|
131 |
+
torchvision==0.18.0
|
132 |
+
tornado==6.4
|
133 |
+
tqdm==4.66.4
|
134 |
+
traitlets==5.14.3
|
135 |
+
types-python-dateutil==2.9.0.20240316
|
136 |
+
typing_extensions==4.12.0
|
137 |
+
tzdata==2024.1
|
138 |
+
ultralytics==8.2.23
|
139 |
+
uri-template==1.3.0
|
140 |
+
urllib3==2.2.1
|
141 |
+
watchdog==4.0.1
|
142 |
+
wcwidth==0.2.13
|
143 |
+
webcolors==1.13
|
144 |
+
webencodings==0.5.1
|
145 |
+
websocket-client==1.8.0
|
utils.py
ADDED
@@ -0,0 +1,43 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
import os
|
2 |
+
import cv2
|
3 |
+
import zipfile
|
4 |
+
import shapefile
|
5 |
+
import numpy as np
|
6 |
+
from io import BytesIO
|
7 |
+
from shapely.geometry import Polygon
|
8 |
+
import matplotlib.pyplot as plt
|
9 |
+
from PIL import Image
|
10 |
+
|
11 |
+
|
12 |
+
def convert_pixel_to_latlon(x, y, image_width, image_height, image_coords):
|
13 |
+
top_left, top_right, bottom_right, bottom_left = image_coords
|
14 |
+
|
15 |
+
lon_top = top_left[0] + (top_right[0] - top_left[0]) * (x / image_width)
|
16 |
+
lon_bottom = bottom_left[0] + (bottom_right[0] - bottom_left[0]) * (x / image_width)
|
17 |
+
lat_left = top_left[1] + (bottom_left[1] - top_left[1]) * (y / image_height)
|
18 |
+
lat_right = top_right[1] + (bottom_right[1] - top_right[1]) * (y / image_height)
|
19 |
+
|
20 |
+
lon = lon_top + (lon_bottom - lon_top) * (y / image_height)
|
21 |
+
lat = lat_left + (lat_right - lat_left) * (x / image_width)
|
22 |
+
|
23 |
+
return lon, lat
|
24 |
+
|
25 |
+
# Function to create a shapefile with image dimensions and bounding boxes
|
26 |
+
def create_shapefile_with_latlon(bboxes, image_shape, image_coords, shapefile_path):
|
27 |
+
w = shapefile.Writer(shapefile_path)
|
28 |
+
w.field('id', 'C')
|
29 |
+
|
30 |
+
img_width, img_height = image_shape
|
31 |
+
|
32 |
+
# Add bounding boxes for weeds
|
33 |
+
for idx, (x1, y1, x2, y2) in enumerate(bboxes):
|
34 |
+
top_left = convert_pixel_to_latlon(x1, y1, img_width, img_height, image_coords)
|
35 |
+
top_right = convert_pixel_to_latlon(x2, y1, img_width, img_height, image_coords)
|
36 |
+
bottom_left = convert_pixel_to_latlon(x1, y2, img_width, img_height, image_coords)
|
37 |
+
bottom_right = convert_pixel_to_latlon(x2, y2, img_width, img_height, image_coords)
|
38 |
+
|
39 |
+
poly = Polygon([top_left, top_right, bottom_right, bottom_left, top_left])
|
40 |
+
w.poly([poly.exterior.coords])
|
41 |
+
w.record(f'weed_{idx}')
|
42 |
+
|
43 |
+
w.close()
|