SKU-110K CVPR2019 Dataset Auto-Download (#3167)
Browse files* SKU-110K CVPR2019 Dataset Auto-Download
This PR adds the **SKU-110K CVPR2019 Dataset** https://github.com/eg4000/SKU110K_CVPR19 to the list of YOLOv5 🚀 auto-download list.
- [x] [COCO](https://cocodataset.org/#home)
- [x] [COCO128](https://www.kaggle.com/ultralytics/coco128)
- [x] [VOC](http://host.robots.ox.ac.uk/pascal/VOC/)
- [x] [Argoverse-HD](http://www.cs.cmu.edu/~mengtial/proj/streaming/)
- [x] [VisDrone2019-DET](https://github.com/VisDrone/VisDrone-Dataset)
- [x] [GlobalWheat-2020](http://www.global-wheat.com/)
- [ ] [KITTI](https://www.kaggle.com/twaldo/kitti-object-detection)
- [x] [SKU110](https://github.com/eg4000/SKU110K_CVPR19) (grocery store items)
- [ ] [CityScapes](https://www.cityscapes-dataset.com/)
- [x] [Objects365](https://www.objects365.org/overview.html)
- [ ] [OpenImages](https://storage.googleapis.com/openimages/web/index.html)
```yaml
# SKU-110K retail items dataset https://github.com/eg4000/SKU110K_CVPR19
# Train command: python train.py --data SKU-110K.yaml
# Default dataset location is next to YOLOv5:
# /parent_folder
# /datasets/SKU-110K
# /yolov5
```
* Update SKU-110K.yaml
* Update SKU-110K.yaml
- data/SKU-110K.yaml +52 -0
@@ -0,0 +1,52 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# SKU-110K retail items dataset https://github.com/eg4000/SKU110K_CVPR19
|
2 |
+
# Train command: python train.py --data SKU-110K.yaml
|
3 |
+
# Default dataset location is next to YOLOv5:
|
4 |
+
# /parent_folder
|
5 |
+
# /datasets/SKU-110K
|
6 |
+
# /yolov5
|
7 |
+
|
8 |
+
|
9 |
+
# train and val data as 1) directory: path/images/, 2) file: path/images.txt, or 3) list: [path1/images/, path2/images/]
|
10 |
+
train: ../datasets/SKU-110K/train.txt # 8219 images
|
11 |
+
val: ../datasets/SKU-110K/val.txt # 588 images
|
12 |
+
test: ../datasets/SKU-110K/test.txt # 2936 images
|
13 |
+
|
14 |
+
# number of classes
|
15 |
+
nc: 1
|
16 |
+
|
17 |
+
# class names
|
18 |
+
names: [ 'object' ]
|
19 |
+
|
20 |
+
|
21 |
+
# download command/URL (optional) --------------------------------------------------------------------------------------
|
22 |
+
download: |
|
23 |
+
import shutil
|
24 |
+
from tqdm import tqdm
|
25 |
+
from utils.general import np, pd, Path, download, xyxy2xywh
|
26 |
+
|
27 |
+
# Download
|
28 |
+
datasets = Path('../datasets') # download directory
|
29 |
+
urls = ['http://trax-geometry.s3.amazonaws.com/cvpr_challenge/SKU110K_fixed.tar.gz']
|
30 |
+
download(urls, dir=datasets, delete=False)
|
31 |
+
|
32 |
+
# Rename directories
|
33 |
+
dir = (datasets / 'SKU-110K')
|
34 |
+
if dir.exists():
|
35 |
+
shutil.rmtree(dir)
|
36 |
+
(datasets / 'SKU110K_fixed').rename(dir) # rename dir
|
37 |
+
(dir / 'labels').mkdir(parents=True, exist_ok=True) # create labels dir
|
38 |
+
|
39 |
+
# Convert labels
|
40 |
+
names = 'image', 'x1', 'y1', 'x2', 'y2', 'class', 'image_width', 'image_height' # column names
|
41 |
+
for d in 'annotations_train.csv', 'annotations_val.csv', 'annotations_test.csv':
|
42 |
+
x = pd.read_csv(dir / 'annotations' / d, names=names).values # annotations
|
43 |
+
images, unique_images = x[:, 0], np.unique(x[:, 0])
|
44 |
+
with open((dir / d).with_suffix('.txt').__str__().replace('annotations_', ''), 'w') as f:
|
45 |
+
f.writelines(f'./images/{s}\n' for s in unique_images)
|
46 |
+
for im in tqdm(unique_images, desc=f'Converting {dir / d}'):
|
47 |
+
cls = 0 # single-class dataset
|
48 |
+
with open((dir / 'labels' / im).with_suffix('.txt'), 'a') as f:
|
49 |
+
for r in x[images == im]:
|
50 |
+
w, h = r[6], r[7] # image width, height
|
51 |
+
xywh = xyxy2xywh(np.array([[r[1] / w, r[2] / h, r[3] / w, r[4] / h]]))[0] # instance
|
52 |
+
f.write(f"{cls} {xywh[0]:.5f} {xywh[1]:.5f} {xywh[2]:.5f} {xywh[3]:.5f}\n") # write label
|