{ "cells": [ { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "import os\n", "import zipfile\n", "import requests\n", "import jsonlines\n", "from tqdm import tqdm\n", "from pathlib import Path\n", "from pycocotools.coco import COCO\n", "from pycocotools import mask as maskUtils" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "# Download Annotations" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "url = 'http://images.cocodataset.org/annotations/'\n", "file = 'stuff_annotations_trainval2017.zip'\n", "if not Path(f'./{file}').exists():\n", " response = requests.get(url + file)\n", " with open(file, 'wb') as f:\n", " f.write(response.content)\n", "\n", " with zipfile.ZipFile(file, 'r') as zipf:\n", " zipf.extractall(Path())\n", "\n", "# for split in ['train', 'val']:\n", "# file = f'./annotations/stuff_{split}2017_pixelmaps'\n", "# if not Path(file).exists():\n", "# with zipfile.ZipFile(file + '.zip', 'r') as zipf:\n", "# zipf.extractall(Path('./annotations'))\n" ] }, { "cell_type": "markdown", "metadata": {}, "source": [ "# Stuff Segmentation Task" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "train_data = COCO('annotations/stuff_train2017.json')\n", "val_data = COCO('annotations/stuff_val2017.json')" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "for split, data in zip(['train', 'validation'], [train_data, val_data]):\n", " with jsonlines.open(f'data/stuff_{split}.jsonl', mode='w') as writer:\n", " for image_id, image_info in tqdm(data.imgs.items()):\n", " categories, sem_rles = [], []\n", " anns = data.imgToAnns[image_id]\n", " file_name = image_info['file_name']\n", " height, width = image_info['height'], image_info['width']\n", " for ann in anns:\n", " categories.append(ann['category_id'] - 92)\n", " segm = ann['segmentation']\n", " if isinstance(segm, list):\n", " rles = maskUtils.frPyObjects(segm, height, width)\n", " rle = maskUtils.merge(rles)\n", " rle['counts'] = rle['counts'].decode()\n", " elif isinstance(segm['counts'], list):\n", " rle = maskUtils.frPyObjects(segm, height, width)\n", " else:\n", " rle = segm\n", " sem_rles.append(rle)\n", " writer.write({\n", " 'image': file_name, 'categories': categories, 'sem.rles': sem_rles\n", " })" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [ "for split in ['train', 'validation']:\n", " file_path = f'data/stuff_{split}.jsonl'\n", " with zipfile.ZipFile(f'data/stuff_{split}.zip', 'w', zipfile.ZIP_DEFLATED) as zipf:\n", " zipf.write(file_path, os.path.basename(file_path))" ] }, { "cell_type": "code", "execution_count": null, "metadata": {}, "outputs": [], "source": [] } ], "metadata": { "kernelspec": { "display_name": ".venv", "language": "python", "name": "python3" }, "language_info": { "codemirror_mode": { "name": "ipython", "version": 3 }, "file_extension": ".py", "mimetype": "text/x-python", "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", "version": "3.12.2" } }, "nbformat": 4, "nbformat_minor": 2 }