{"cells":[{"cell_type":"code","execution_count":5,"metadata":{"execution":{"iopub.execute_input":"2024-01-16T09:38:35.178622Z","iopub.status.busy":"2024-01-16T09:38:35.178222Z","iopub.status.idle":"2024-01-16T09:39:00.420319Z","shell.execute_reply":"2024-01-16T09:39:00.418913Z","shell.execute_reply.started":"2024-01-16T09:38:35.178588Z"},"trusted":true},"outputs":[{"name":"stdout","output_type":"stream","text":["Collecting torchsummary\n"," Downloading torchsummary-1.5.1-py3-none-any.whl (2.8 kB)\n","Installing collected packages: torchsummary\n","Successfully installed torchsummary-1.5.1\n","\u001b[33mWARNING: Running pip as the 'root' user can result in broken permissions and conflicting behaviour with the system package manager. It is recommended to use a virtual environment instead: https://pip.pypa.io/warnings/venv\u001b[0m\u001b[33m\n","\u001b[0mCollecting torchview\n"," Downloading torchview-0.2.6-py3-none-any.whl (25 kB)\n","Installing collected packages: torchview\n","Successfully installed torchview-0.2.6\n","\u001b[33mWARNING: Running pip as the 'root' user can result in broken permissions and conflicting behaviour with the system package manager. It is recommended to use a virtual environment instead: https://pip.pypa.io/warnings/venv\u001b[0m\u001b[33m\n","\u001b[0m"]}],"source":["## torchsummary and torchview are not a part of requirements.txt but are usful for visualizing model architecture\n","!pip install torchsummary\n","!pip install torchview"]},{"cell_type":"code","execution_count":19,"metadata":{"execution":{"iopub.execute_input":"2024-01-16T09:45:08.596528Z","iopub.status.busy":"2024-01-16T09:45:08.596090Z","iopub.status.idle":"2024-01-16T09:45:08.660337Z","shell.execute_reply":"2024-01-16T09:45:08.659317Z","shell.execute_reply.started":"2024-01-16T09:45:08.596495Z"},"trusted":true},"outputs":[],"source":["import random\n","import os\n","import glob\n","import warnings\n","\n","import pandas as pd\n","import numpy as np\n","import matplotlib.pyplot as plt\n","import seaborn as sns\n","\n","import torch\n","import torchvision\n","\n","from torch import nn\n","from torch.utils.data import Dataset, DataLoader\n","\n","from torchsummary import summary\n","from torchview import draw_graph\n","\n","import albumentations as A\n","from albumentations.pytorch import ToTensorV2\n","\n","from PIL import Image\n","from tqdm.notebook import tqdm\n","\n","from sklearn.model_selection import train_test_split\n","\n","def set_seed(seed: int = 42):\n"," random.seed(seed)\n"," np.random.seed(seed)\n"," os.environ[\"PYTHONHASHSEED\"] = str(seed)\n"," torch.manual_seed(seed)\n"," torch.cuda.manual_seed(seed)\n","\n","set_seed()\n","\n","# The model was trained on kaggle environment thus the paths are coresponding to kaggle Input/ directory\n","\n","DATASET_PATH = \"/kaggle/input/lgg-mri-segmentation/kaggle_3m/*/*_mask.tif\"\n","SAVE_MODEL_PARH = \"/kaggle/working/model.pth\"\n","\n","warnings.filterwarnings(\"ignore\")\n","plt.style.use(\"dark_background\")"]},{"cell_type":"code","execution_count":14,"metadata":{"execution":{"iopub.execute_input":"2024-01-16T09:44:23.948586Z","iopub.status.busy":"2024-01-16T09:44:23.948083Z","iopub.status.idle":"2024-01-16T09:44:24.223721Z","shell.execute_reply":"2024-01-16T09:44:24.222782Z","shell.execute_reply.started":"2024-01-16T09:44:23.948541Z"},"trusted":true},"outputs":[],"source":["image_masks = glob.glob(DATASET_PATH)\n","image_paths = [file_path.replace(\"_mask\", '') for file_path in image_masks]"]},{"cell_type":"code","execution_count":15,"metadata":{"execution":{"iopub.execute_input":"2024-01-16T09:44:25.127517Z","iopub.status.busy":"2024-01-16T09:44:25.126636Z","iopub.status.idle":"2024-01-16T09:44:32.146759Z","shell.execute_reply":"2024-01-16T09:44:32.146072Z","shell.execute_reply.started":"2024-01-16T09:44:25.127473Z"},"trusted":true},"outputs":[],"source":["def labels(mask_path):\n"," label = []\n"," for mask in mask_path:\n"," img = Image.open(mask)\n"," label.append(1) if np.array(img).sum() > 0 else label.append(0)\n"," return label\n","mask_labels = labels(image_masks)"]},{"cell_type":"code","execution_count":16,"metadata":{"execution":{"iopub.execute_input":"2024-01-16T09:44:48.583543Z","iopub.status.busy":"2024-01-16T09:44:48.583139Z","iopub.status.idle":"2024-01-16T09:44:48.591057Z","shell.execute_reply":"2024-01-16T09:44:48.590151Z","shell.execute_reply.started":"2024-01-16T09:44:48.583508Z"},"trusted":true},"outputs":[],"source":["df = pd.DataFrame({\n"," 'image_path': image_paths,\n"," 'mask_path' : image_masks,\n"," 'label' : mask_labels\n"," })"]},{"cell_type":"code","execution_count":17,"metadata":{"execution":{"iopub.execute_input":"2024-01-16T09:44:49.558920Z","iopub.status.busy":"2024-01-16T09:44:49.558507Z","iopub.status.idle":"2024-01-16T09:44:49.568765Z","shell.execute_reply":"2024-01-16T09:44:49.568052Z","shell.execute_reply.started":"2024-01-16T09:44:49.558856Z"},"trusted":true},"outputs":[{"data":{"text/html":["
\n"," | image_path | \n","mask_path | \n","label | \n","
---|---|---|---|
0 | \n","/kaggle/input/lgg-mri-segmentation/kaggle_3m/T... | \n","/kaggle/input/lgg-mri-segmentation/kaggle_3m/T... | \n","0 | \n","
1 | \n","/kaggle/input/lgg-mri-segmentation/kaggle_3m/T... | \n","/kaggle/input/lgg-mri-segmentation/kaggle_3m/T... | \n","0 | \n","
2 | \n","/kaggle/input/lgg-mri-segmentation/kaggle_3m/T... | \n","/kaggle/input/lgg-mri-segmentation/kaggle_3m/T... | \n","1 | \n","
3 | \n","/kaggle/input/lgg-mri-segmentation/kaggle_3m/T... | \n","/kaggle/input/lgg-mri-segmentation/kaggle_3m/T... | \n","1 | \n","
4 | \n","/kaggle/input/lgg-mri-segmentation/kaggle_3m/T... | \n","/kaggle/input/lgg-mri-segmentation/kaggle_3m/T... | \n","1 | \n","