{ "cells": [ { "cell_type": "markdown", "id": "9b10cf68", "metadata": { "papermill": { "duration": 0.009368, "end_time": "2023-09-03T09:54:26.123203", "exception": false, "start_time": "2023-09-03T09:54:26.113835", "status": "completed" }, "tags": [] }, "source": [ "# Import Libraries and Load Data" ] }, { "cell_type": "code", "execution_count": 1, "id": "82a4c58c", "metadata": { "execution": { "iopub.execute_input": "2023-09-03T09:54:26.143814Z", "iopub.status.busy": "2023-09-03T09:54:26.143453Z", "iopub.status.idle": "2023-09-03T09:54:38.437851Z", "shell.execute_reply": "2023-09-03T09:54:38.436857Z" }, "papermill": { "duration": 12.307795, "end_time": "2023-09-03T09:54:38.440372", "exception": false, "start_time": "2023-09-03T09:54:26.132577", "status": "completed" }, "tags": [] }, "outputs": [], "source": [ "## Remove Warnings ## \n", "import warnings\n", "warnings.filterwarnings(\"ignore\")\n", "\n", "## Data ## \n", "import numpy as np\n", "import pandas as pd \n", "import os \n", "\n", "## Visualization ## \n", "import matplotlib.pyplot as plt \n", "import plotly.express as px\n", "import seaborn as sns\n", "import plotly.graph_objects as go \n", "\n", "## Image ## \n", "import cv2\n", "from tensorflow.keras.preprocessing.image import ImageDataGenerator \n", "\n", "## Tensorflow ## \n", "from tensorflow.keras.models import Sequential, Model\n", "from tensorflow.keras.layers import Input, Dense , Conv2D , Dropout , Flatten , Activation, MaxPooling2D , GlobalAveragePooling2D\n", "from tensorflow.keras.optimizers import Adam , RMSprop \n", "from tensorflow.keras.layers import BatchNormalization\n", "from tensorflow.keras.callbacks import ReduceLROnPlateau , EarlyStopping , ModelCheckpoint , LearningRateScheduler\n", "from tensorflow.keras.applications import ResNet50V2" ] }, { "cell_type": "code", "execution_count": 2, "id": "1906bacd", "metadata": { "execution": { "iopub.execute_input": "2023-09-03T09:54:38.462010Z", "iopub.status.busy": "2023-09-03T09:54:38.461382Z", "iopub.status.idle": "2023-09-03T09:54:38.530886Z", "shell.execute_reply": "2023-09-03T09:54:38.529893Z" }, "papermill": { "duration": 0.082889, "end_time": "2023-09-03T09:54:38.533290", "exception": false, "start_time": "2023-09-03T09:54:38.450401", "status": "completed" }, "tags": [] }, "outputs": [ { "data": { "text/html": [ "
\n", "\n", "\n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", " \n", "
class idfilepathslabelsdata set
00C:/Users/kamel/Documents/Image Classification/...ADONIStrain
10C:/Users/kamel/Documents/Image Classification/...ADONIStrain
20C:/Users/kamel/Documents/Image Classification/...ADONIStrain
30C:/Users/kamel/Documents/Image Classification/...ADONIStrain
40C:/Users/kamel/Documents/Image Classification/...ADONIStrain
\n", "
" ], "text/plain": [ " class id filepaths labels \\\n", "0 0 C:/Users/kamel/Documents/Image Classification/... ADONIS \n", "1 0 C:/Users/kamel/Documents/Image Classification/... ADONIS \n", "2 0 C:/Users/kamel/Documents/Image Classification/... ADONIS \n", "3 0 C:/Users/kamel/Documents/Image Classification/... ADONIS \n", "4 0 C:/Users/kamel/Documents/Image Classification/... ADONIS \n", "\n", " data set \n", "0 train \n", "1 train \n", "2 train \n", "3 train \n", "4 train " ] }, "execution_count": 2, "metadata": {}, "output_type": "execute_result" } ], "source": [ "df = pd.read_csv('C:/Users/kamel/Documents/Image Classification/butterfly-dataset/butterflies and moths.csv') \n", "IMAGE_DIR = 'C:/Users/kamel/Documents/Image Classification/butterfly-dataset'\n", "df['filepaths'] = IMAGE_DIR + '/' + df['filepaths']\n", "df.head()" ] }, { "cell_type": "code", "execution_count": 3, "id": "1b2dd2d3", "metadata": { "execution": { "iopub.execute_input": "2023-09-03T09:54:38.555496Z", "iopub.status.busy": "2023-09-03T09:54:38.555183Z", "iopub.status.idle": "2023-09-03T09:54:38.575753Z", "shell.execute_reply": "2023-09-03T09:54:38.574842Z" }, "papermill": { "duration": 0.033846, "end_time": "2023-09-03T09:54:38.577919", "exception": false, "start_time": "2023-09-03T09:54:38.544073", "status": "completed" }, "tags": [] }, "outputs": [], "source": [ "train_df = df.loc[df['data set'] == 'train']\n", "val_df = df.loc[df['data set'] == 'valid']\n", "test_df = df.loc[df['data set'] == 'test']" ] }, { "cell_type": "markdown", "id": "37bc594c", "metadata": { "papermill": { "duration": 0.009856, "end_time": "2023-09-03T09:54:38.597762", "exception": false, "start_time": "2023-09-03T09:54:38.587906", "status": "completed" }, "tags": [] }, "source": [ "# Exploratory Data Analysis" ] }, { "cell_type": "code", "execution_count": 13, "id": "01cf1f03", "metadata": { "execution": { "iopub.execute_input": "2023-09-03T09:54:38.641103Z", "iopub.status.busy": "2023-09-03T09:54:38.640074Z", "iopub.status.idle": "2023-09-03T09:54:40.212205Z", "shell.execute_reply": "2023-09-03T09:54:40.211295Z" }, "papermill": { "duration": 1.58707, "end_time": "2023-09-03T09:54:40.214269", "exception": false, "start_time": "2023-09-03T09:54:38.627199", "status": "completed" }, "tags": [] }, "outputs": [ { "data": { "application/vnd.plotly.v1+json": { "config": { "plotlyServerURL": "https://plot.ly" }, "data": [ { "alignmentgroup": "True", "hovertemplate": "x=%{x}
y=%{y}
text=%{text}
color=%{marker.color}", "legendgroup": "", "marker": { "color": [ 153, 150, 149, 139, 138, 136, 131, 128, 117, 115 ], "coloraxis": "coloraxis", "line": { "color": "black", "width": 1.5 }, "pattern": { "shape": "" } }, "name": "", "offsetgroup": "", "opacity": 0.8, "orientation": "v", "showlegend": false, "text": [ 153, 150, 149, 139, 138, 136, 131, 128, 117, 115 ], "textposition": "auto", "type": "bar", "x": [ "ATALA", "ARCIGERA FLOWER MOTH", "BANDED ORANGE HELICONIAN", "ATLAS MOTH", "APPOLLO", "ADONIS", "AN 88", "BANDED PEACOCK", "AFRICAN GIANT SWALLOWTAIL", "AMERICAN SNOOT" ], "xaxis": "x", "y": [ 153, 150, 149, 139, 138, 136, 131, 128, 117, 115 ], "yaxis": "y" } ], "layout": { "barmode": "relative", "coloraxis": { "colorbar": { "title": { "text": "color" } }, "colorscale": [ [ 0, "rgb(247,251,255)" ], [ 0.125, "rgb(222,235,247)" ], [ 0.25, "rgb(198,219,239)" ], [ 0.375, "rgb(158,202,225)" ], [ 0.5, "rgb(107,174,214)" ], [ 0.625, "rgb(66,146,198)" ], [ 0.75, "rgb(33,113,181)" ], [ 0.875, "rgb(8,81,156)" ], [ 1, "rgb(8,48,107)" ] ] }, "legend": { "tracegroupgap": 0 }, "margin": { "t": 60 }, "template": { "data": { "bar": [ { "error_x": { "color": "#2a3f5f" }, "error_y": { "color": "#2a3f5f" }, "marker": { "line": { "color": "white", "width": 0.5 }, "pattern": { "fillmode": "overlay", "size": 10, "solidity": 0.2 } }, "type": "bar" } ], "barpolar": [ { "marker": { "line": { "color": "white", "width": 0.5 }, "pattern": { "fillmode": "overlay", "size": 10, "solidity": 0.2 } }, "type": "barpolar" } ], "carpet": [ { "aaxis": { "endlinecolor": "#2a3f5f", "gridcolor": "#C8D4E3", "linecolor": "#C8D4E3", "minorgridcolor": "#C8D4E3", "startlinecolor": "#2a3f5f" }, "baxis": { "endlinecolor": "#2a3f5f", "gridcolor": "#C8D4E3", "linecolor": "#C8D4E3", "minorgridcolor": "#C8D4E3", "startlinecolor": "#2a3f5f" }, "type": "carpet" } ], "choropleth": [ { "colorbar": { "outlinewidth": 0, "ticks": "" }, "type": "choropleth" } ], "contour": [ { "colorbar": { "outlinewidth": 0, "ticks": "" }, "colorscale": [ [ 0, "#0d0887" ], [ 0.1111111111111111, "#46039f" ], [ 0.2222222222222222, "#7201a8" ], [ 0.3333333333333333, "#9c179e" ], [ 0.4444444444444444, "#bd3786" ], [ 0.5555555555555556, "#d8576b" ], [ 0.6666666666666666, "#ed7953" ], [ 0.7777777777777778, "#fb9f3a" ], [ 0.8888888888888888, "#fdca26" ], [ 1, "#f0f921" ] ], "type": "contour" } ], "contourcarpet": [ { "colorbar": { "outlinewidth": 0, "ticks": "" }, "type": "contourcarpet" } ], "heatmap": [ { "colorbar": { "outlinewidth": 0, "ticks": "" }, "colorscale": [ [ 0, "#0d0887" ], [ 0.1111111111111111, "#46039f" ], [ 0.2222222222222222, "#7201a8" ], [ 0.3333333333333333, "#9c179e" ], [ 0.4444444444444444, "#bd3786" ], [ 0.5555555555555556, "#d8576b" ], [ 0.6666666666666666, "#ed7953" ], [ 0.7777777777777778, "#fb9f3a" ], [ 0.8888888888888888, "#fdca26" ], [ 1, "#f0f921" ] ], "type": "heatmap" } ], "heatmapgl": [ { "colorbar": { "outlinewidth": 0, "ticks": "" }, "colorscale": [ [ 0, "#0d0887" ], [ 0.1111111111111111, "#46039f" ], [ 0.2222222222222222, "#7201a8" ], [ 0.3333333333333333, "#9c179e" ], [ 0.4444444444444444, "#bd3786" ], [ 0.5555555555555556, "#d8576b" ], [ 0.6666666666666666, "#ed7953" ], [ 0.7777777777777778, "#fb9f3a" ], [ 0.8888888888888888, "#fdca26" ], [ 1, "#f0f921" ] ], "type": "heatmapgl" } ], "histogram": [ { "marker": { "pattern": { "fillmode": "overlay", "size": 10, "solidity": 0.2 } }, "type": "histogram" } ], "histogram2d": [ { "colorbar": { "outlinewidth": 0, "ticks": "" }, "colorscale": [ [ 0, "#0d0887" ], [ 0.1111111111111111, "#46039f" ], [ 0.2222222222222222, "#7201a8" ], [ 0.3333333333333333, "#9c179e" ], [ 0.4444444444444444, "#bd3786" ], [ 0.5555555555555556, "#d8576b" ], [ 0.6666666666666666, "#ed7953" ], [ 0.7777777777777778, "#fb9f3a" ], [ 0.8888888888888888, "#fdca26" ], [ 1, "#f0f921" ] ], "type": "histogram2d" } ], "histogram2dcontour": [ { "colorbar": { "outlinewidth": 0, "ticks": "" }, "colorscale": [ [ 0, "#0d0887" ], [ 0.1111111111111111, "#46039f" ], [ 0.2222222222222222, "#7201a8" ], [ 0.3333333333333333, "#9c179e" ], [ 0.4444444444444444, "#bd3786" ], [ 0.5555555555555556, "#d8576b" ], [ 0.6666666666666666, "#ed7953" ], [ 0.7777777777777778, "#fb9f3a" ], [ 0.8888888888888888, "#fdca26" ], [ 1, "#f0f921" ] ], "type": "histogram2dcontour" } ], "mesh3d": [ { "colorbar": { "outlinewidth": 0, "ticks": "" }, "type": "mesh3d" } ], "parcoords": [ { "line": { "colorbar": { "outlinewidth": 0, "ticks": "" } }, "type": "parcoords" } ], "pie": [ { "automargin": true, "type": "pie" } ], "scatter": [ { "fillpattern": { "fillmode": "overlay", "size": 10, "solidity": 0.2 }, "type": "scatter" } ], "scatter3d": [ { "line": { "colorbar": { "outlinewidth": 0, "ticks": "" } }, "marker": { "colorbar": { "outlinewidth": 0, "ticks": "" } }, "type": "scatter3d" } ], "scattercarpet": [ { "marker": { "colorbar": { "outlinewidth": 0, "ticks": "" } }, "type": "scattercarpet" } ], "scattergeo": [ { "marker": { "colorbar": { "outlinewidth": 0, "ticks": "" } }, "type": "scattergeo" } ], "scattergl": [ { "marker": { "colorbar": { "outlinewidth": 0, "ticks": "" } }, "type": "scattergl" } ], "scattermapbox": [ { "marker": { "colorbar": { "outlinewidth": 0, "ticks": "" } }, "type": "scattermapbox" } ], "scatterpolar": [ { "marker": { "colorbar": { "outlinewidth": 0, "ticks": "" } }, "type": "scatterpolar" } ], "scatterpolargl": [ { "marker": { "colorbar": { "outlinewidth": 0, "ticks": "" } }, "type": "scatterpolargl" } ], "scatterternary": [ { "marker": { "colorbar": { "outlinewidth": 0, "ticks": "" } }, "type": "scatterternary" } ], "surface": [ { "colorbar": { "outlinewidth": 0, "ticks": "" }, "colorscale": [ [ 0, "#0d0887" ], [ 0.1111111111111111, "#46039f" ], [ 0.2222222222222222, "#7201a8" ], [ 0.3333333333333333, "#9c179e" ], [ 0.4444444444444444, "#bd3786" ], [ 0.5555555555555556, "#d8576b" ], [ 0.6666666666666666, "#ed7953" ], [ 0.7777777777777778, "#fb9f3a" ], [ 0.8888888888888888, "#fdca26" ], [ 1, "#f0f921" ] ], "type": "surface" } ], "table": [ { "cells": { "fill": { "color": "#EBF0F8" }, "line": { "color": "white" } }, "header": { "fill": { "color": "#C8D4E3" }, "line": { "color": "white" } }, "type": "table" } ] }, "layout": { "annotationdefaults": { "arrowcolor": "#2a3f5f", "arrowhead": 0, "arrowwidth": 1 }, "autotypenumbers": "strict", "coloraxis": { "colorbar": { "outlinewidth": 0, "ticks": "" } }, "colorscale": { "diverging": [ [ 0, "#8e0152" ], [ 0.1, "#c51b7d" ], [ 0.2, "#de77ae" ], [ 0.3, "#f1b6da" ], [ 0.4, "#fde0ef" ], [ 0.5, "#f7f7f7" ], [ 0.6, "#e6f5d0" ], [ 0.7, "#b8e186" ], [ 0.8, "#7fbc41" ], [ 0.9, "#4d9221" ], [ 1, "#276419" ] ], "sequential": [ [ 0, "#0d0887" ], [ 0.1111111111111111, "#46039f" ], [ 0.2222222222222222, "#7201a8" ], [ 0.3333333333333333, "#9c179e" ], [ 0.4444444444444444, "#bd3786" ], [ 0.5555555555555556, "#d8576b" ], [ 0.6666666666666666, "#ed7953" ], [ 0.7777777777777778, "#fb9f3a" ], [ 0.8888888888888888, "#fdca26" ], [ 1, "#f0f921" ] ], "sequentialminus": [ [ 0, "#0d0887" ], [ 0.1111111111111111, "#46039f" ], [ 0.2222222222222222, "#7201a8" ], [ 0.3333333333333333, "#9c179e" ], [ 0.4444444444444444, "#bd3786" ], [ 0.5555555555555556, "#d8576b" ], [ 0.6666666666666666, "#ed7953" ], [ 0.7777777777777778, "#fb9f3a" ], [ 0.8888888888888888, "#fdca26" ], [ 1, "#f0f921" ] ] }, "colorway": [ "#636efa", "#EF553B", "#00cc96", "#ab63fa", "#FFA15A", "#19d3f3", "#FF6692", "#B6E880", "#FF97FF", "#FECB52" ], "font": { "color": "#2a3f5f" }, "geo": { "bgcolor": "white", "lakecolor": "white", "landcolor": "white", "showlakes": true, "showland": true, "subunitcolor": "#C8D4E3" }, "hoverlabel": { "align": "left" }, "hovermode": "closest", "mapbox": { "style": "light" }, "paper_bgcolor": "white", "plot_bgcolor": "white", "polar": { "angularaxis": { "gridcolor": "#EBF0F8", "linecolor": "#EBF0F8", "ticks": "" }, "bgcolor": "white", "radialaxis": { "gridcolor": "#EBF0F8", "linecolor": "#EBF0F8", "ticks": "" } }, "scene": { "xaxis": { "backgroundcolor": "white", "gridcolor": "#DFE8F3", "gridwidth": 2, "linecolor": "#EBF0F8", "showbackground": true, "ticks": "", "zerolinecolor": "#EBF0F8" }, "yaxis": { "backgroundcolor": "white", "gridcolor": "#DFE8F3", "gridwidth": 2, "linecolor": "#EBF0F8", "showbackground": true, "ticks": "", "zerolinecolor": "#EBF0F8" }, "zaxis": { "backgroundcolor": "white", "gridcolor": "#DFE8F3", "gridwidth": 2, "linecolor": "#EBF0F8", "showbackground": true, "ticks": "", "zerolinecolor": "#EBF0F8" } }, "shapedefaults": { "line": { "color": "#2a3f5f" } }, "ternary": { "aaxis": { "gridcolor": "#DFE8F3", "linecolor": "#A2B1C6", "ticks": "" }, "baxis": { "gridcolor": "#DFE8F3", "linecolor": "#A2B1C6", "ticks": "" }, "bgcolor": "white", "caxis": { "gridcolor": "#DFE8F3", "linecolor": "#A2B1C6", "ticks": "" } }, "title": { "x": 0.05 }, "xaxis": { "automargin": true, "gridcolor": "#EBF0F8", "linecolor": "#EBF0F8", "ticks": "", "title": { "standoff": 15 }, "zerolinecolor": "#EBF0F8", "zerolinewidth": 2 }, "yaxis": { "automargin": true, "gridcolor": "#EBF0F8", "linecolor": "#EBF0F8", "ticks": "", "title": { "standoff": 15 }, "zerolinecolor": "#EBF0F8", "zerolinewidth": 2 } } }, "title": { "text": "Labels Distribution" }, "xaxis": { "anchor": "y", "domain": [ 0, 1 ], "title": { "text": "Label" } }, "yaxis": { "anchor": "x", "domain": [ 0, 1 ], "title": { "text": "Count" } } } }, "text/html": [ "
" ] }, "metadata": {}, "output_type": "display_data" } ], "source": [ "label_counts = df['labels'].value_counts()[:10]\n", "\n", "fig = px.bar(x=label_counts.index, \n", " y=label_counts.values,\n", " color=label_counts.values,\n", " text=label_counts.values,\n", " color_continuous_scale='Blues')\n", "\n", "fig.update_layout(\n", " title_text='Labels Distribution',\n", " template='plotly_white',\n", " xaxis=dict(\n", " title='Label',\n", " ),\n", " yaxis=dict(\n", " title='Count',\n", " )\n", ")\n", "\n", "fig.update_traces(marker_line_color='black', \n", " marker_line_width=1.5, \n", " opacity=0.8)\n", " \n", "fig.show()" ] }, { "cell_type": "markdown", "id": "40cae06a", "metadata": { "papermill": { "duration": 0.045333, "end_time": "2023-09-03T09:54:44.387581", "exception": false, "start_time": "2023-09-03T09:54:44.342248", "status": "completed" }, "tags": [] }, "source": [ "# Generate Image using ImageDataGenerator" ] }, { "cell_type": "code", "execution_count": 5, "id": "9c49b50f", "metadata": { "execution": { "iopub.execute_input": "2023-09-03T09:54:44.571283Z", "iopub.status.busy": "2023-09-03T09:54:44.570890Z", "iopub.status.idle": "2023-09-03T09:54:47.843050Z", "shell.execute_reply": "2023-09-03T09:54:47.842111Z" }, "papermill": { "duration": 3.322283, "end_time": "2023-09-03T09:54:47.845125", "exception": false, "start_time": "2023-09-03T09:54:44.522842", "status": "completed" }, "tags": [] }, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "Found 1256 images belonging to 10 classes.\n", "Found 50 images belonging to 10 classes.\n" ] } ], "source": [ "# only train data needs to be augmented \n", "train_gen = ImageDataGenerator(horizontal_flip=True, vertical_flip=True, rescale=1/255.)\n", "val_gen = ImageDataGenerator(rescale=1/255.)\n", "\n", "train_dir = 'C:/Users/kamel/Documents/Image Classification/butterfly-dataset/train'\n", "val_dir = 'C:/Users/kamel/Documents/Image Classification/butterfly-dataset/valid'\n", "\n", "BATCH_SIZE = 16\n", "SEED = 56\n", "IMAGE_SIZE = (244, 244)\n", "\n", "train_flow_gen = train_gen.flow_from_directory(directory=train_dir,\n", " class_mode='sparse',\n", " batch_size=BATCH_SIZE,\n", " target_size=IMAGE_SIZE,\n", " seed=SEED)\n", "\n", "val_flow_gen = val_gen.flow_from_directory(directory=val_dir,\n", " class_mode='sparse',\n", " batch_size=BATCH_SIZE,\n", " target_size=IMAGE_SIZE,\n", " seed=SEED)" ] }, { "cell_type": "markdown", "id": "0398ba07", "metadata": { "papermill": { "duration": 0.045878, "end_time": "2023-09-03T09:54:47.938297", "exception": false, "start_time": "2023-09-03T09:54:47.892419", "status": "completed" }, "tags": [] }, "source": [ "# Create Model" ] }, { "cell_type": "code", "execution_count": 6, "id": "2b80bd86", "metadata": { "execution": { "iopub.execute_input": "2023-09-03T09:54:48.123906Z", "iopub.status.busy": "2023-09-03T09:54:48.122767Z", "iopub.status.idle": "2023-09-03T09:54:48.130732Z", "shell.execute_reply": "2023-09-03T09:54:48.129884Z" }, "papermill": { "duration": 0.058368, "end_time": "2023-09-03T09:54:48.132785", "exception": false, "start_time": "2023-09-03T09:54:48.074417", "status": "completed" }, "tags": [] }, "outputs": [], "source": [ "verbose=False\n", " \n", "input_tensor = Input(shape=(224, 224, 3))\n", " \n", "base_model = ResNet50V2(input_tensor=input_tensor, include_top=False, weights='imagenet')\n", " \n", "bm_output = base_model.output\n", "\n", "x = GlobalAveragePooling2D()(bm_output)\n", "x = Dense(1024, activation='relu')(x)\n", "x = Dropout(rate=0.5)(x)\n", "output = Dense(100, activation='softmax')(x)\n", "model = Model(inputs=input_tensor, outputs=output)\n", " \n", "if verbose:\n", " model.summary()" ] }, { "cell_type": "markdown", "id": "c28b3bd4", "metadata": { "papermill": { "duration": 0.327423, "end_time": "2023-09-03T10:55:37.594519", "exception": false, "start_time": "2023-09-03T10:55:37.267096", "status": "completed" }, "tags": [] }, "source": [ "# ResNet Modelling" ] }, { "cell_type": "code", "execution_count": 7, "id": "e1087e22", "metadata": { "execution": { "iopub.execute_input": "2023-09-03T10:55:38.752875Z", "iopub.status.busy": "2023-09-03T10:55:38.752385Z", "iopub.status.idle": "2023-09-03T10:55:41.001254Z", "shell.execute_reply": "2023-09-03T10:55:41.000298Z" }, "papermill": { "duration": 2.573106, "end_time": "2023-09-03T10:55:41.003664", "exception": false, "start_time": "2023-09-03T10:55:38.430558", "status": "completed" }, "tags": [] }, "outputs": [], "source": [ "model.compile(optimizer=Adam(lr=0.001), loss='sparse_categorical_crossentropy', metrics=['accuracy'])\n", "\n", "rlr_cb = ReduceLROnPlateau(monitor='val_loss', factor=0.2, patience=3, mode='min', verbose=0)\n", "early_cb = EarlyStopping(monitor='val_loss', patience=5, mode='min', verbose=0)" ] }, { "cell_type": "code", "execution_count": 10, "id": "19196570", "metadata": { "execution": { "iopub.execute_input": "2023-09-03T10:55:41.562460Z", "iopub.status.busy": "2023-09-03T10:55:41.562088Z", "iopub.status.idle": "2023-09-03T11:28:47.817367Z", "shell.execute_reply": "2023-09-03T11:28:47.816362Z" }, "papermill": { "duration": 1986.53774, "end_time": "2023-09-03T11:28:47.819746", "exception": false, "start_time": "2023-09-03T10:55:41.282006", "status": "completed" }, "tags": [] }, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "Epoch 1/5\n", "79/79 [==============================] - 661s 8s/step - loss: 1.2424 - accuracy: 0.6815 - val_loss: 76.2388 - val_accuracy: 0.1400 - lr: 0.0010\n", "Epoch 2/5\n", "79/79 [==============================] - 686s 9s/step - loss: 0.6616 - accuracy: 0.8169 - val_loss: 3.6352 - val_accuracy: 0.6000 - lr: 0.0010\n", "Epoch 3/5\n", "79/79 [==============================] - 692s 9s/step - loss: 0.4898 - accuracy: 0.8583 - val_loss: 6.5402 - val_accuracy: 0.3800 - lr: 0.0010\n", "Epoch 4/5\n", "79/79 [==============================] - 699s 9s/step - loss: 0.4228 - accuracy: 0.8933 - val_loss: 0.5610 - val_accuracy: 0.8200 - lr: 0.0010\n", "Epoch 5/5\n", "79/79 [==============================] - 694s 9s/step - loss: 0.2828 - accuracy: 0.9132 - val_loss: 0.0705 - val_accuracy: 0.9800 - lr: 0.0010\n" ] }, { "data": { "text/plain": [ "" ] }, "execution_count": 10, "metadata": {}, "output_type": "execute_result" } ], "source": [ "model.fit(train_flow_gen, epochs=5,\n", " steps_per_epoch=int(np.ceil(train_df.shape[0]/BATCH_SIZE)),\n", " validation_data=val_flow_gen,\n", " validation_steps=int(np.ceil(val_df.shape[0]/BATCH_SIZE)),\n", " callbacks=[rlr_cb, early_cb])" ] }, { "cell_type": "code", "execution_count": 12, "id": "a7fdf171", "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "Found 50 images belonging to 10 classes.\n" ] } ], "source": [ "test_dir = 'C:/Users/kamel/Documents/Image Classification/butterfly-dataset/test'\n", "test_gen = ImageDataGenerator(rescale=1/255.)\n", "test_flow_gen = test_gen.flow_from_directory(directory=test_dir,\n", " class_mode='sparse',\n", " batch_size=BATCH_SIZE,\n", " target_size=IMAGE_SIZE,\n", " seed=SEED)" ] }, { "cell_type": "code", "execution_count": 13, "id": "f5ac91bb", "metadata": { "execution": { "iopub.execute_input": "2023-09-03T11:28:48.864295Z", "iopub.status.busy": "2023-09-03T11:28:48.863901Z", "iopub.status.idle": "2023-09-03T11:28:54.311623Z", "shell.execute_reply": "2023-09-03T11:28:54.310452Z" }, "papermill": { "duration": 5.996544, "end_time": "2023-09-03T11:28:54.313850", "exception": false, "start_time": "2023-09-03T11:28:48.317306", "status": "completed" }, "tags": [] }, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "4/4 [==============================] - 5s 1s/step - loss: 0.1499 - accuracy: 0.9400\n", "ResNet Test Data Accuracy: 0.9399999976158142\n" ] } ], "source": [ "print('ResNet Test Data Accuracy: {0}'.format(model.evaluate(test_flow_gen)[1:][0]))" ] }, { "cell_type": "code", "execution_count": 14, "id": "78b5d06a", "metadata": {}, "outputs": [], "source": [ "# Save the current weights manually\n", "model.save('C:/Users/kamel/Documents/Image Classification/model_checkpoint_manual_resnet.h5')" ] }, { "cell_type": "markdown", "id": "0a9e58e9", "metadata": {}, "source": [ "# Deployment" ] }, { "cell_type": "code", "execution_count": 4, "id": "72ab47ea", "metadata": {}, "outputs": [ { "name": "stdout", "output_type": "stream", "text": [ "Running on local URL: http://127.0.0.1:7861\n", "\n", "To create a public link, set `share=True` in `launch()`.\n" ] }, { "data": { "text/html": [ "
" ], "text/plain": [ "" ] }, "metadata": {}, "output_type": "display_data" }, { "data": { "text/plain": [] }, "execution_count": 4, "metadata": {}, "output_type": "execute_result" }, { "name": "stdout", "output_type": "stream", "text": [ "1/1 [==============================] - 1s 1s/step\n" ] }, { "name": "stderr", "output_type": "stream", "text": [ "Traceback (most recent call last):\n", " File \"D:\\Software\\anaconda3\\lib\\site-packages\\gradio\\queueing.py\", line 495, in call_prediction\n", " output = await route_utils.call_process_api(\n", " File \"D:\\Software\\anaconda3\\lib\\site-packages\\gradio\\route_utils.py\", line 232, in call_process_api\n", " output = await app.get_blocks().process_api(\n", " File \"D:\\Software\\anaconda3\\lib\\site-packages\\gradio\\blocks.py\", line 1561, in process_api\n", " result = await self.call_function(\n", " File \"D:\\Software\\anaconda3\\lib\\site-packages\\gradio\\blocks.py\", line 1179, in call_function\n", " prediction = await anyio.to_thread.run_sync(\n", " File \"D:\\Software\\anaconda3\\lib\\site-packages\\anyio\\to_thread.py\", line 28, in run_sync\n", " return await get_asynclib().run_sync_in_worker_thread(func, *args, cancellable=cancellable,\n", " File \"D:\\Software\\anaconda3\\lib\\site-packages\\anyio\\_backends\\_asyncio.py\", line 818, in run_sync_in_worker_thread\n", " return await future\n", " File \"D:\\Software\\anaconda3\\lib\\site-packages\\anyio\\_backends\\_asyncio.py\", line 754, in run\n", " result = context.run(func, *args)\n", " File \"D:\\Software\\anaconda3\\lib\\site-packages\\gradio\\utils.py\", line 678, in wrapper\n", " response = f(*args, **kwargs)\n", " File \"C:\\Users\\kamel\\AppData\\Local\\Temp\\ipykernel_9500\\3770787755.py\", line 35, in classify_image\n", " img = preprocess_image(img)\n", " File \"C:\\Users\\kamel\\AppData\\Local\\Temp\\ipykernel_9500\\3770787755.py\", line 28, in preprocess_image\n", " raise ValueError(\"Unsupported input type. Please provide a file path or a NumPy array.\")\n", "ValueError: Unsupported input type. Please provide a file path or a NumPy array.\n" ] }, { "name": "stdout", "output_type": "stream", "text": [ "1/1 [==============================] - 0s 131ms/step\n" ] }, { "name": "stderr", "output_type": "stream", "text": [ "Traceback (most recent call last):\n", " File \"D:\\Software\\anaconda3\\lib\\site-packages\\gradio\\queueing.py\", line 495, in call_prediction\n", " output = await route_utils.call_process_api(\n", " File \"D:\\Software\\anaconda3\\lib\\site-packages\\gradio\\route_utils.py\", line 232, in call_process_api\n", " output = await app.get_blocks().process_api(\n", " File \"D:\\Software\\anaconda3\\lib\\site-packages\\gradio\\blocks.py\", line 1561, in process_api\n", " result = await self.call_function(\n", " File \"D:\\Software\\anaconda3\\lib\\site-packages\\gradio\\blocks.py\", line 1179, in call_function\n", " prediction = await anyio.to_thread.run_sync(\n", " File \"D:\\Software\\anaconda3\\lib\\site-packages\\anyio\\to_thread.py\", line 28, in run_sync\n", " return await get_asynclib().run_sync_in_worker_thread(func, *args, cancellable=cancellable,\n", " File \"D:\\Software\\anaconda3\\lib\\site-packages\\anyio\\_backends\\_asyncio.py\", line 818, in run_sync_in_worker_thread\n", " return await future\n", " File \"D:\\Software\\anaconda3\\lib\\site-packages\\anyio\\_backends\\_asyncio.py\", line 754, in run\n", " result = context.run(func, *args)\n", " File \"D:\\Software\\anaconda3\\lib\\site-packages\\gradio\\utils.py\", line 678, in wrapper\n", " response = f(*args, **kwargs)\n", " File \"C:\\Users\\kamel\\AppData\\Local\\Temp\\ipykernel_9500\\3770787755.py\", line 35, in classify_image\n", " img = preprocess_image(img)\n", " File \"C:\\Users\\kamel\\AppData\\Local\\Temp\\ipykernel_9500\\3770787755.py\", line 28, in preprocess_image\n", " raise ValueError(\"Unsupported input type. Please provide a file path or a NumPy array.\")\n", "ValueError: Unsupported input type. Please provide a file path or a NumPy array.\n" ] } ], "source": [ "import gradio as gr\n", "import tensorflow as tf\n", "from tensorflow.keras.models import load_model\n", "import numpy as np\n", "import cv2\n", "\n", "# Load the trained model\n", "model_path = 'C:/Users/kamel/Documents/Image Classification/model_checkpoint_manual_resnet.h5'\n", "model = load_model(model_path)\n", "\n", "class_names = ['ADONIS', 'AFRICAN GIANT SWALLOWTAIL', 'AMERICAN SNOOT', 'AN 88', 'APPOLLO', 'ARCIGERA FLOWER MOTH', 'ATALA', 'ATLAS MOTH', 'BANDED ORANGE HELICONIAN', 'BANDED PEACOCK']\n", "\n", "# Define a function to preprocess the input image\n", "def preprocess_image(img):\n", " # Check if img is a file path or an image object\n", " if isinstance(img, str):\n", " # Load and preprocess the image\n", " img = cv2.imread(img)\n", " img = cv2.resize(img, (224, 224))\n", " img = img / 255.0 # Normalize pixel values\n", " img = np.expand_dims(img, axis=0) # Add batch dimension\n", " elif isinstance(img, np.ndarray):\n", " # If img is already an image array, resize it\n", " img = cv2.resize(img, (224, 224))\n", " img = img / 255.0 # Normalize pixel values\n", " img = np.expand_dims(img, axis=0) # Add batch dimension\n", " else:\n", " raise ValueError(\"Unsupported input type. Please provide a file path or a NumPy array.\")\n", "\n", " return img\n", "\n", "# Define the classification function\n", "def classify_image(img):\n", " # Preprocess the image\n", " img = preprocess_image(img)\n", " \n", " # Make predictions\n", " predictions = model.predict(img)\n", " \n", " # Get the predicted class label\n", " predicted_class = np.argmax(predictions)\n", " \n", " # Get the predicted class name\n", " predicted_class_name = class_names[predicted_class]\n", " \n", " return f\"Predicted Class: {predicted_class_name}\"\n", "\n", "# Create a Gradio interface\n", "iface = gr.Interface(fn=classify_image, \n", " inputs=\"image\",\n", " outputs=\"text\",\n", " live=True)\n", "\n", "# Launch the Gradio app\n", "iface.launch()\n" ] }, { "cell_type": "code", "execution_count": null, "id": "b97686a4", "metadata": {}, "outputs": [], "source": [] } ], "metadata": { "kernelspec": { "display_name": "Python 3 (ipykernel)", "language": "python", "name": "python3" }, "language_info": { "codemirror_mode": { "name": "ipython", "version": 3 }, "file_extension": ".py", "mimetype": "text/x-python", "name": "python", "nbconvert_exporter": "python", "pygments_lexer": "ipython3", "version": "3.9.18" }, "papermill": { "default_parameters": {}, "duration": 5680.107554, "end_time": "2023-09-03T11:29:02.521595", "environment_variables": {}, "exception": null, "input_path": "__notebook__.ipynb", "output_path": "__notebook__.ipynb", "parameters": {}, "start_time": "2023-09-03T09:54:22.414041", "version": "2.4.0" } }, "nbformat": 4, "nbformat_minor": 5 }