NORLIE JHON MALAGDAO commited on
Commit
c8f775e
·
verified ·
1 Parent(s): 012748a

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +46 -91
app.py CHANGED
@@ -1,4 +1,3 @@
1
-
2
  import gradio as gr
3
  import matplotlib.pyplot as plt
4
  import numpy as np
@@ -10,19 +9,11 @@ from tensorflow import keras
10
  from tensorflow.keras import layers
11
  from tensorflow.keras.models import Sequential
12
 
13
-
14
  from PIL import Image
15
  import gdown
16
  import zipfile
17
-
18
  import pathlib
19
 
20
-
21
-
22
-
23
-
24
-
25
-
26
  # Define the Google Drive shareable link
27
  gdrive_url = 'https://drive.google.com/file/d/1HjHYlQyRz5oWt8kehkt1TiOGRRlKFsv8/view?usp=drive_link'
28
 
@@ -62,109 +53,76 @@ for root, dirs, files in os.walk(extracted_path):
62
  for f in files:
63
  print(f"{subindent}{f}")
64
 
65
-
66
-
67
- import pathlib
68
  # Path to the dataset directory
69
  data_dir = pathlib.Path('extracted_files/Pest_Dataset')
70
- data_dir = pathlib.Path(data_dir)
71
-
72
-
73
 
 
 
74
 
75
- img_height,img_width=180,180
76
- batch_size=32
77
  train_ds = tf.keras.preprocessing.image_dataset_from_directory(
78
- data_dir,
79
- validation_split=0.2,
80
- subset="training",
81
- seed=123,
82
- image_size=(img_height, img_width),
83
- batch_size=batch_size)
84
-
85
-
86
-
87
-
88
-
89
 
90
  val_ds = tf.keras.preprocessing.image_dataset_from_directory(
91
- data_dir,
92
- validation_split=0.2,
93
- subset="validation",
94
- seed=123,
95
- image_size=(img_height, img_width),
96
- batch_size=batch_size)
97
-
98
-
99
-
100
-
101
-
102
-
103
-
104
 
105
  class_names = train_ds.class_names
106
  print(class_names)
107
 
108
-
109
-
110
-
111
- import matplotlib.pyplot as plt
112
-
113
  plt.figure(figsize=(10, 10))
114
  for images, labels in train_ds.take(1):
115
- for i in range(9):
116
- ax = plt.subplot(3, 3, i + 1)
117
- plt.imshow(images[i].numpy().astype("uint8"))
118
- plt.title(class_names[labels[i]])
119
- plt.axis("off")
120
-
121
-
122
-
123
-
124
-
125
-
126
-
127
 
128
  num_classes = 12
129
 
130
  model = Sequential([
131
- data_augmentation,
132
- layers.Rescaling(1./255),
133
- layers.Conv2D(16, 3, padding='same', activation='relu'),
134
- layers.MaxPooling2D(),
135
- layers.Conv2D(32, 3, padding='same', activation='relu'),
136
- layers.MaxPooling2D(),
137
- layers.Conv2D(64, 3, padding='same', activation='relu'),
138
- layers.MaxPooling2D(),
139
- layers.Dropout(0.2),
140
- layers.Flatten(),
141
- layers.Dense(128, activation='relu'),
142
- layers.Dense(num_classes, name="outputs")
143
  ])
144
 
145
-
146
-
147
-
148
-
149
  model.compile(optimizer='adam',
150
  loss=tf.keras.losses.SparseCategoricalCrossentropy(from_logits=True),
151
  metrics=['accuracy'])
152
 
153
-
154
-
155
-
156
- epochs=10
157
  history = model.fit(
158
- train_ds,
159
- validation_data=val_ds,
160
- epochs=epochs
161
  )
162
 
163
-
164
- import gradio as gr
165
- import numpy as np
166
- import tensorflow as tf
167
-
168
  def predict_image(img):
169
  img = np.array(img)
170
  img_resized = tf.image.resize(img, (180, 180))
@@ -175,13 +133,10 @@ def predict_image(img):
175
  image = gr.Image()
176
  label = gr.Label(num_top_classes=5)
177
 
178
-
179
-
180
  gr.Interface(
181
  fn=predict_image,
182
  inputs=image,
183
  outputs=label,
184
  title="Pest Classification",
185
- description="Upload an image of a pest to classify it into one of the predefined categories.",
186
- css=custom_css
187
- ).launch(debug=True)
 
 
1
  import gradio as gr
2
  import matplotlib.pyplot as plt
3
  import numpy as np
 
9
  from tensorflow.keras import layers
10
  from tensorflow.keras.models import Sequential
11
 
 
12
  from PIL import Image
13
  import gdown
14
  import zipfile
 
15
  import pathlib
16
 
 
 
 
 
 
 
17
  # Define the Google Drive shareable link
18
  gdrive_url = 'https://drive.google.com/file/d/1HjHYlQyRz5oWt8kehkt1TiOGRRlKFsv8/view?usp=drive_link'
19
 
 
53
  for f in files:
54
  print(f"{subindent}{f}")
55
 
 
 
 
56
  # Path to the dataset directory
57
  data_dir = pathlib.Path('extracted_files/Pest_Dataset')
 
 
 
58
 
59
+ img_height, img_width = 180, 180
60
+ batch_size = 32
61
 
 
 
62
  train_ds = tf.keras.preprocessing.image_dataset_from_directory(
63
+ data_dir,
64
+ validation_split=0.2,
65
+ subset="training",
66
+ seed=123,
67
+ image_size=(img_height, img_width),
68
+ batch_size=batch_size
69
+ )
 
 
 
 
70
 
71
  val_ds = tf.keras.preprocessing.image_dataset_from_directory(
72
+ data_dir,
73
+ validation_split=0.2,
74
+ subset="validation",
75
+ seed=123,
76
+ image_size=(img_height, img_width),
77
+ batch_size=batch_size
78
+ )
 
 
 
 
 
 
79
 
80
  class_names = train_ds.class_names
81
  print(class_names)
82
 
 
 
 
 
 
83
  plt.figure(figsize=(10, 10))
84
  for images, labels in train_ds.take(1):
85
+ for i in range(9):
86
+ ax = plt.subplot(3, 3, i + 1)
87
+ plt.imshow(images[i].numpy().astype("uint8"))
88
+ plt.title(class_names[labels[i]])
89
+ plt.axis("off")
90
+
91
+ # Define data augmentation
92
+ data_augmentation = keras.Sequential([
93
+ layers.RandomFlip("horizontal", input_shape=(img_height, img_width, 3)),
94
+ layers.RandomRotation(0.1),
95
+ layers.RandomZoom(0.1),
96
+ ])
97
 
98
  num_classes = 12
99
 
100
  model = Sequential([
101
+ data_augmentation,
102
+ layers.Rescaling(1./255),
103
+ layers.Conv2D(16, 3, padding='same', activation='relu'),
104
+ layers.MaxPooling2D(),
105
+ layers.Conv2D(32, 3, padding='same', activation='relu'),
106
+ layers.MaxPooling2D(),
107
+ layers.Conv2D(64, 3, padding='same', activation='relu'),
108
+ layers.MaxPooling2D(),
109
+ layers.Dropout(0.2),
110
+ layers.Flatten(),
111
+ layers.Dense(128, activation='relu'),
112
+ layers.Dense(num_classes, name="outputs")
113
  ])
114
 
 
 
 
 
115
  model.compile(optimizer='adam',
116
  loss=tf.keras.losses.SparseCategoricalCrossentropy(from_logits=True),
117
  metrics=['accuracy'])
118
 
119
+ epochs = 10
 
 
 
120
  history = model.fit(
121
+ train_ds,
122
+ validation_data=val_ds,
123
+ epochs=epochs
124
  )
125
 
 
 
 
 
 
126
  def predict_image(img):
127
  img = np.array(img)
128
  img_resized = tf.image.resize(img, (180, 180))
 
133
  image = gr.Image()
134
  label = gr.Label(num_top_classes=5)
135
 
 
 
136
  gr.Interface(
137
  fn=predict_image,
138
  inputs=image,
139
  outputs=label,
140
  title="Pest Classification",
141
+ description="Upload an image of a pest to classify it into one of the predefined categories."
142
+ ).launch(debug=True)