NORLIE JHON MALAGDAO commited on
Commit
db69c22
·
verified ·
1 Parent(s): 7cf341d

Delete app.py

Browse files
Files changed (1) hide show
  1. app.py +0 -199
app.py DELETED
@@ -1,199 +0,0 @@
1
-
2
- import gradio as gr
3
- import matplotlib.pyplot as plt
4
- import numpy as np
5
- import os
6
- import PIL
7
- import tensorflow as tf
8
-
9
- from tensorflow import keras
10
- from tensorflow.keras import layers
11
- from tensorflow.keras.models import Sequential
12
-
13
-
14
- from PIL import Image
15
- import gdown
16
- import zipfile
17
-
18
- import pathlib
19
-
20
-
21
-
22
-
23
-
24
-
25
-
26
- # Define the Google Drive shareable link
27
- gdrive_url = 'https://drive.google.com/file/d/1HjHYlQyRz5oWt8kehkt1TiOGRRlKFsv8/view?usp=drive_link'
28
-
29
- # Extract the file ID from the URL
30
- file_id = gdrive_url.split('/d/')[1].split('/view')[0]
31
- direct_download_url = f'https://drive.google.com/uc?id={file_id}'
32
-
33
- # Define the local filename to save the ZIP file
34
- local_zip_file = 'file.zip'
35
-
36
- # Download the ZIP file
37
- gdown.download(direct_download_url, local_zip_file, quiet=False)
38
-
39
- # Directory to extract files
40
- extracted_path = 'extracted_files'
41
-
42
- # Verify if the downloaded file is a ZIP file and extract it
43
- try:
44
- with zipfile.ZipFile(local_zip_file, 'r') as zip_ref:
45
- zip_ref.extractall(extracted_path)
46
- print("Extraction successful!")
47
- except zipfile.BadZipFile:
48
- print("Error: The downloaded file is not a valid ZIP file.")
49
-
50
- # Optionally, you can delete the ZIP file after extraction
51
- os.remove(local_zip_file)
52
-
53
- # Convert the extracted directory path to a pathlib.Path object
54
- data_dir = pathlib.Path(extracted_path)
55
-
56
- # Print the directory structure to debug
57
- for root, dirs, files in os.walk(extracted_path):
58
- level = root.replace(extracted_path, '').count(os.sep)
59
- indent = ' ' * 4 * (level)
60
- print(f"{indent}{os.path.basename(root)}/")
61
- subindent = ' ' * 4 * (level + 1)
62
- for f in files:
63
- print(f"{subindent}{f}")
64
-
65
-
66
-
67
- import pathlib
68
- # Path to the dataset directory
69
- data_dir = pathlib.Path('extracted_files/Pest_Dataset')
70
- data_dir = pathlib.Path(data_dir)
71
-
72
-
73
-
74
- bees = list(data_dir.glob('bees/*'))
75
- print(bees[0])
76
- PIL.Image.open(str(bees[0]))
77
-
78
-
79
-
80
- img_height,img_width=180,180
81
- batch_size=32
82
- train_ds = tf.keras.preprocessing.image_dataset_from_directory(
83
- data_dir,
84
- validation_split=0.2,
85
- subset="training",
86
- seed=123,
87
- image_size=(img_height, img_width),
88
- batch_size=batch_size)
89
-
90
-
91
-
92
-
93
-
94
-
95
- val_ds = tf.keras.preprocessing.image_dataset_from_directory(
96
- data_dir,
97
- validation_split=0.2,
98
- subset="validation",
99
- seed=123,
100
- image_size=(img_height, img_width),
101
- batch_size=batch_size)
102
-
103
-
104
-
105
-
106
-
107
-
108
-
109
-
110
- class_names = train_ds.class_names
111
- print(class_names)
112
-
113
-
114
-
115
-
116
- import matplotlib.pyplot as plt
117
-
118
- plt.figure(figsize=(10, 10))
119
- for images, labels in train_ds.take(1):
120
- for i in range(9):
121
- ax = plt.subplot(3, 3, i + 1)
122
- plt.imshow(images[i].numpy().astype("uint8"))
123
- plt.title(class_names[labels[i]])
124
- plt.axis("off")
125
-
126
-
127
-
128
-
129
-
130
-
131
-
132
-
133
- num_classes = 12
134
-
135
- model = Sequential([
136
- layers.experimental.preprocessing.Rescaling(1./255, input_shape=(img_height, img_width, 3)),
137
- layers.Conv2D(16, 3, padding='same', activation='relu'),
138
- layers.MaxPooling2D(),
139
- layers.Conv2D(32, 3, padding='same', activation='relu'),
140
- layers.MaxPooling2D(),
141
- layers.Conv2D(64, 3, padding='same', activation='relu'),
142
- layers.MaxPooling2D(),
143
- layers.Flatten(),
144
- layers.Dense(128, activation='relu'),
145
- layers.Dense(num_classes,activation='softmax')
146
- ])
147
-
148
-
149
-
150
-
151
-
152
- model.compile(optimizer='adam',
153
- loss=tf.keras.losses.SparseCategoricalCrossentropy(from_logits=True),
154
- metrics=['accuracy'])
155
-
156
-
157
-
158
-
159
- epochs=10
160
- history = model.fit(
161
- train_ds,
162
- validation_data=val_ds,
163
- epochs=epochs
164
- )
165
-
166
-
167
- import gradio as gr
168
- import numpy as np
169
- import tensorflow as tf
170
-
171
- def predict_image(img):
172
- img = np.array(img)
173
- img_resized = tf.image.resize(img, (180, 180))
174
- img_4d = tf.expand_dims(img_resized, axis=0)
175
- prediction = model.predict(img_4d)[0]
176
- return {class_names[i]: float(prediction[i]) for i in range(len(class_names))}
177
-
178
- image = gr.Image()
179
- label = gr.Label(num_top_classes=5)
180
-
181
- # Define custom CSS for background image
182
- custom_css = """
183
- body {
184
- background-image: url('\extracted_files\Pest_Dataset\bees\bees (444).jpg');
185
- background-size: cover;
186
- background-repeat: no-repeat;
187
- background-attachment: fixed;
188
- color: white;
189
- }
190
- """
191
-
192
- gr.Interface(
193
- fn=predict_image,
194
- inputs=image,
195
- outputs=label,
196
- title="Pest Classification",
197
- description="Upload an image of a pest to classify it into one of the predefined categories.",
198
- css=custom_css
199
- ).launch(debug=True)