DanielPFlorian commited on
Commit
4d817ae
·
1 Parent(s): c342e41

create test app

Browse files
Files changed (1) hide show
  1. app.py +211 -5
app.py CHANGED
@@ -1,8 +1,214 @@
1
-
2
  import gradio as gr
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
3
 
4
- def greet(name):
5
- return "Hello " + name + "!!"
 
 
 
 
 
 
 
 
 
 
 
 
6
 
7
- iface = gr.Interface(fn=greet, inputs="text", outputs="text")
8
- iface.launch()
 
 
 
 
 
 
 
 
1
  import gradio as gr
2
+ from PIL import Image
3
+ import numpy as np
4
+ import json
5
+ from torchvision import models
6
+ import torch.nn.functional as F
7
+ from torch import nn
8
+ import torch
9
+ import matplotlib.pyplot as plt
10
+ import matplotlib.ticker as ticker
11
+ from huggingface_hub import HfApi
12
+
13
+ def load_checkpoint(filepath):
14
+ """Builds PyTorch Model from saved model
15
+ Returns built model
16
+
17
+ Arguments: string, filepath of saved PyTorch model
18
+ """
19
+
20
+ # Load pretrained weights
21
+ weights = "IMAGENET1K_V1"
22
+
23
+ # Load model using pretrained weights
24
+ model = models.maxvit_t(weights=weights)
25
+
26
+ # Load checkpoint
27
+ checkpoint = torch.load(filepath, map_location=torch.device("cpu"))
28
+
29
+ # Get new classifier from checkpoint
30
+ new_classifier = checkpoint["classifier"]
31
+
32
+ # Replace pretrained model output classifier layer[5] with newly created classifier
33
+ model.classifier[5] = new_classifier
34
+
35
+ # Load model weights
36
+ model.load_state_dict(checkpoint["state_dict"])
37
+
38
+ # Load class to index mapping
39
+ model.class_to_idx = checkpoint["class_to_idx"]
40
+
41
+ return model
42
+
43
+
44
+ class Network(nn.Module):
45
+ def __init__(self, input_size, hidden_layers, output_size=102, drop_p=0.2):
46
+ """Builds a feedforward network with arbitrary hidden layers.
47
+
48
+ Arguments
49
+ ---------
50
+ input_size: integer, size of the input layer
51
+ output_size: integer, size of the output layer
52
+ hidden_layers: list of integers, the sizes of the hidden layers
53
+ drop_p: float, dropout probability
54
+ """
55
+ super().__init__()
56
+
57
+ self.drop_p = drop_p
58
+
59
+ # Input to a hidden layer
60
+ self.hidden_layers = nn.ModuleList([nn.Linear(input_size, hidden_layers[0])])
61
+
62
+ # Add a variable number of more hidden layers
63
+ layer_sizes = zip(hidden_layers[:-1], hidden_layers[1:])
64
+ self.hidden_layers.extend([nn.Linear(h1, h2) for h1, h2 in layer_sizes])
65
+
66
+ self.output = nn.Linear(hidden_layers[-1], output_size)
67
+
68
+ print(
69
+ f"\nNumber of layers: {len(self.hidden_layers)}"
70
+ f"\nNumber of units in layers:{hidden_layers}"
71
+ )
72
+
73
+ def forward(self, x):
74
+ """Forward pass through the network, returns the output logits"""
75
+
76
+ for each in self.hidden_layers:
77
+ x = F.relu(each(x))
78
+ x = F.dropout(x, self.drop_p)
79
+ x = self.output(x)
80
+
81
+ return F.log_softmax(x, dim=1)
82
+
83
+ def process_image(img_path):
84
+ """Scales, crops, and normalizes a PIL image for a PyTorch model,
85
+ returns a Numpy array
86
+
87
+ Arguments
88
+ ---------
89
+ image: path of the image to be processed
90
+ """
91
+ inp = Image.open(img_path)
92
+ exif = inp.getexif()
93
+
94
+ # Get image size
95
+ w, h = inp.size
96
+
97
+ # Create inference directory for prediction
98
+ os.makedirs("inference", exist_ok=True)
99
+
100
+ # Remove non alphanumeric characters
101
+ image_path = re.sub(r"\W+", "", img_path)
102
+ image_path = str(datetime.now()) + ".png"
103
+
104
+ # Join to directory path
105
+ inf_image = os.path.join("inference", img_path)
106
+
107
+ # Use repo for inference
108
+ inp.save(inf_image, exif=exif)
109
+ HfApi().upload_file(
110
+ path_or_fileobj=inf_image,
111
+ path_in_repo=image_path,
112
+ repo_id="DanielPFlorian/flower-image-classifier",
113
+ repo_type="dataset",
114
+ token=HF_TOKEN,
115
+ )
116
+
117
+ # resize image so shortest side is 256 preserving aspect ratio
118
+ if w > h:
119
+ inp.thumbnail((10000, 256))
120
+ elif h > w:
121
+ inp.thumbnail((256, 10000))
122
+ else:
123
+ inp.thumbnail((256, 256))
124
+
125
+ # crop center 224x224
126
+ w, h = inp.size
127
+ left = (w - 224) // 2
128
+ top = (h - 224) // 2
129
+ right = (w + 224) // 2
130
+ bottom = (h + 224) // 2
131
+ image = inp.crop((left, top, right, bottom))
132
+
133
+ # Convert pil image to numpy array and scale color channels to [0, 1]
134
+ np_image = np.array(image) / 255
135
+
136
+ # Normalize image
137
+ mean = np.array([0.485, 0.456, 0.406]) # Mean
138
+ std = np.array([0.229, 0.224, 0.225]) # Standard deviation
139
+ np_image = (np_image - mean) / std
140
+
141
+ # Move color channels to first dimension
142
+ np_image = np_image.transpose((2, 0, 1))
143
+
144
+ return np_image
145
+
146
+ def predict(image_path, model=model, category_names=cat_to_name, topk=5):
147
+ """Predict the class (or classes) of an image using a trained deep learning model.
148
+ Arguments
149
+ ---------
150
+ image_path: path of the image to be processed
151
+ model: model to be used for prediction
152
+ topk: number of top predicted classes to return
153
+ """
154
+ # Process image function
155
+ image = process_image(image_path)
156
+
157
+ # Convert image to float tensor with batch size of 1
158
+ image = torch.as_tensor(image).view((1, 3, 224, 224)).float()
159
+
160
+ # Set model to evaluation mode/ inference mode
161
+ model.eval()
162
+
163
+ # Turn off gradients to speed up this part
164
+ with torch.no_grad():
165
+ # Forward Pass. Ouputs log probabilities of classes
166
+ log_ps = model.forward(image)
167
+
168
+ # Exponential of log probabilities for each class
169
+ ps = torch.exp(log_ps)
170
+
171
+ # Get top k predictions. Returns probabilities and class indexes
172
+ top_probs, idx = ps.topk(topk, dim=1)
173
+
174
+ # Convert tensors to lists. Index[0] returns unnested List
175
+ top_probs, idx = top_probs.tolist()[0], idx.tolist()[0]
176
+
177
+ # Convert top_probs to percentages
178
+ percentages = [round(prob * 100.00, 2) for prob in top_probs]
179
+
180
+ # Converts class_labels:indexes to indexes:class_labels
181
+ idx_to_class = {val: key for key, val in model.class_to_idx.items()}
182
+
183
+ # get class labels from indexes
184
+ top_labels = [idx_to_class[lab] for lab in idx]
185
+
186
+ # Get names from labels
187
+ if category_names:
188
+ top_labels = [category_names[str(lab)] for lab in top_labels]
189
+
190
+ # Plot Functionality
191
 
192
+ image = Image.open(image_path)
193
+ fig, (ax1, ax2) = plt.subplots(ncols=2)
194
+ ax1.imshow(image)
195
+ ax1.axis("off")
196
+ ax2.barh(np.arange(len(top_labels)), percentages)
197
+ asp = np.diff(ax2.get_xlim())[0] / np.diff(ax2.get_ylim())[0]
198
+ ax2.set_aspect(asp)
199
+ ax2.set_yticks(np.arange(len(top_labels)))
200
+ ax2.set_yticklabels(top_labels)
201
+ ax2.invert_yaxis()
202
+ ax2.xaxis.set_major_formatter(ticker.PercentFormatter())
203
+ plt.tight_layout()
204
+ ax2.set_title("Class Probability")
205
+ plt.show()
206
 
207
+ return fig
208
+
209
+ gr.Interface(
210
+ predict,
211
+ inputs=gr.inputs.Image(label="Upload a flower image", type="filepath"),
212
+ outputs=gr.outputs.Label(num_top_classes=5),
213
+ title="What kind of flower is this?",
214
+ ).launch()