dpaliwa commited on
Commit
49a6344
·
1 Parent(s): 6333e15

Added Images and model

Browse files
Files changed (3) hide show
  1. app.ipynb +89 -15
  2. app.py +1 -1
  3. weaponClassifier.ipynb +0 -0
app.ipynb CHANGED
@@ -2,7 +2,7 @@
2
  "cells": [
3
  {
4
  "cell_type": "code",
5
- "execution_count": 2,
6
  "metadata": {},
7
  "outputs": [],
8
  "source": [
@@ -12,7 +12,7 @@
12
  },
13
  {
14
  "cell_type": "code",
15
- "execution_count": 3,
16
  "metadata": {},
17
  "outputs": [],
18
  "source": [
@@ -22,7 +22,7 @@
22
  },
23
  {
24
  "cell_type": "code",
25
- "execution_count": 4,
26
  "metadata": {},
27
  "outputs": [
28
  {
@@ -32,7 +32,7 @@
32
  "<PIL.Image.Image image mode=RGB size=198x256>"
33
  ]
34
  },
35
- "execution_count": 4,
36
  "metadata": {},
37
  "output_type": "execute_result"
38
  }
@@ -46,16 +46,16 @@
46
  },
47
  {
48
  "cell_type": "code",
49
- "execution_count": 5,
50
  "metadata": {},
51
  "outputs": [],
52
  "source": [
53
- "learn = load_learner('weapon_finder_model.pkl')"
54
  ]
55
  },
56
  {
57
  "cell_type": "code",
58
- "execution_count": 6,
59
  "metadata": {},
60
  "outputs": [
61
  {
@@ -98,10 +98,10 @@
98
  {
99
  "data": {
100
  "text/plain": [
101
- "('human holding weapon', TensorBase(0), TensorBase([9.9961e-01, 3.9367e-04]))"
102
  ]
103
  },
104
- "execution_count": 6,
105
  "metadata": {},
106
  "output_type": "execute_result"
107
  }
@@ -112,11 +112,11 @@
112
  },
113
  {
114
  "cell_type": "code",
115
- "execution_count": 7,
116
  "metadata": {},
117
  "outputs": [],
118
  "source": [
119
- "categories = ('Human holding Weapon','Weapon')\n",
120
  "\n",
121
  "def classify_image(img):\n",
122
  " pred,idx,probs = learn.predict(img)\n",
@@ -125,7 +125,7 @@
125
  },
126
  {
127
  "cell_type": "code",
128
- "execution_count": 8,
129
  "metadata": {},
130
  "outputs": [
131
  {
@@ -146,8 +146,8 @@
146
  "name": "stdout",
147
  "output_type": "stream",
148
  "text": [
149
- "Running on local URL: http://127.0.0.1:7860\n",
150
- "Running on public URL: https://80982e5d8261b6ff39.gradio.live\n",
151
  "\n",
152
  "This share link expires in 72 hours. For free permanent hosting and GPU upgrades (NEW!), check out Spaces: https://huggingface.co/spaces\n"
153
  ]
@@ -156,9 +156,83 @@
156
  "data": {
157
  "text/plain": []
158
  },
159
- "execution_count": 8,
160
  "metadata": {},
161
  "output_type": "execute_result"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
162
  }
163
  ],
164
  "source": [
 
2
  "cells": [
3
  {
4
  "cell_type": "code",
5
+ "execution_count": 1,
6
  "metadata": {},
7
  "outputs": [],
8
  "source": [
 
12
  },
13
  {
14
  "cell_type": "code",
15
+ "execution_count": 2,
16
  "metadata": {},
17
  "outputs": [],
18
  "source": [
 
22
  },
23
  {
24
  "cell_type": "code",
25
+ "execution_count": 3,
26
  "metadata": {},
27
  "outputs": [
28
  {
 
32
  "<PIL.Image.Image image mode=RGB size=198x256>"
33
  ]
34
  },
35
+ "execution_count": 3,
36
  "metadata": {},
37
  "output_type": "execute_result"
38
  }
 
46
  },
47
  {
48
  "cell_type": "code",
49
+ "execution_count": 4,
50
  "metadata": {},
51
  "outputs": [],
52
  "source": [
53
+ "learn = load_learner('model.pkl')"
54
  ]
55
  },
56
  {
57
  "cell_type": "code",
58
+ "execution_count": 5,
59
  "metadata": {},
60
  "outputs": [
61
  {
 
98
  {
99
  "data": {
100
  "text/plain": [
101
+ "('human holding weapon', TensorBase(0), TensorBase([0.8322, 0.1348, 0.0330]))"
102
  ]
103
  },
104
+ "execution_count": 5,
105
  "metadata": {},
106
  "output_type": "execute_result"
107
  }
 
112
  },
113
  {
114
  "cell_type": "code",
115
+ "execution_count": 6,
116
  "metadata": {},
117
  "outputs": [],
118
  "source": [
119
+ "categories = ('Weapon held','No weapon held','Weapon')\n",
120
  "\n",
121
  "def classify_image(img):\n",
122
  " pred,idx,probs = learn.predict(img)\n",
 
125
  },
126
  {
127
  "cell_type": "code",
128
+ "execution_count": 7,
129
  "metadata": {},
130
  "outputs": [
131
  {
 
146
  "name": "stdout",
147
  "output_type": "stream",
148
  "text": [
149
+ "Running on local URL: http://127.0.0.1:7861\n",
150
+ "Running on public URL: https://5dee978003efa97ecd.gradio.live\n",
151
  "\n",
152
  "This share link expires in 72 hours. For free permanent hosting and GPU upgrades (NEW!), check out Spaces: https://huggingface.co/spaces\n"
153
  ]
 
156
  "data": {
157
  "text/plain": []
158
  },
159
+ "execution_count": 7,
160
  "metadata": {},
161
  "output_type": "execute_result"
162
+ },
163
+ {
164
+ "data": {
165
+ "text/html": [
166
+ "\n",
167
+ "<style>\n",
168
+ " /* Turns off some styling */\n",
169
+ " progress {\n",
170
+ " /* gets rid of default border in Firefox and Opera. */\n",
171
+ " border: none;\n",
172
+ " /* Needs to be in here for Safari polyfill so background images work as expected. */\n",
173
+ " background-size: auto;\n",
174
+ " }\n",
175
+ " progress:not([value]), progress:not([value])::-webkit-progress-bar {\n",
176
+ " background: repeating-linear-gradient(45deg, #7e7e7e, #7e7e7e 10px, #5c5c5c 10px, #5c5c5c 20px);\n",
177
+ " }\n",
178
+ " .progress-bar-interrupted, .progress-bar-interrupted::-webkit-progress-bar {\n",
179
+ " background: #F44336;\n",
180
+ " }\n",
181
+ "</style>\n"
182
+ ],
183
+ "text/plain": [
184
+ "<IPython.core.display.HTML object>"
185
+ ]
186
+ },
187
+ "metadata": {},
188
+ "output_type": "display_data"
189
+ },
190
+ {
191
+ "data": {
192
+ "text/html": [],
193
+ "text/plain": [
194
+ "<IPython.core.display.HTML object>"
195
+ ]
196
+ },
197
+ "metadata": {},
198
+ "output_type": "display_data"
199
+ },
200
+ {
201
+ "data": {
202
+ "text/html": [
203
+ "\n",
204
+ "<style>\n",
205
+ " /* Turns off some styling */\n",
206
+ " progress {\n",
207
+ " /* gets rid of default border in Firefox and Opera. */\n",
208
+ " border: none;\n",
209
+ " /* Needs to be in here for Safari polyfill so background images work as expected. */\n",
210
+ " background-size: auto;\n",
211
+ " }\n",
212
+ " progress:not([value]), progress:not([value])::-webkit-progress-bar {\n",
213
+ " background: repeating-linear-gradient(45deg, #7e7e7e, #7e7e7e 10px, #5c5c5c 10px, #5c5c5c 20px);\n",
214
+ " }\n",
215
+ " .progress-bar-interrupted, .progress-bar-interrupted::-webkit-progress-bar {\n",
216
+ " background: #F44336;\n",
217
+ " }\n",
218
+ "</style>\n"
219
+ ],
220
+ "text/plain": [
221
+ "<IPython.core.display.HTML object>"
222
+ ]
223
+ },
224
+ "metadata": {},
225
+ "output_type": "display_data"
226
+ },
227
+ {
228
+ "data": {
229
+ "text/html": [],
230
+ "text/plain": [
231
+ "<IPython.core.display.HTML object>"
232
+ ]
233
+ },
234
+ "metadata": {},
235
+ "output_type": "display_data"
236
  }
237
  ],
238
  "source": [
app.py CHANGED
@@ -11,7 +11,7 @@ def is_holdingweapon(x):
11
  learn = load_learner('model.pkl')
12
 
13
 
14
- categories = ('Human holding Weapon','Weapon')
15
 
16
  def classify_image(img):
17
  pred,idx,probs = learn.predict(img)
 
11
  learn = load_learner('model.pkl')
12
 
13
 
14
+ categories = ('Human holding Weapon','No Weapon detected','Weapon')
15
 
16
  def classify_image(img):
17
  pred,idx,probs = learn.predict(img)
weaponClassifier.ipynb CHANGED
The diff for this file is too large to render. See raw diff