Mihai-Valentin DUMITRU commited on
Commit
51beb00
·
1 Parent(s): 70969ff

fix exports from notebook

Browse files
Files changed (2) hide show
  1. app.py +22 -3
  2. elephants.ipynb +86 -13
app.py CHANGED
@@ -1,9 +1,28 @@
1
- # AUTOGENERATED! DO NOT EDIT! File to edit: ../elephants.ipynb.
2
 
3
  # %% auto 0
4
- __all__ = []
5
 
6
- # %% ../elephants.ipynb 2
7
  from fastai.vision.all import *
8
  import gradio as gr
9
  import timm
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # AUTOGENERATED! DO NOT EDIT! File to edit: ./elephants.ipynb.
2
 
3
  # %% auto 0
4
+ __all__ = ['learn', 'categories', 'image', 'label', 'examples', 'intf', 'classify_image']
5
 
6
+ # %% ./elephants.ipynb 2
7
  from fastai.vision.all import *
8
  import gradio as gr
9
  import timm
10
+
11
+ # %% ./elephants.ipynb 4
12
+ learn = load_learner('model.pkl')
13
+
14
+ # %% ./elephants.ipynb 6
15
+ categories = learn.dls.vocab
16
+
17
+ def classify_image(img):
18
+ pred,idx,probs = learn.predict(img)
19
+ return dict(zip(categories, map(float,probs)))
20
+
21
+ # %% ./elephants.ipynb 8
22
+ image = gr.inputs.Image(shape=(192, 192))
23
+ label = gr.outputs.Label()
24
+ examples = ['baby_african_elephant.jpg', 'baby_asian_elephant.jpg', 'mammoth_plushie.jpg']
25
+
26
+ # %% ./elephants.ipynb 9
27
+ intf = gr.Interface(fn=classify_image, inputs=image, outputs=label, examples=examples)
28
+ intf.launch(inline=False)
elephants.ipynb CHANGED
@@ -63,7 +63,7 @@
63
  "metadata": {},
64
  "outputs": [],
65
  "source": [
66
- "#export\n",
67
  "learn = load_learner('model.pkl')"
68
  ]
69
  },
@@ -132,7 +132,7 @@
132
  "metadata": {},
133
  "outputs": [],
134
  "source": [
135
- "#export\n",
136
  "categories = learn.dls.vocab\n",
137
  "\n",
138
  "def classify_image(img):\n",
@@ -222,7 +222,7 @@
222
  }
223
  ],
224
  "source": [
225
- "#export\n",
226
  "image = gr.inputs.Image(shape=(192, 192))\n",
227
  "label = gr.outputs.Label()\n",
228
  "examples = ['baby_african_elephant.jpg', 'baby_asian_elephant.jpg', 'mammoth_plushie.jpg']"
@@ -230,7 +230,7 @@
230
  },
231
  {
232
  "cell_type": "code",
233
- "execution_count": 11,
234
  "id": "4f463e23",
235
  "metadata": {
236
  "scrolled": true
@@ -240,30 +240,103 @@
240
  "name": "stdout",
241
  "output_type": "stream",
242
  "text": [
243
- "Running on local URL: http://127.0.0.1:7860\n",
244
- "Running on public URL: https://388ff8a9222725f3f3.gradio.live\n",
245
  "\n",
246
- "This share link expires in 72 hours. For free permanent hosting and GPU upgrades, run `gradio deploy` from Terminal to deploy to Spaces (https://huggingface.co/spaces)\n"
247
  ]
248
  },
249
  {
250
  "data": {
251
  "text/plain": []
252
  },
253
- "execution_count": 11,
254
  "metadata": {},
255
  "output_type": "execute_result"
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
256
  }
257
  ],
258
  "source": [
259
- "#export\n",
260
  "intf = gr.Interface(fn=classify_image, inputs=image, outputs=label, examples=examples)\n",
261
- "intf.launch(inline=False, share=True)"
262
  ]
263
  },
264
  {
265
  "cell_type": "code",
266
- "execution_count": 12,
267
  "id": "ff9d5c73",
268
  "metadata": {},
269
  "outputs": [],
@@ -273,7 +346,7 @@
273
  },
274
  {
275
  "cell_type": "code",
276
- "execution_count": 15,
277
  "id": "eb95104b",
278
  "metadata": {},
279
  "outputs": [
@@ -286,7 +359,7 @@
286
  }
287
  ],
288
  "source": [
289
- "nbdev.export.nb_export('elephants.ipynb', 'app.py')\n",
290
  "print('Export successful')"
291
  ]
292
  },
 
63
  "metadata": {},
64
  "outputs": [],
65
  "source": [
66
+ "#|export\n",
67
  "learn = load_learner('model.pkl')"
68
  ]
69
  },
 
132
  "metadata": {},
133
  "outputs": [],
134
  "source": [
135
+ "#|export\n",
136
  "categories = learn.dls.vocab\n",
137
  "\n",
138
  "def classify_image(img):\n",
 
222
  }
223
  ],
224
  "source": [
225
+ "#|export\n",
226
  "image = gr.inputs.Image(shape=(192, 192))\n",
227
  "label = gr.outputs.Label()\n",
228
  "examples = ['baby_african_elephant.jpg', 'baby_asian_elephant.jpg', 'mammoth_plushie.jpg']"
 
230
  },
231
  {
232
  "cell_type": "code",
233
+ "execution_count": 16,
234
  "id": "4f463e23",
235
  "metadata": {
236
  "scrolled": true
 
240
  "name": "stdout",
241
  "output_type": "stream",
242
  "text": [
243
+ "Running on local URL: http://127.0.0.1:7861\n",
 
244
  "\n",
245
+ "To create a public link, set `share=True` in `launch()`.\n"
246
  ]
247
  },
248
  {
249
  "data": {
250
  "text/plain": []
251
  },
252
+ "execution_count": 16,
253
  "metadata": {},
254
  "output_type": "execute_result"
255
+ },
256
+ {
257
+ "data": {
258
+ "text/html": [
259
+ "\n",
260
+ "<style>\n",
261
+ " /* Turns off some styling */\n",
262
+ " progress {\n",
263
+ " /* gets rid of default border in Firefox and Opera. */\n",
264
+ " border: none;\n",
265
+ " /* Needs to be in here for Safari polyfill so background images work as expected. */\n",
266
+ " background-size: auto;\n",
267
+ " }\n",
268
+ " progress:not([value]), progress:not([value])::-webkit-progress-bar {\n",
269
+ " background: repeating-linear-gradient(45deg, #7e7e7e, #7e7e7e 10px, #5c5c5c 10px, #5c5c5c 20px);\n",
270
+ " }\n",
271
+ " .progress-bar-interrupted, .progress-bar-interrupted::-webkit-progress-bar {\n",
272
+ " background: #F44336;\n",
273
+ " }\n",
274
+ "</style>\n"
275
+ ],
276
+ "text/plain": [
277
+ "<IPython.core.display.HTML object>"
278
+ ]
279
+ },
280
+ "metadata": {},
281
+ "output_type": "display_data"
282
+ },
283
+ {
284
+ "data": {
285
+ "text/html": [],
286
+ "text/plain": [
287
+ "<IPython.core.display.HTML object>"
288
+ ]
289
+ },
290
+ "metadata": {},
291
+ "output_type": "display_data"
292
+ },
293
+ {
294
+ "data": {
295
+ "text/html": [
296
+ "\n",
297
+ "<style>\n",
298
+ " /* Turns off some styling */\n",
299
+ " progress {\n",
300
+ " /* gets rid of default border in Firefox and Opera. */\n",
301
+ " border: none;\n",
302
+ " /* Needs to be in here for Safari polyfill so background images work as expected. */\n",
303
+ " background-size: auto;\n",
304
+ " }\n",
305
+ " progress:not([value]), progress:not([value])::-webkit-progress-bar {\n",
306
+ " background: repeating-linear-gradient(45deg, #7e7e7e, #7e7e7e 10px, #5c5c5c 10px, #5c5c5c 20px);\n",
307
+ " }\n",
308
+ " .progress-bar-interrupted, .progress-bar-interrupted::-webkit-progress-bar {\n",
309
+ " background: #F44336;\n",
310
+ " }\n",
311
+ "</style>\n"
312
+ ],
313
+ "text/plain": [
314
+ "<IPython.core.display.HTML object>"
315
+ ]
316
+ },
317
+ "metadata": {},
318
+ "output_type": "display_data"
319
+ },
320
+ {
321
+ "data": {
322
+ "text/html": [],
323
+ "text/plain": [
324
+ "<IPython.core.display.HTML object>"
325
+ ]
326
+ },
327
+ "metadata": {},
328
+ "output_type": "display_data"
329
  }
330
  ],
331
  "source": [
332
+ "#|export\n",
333
  "intf = gr.Interface(fn=classify_image, inputs=image, outputs=label, examples=examples)\n",
334
+ "intf.launch(inline=False)"
335
  ]
336
  },
337
  {
338
  "cell_type": "code",
339
+ "execution_count": 17,
340
  "id": "ff9d5c73",
341
  "metadata": {},
342
  "outputs": [],
 
346
  },
347
  {
348
  "cell_type": "code",
349
+ "execution_count": 21,
350
  "id": "eb95104b",
351
  "metadata": {},
352
  "outputs": [
 
359
  }
360
  ],
361
  "source": [
362
+ "nbdev.export.nb_export('elephants.ipynb', './app')\n",
363
  "print('Export successful')"
364
  ]
365
  },