Spaces:
Runtime error
Runtime error
Commit
·
fecd672
0
Parent(s):
save
Browse files- .github/workflows/deploy.yaml +9 -0
- .github/workflows/test.yaml +7 -0
- .gitignore +151 -0
- LICENSE +201 -0
- MANIFEST.in +5 -0
- README.md +27 -0
- app.ipynb +410 -0
- create_space.png +0 -0
- hfspace_demo +1 -0
- nbdev_spaces_demo/__init__.py +3 -0
- nbdev_spaces_demo/_modidx.py +8 -0
- nbdev_spaces_demo/size.py +15 -0
- nbs/_quarto.yml +20 -0
- nbs/index.ipynb +75 -0
- nbs/nbdev.yml +9 -0
- nbs/size.ipynb +94 -0
- nbs/styles.css +37 -0
- settings.ini +42 -0
- setup.py +57 -0
.github/workflows/deploy.yaml
ADDED
@@ -0,0 +1,9 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
name: Deploy to GitHub Pages
|
2 |
+
on:
|
3 |
+
push:
|
4 |
+
branches: [ "main", "master" ]
|
5 |
+
workflow_dispatch:
|
6 |
+
jobs:
|
7 |
+
deploy:
|
8 |
+
runs-on: ubuntu-latest
|
9 |
+
steps: [uses: fastai/workflows/quarto-ghp@master]
|
.github/workflows/test.yaml
ADDED
@@ -0,0 +1,7 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
name: CI
|
2 |
+
on: [workflow_dispatch, pull_request, push]
|
3 |
+
|
4 |
+
jobs:
|
5 |
+
test:
|
6 |
+
runs-on: ubuntu-latest
|
7 |
+
steps: [uses: fastai/workflows/nbdev-ci@master]
|
.gitignore
ADDED
@@ -0,0 +1,151 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
_docs/
|
2 |
+
_proc/
|
3 |
+
|
4 |
+
*.bak
|
5 |
+
.gitattributes
|
6 |
+
.last_checked
|
7 |
+
.gitconfig
|
8 |
+
*.bak
|
9 |
+
*.log
|
10 |
+
*~
|
11 |
+
~*
|
12 |
+
_tmp*
|
13 |
+
tmp*
|
14 |
+
tags
|
15 |
+
*.pkg
|
16 |
+
|
17 |
+
# Byte-compiled / optimized / DLL files
|
18 |
+
__pycache__/
|
19 |
+
*.py[cod]
|
20 |
+
*$py.class
|
21 |
+
|
22 |
+
# C extensions
|
23 |
+
*.so
|
24 |
+
|
25 |
+
# Distribution / packaging
|
26 |
+
.Python
|
27 |
+
env/
|
28 |
+
build/
|
29 |
+
develop-eggs/
|
30 |
+
dist/
|
31 |
+
downloads/
|
32 |
+
eggs/
|
33 |
+
.eggs/
|
34 |
+
lib/
|
35 |
+
lib64/
|
36 |
+
parts/
|
37 |
+
sdist/
|
38 |
+
var/
|
39 |
+
wheels/
|
40 |
+
*.egg-info/
|
41 |
+
.installed.cfg
|
42 |
+
*.egg
|
43 |
+
|
44 |
+
# PyInstaller
|
45 |
+
# Usually these files are written by a python script from a template
|
46 |
+
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
47 |
+
*.manifest
|
48 |
+
*.spec
|
49 |
+
|
50 |
+
# Installer logs
|
51 |
+
pip-log.txt
|
52 |
+
pip-delete-this-directory.txt
|
53 |
+
|
54 |
+
# Unit test / coverage reports
|
55 |
+
htmlcov/
|
56 |
+
.tox/
|
57 |
+
.coverage
|
58 |
+
.coverage.*
|
59 |
+
.cache
|
60 |
+
nosetests.xml
|
61 |
+
coverage.xml
|
62 |
+
*.cover
|
63 |
+
.hypothesis/
|
64 |
+
|
65 |
+
# Translations
|
66 |
+
*.mo
|
67 |
+
*.pot
|
68 |
+
|
69 |
+
# Django stuff:
|
70 |
+
*.log
|
71 |
+
local_settings.py
|
72 |
+
|
73 |
+
# Flask stuff:
|
74 |
+
instance/
|
75 |
+
.webassets-cache
|
76 |
+
|
77 |
+
# Scrapy stuff:
|
78 |
+
.scrapy
|
79 |
+
|
80 |
+
# Sphinx documentation
|
81 |
+
docs/_build/
|
82 |
+
|
83 |
+
# PyBuilder
|
84 |
+
target/
|
85 |
+
|
86 |
+
# Jupyter Notebook
|
87 |
+
.ipynb_checkpoints
|
88 |
+
|
89 |
+
# pyenv
|
90 |
+
.python-version
|
91 |
+
|
92 |
+
# celery beat schedule file
|
93 |
+
celerybeat-schedule
|
94 |
+
|
95 |
+
# SageMath parsed files
|
96 |
+
*.sage.py
|
97 |
+
|
98 |
+
# dotenv
|
99 |
+
.env
|
100 |
+
|
101 |
+
# virtualenv
|
102 |
+
.venv
|
103 |
+
venv/
|
104 |
+
ENV/
|
105 |
+
|
106 |
+
# Spyder project settings
|
107 |
+
.spyderproject
|
108 |
+
.spyproject
|
109 |
+
|
110 |
+
# Rope project settings
|
111 |
+
.ropeproject
|
112 |
+
|
113 |
+
# mkdocs documentation
|
114 |
+
/site
|
115 |
+
|
116 |
+
# mypy
|
117 |
+
.mypy_cache/
|
118 |
+
|
119 |
+
.vscode
|
120 |
+
*.swp
|
121 |
+
|
122 |
+
# osx generated files
|
123 |
+
.DS_Store
|
124 |
+
.DS_Store?
|
125 |
+
.Trashes
|
126 |
+
ehthumbs.db
|
127 |
+
Thumbs.db
|
128 |
+
.idea
|
129 |
+
|
130 |
+
# pytest
|
131 |
+
.pytest_cache
|
132 |
+
|
133 |
+
# tools/trust-doc-nbs
|
134 |
+
docs_src/.last_checked
|
135 |
+
|
136 |
+
# symlinks to fastai
|
137 |
+
docs_src/fastai
|
138 |
+
tools/fastai
|
139 |
+
|
140 |
+
# link checker
|
141 |
+
checklink/cookies.txt
|
142 |
+
|
143 |
+
# .gitconfig is now autogenerated
|
144 |
+
.gitconfig
|
145 |
+
|
146 |
+
# Quarto installer
|
147 |
+
.deb
|
148 |
+
.pkg
|
149 |
+
|
150 |
+
# Quarto
|
151 |
+
.quarto
|
LICENSE
ADDED
@@ -0,0 +1,201 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
Apache License
|
2 |
+
Version 2.0, January 2004
|
3 |
+
http://www.apache.org/licenses/
|
4 |
+
|
5 |
+
TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
6 |
+
|
7 |
+
1. Definitions.
|
8 |
+
|
9 |
+
"License" shall mean the terms and conditions for use, reproduction,
|
10 |
+
and distribution as defined by Sections 1 through 9 of this document.
|
11 |
+
|
12 |
+
"Licensor" shall mean the copyright owner or entity authorized by
|
13 |
+
the copyright owner that is granting the License.
|
14 |
+
|
15 |
+
"Legal Entity" shall mean the union of the acting entity and all
|
16 |
+
other entities that control, are controlled by, or are under common
|
17 |
+
control with that entity. For the purposes of this definition,
|
18 |
+
"control" means (i) the power, direct or indirect, to cause the
|
19 |
+
direction or management of such entity, whether by contract or
|
20 |
+
otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
21 |
+
outstanding shares, or (iii) beneficial ownership of such entity.
|
22 |
+
|
23 |
+
"You" (or "Your") shall mean an individual or Legal Entity
|
24 |
+
exercising permissions granted by this License.
|
25 |
+
|
26 |
+
"Source" form shall mean the preferred form for making modifications,
|
27 |
+
including but not limited to software source code, documentation
|
28 |
+
source, and configuration files.
|
29 |
+
|
30 |
+
"Object" form shall mean any form resulting from mechanical
|
31 |
+
transformation or translation of a Source form, including but
|
32 |
+
not limited to compiled object code, generated documentation,
|
33 |
+
and conversions to other media types.
|
34 |
+
|
35 |
+
"Work" shall mean the work of authorship, whether in Source or
|
36 |
+
Object form, made available under the License, as indicated by a
|
37 |
+
copyright notice that is included in or attached to the work
|
38 |
+
(an example is provided in the Appendix below).
|
39 |
+
|
40 |
+
"Derivative Works" shall mean any work, whether in Source or Object
|
41 |
+
form, that is based on (or derived from) the Work and for which the
|
42 |
+
editorial revisions, annotations, elaborations, or other modifications
|
43 |
+
represent, as a whole, an original work of authorship. For the purposes
|
44 |
+
of this License, Derivative Works shall not include works that remain
|
45 |
+
separable from, or merely link (or bind by name) to the interfaces of,
|
46 |
+
the Work and Derivative Works thereof.
|
47 |
+
|
48 |
+
"Contribution" shall mean any work of authorship, including
|
49 |
+
the original version of the Work and any modifications or additions
|
50 |
+
to that Work or Derivative Works thereof, that is intentionally
|
51 |
+
submitted to Licensor for inclusion in the Work by the copyright owner
|
52 |
+
or by an individual or Legal Entity authorized to submit on behalf of
|
53 |
+
the copyright owner. For the purposes of this definition, "submitted"
|
54 |
+
means any form of electronic, verbal, or written communication sent
|
55 |
+
to the Licensor or its representatives, including but not limited to
|
56 |
+
communication on electronic mailing lists, source code control systems,
|
57 |
+
and issue tracking systems that are managed by, or on behalf of, the
|
58 |
+
Licensor for the purpose of discussing and improving the Work, but
|
59 |
+
excluding communication that is conspicuously marked or otherwise
|
60 |
+
designated in writing by the copyright owner as "Not a Contribution."
|
61 |
+
|
62 |
+
"Contributor" shall mean Licensor and any individual or Legal Entity
|
63 |
+
on behalf of whom a Contribution has been received by Licensor and
|
64 |
+
subsequently incorporated within the Work.
|
65 |
+
|
66 |
+
2. Grant of Copyright License. Subject to the terms and conditions of
|
67 |
+
this License, each Contributor hereby grants to You a perpetual,
|
68 |
+
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
69 |
+
copyright license to reproduce, prepare Derivative Works of,
|
70 |
+
publicly display, publicly perform, sublicense, and distribute the
|
71 |
+
Work and such Derivative Works in Source or Object form.
|
72 |
+
|
73 |
+
3. Grant of Patent License. Subject to the terms and conditions of
|
74 |
+
this License, each Contributor hereby grants to You a perpetual,
|
75 |
+
worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
76 |
+
(except as stated in this section) patent license to make, have made,
|
77 |
+
use, offer to sell, sell, import, and otherwise transfer the Work,
|
78 |
+
where such license applies only to those patent claims licensable
|
79 |
+
by such Contributor that are necessarily infringed by their
|
80 |
+
Contribution(s) alone or by combination of their Contribution(s)
|
81 |
+
with the Work to which such Contribution(s) was submitted. If You
|
82 |
+
institute patent litigation against any entity (including a
|
83 |
+
cross-claim or counterclaim in a lawsuit) alleging that the Work
|
84 |
+
or a Contribution incorporated within the Work constitutes direct
|
85 |
+
or contributory patent infringement, then any patent licenses
|
86 |
+
granted to You under this License for that Work shall terminate
|
87 |
+
as of the date such litigation is filed.
|
88 |
+
|
89 |
+
4. Redistribution. You may reproduce and distribute copies of the
|
90 |
+
Work or Derivative Works thereof in any medium, with or without
|
91 |
+
modifications, and in Source or Object form, provided that You
|
92 |
+
meet the following conditions:
|
93 |
+
|
94 |
+
(a) You must give any other recipients of the Work or
|
95 |
+
Derivative Works a copy of this License; and
|
96 |
+
|
97 |
+
(b) You must cause any modified files to carry prominent notices
|
98 |
+
stating that You changed the files; and
|
99 |
+
|
100 |
+
(c) You must retain, in the Source form of any Derivative Works
|
101 |
+
that You distribute, all copyright, patent, trademark, and
|
102 |
+
attribution notices from the Source form of the Work,
|
103 |
+
excluding those notices that do not pertain to any part of
|
104 |
+
the Derivative Works; and
|
105 |
+
|
106 |
+
(d) If the Work includes a "NOTICE" text file as part of its
|
107 |
+
distribution, then any Derivative Works that You distribute must
|
108 |
+
include a readable copy of the attribution notices contained
|
109 |
+
within such NOTICE file, excluding those notices that do not
|
110 |
+
pertain to any part of the Derivative Works, in at least one
|
111 |
+
of the following places: within a NOTICE text file distributed
|
112 |
+
as part of the Derivative Works; within the Source form or
|
113 |
+
documentation, if provided along with the Derivative Works; or,
|
114 |
+
within a display generated by the Derivative Works, if and
|
115 |
+
wherever such third-party notices normally appear. The contents
|
116 |
+
of the NOTICE file are for informational purposes only and
|
117 |
+
do not modify the License. You may add Your own attribution
|
118 |
+
notices within Derivative Works that You distribute, alongside
|
119 |
+
or as an addendum to the NOTICE text from the Work, provided
|
120 |
+
that such additional attribution notices cannot be construed
|
121 |
+
as modifying the License.
|
122 |
+
|
123 |
+
You may add Your own copyright statement to Your modifications and
|
124 |
+
may provide additional or different license terms and conditions
|
125 |
+
for use, reproduction, or distribution of Your modifications, or
|
126 |
+
for any such Derivative Works as a whole, provided Your use,
|
127 |
+
reproduction, and distribution of the Work otherwise complies with
|
128 |
+
the conditions stated in this License.
|
129 |
+
|
130 |
+
5. Submission of Contributions. Unless You explicitly state otherwise,
|
131 |
+
any Contribution intentionally submitted for inclusion in the Work
|
132 |
+
by You to the Licensor shall be under the terms and conditions of
|
133 |
+
this License, without any additional terms or conditions.
|
134 |
+
Notwithstanding the above, nothing herein shall supersede or modify
|
135 |
+
the terms of any separate license agreement you may have executed
|
136 |
+
with Licensor regarding such Contributions.
|
137 |
+
|
138 |
+
6. Trademarks. This License does not grant permission to use the trade
|
139 |
+
names, trademarks, service marks, or product names of the Licensor,
|
140 |
+
except as required for reasonable and customary use in describing the
|
141 |
+
origin of the Work and reproducing the content of the NOTICE file.
|
142 |
+
|
143 |
+
7. Disclaimer of Warranty. Unless required by applicable law or
|
144 |
+
agreed to in writing, Licensor provides the Work (and each
|
145 |
+
Contributor provides its Contributions) on an "AS IS" BASIS,
|
146 |
+
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
147 |
+
implied, including, without limitation, any warranties or conditions
|
148 |
+
of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
149 |
+
PARTICULAR PURPOSE. You are solely responsible for determining the
|
150 |
+
appropriateness of using or redistributing the Work and assume any
|
151 |
+
risks associated with Your exercise of permissions under this License.
|
152 |
+
|
153 |
+
8. Limitation of Liability. In no event and under no legal theory,
|
154 |
+
whether in tort (including negligence), contract, or otherwise,
|
155 |
+
unless required by applicable law (such as deliberate and grossly
|
156 |
+
negligent acts) or agreed to in writing, shall any Contributor be
|
157 |
+
liable to You for damages, including any direct, indirect, special,
|
158 |
+
incidental, or consequential damages of any character arising as a
|
159 |
+
result of this License or out of the use or inability to use the
|
160 |
+
Work (including but not limited to damages for loss of goodwill,
|
161 |
+
work stoppage, computer failure or malfunction, or any and all
|
162 |
+
other commercial damages or losses), even if such Contributor
|
163 |
+
has been advised of the possibility of such damages.
|
164 |
+
|
165 |
+
9. Accepting Warranty or Additional Liability. While redistributing
|
166 |
+
the Work or Derivative Works thereof, You may choose to offer,
|
167 |
+
and charge a fee for, acceptance of support, warranty, indemnity,
|
168 |
+
or other liability obligations and/or rights consistent with this
|
169 |
+
License. However, in accepting such obligations, You may act only
|
170 |
+
on Your own behalf and on Your sole responsibility, not on behalf
|
171 |
+
of any other Contributor, and only if You agree to indemnify,
|
172 |
+
defend, and hold each Contributor harmless for any liability
|
173 |
+
incurred by, or claims asserted against, such Contributor by reason
|
174 |
+
of your accepting any such warranty or additional liability.
|
175 |
+
|
176 |
+
END OF TERMS AND CONDITIONS
|
177 |
+
|
178 |
+
APPENDIX: How to apply the Apache License to your work.
|
179 |
+
|
180 |
+
To apply the Apache License to your work, attach the following
|
181 |
+
boilerplate notice, with the fields enclosed by brackets "[]"
|
182 |
+
replaced with your own identifying information. (Don't include
|
183 |
+
the brackets!) The text should be enclosed in the appropriate
|
184 |
+
comment syntax for the file format. We also recommend that a
|
185 |
+
file or class name and description of purpose be included on the
|
186 |
+
same "printed page" as the copyright notice for easier
|
187 |
+
identification within third-party archives.
|
188 |
+
|
189 |
+
Copyright 2022, fastai
|
190 |
+
|
191 |
+
Licensed under the Apache License, Version 2.0 (the "License");
|
192 |
+
you may not use this file except in compliance with the License.
|
193 |
+
You may obtain a copy of the License at
|
194 |
+
|
195 |
+
http://www.apache.org/licenses/LICENSE-2.0
|
196 |
+
|
197 |
+
Unless required by applicable law or agreed to in writing, software
|
198 |
+
distributed under the License is distributed on an "AS IS" BASIS,
|
199 |
+
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
200 |
+
See the License for the specific language governing permissions and
|
201 |
+
limitations under the License.
|
MANIFEST.in
ADDED
@@ -0,0 +1,5 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
include settings.ini
|
2 |
+
include LICENSE
|
3 |
+
include CONTRIBUTING.md
|
4 |
+
include README.md
|
5 |
+
recursive-exclude * __pycache__
|
README.md
ADDED
@@ -0,0 +1,27 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
nbdev-spaces-demo
|
2 |
+
================
|
3 |
+
|
4 |
+
<!-- WARNING: THIS FILE WAS AUTOGENERATED! DO NOT EDIT! -->
|
5 |
+
|
6 |
+
This is a toy python library that lets you obtain the size of any
|
7 |
+
Hugging Face dataset. For example, we can check the size of
|
8 |
+
[tglcourse/CelebA-faces-cropped-128](https://huggingface.co/datasets/tglcourse/CelebA-faces-cropped-128)
|
9 |
+
like so:
|
10 |
+
|
11 |
+
``` python
|
12 |
+
from nbdev_spaces_demo import hfsize
|
13 |
+
hfsize("tglcourse/CelebA-faces-cropped-128")
|
14 |
+
```
|
15 |
+
|
16 |
+
'5.49 GB'
|
17 |
+
|
18 |
+
We deploy this function using Gradio and Hugging Face spaces.
|
19 |
+
|
20 |
+
## Using nbdev with Gradio
|
21 |
+
|
22 |
+
Gradio and Hugging Face spaces is one of the easiest way to create and
|
23 |
+
host apps. Gradio also allows you to prototype these apps in notebooks,
|
24 |
+
which is excellent!
|
25 |
+
|
26 |
+
We show you step-by-step instructions on how to deploy a Hugging Face
|
27 |
+
gradio app from a notebook in this example.
|
app.ipynb
ADDED
@@ -0,0 +1,410 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"cells": [
|
3 |
+
{
|
4 |
+
"cell_type": "markdown",
|
5 |
+
"id": "8c68f03e-620c-46a9-a7ec-a6cde27043cd",
|
6 |
+
"metadata": {},
|
7 |
+
"source": [
|
8 |
+
"# Hugging Face Spaces\n",
|
9 |
+
"\n",
|
10 |
+
"> A demo of using nbdev with Hugging Face Spaces\n",
|
11 |
+
"\n",
|
12 |
+
"Hugging Face spaces require that your python script is named `app.py`, so your first cell should be this, which will make sure code is exported to a file named `app.py`:"
|
13 |
+
]
|
14 |
+
},
|
15 |
+
{
|
16 |
+
"cell_type": "code",
|
17 |
+
"execution_count": null,
|
18 |
+
"id": "c3463e8e-454a-48b8-ae21-8308703d2275",
|
19 |
+
"metadata": {},
|
20 |
+
"outputs": [],
|
21 |
+
"source": [
|
22 |
+
"#|default_exp app"
|
23 |
+
]
|
24 |
+
},
|
25 |
+
{
|
26 |
+
"cell_type": "markdown",
|
27 |
+
"id": "96483373-4ae1-49b2-85ed-ceee8456df19",
|
28 |
+
"metadata": {},
|
29 |
+
"source": [
|
30 |
+
"# Create a Gradio-enabled Space on Hugging Face\n",
|
31 |
+
"\n",
|
32 |
+
"The first step is to create a space and select the appropriate sdk (which is Gradio in this example), per [these instructions](https://huggingface.co/docs/hub/spaces-overview#creating-a-new-space):"
|
33 |
+
]
|
34 |
+
},
|
35 |
+
{
|
36 |
+
"cell_type": "markdown",
|
37 |
+
"id": "b34d7ec6-69b8-48c4-a68b-fad6db3c2fab",
|
38 |
+
"metadata": {},
|
39 |
+
"source": [
|
40 |
+
""
|
41 |
+
]
|
42 |
+
},
|
43 |
+
{
|
44 |
+
"cell_type": "markdown",
|
45 |
+
"id": "c25e8e7a-52d9-4305-a107-ba03e3d6a5f3",
|
46 |
+
"metadata": {},
|
47 |
+
"source": [
|
48 |
+
"After you are done creating the space, **clone the repo to the root of your nbdev project.** In this example, I ran the command `git clone https://huggingface.co/spaces/hamel/hfspace_demo` from the root of this repository."
|
49 |
+
]
|
50 |
+
},
|
51 |
+
{
|
52 |
+
"cell_type": "markdown",
|
53 |
+
"id": "ff26114c-329b-4a97-98b5-c652554b0114",
|
54 |
+
"metadata": {},
|
55 |
+
"source": [
|
56 |
+
"## Make an app with Gradio"
|
57 |
+
]
|
58 |
+
},
|
59 |
+
{
|
60 |
+
"cell_type": "markdown",
|
61 |
+
"id": "14a884fc-36e2-43ec-8e42-ca2903aaa4de",
|
62 |
+
"metadata": {},
|
63 |
+
"source": [
|
64 |
+
"Below, we will create a [gradio](https://gradio.app/) in a notebook and show you how to deploy it to [Hugging Face Spaces](https://huggingface.co/docs/hub/spaces).\n",
|
65 |
+
"\n",
|
66 |
+
"First, lets specify the libraries we need, which in this case are gradio, and the nbdev project which in this case is `nbdev_spaces_demo`:"
|
67 |
+
]
|
68 |
+
},
|
69 |
+
{
|
70 |
+
"cell_type": "code",
|
71 |
+
"execution_count": null,
|
72 |
+
"id": "e5e5d597-19ad-46e5-81ad-8f646d8a1c21",
|
73 |
+
"metadata": {},
|
74 |
+
"outputs": [],
|
75 |
+
"source": [
|
76 |
+
"#|export\n",
|
77 |
+
"import gradio as gr\n",
|
78 |
+
"from nbdev_spaces_demo import hfsize "
|
79 |
+
]
|
80 |
+
},
|
81 |
+
{
|
82 |
+
"cell_type": "markdown",
|
83 |
+
"id": "9ff9f84d-7744-46ad-80ed-2cf1fa6d0643",
|
84 |
+
"metadata": {},
|
85 |
+
"source": [
|
86 |
+
"As a reminder, `hfsize` can be used to check the size of a Hugging Face Dataset. For example, we can check the size of [tglcourse/CelebA-faces-cropped-128](https://huggingface.co/datasets/tglcourse/CelebA-faces-cropped-128) like so:"
|
87 |
+
]
|
88 |
+
},
|
89 |
+
{
|
90 |
+
"cell_type": "code",
|
91 |
+
"execution_count": null,
|
92 |
+
"id": "95bc32b8-d8ff-4761-a2d7-0880c51d0a42",
|
93 |
+
"metadata": {},
|
94 |
+
"outputs": [
|
95 |
+
{
|
96 |
+
"data": {
|
97 |
+
"text/plain": [
|
98 |
+
"'5.49 GB'"
|
99 |
+
]
|
100 |
+
},
|
101 |
+
"execution_count": null,
|
102 |
+
"metadata": {},
|
103 |
+
"output_type": "execute_result"
|
104 |
+
}
|
105 |
+
],
|
106 |
+
"source": [
|
107 |
+
"hfsize(\"tglcourse/CelebA-faces-cropped-128\")"
|
108 |
+
]
|
109 |
+
},
|
110 |
+
{
|
111 |
+
"cell_type": "markdown",
|
112 |
+
"id": "cb13747b-ea48-4146-846d-deb9e855d32d",
|
113 |
+
"metadata": {},
|
114 |
+
"source": [
|
115 |
+
"You can construct a simple UI with the `gradio.interface` and then call the `launch` method of that interface to display a preview in a notebook. This is a great way to test your app to see if it works"
|
116 |
+
]
|
117 |
+
},
|
118 |
+
{
|
119 |
+
"cell_type": "code",
|
120 |
+
"execution_count": null,
|
121 |
+
"id": "7b20e2a1-b622-4970-9069-0202ce10a2ce",
|
122 |
+
"metadata": {},
|
123 |
+
"outputs": [
|
124 |
+
{
|
125 |
+
"name": "stdout",
|
126 |
+
"output_type": "stream",
|
127 |
+
"text": [
|
128 |
+
"Running on local URL: http://127.0.0.1:7860\n",
|
129 |
+
"\n",
|
130 |
+
"To create a public link, set `share=True` in `launch()`.\n"
|
131 |
+
]
|
132 |
+
},
|
133 |
+
{
|
134 |
+
"data": {
|
135 |
+
"text/html": [
|
136 |
+
"<div><iframe src=\"http://127.0.0.1:7860/\" width=\"500\" height=\"500\" allow=\"autoplay; camera; microphone; clipboard-read; clipboard-write;\" frameborder=\"0\" allowfullscreen></iframe></div>"
|
137 |
+
],
|
138 |
+
"text/plain": [
|
139 |
+
"<IPython.core.display.HTML object>"
|
140 |
+
]
|
141 |
+
},
|
142 |
+
"metadata": {},
|
143 |
+
"output_type": "display_data"
|
144 |
+
},
|
145 |
+
{
|
146 |
+
"data": {
|
147 |
+
"text/plain": [
|
148 |
+
"(<gradio.routes.App>, 'http://127.0.0.1:7860/', None)"
|
149 |
+
]
|
150 |
+
},
|
151 |
+
"execution_count": null,
|
152 |
+
"metadata": {},
|
153 |
+
"output_type": "execute_result"
|
154 |
+
}
|
155 |
+
],
|
156 |
+
"source": [
|
157 |
+
"#|export\n",
|
158 |
+
"iface = gr.Interface(fn=hfsize, inputs=gr.Text(value=\"tglcourse/CelebA-faces-cropped-128\"), outputs=\"text\")\n",
|
159 |
+
"iface.launch(width=500)"
|
160 |
+
]
|
161 |
+
},
|
162 |
+
{
|
163 |
+
"cell_type": "markdown",
|
164 |
+
"id": "59926b18-a9af-4387-9fcc-f88e588da577",
|
165 |
+
"metadata": {},
|
166 |
+
"source": [
|
167 |
+
"Note how running the `launch()` method in a notebook runs a webserver in the background. Below, we call the `close()` method to close the webserver."
|
168 |
+
]
|
169 |
+
},
|
170 |
+
{
|
171 |
+
"cell_type": "code",
|
172 |
+
"execution_count": null,
|
173 |
+
"id": "39d7be72-9389-42cf-91b1-78e8f4bbd083",
|
174 |
+
"metadata": {},
|
175 |
+
"outputs": [
|
176 |
+
{
|
177 |
+
"name": "stdout",
|
178 |
+
"output_type": "stream",
|
179 |
+
"text": [
|
180 |
+
"Closing server running on port: 7860\n"
|
181 |
+
]
|
182 |
+
}
|
183 |
+
],
|
184 |
+
"source": [
|
185 |
+
"# this is only necessary in a notebook\n",
|
186 |
+
"iface.close()"
|
187 |
+
]
|
188 |
+
},
|
189 |
+
{
|
190 |
+
"cell_type": "markdown",
|
191 |
+
"id": "249b2cd7-3123-45bf-945f-882b8a964cf5",
|
192 |
+
"metadata": {},
|
193 |
+
"source": [
|
194 |
+
"## Converting This Notebook Into A Gradio App"
|
195 |
+
]
|
196 |
+
},
|
197 |
+
{
|
198 |
+
"cell_type": "markdown",
|
199 |
+
"id": "5c18ca6e-8de8-49e1-b95a-304070bbc171",
|
200 |
+
"metadata": {},
|
201 |
+
"source": [
|
202 |
+
"In order to host this code on Hugging Faces spaces, we need to do the following:\n",
|
203 |
+
"\n",
|
204 |
+
"1. Export parts of this notebook to a script named `app.py`\n",
|
205 |
+
"2. Create a `requirements.txt` file specifying all the dependencies of the gradio app which is inferred from `settings.ini`"
|
206 |
+
]
|
207 |
+
},
|
208 |
+
{
|
209 |
+
"cell_type": "markdown",
|
210 |
+
"id": "1971847f-8d70-429b-8dd2-292d3e329266",
|
211 |
+
"metadata": {},
|
212 |
+
"source": [
|
213 |
+
"We can achieve this with the below code, note how we are exporting the code to the `hfspace_demo/` directory, which is the repo we cloned in the first step."
|
214 |
+
]
|
215 |
+
},
|
216 |
+
{
|
217 |
+
"cell_type": "code",
|
218 |
+
"execution_count": null,
|
219 |
+
"id": "6706d92c-5785-4f09-9773-b9a944c493a5",
|
220 |
+
"metadata": {},
|
221 |
+
"outputs": [],
|
222 |
+
"source": [
|
223 |
+
"from nbdev.export import nb_export\n",
|
224 |
+
"from nbdev.release import write_requirements\n",
|
225 |
+
"\n",
|
226 |
+
"app_dir = 'hfspace_demo/'\n",
|
227 |
+
"nb_export('app.ipynb', app_dir)\n",
|
228 |
+
"write_requirements(app_dir)"
|
229 |
+
]
|
230 |
+
},
|
231 |
+
{
|
232 |
+
"cell_type": "markdown",
|
233 |
+
"id": "662a58d6-a907-4a7f-962e-35859687a1e4",
|
234 |
+
"metadata": {},
|
235 |
+
"source": [
|
236 |
+
"We can vendor in our python library generated with nbdev (named `nbdev_spaces_demo`) into the directory as well like so:"
|
237 |
+
]
|
238 |
+
},
|
239 |
+
{
|
240 |
+
"cell_type": "code",
|
241 |
+
"execution_count": null,
|
242 |
+
"id": "128b042e-7a8a-4485-94af-46e7efee8557",
|
243 |
+
"metadata": {},
|
244 |
+
"outputs": [],
|
245 |
+
"source": [
|
246 |
+
"from nbdev.config import get_config\n",
|
247 |
+
"!cp -r {str(get_config().lib_path)} {app_dir}"
|
248 |
+
]
|
249 |
+
},
|
250 |
+
{
|
251 |
+
"cell_type": "markdown",
|
252 |
+
"id": "0182403f-d1d6-48c0-8e66-46aefb23a9ab",
|
253 |
+
"metadata": {},
|
254 |
+
"source": [
|
255 |
+
"<div>\n",
|
256 |
+
"<link rel=\"stylesheet\" href=\"https://gradio.s3-us-west-2.amazonaws.com/2.6.5/static/bundle.css\">\n",
|
257 |
+
"<div id=\"target\"></div>\n",
|
258 |
+
"<script src=\"https://gradio.s3-us-west-2.amazonaws.com/2.6.5/static/bundle.js\"></script>\n",
|
259 |
+
"<script>\n",
|
260 |
+
"launchGradioFromSpaces(\"abidlabs/question-answering\", \"#target\")\n",
|
261 |
+
"</script>\n",
|
262 |
+
"</div>"
|
263 |
+
]
|
264 |
+
},
|
265 |
+
{
|
266 |
+
"cell_type": "markdown",
|
267 |
+
"id": "84d5fd19-7880-459c-8382-b3574ed11141",
|
268 |
+
"metadata": {},
|
269 |
+
"source": [
|
270 |
+
"### Understanding what is generated"
|
271 |
+
]
|
272 |
+
},
|
273 |
+
{
|
274 |
+
"cell_type": "markdown",
|
275 |
+
"id": "f0b783f0-cd5a-4092-b19c-8d05a978ce3c",
|
276 |
+
"metadata": {},
|
277 |
+
"source": [
|
278 |
+
"The contents of the hfspace_demo/ folder will contain these assets:"
|
279 |
+
]
|
280 |
+
},
|
281 |
+
{
|
282 |
+
"cell_type": "code",
|
283 |
+
"execution_count": null,
|
284 |
+
"id": "a8d6b05f-1d17-4000-b82a-7fd4eb3092c5",
|
285 |
+
"metadata": {},
|
286 |
+
"outputs": [
|
287 |
+
{
|
288 |
+
"name": "stdout",
|
289 |
+
"output_type": "stream",
|
290 |
+
"text": [
|
291 |
+
"README.md app.py \u001b[1m\u001b[36mnbdev_spaces_demo\u001b[m\u001b[m requirements.txt\n"
|
292 |
+
]
|
293 |
+
}
|
294 |
+
],
|
295 |
+
"source": [
|
296 |
+
"!ls hfspace_demo/"
|
297 |
+
]
|
298 |
+
},
|
299 |
+
{
|
300 |
+
"cell_type": "markdown",
|
301 |
+
"id": "9ea562e7-b67a-45df-b822-2f4528a307c2",
|
302 |
+
"metadata": {},
|
303 |
+
"source": [
|
304 |
+
"Notice how the contents of app.py only contains the exported cells from this notebook:"
|
305 |
+
]
|
306 |
+
},
|
307 |
+
{
|
308 |
+
"cell_type": "code",
|
309 |
+
"execution_count": null,
|
310 |
+
"id": "4bae6a5c-58bc-4a0f-9aac-34c092150fdc",
|
311 |
+
"metadata": {},
|
312 |
+
"outputs": [
|
313 |
+
{
|
314 |
+
"data": {
|
315 |
+
"text/plain": [
|
316 |
+
"\u001b[0;31m# AUTOGENERATED! DO NOT EDIT! File to edit: ../app.ipynb.\u001b[0m\u001b[0;34m\u001b[0m\n",
|
317 |
+
"\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\n",
|
318 |
+
"\u001b[0;34m\u001b[0m\u001b[0;31m# %% auto 0\u001b[0m\u001b[0;34m\u001b[0m\n",
|
319 |
+
"\u001b[0;34m\u001b[0m\u001b[0m__all__\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0;34m[\u001b[0m\u001b[0;34m'iface'\u001b[0m\u001b[0;34m]\u001b[0m\u001b[0;34m\u001b[0m\n",
|
320 |
+
"\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\n",
|
321 |
+
"\u001b[0;34m\u001b[0m\u001b[0;31m# %% ../app.ipynb 7\u001b[0m\u001b[0;34m\u001b[0m\n",
|
322 |
+
"\u001b[0;34m\u001b[0m\u001b[0;32mimport\u001b[0m \u001b[0mgradio\u001b[0m \u001b[0;32mas\u001b[0m \u001b[0mgr\u001b[0m\u001b[0;34m\u001b[0m\n",
|
323 |
+
"\u001b[0;34m\u001b[0m\u001b[0;32mfrom\u001b[0m \u001b[0mnbdev_spaces_demo\u001b[0m \u001b[0;32mimport\u001b[0m \u001b[0mhfsize\u001b[0m \u001b[0;34m\u001b[0m\n",
|
324 |
+
"\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\n",
|
325 |
+
"\u001b[0;34m\u001b[0m\u001b[0;31m# %% ../app.ipynb 11\u001b[0m\u001b[0;34m\u001b[0m\n",
|
326 |
+
"\u001b[0;34m\u001b[0m\u001b[0miface\u001b[0m \u001b[0;34m=\u001b[0m \u001b[0mgr\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mInterface\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mfn\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mhfsize\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0minputs\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0mgr\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mText\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mvalue\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;34m\"tglcourse/CelebA-faces-cropped-128\"\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m,\u001b[0m \u001b[0moutputs\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;34m\"text\"\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\n",
|
327 |
+
"\u001b[0;34m\u001b[0m\u001b[0miface\u001b[0m\u001b[0;34m.\u001b[0m\u001b[0mlaunch\u001b[0m\u001b[0;34m(\u001b[0m\u001b[0mwidth\u001b[0m\u001b[0;34m=\u001b[0m\u001b[0;36m500\u001b[0m\u001b[0;34m)\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n"
|
328 |
+
]
|
329 |
+
},
|
330 |
+
"metadata": {},
|
331 |
+
"output_type": "display_data"
|
332 |
+
}
|
333 |
+
],
|
334 |
+
"source": [
|
335 |
+
"%pycat hfspace_demo/app.py"
|
336 |
+
]
|
337 |
+
},
|
338 |
+
{
|
339 |
+
"cell_type": "markdown",
|
340 |
+
"id": "aadb4817-0671-4d05-9abc-d16776e2bec7",
|
341 |
+
"metadata": {},
|
342 |
+
"source": [
|
343 |
+
"Similarly, the contents of requirements.txt contain all dependencies listed in `settings.ini` from the `requirments` and `pip_requirements` fields (which in this case is just `fastcore`:"
|
344 |
+
]
|
345 |
+
},
|
346 |
+
{
|
347 |
+
"cell_type": "code",
|
348 |
+
"execution_count": null,
|
349 |
+
"id": "831333c4-5e67-46fd-bd73-81a61cbcbd86",
|
350 |
+
"metadata": {},
|
351 |
+
"outputs": [
|
352 |
+
{
|
353 |
+
"name": "stdout",
|
354 |
+
"output_type": "stream",
|
355 |
+
"text": [
|
356 |
+
"fastcore\n"
|
357 |
+
]
|
358 |
+
}
|
359 |
+
],
|
360 |
+
"source": [
|
361 |
+
"!cat hfspace_demo/requirements.txt"
|
362 |
+
]
|
363 |
+
},
|
364 |
+
{
|
365 |
+
"cell_type": "markdown",
|
366 |
+
"id": "f15d9c78-1f55-449e-8058-9af1832367a0",
|
367 |
+
"metadata": {},
|
368 |
+
"source": [
|
369 |
+
"## Launching Your Gradio App\n",
|
370 |
+
"\n",
|
371 |
+
"To launch your gradio app, you need to commit the changes in the Hugging Face repo. \n",
|
372 |
+
"\n",
|
373 |
+
"First, change directories to your huggingface repo (in this case its a directory called `hfspace_demo/`:\n",
|
374 |
+
"\n",
|
375 |
+
"`cd hfspace_demo`\n",
|
376 |
+
"\n",
|
377 |
+
"Then commit all changes\n",
|
378 |
+
"\n",
|
379 |
+
"```\n",
|
380 |
+
"git add -A; git commit -m \"Add application files\"; git push\n",
|
381 |
+
"```"
|
382 |
+
]
|
383 |
+
},
|
384 |
+
{
|
385 |
+
"cell_type": "markdown",
|
386 |
+
"id": "fa661f93-73b4-465a-9c22-cc38197505cb",
|
387 |
+
"metadata": {},
|
388 |
+
"source": [
|
389 |
+
"## Voilà! Enjoy your Gradio App"
|
390 |
+
]
|
391 |
+
},
|
392 |
+
{
|
393 |
+
"cell_type": "markdown",
|
394 |
+
"id": "9b20ff94-6842-4078-9ec1-be740944e721",
|
395 |
+
"metadata": {},
|
396 |
+
"source": [
|
397 |
+
"After a couple of minutes, you will see your app published! "
|
398 |
+
]
|
399 |
+
}
|
400 |
+
],
|
401 |
+
"metadata": {
|
402 |
+
"kernelspec": {
|
403 |
+
"display_name": "Python 3 (ipykernel)",
|
404 |
+
"language": "python",
|
405 |
+
"name": "python3"
|
406 |
+
}
|
407 |
+
},
|
408 |
+
"nbformat": 4,
|
409 |
+
"nbformat_minor": 5
|
410 |
+
}
|
create_space.png
ADDED
![]() |
hfspace_demo
ADDED
@@ -0,0 +1 @@
|
|
|
|
|
1 |
+
Subproject commit 4b24ff04a9705aaaf5a62a209ab654394e665b20
|
nbdev_spaces_demo/__init__.py
ADDED
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
1 |
+
__version__ = "0.0.1"
|
2 |
+
|
3 |
+
from .size import hfsize
|
nbdev_spaces_demo/_modidx.py
ADDED
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# Autogenerated by nbdev
|
2 |
+
|
3 |
+
d = { 'settings': { 'branch': 'master',
|
4 |
+
'doc_baseurl': '/nbdev-spaces-demo',
|
5 |
+
'doc_host': 'https://fastai.github.io',
|
6 |
+
'git_url': 'https://github.com/fastai/nbdev-spaces-demo',
|
7 |
+
'lib_path': 'nbdev_spaces_demo'},
|
8 |
+
'syms': {'nbdev_spaces_demo.size': {'nbdev_spaces_demo.size.hfsize': ('size.html#hfsize', 'nbdev_spaces_demo/size.py')}}}
|
nbdev_spaces_demo/size.py
ADDED
@@ -0,0 +1,15 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
# AUTOGENERATED! DO NOT EDIT! File to edit: ../nbs/size.ipynb.
|
2 |
+
|
3 |
+
# %% auto 0
|
4 |
+
__all__ = ['hfsize']
|
5 |
+
|
6 |
+
# %% ../nbs/size.ipynb 2
|
7 |
+
from fastcore.net import urljson, HTTPError
|
8 |
+
|
9 |
+
def hfsize(repo:str):
|
10 |
+
"Returns the size in GB of a HuggingFace Dataset."
|
11 |
+
url = f'https://huggingface.co/api/datasets/{repo}'
|
12 |
+
try: resp = urljson(f'{url}/treesize/main')
|
13 |
+
except HTTPError: return f'Did not find repo: {url}'
|
14 |
+
gb = resp['size'] / 1e9
|
15 |
+
return f'{gb:.2f} GB'
|
nbs/_quarto.yml
ADDED
@@ -0,0 +1,20 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
project:
|
2 |
+
type: website
|
3 |
+
|
4 |
+
format:
|
5 |
+
html:
|
6 |
+
theme: cosmo
|
7 |
+
css: styles.css
|
8 |
+
toc: true
|
9 |
+
|
10 |
+
website:
|
11 |
+
twitter-card: true
|
12 |
+
open-graph: true
|
13 |
+
repo-actions: [issue]
|
14 |
+
navbar:
|
15 |
+
background: primary
|
16 |
+
search: true
|
17 |
+
sidebar:
|
18 |
+
style: floating
|
19 |
+
|
20 |
+
metadata-files: [nbdev.yml, sidebar.yml]
|
nbs/index.ipynb
ADDED
@@ -0,0 +1,75 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"cells": [
|
3 |
+
{
|
4 |
+
"cell_type": "markdown",
|
5 |
+
"metadata": {},
|
6 |
+
"source": [
|
7 |
+
"# nbdev-spaces-demo\n",
|
8 |
+
"\n",
|
9 |
+
"> A demo of how to create a Hugging Face Space with gradio within a nbdev project."
|
10 |
+
]
|
11 |
+
},
|
12 |
+
{
|
13 |
+
"cell_type": "markdown",
|
14 |
+
"metadata": {},
|
15 |
+
"source": [
|
16 |
+
"This is a toy python library that lets you obtain the size of any Hugging Face dataset. For example, we can check the size of [tglcourse/CelebA-faces-cropped-128](https://huggingface.co/datasets/tglcourse/CelebA-faces-cropped-128) like so:"
|
17 |
+
]
|
18 |
+
},
|
19 |
+
{
|
20 |
+
"cell_type": "code",
|
21 |
+
"execution_count": null,
|
22 |
+
"metadata": {},
|
23 |
+
"outputs": [
|
24 |
+
{
|
25 |
+
"data": {
|
26 |
+
"text/plain": [
|
27 |
+
"'5.49 GB'"
|
28 |
+
]
|
29 |
+
},
|
30 |
+
"execution_count": null,
|
31 |
+
"metadata": {},
|
32 |
+
"output_type": "execute_result"
|
33 |
+
}
|
34 |
+
],
|
35 |
+
"source": [
|
36 |
+
"from nbdev_spaces_demo import hfsize \n",
|
37 |
+
"hfsize(\"tglcourse/CelebA-faces-cropped-128\")"
|
38 |
+
]
|
39 |
+
},
|
40 |
+
{
|
41 |
+
"cell_type": "markdown",
|
42 |
+
"metadata": {},
|
43 |
+
"source": [
|
44 |
+
"We deploy this function using Gradio and Hugging Face spaces."
|
45 |
+
]
|
46 |
+
},
|
47 |
+
{
|
48 |
+
"cell_type": "markdown",
|
49 |
+
"metadata": {},
|
50 |
+
"source": [
|
51 |
+
"## Using nbdev with Gradio\n",
|
52 |
+
"\n",
|
53 |
+
"Gradio and Hugging Face spaces is one of the easiest way to create and host apps. Gradio also allows you to prototype these apps in notebooks, which is excellent! \n",
|
54 |
+
"\n",
|
55 |
+
"We show you step-by-step instructions on how to deploy a Hugging Face gradio app from a notebook in this example."
|
56 |
+
]
|
57 |
+
},
|
58 |
+
{
|
59 |
+
"cell_type": "code",
|
60 |
+
"execution_count": null,
|
61 |
+
"metadata": {},
|
62 |
+
"outputs": [],
|
63 |
+
"source": []
|
64 |
+
}
|
65 |
+
],
|
66 |
+
"metadata": {
|
67 |
+
"kernelspec": {
|
68 |
+
"display_name": "Python 3 (ipykernel)",
|
69 |
+
"language": "python",
|
70 |
+
"name": "python3"
|
71 |
+
}
|
72 |
+
},
|
73 |
+
"nbformat": 4,
|
74 |
+
"nbformat_minor": 4
|
75 |
+
}
|
nbs/nbdev.yml
ADDED
@@ -0,0 +1,9 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
project:
|
2 |
+
output-dir: _docs
|
3 |
+
|
4 |
+
website:
|
5 |
+
title: "nbdev-spaces-demo"
|
6 |
+
site-url: "https://fastai.github.io/nbdev-spaces-demo"
|
7 |
+
description: "A demo of how to create a Hugging Face Space with gradio within a nbdev project."
|
8 |
+
repo-branch: master
|
9 |
+
repo-url: "https://github.com/fastai/nbdev-spaces-demo"
|
nbs/size.ipynb
ADDED
@@ -0,0 +1,94 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
{
|
2 |
+
"cells": [
|
3 |
+
{
|
4 |
+
"cell_type": "markdown",
|
5 |
+
"metadata": {},
|
6 |
+
"source": [
|
7 |
+
"# size\n",
|
8 |
+
"> Determine the size of a Hugging Face Dataset"
|
9 |
+
]
|
10 |
+
},
|
11 |
+
{
|
12 |
+
"cell_type": "code",
|
13 |
+
"execution_count": null,
|
14 |
+
"metadata": {},
|
15 |
+
"outputs": [],
|
16 |
+
"source": [
|
17 |
+
"#| default_exp size"
|
18 |
+
]
|
19 |
+
},
|
20 |
+
{
|
21 |
+
"cell_type": "code",
|
22 |
+
"execution_count": null,
|
23 |
+
"metadata": {},
|
24 |
+
"outputs": [],
|
25 |
+
"source": [
|
26 |
+
"#|export\n",
|
27 |
+
"from fastcore.net import urljson, HTTPError\n",
|
28 |
+
"\n",
|
29 |
+
"def hfsize(repo:str):\n",
|
30 |
+
" \"Returns the size in GB of a HuggingFace Dataset.\"\n",
|
31 |
+
" url = f'https://huggingface.co/api/datasets/{repo}'\n",
|
32 |
+
" try: resp = urljson(f'{url}/treesize/main')\n",
|
33 |
+
" except HTTPError: return f'Did not find repo: {url}'\n",
|
34 |
+
" gb = resp['size'] / 1e9\n",
|
35 |
+
" return f'{gb:.2f} GB'"
|
36 |
+
]
|
37 |
+
},
|
38 |
+
{
|
39 |
+
"cell_type": "markdown",
|
40 |
+
"metadata": {},
|
41 |
+
"source": [
|
42 |
+
"`size` take as an input a [Hugging Face Dataset](https://huggingface.co/docs/datasets/index) repo and returns the total size in GB of the data.\n",
|
43 |
+
"\n",
|
44 |
+
"For example, we can check the size of [tglcourse/CelebA-faces-cropped-128](https://huggingface.co/datasets/tglcourse/CelebA-faces-cropped-128) like so:"
|
45 |
+
]
|
46 |
+
},
|
47 |
+
{
|
48 |
+
"cell_type": "code",
|
49 |
+
"execution_count": null,
|
50 |
+
"metadata": {},
|
51 |
+
"outputs": [
|
52 |
+
{
|
53 |
+
"data": {
|
54 |
+
"text/plain": [
|
55 |
+
"'5.49 GB'"
|
56 |
+
]
|
57 |
+
},
|
58 |
+
"execution_count": null,
|
59 |
+
"metadata": {},
|
60 |
+
"output_type": "execute_result"
|
61 |
+
}
|
62 |
+
],
|
63 |
+
"source": [
|
64 |
+
"hfsize(\"tglcourse/CelebA-faces-cropped-128\")"
|
65 |
+
]
|
66 |
+
},
|
67 |
+
{
|
68 |
+
"cell_type": "code",
|
69 |
+
"execution_count": null,
|
70 |
+
"metadata": {},
|
71 |
+
"outputs": [],
|
72 |
+
"source": [
|
73 |
+
"#| hide\n",
|
74 |
+
"import nbdev; nbdev.nbdev_export()"
|
75 |
+
]
|
76 |
+
},
|
77 |
+
{
|
78 |
+
"cell_type": "code",
|
79 |
+
"execution_count": null,
|
80 |
+
"metadata": {},
|
81 |
+
"outputs": [],
|
82 |
+
"source": []
|
83 |
+
}
|
84 |
+
],
|
85 |
+
"metadata": {
|
86 |
+
"kernelspec": {
|
87 |
+
"display_name": "Python 3 (ipykernel)",
|
88 |
+
"language": "python",
|
89 |
+
"name": "python3"
|
90 |
+
}
|
91 |
+
},
|
92 |
+
"nbformat": 4,
|
93 |
+
"nbformat_minor": 4
|
94 |
+
}
|
nbs/styles.css
ADDED
@@ -0,0 +1,37 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
.cell {
|
2 |
+
margin-bottom: 1rem;
|
3 |
+
}
|
4 |
+
|
5 |
+
.cell > .sourceCode {
|
6 |
+
margin-bottom: 0;
|
7 |
+
}
|
8 |
+
|
9 |
+
.cell-output > pre {
|
10 |
+
margin-bottom: 0;
|
11 |
+
}
|
12 |
+
|
13 |
+
.cell-output > pre, .cell-output > .sourceCode > pre, .cell-output-stdout > pre {
|
14 |
+
margin-left: 0.8rem;
|
15 |
+
margin-top: 0;
|
16 |
+
background: none;
|
17 |
+
border-left: 2px solid lightsalmon;
|
18 |
+
border-top-left-radius: 0;
|
19 |
+
border-top-right-radius: 0;
|
20 |
+
}
|
21 |
+
|
22 |
+
.cell-output > .sourceCode {
|
23 |
+
border: none;
|
24 |
+
}
|
25 |
+
|
26 |
+
.cell-output > .sourceCode {
|
27 |
+
background: none;
|
28 |
+
margin-top: 0;
|
29 |
+
}
|
30 |
+
|
31 |
+
div.description {
|
32 |
+
padding-left: 2px;
|
33 |
+
padding-top: 5px;
|
34 |
+
font-style: italic;
|
35 |
+
font-size: 135%;
|
36 |
+
opacity: 70%;
|
37 |
+
}
|
settings.ini
ADDED
@@ -0,0 +1,42 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
[DEFAULT]
|
2 |
+
# All sections below are required unless otherwise specified.
|
3 |
+
# See https://github.com/fastai/nbdev/blob/master/settings.ini for examples.
|
4 |
+
|
5 |
+
### Python library ###
|
6 |
+
repo = nbdev-spaces-demo
|
7 |
+
lib_name = %(repo)s
|
8 |
+
version = 0.0.1
|
9 |
+
min_python = 3.7
|
10 |
+
license = apache2
|
11 |
+
|
12 |
+
### nbdev ###
|
13 |
+
doc_path = _docs
|
14 |
+
lib_path = nbdev_spaces_demo
|
15 |
+
nbs_path = nbs
|
16 |
+
recursive = True
|
17 |
+
tst_flags = notest
|
18 |
+
put_version_in_init = True
|
19 |
+
|
20 |
+
### Docs ###
|
21 |
+
branch = master
|
22 |
+
custom_sidebar = False
|
23 |
+
doc_host = https://%(user)s.github.io
|
24 |
+
doc_baseurl = /%(repo)s
|
25 |
+
git_url = https://github.com/%(user)s/%(repo)s
|
26 |
+
title = %(lib_name)s
|
27 |
+
|
28 |
+
### PyPI ###
|
29 |
+
audience = Developers
|
30 |
+
author = Hamel Husain
|
31 |
+
author_email = [email protected]
|
32 |
+
copyright = 2022 onwards, %(author)s
|
33 |
+
description = A demo of how to create a Hugging Face Space with gradio within a nbdev project.
|
34 |
+
keywords = nbdev jupyter notebook python
|
35 |
+
language = English
|
36 |
+
status = 3
|
37 |
+
user = fastai
|
38 |
+
|
39 |
+
### Optional ###
|
40 |
+
requirements = fastcore
|
41 |
+
dev_requirements = gradio
|
42 |
+
# console_scripts =
|
setup.py
ADDED
@@ -0,0 +1,57 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
1 |
+
from pkg_resources import parse_version
|
2 |
+
from configparser import ConfigParser
|
3 |
+
import setuptools
|
4 |
+
assert parse_version(setuptools.__version__)>=parse_version('36.2')
|
5 |
+
|
6 |
+
# note: all settings are in settings.ini; edit there, not here
|
7 |
+
config = ConfigParser(delimiters=['='])
|
8 |
+
config.read('settings.ini')
|
9 |
+
cfg = config['DEFAULT']
|
10 |
+
|
11 |
+
cfg_keys = 'version description keywords author author_email'.split()
|
12 |
+
expected = cfg_keys + "lib_name user branch license status min_python audience language".split()
|
13 |
+
for o in expected: assert o in cfg, "missing expected setting: {}".format(o)
|
14 |
+
setup_cfg = {o:cfg[o] for o in cfg_keys}
|
15 |
+
|
16 |
+
licenses = {
|
17 |
+
'apache2': ('Apache Software License 2.0','OSI Approved :: Apache Software License'),
|
18 |
+
'mit': ('MIT License', 'OSI Approved :: MIT License'),
|
19 |
+
'gpl2': ('GNU General Public License v2', 'OSI Approved :: GNU General Public License v2 (GPLv2)'),
|
20 |
+
'gpl3': ('GNU General Public License v3', 'OSI Approved :: GNU General Public License v3 (GPLv3)'),
|
21 |
+
'bsd3': ('BSD License', 'OSI Approved :: BSD License'),
|
22 |
+
}
|
23 |
+
statuses = [ '1 - Planning', '2 - Pre-Alpha', '3 - Alpha',
|
24 |
+
'4 - Beta', '5 - Production/Stable', '6 - Mature', '7 - Inactive' ]
|
25 |
+
py_versions = '3.6 3.7 3.8 3.9 3.10'.split()
|
26 |
+
|
27 |
+
requirements = cfg.get('requirements','').split()
|
28 |
+
if cfg.get('pip_requirements'): requirements += cfg.get('pip_requirements','').split()
|
29 |
+
min_python = cfg['min_python']
|
30 |
+
lic = licenses.get(cfg['license'].lower(), (cfg['license'], None))
|
31 |
+
dev_requirements = (cfg.get('dev_requirements') or '').split()
|
32 |
+
|
33 |
+
setuptools.setup(
|
34 |
+
name = cfg['lib_name'],
|
35 |
+
license = lic[0],
|
36 |
+
classifiers = [
|
37 |
+
'Development Status :: ' + statuses[int(cfg['status'])],
|
38 |
+
'Intended Audience :: ' + cfg['audience'].title(),
|
39 |
+
'Natural Language :: ' + cfg['language'].title(),
|
40 |
+
] + ['Programming Language :: Python :: '+o for o in py_versions[py_versions.index(min_python):]] + (['License :: ' + lic[1] ] if lic[1] else []),
|
41 |
+
url = cfg['git_url'],
|
42 |
+
packages = setuptools.find_packages(),
|
43 |
+
include_package_data = True,
|
44 |
+
install_requires = requirements,
|
45 |
+
extras_require={ 'dev': dev_requirements },
|
46 |
+
dependency_links = cfg.get('dep_links','').split(),
|
47 |
+
python_requires = '>=' + cfg['min_python'],
|
48 |
+
long_description = open('README.md').read(),
|
49 |
+
long_description_content_type = 'text/markdown',
|
50 |
+
zip_safe = False,
|
51 |
+
entry_points = {
|
52 |
+
'console_scripts': cfg.get('console_scripts','').split(),
|
53 |
+
'nbdev': [f'{cfg.get("lib_path")}={cfg.get("lib_path")}._modidx:d']
|
54 |
+
},
|
55 |
+
**setup_cfg)
|
56 |
+
|
57 |
+
|