hysts HF Staff commited on
Commit
9c6e4e4
·
1 Parent(s): 24a36dc
.pre-commit-config.yaml CHANGED
@@ -1,6 +1,6 @@
1
  repos:
2
  - repo: https://github.com/pre-commit/pre-commit-hooks
3
- rev: v4.5.0
4
  hooks:
5
  - id: check-executables-have-shebangs
6
  - id: check-json
@@ -13,48 +13,21 @@ repos:
13
  args: ["--fix=lf"]
14
  - id: requirements-txt-fixer
15
  - id: trailing-whitespace
16
- - repo: https://github.com/myint/docformatter
17
- rev: v1.7.5
18
  hooks:
19
- - id: docformatter
20
- args: ["--in-place"]
21
- - repo: https://github.com/pycqa/isort
22
- rev: 5.13.2
23
- hooks:
24
- - id: isort
25
- args: ["--profile", "black"]
26
  - repo: https://github.com/pre-commit/mirrors-mypy
27
- rev: v1.8.0
28
  hooks:
29
  - id: mypy
30
  args: ["--ignore-missing-imports"]
31
  additional_dependencies:
32
  [
33
  "types-python-slugify",
34
- "types-requests",
35
- "types-PyYAML",
36
  "types-pytz",
 
 
37
  ]
38
- - repo: https://github.com/psf/black
39
- rev: 24.2.0
40
- hooks:
41
- - id: black
42
- language_version: python3.10
43
- args: ["--line-length", "119"]
44
- - repo: https://github.com/kynan/nbstripout
45
- rev: 0.7.1
46
- hooks:
47
- - id: nbstripout
48
- args:
49
- [
50
- "--extra-keys",
51
- "metadata.interpreter metadata.kernelspec cell.metadata.pycharm",
52
- ]
53
- - repo: https://github.com/nbQA-dev/nbQA
54
- rev: 1.7.1
55
- hooks:
56
- - id: nbqa-black
57
- - id: nbqa-pyupgrade
58
- args: ["--py37-plus"]
59
- - id: nbqa-isort
60
- args: ["--float-to-top"]
 
1
  repos:
2
  - repo: https://github.com/pre-commit/pre-commit-hooks
3
+ rev: v5.0.0
4
  hooks:
5
  - id: check-executables-have-shebangs
6
  - id: check-json
 
13
  args: ["--fix=lf"]
14
  - id: requirements-txt-fixer
15
  - id: trailing-whitespace
16
+ - repo: https://github.com/astral-sh/ruff-pre-commit
17
+ rev: v0.9.9
18
  hooks:
19
+ - id: ruff
20
+ args: ["--fix"]
21
+ - id: ruff-format
 
 
 
 
22
  - repo: https://github.com/pre-commit/mirrors-mypy
23
+ rev: v1.15.0
24
  hooks:
25
  - id: mypy
26
  args: ["--ignore-missing-imports"]
27
  additional_dependencies:
28
  [
29
  "types-python-slugify",
 
 
30
  "types-pytz",
31
+ "types-PyYAML",
32
+ "types-requests",
33
  ]
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
.python-version ADDED
@@ -0,0 +1 @@
 
 
1
+ 3.10
.vscode/extensions.json ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "recommendations": [
3
+ "ms-python.python",
4
+ "charliermarsh.ruff",
5
+ "streetsidesoftware.code-spell-checker",
6
+ "tamasfe.even-better-toml"
7
+ ]
8
+ }
.vscode/settings.json CHANGED
@@ -2,29 +2,16 @@
2
  "editor.formatOnSave": true,
3
  "files.insertFinalNewline": false,
4
  "[python]": {
5
- "editor.defaultFormatter": "ms-python.black-formatter",
6
  "editor.formatOnType": true,
7
  "editor.codeActionsOnSave": {
 
8
  "source.organizeImports": "explicit"
9
  }
10
  },
11
  "[jupyter]": {
12
  "files.insertFinalNewline": false
13
  },
14
- "black-formatter.args": [
15
- "--line-length=119"
16
- ],
17
- "isort.args": ["--profile", "black"],
18
- "flake8.args": [
19
- "--max-line-length=119"
20
- ],
21
- "ruff.lint.args": [
22
- "--line-length=119"
23
- ],
24
  "notebook.output.scrolling": true,
25
- "notebook.formatOnCellExecution": true,
26
- "notebook.formatOnSave.enabled": true,
27
- "notebook.codeActionsOnSave": {
28
- "source.organizeImports": "explicit"
29
- }
30
  }
 
2
  "editor.formatOnSave": true,
3
  "files.insertFinalNewline": false,
4
  "[python]": {
5
+ "editor.defaultFormatter": "charliermarsh.ruff",
6
  "editor.formatOnType": true,
7
  "editor.codeActionsOnSave": {
8
+ "source.fixAll.ruff": "explicit",
9
  "source.organizeImports": "explicit"
10
  }
11
  },
12
  "[jupyter]": {
13
  "files.insertFinalNewline": false
14
  },
 
 
 
 
 
 
 
 
 
 
15
  "notebook.output.scrolling": true,
16
+ "notebook.formatOnSave.enabled": true
 
 
 
 
17
  }
README.md CHANGED
@@ -4,7 +4,7 @@ emoji: 🏃
4
  colorFrom: gray
5
  colorTo: purple
6
  sdk: gradio
7
- sdk_version: 4.36.1
8
  app_file: app.py
9
  pinned: false
10
  ---
 
4
  colorFrom: gray
5
  colorTo: purple
6
  sdk: gradio
7
+ sdk_version: 5.20.0
8
  app_file: app.py
9
  pinned: false
10
  ---
app.py CHANGED
@@ -1,7 +1,5 @@
1
  #!/usr/bin/env python
2
 
3
- from __future__ import annotations
4
-
5
  import os
6
  import pathlib
7
  import tarfile
@@ -21,21 +19,19 @@ def load_sample_image_paths() -> list[pathlib.Path]:
21
  if not image_dir.exists():
22
  path = huggingface_hub.hf_hub_download("public-data/sample-images-TADNE", "images.tar.gz", repo_type="dataset")
23
  with tarfile.open(path) as f:
24
- f.extractall()
25
  return sorted(image_dir.glob("*"))
26
 
27
 
28
  def load_model() -> tf.keras.Model:
29
  path = huggingface_hub.hf_hub_download("public-data/DeepDanbooru", "model-resnet_custom_v3.h5")
30
- model = tf.keras.models.load_model(path)
31
- return model
32
 
33
 
34
  def load_labels() -> list[str]:
35
  path = huggingface_hub.hf_hub_download("public-data/DeepDanbooru", "tags.txt")
36
- with open(path) as f:
37
- labels = [line.strip() for line in f.readlines()]
38
- return labels
39
 
40
 
41
  model = load_model()
@@ -53,8 +49,8 @@ def predict(image: PIL.Image.Image, score_threshold: float) -> tuple[dict[str, f
53
  probs = probs.astype(float)
54
 
55
  indices = np.argsort(probs)[::-1]
56
- result_all = dict()
57
- result_threshold = dict()
58
  for index in indices:
59
  label = labels[index]
60
  prob = probs[index]
@@ -67,15 +63,15 @@ def predict(image: PIL.Image.Image, score_threshold: float) -> tuple[dict[str, f
67
 
68
 
69
  image_paths = load_sample_image_paths()
70
- examples = [[path.as_posix(), 0.5] for path in image_paths]
71
 
72
- with gr.Blocks(css="style.css") as demo:
73
  gr.Markdown(DESCRIPTION)
74
  with gr.Row():
75
  with gr.Column():
76
  image = gr.Image(label="Input", type="pil")
77
  score_threshold = gr.Slider(label="Score threshold", minimum=0, maximum=1, step=0.05, value=0.5)
78
- run_button = gr.Button("Run")
79
  with gr.Column():
80
  with gr.Tabs():
81
  with gr.Tab(label="Output"):
 
1
  #!/usr/bin/env python
2
 
 
 
3
  import os
4
  import pathlib
5
  import tarfile
 
19
  if not image_dir.exists():
20
  path = huggingface_hub.hf_hub_download("public-data/sample-images-TADNE", "images.tar.gz", repo_type="dataset")
21
  with tarfile.open(path) as f:
22
+ f.extractall() # noqa: S202
23
  return sorted(image_dir.glob("*"))
24
 
25
 
26
  def load_model() -> tf.keras.Model:
27
  path = huggingface_hub.hf_hub_download("public-data/DeepDanbooru", "model-resnet_custom_v3.h5")
28
+ return tf.keras.models.load_model(path)
 
29
 
30
 
31
  def load_labels() -> list[str]:
32
  path = huggingface_hub.hf_hub_download("public-data/DeepDanbooru", "tags.txt")
33
+ with pathlib.Path(path).open() as f:
34
+ return [line.strip() for line in f]
 
35
 
36
 
37
  model = load_model()
 
49
  probs = probs.astype(float)
50
 
51
  indices = np.argsort(probs)[::-1]
52
+ result_all = {}
53
+ result_threshold = {}
54
  for index in indices:
55
  label = labels[index]
56
  prob = probs[index]
 
63
 
64
 
65
  image_paths = load_sample_image_paths()
66
+ examples = [[path, 0.5] for path in image_paths]
67
 
68
+ with gr.Blocks(css_paths="style.css") as demo:
69
  gr.Markdown(DESCRIPTION)
70
  with gr.Row():
71
  with gr.Column():
72
  image = gr.Image(label="Input", type="pil")
73
  score_threshold = gr.Slider(label="Score threshold", minimum=0, maximum=1, step=0.05, value=0.5)
74
+ run_button = gr.Button()
75
  with gr.Column():
76
  with gr.Tabs():
77
  with gr.Tab(label="Output"):
pyproject.toml ADDED
@@ -0,0 +1,57 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ [project]
2
+ name = "deepdanbooru-app"
3
+ version = "0.1.0"
4
+ description = "Add your description here"
5
+ readme = "README.md"
6
+ requires-python = ">=3.10"
7
+ dependencies = [
8
+ "deepdanbooru",
9
+ "gradio>=5.20.0",
10
+ "hf-transfer>=0.1.9",
11
+ "tensorflow>=2.18.0",
12
+ ]
13
+
14
+ [tool.uv.sources]
15
+ deepdanbooru = { git = "https://github.com/KichangKim/DeepDanbooru", rev = "v3-20200915-sgd-e30" }
16
+
17
+ [tool.ruff]
18
+ line-length = 119
19
+
20
+ [tool.ruff.lint]
21
+ select = ["ALL"]
22
+ ignore = [
23
+ "COM812", # missing-trailing-comma
24
+ "D203", # one-blank-line-before-class
25
+ "D213", # multi-line-summary-second-line
26
+ "E501", # line-too-long
27
+ "SIM117", # multiple-with-statements
28
+ ]
29
+ extend-ignore = [
30
+ "D100", # undocumented-public-module
31
+ "D101", # undocumented-public-class
32
+ "D102", # undocumented-public-method
33
+ "D103", # undocumented-public-function
34
+ "D104", # undocumented-public-package
35
+ "D105", # undocumented-magic-method
36
+ "D107", # undocumented-public-init
37
+ "EM101", # raw-string-in-exception
38
+ "FBT001", # boolean-type-hint-positional-argument
39
+ "FBT002", # boolean-default-value-positional-argument
40
+ "PD901", # pandas-df-variable-name
41
+ "PGH003", # blanket-type-ignore
42
+ "PLR0913", # too-many-arguments
43
+ "PLR0915", # too-many-statements
44
+ "TRY003", # raise-vanilla-args
45
+ ]
46
+ unfixable = [
47
+ "F401", # unused-import
48
+ ]
49
+
50
+ [tool.ruff.lint.pydocstyle]
51
+ convention = "google"
52
+
53
+ [tool.ruff.lint.per-file-ignores]
54
+ "*.ipynb" = ["T201", "T203"]
55
+
56
+ [tool.ruff.format]
57
+ docstring-code-format = true
requirements.txt CHANGED
@@ -1,4 +1,261 @@
1
- git+https://github.com/KichangKim/DeepDanbooru@v3-20200915-sgd-e30#egg=deepdanbooru
2
- gradio==4.36.1
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
3
  pillow==10.3.0
4
- tensorflow==2.15.0.post1
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # This file was autogenerated by uv via the following command:
2
+ # uv pip compile pyproject.toml -o requirements.txt
3
+ absl-py==2.1.0
4
+ # via
5
+ # keras
6
+ # tensorboard
7
+ # tensorflow
8
+ aiofiles==23.2.1
9
+ # via gradio
10
+ annotated-types==0.7.0
11
+ # via pydantic
12
+ anyio==4.8.0
13
+ # via
14
+ # gradio
15
+ # httpx
16
+ # starlette
17
+ astunparse==1.6.3
18
+ # via tensorflow
19
+ certifi==2025.1.31
20
+ # via
21
+ # httpcore
22
+ # httpx
23
+ # requests
24
+ charset-normalizer==3.4.1
25
+ # via requests
26
+ click==8.1.8
27
+ # via
28
+ # deepdanbooru
29
+ # typer
30
+ # uvicorn
31
+ deepdanbooru @ git+https://github.com/KichangKim/DeepDanbooru@55e8ed7e5e6f24641d9d646a2f34035813b536d4
32
+ # via deepdanbooru-app (pyproject.toml)
33
+ exceptiongroup==1.2.2
34
+ # via anyio
35
+ fastapi==0.115.11
36
+ # via gradio
37
+ ffmpy==0.5.0
38
+ # via gradio
39
+ filelock==3.17.0
40
+ # via huggingface-hub
41
+ flatbuffers==25.2.10
42
+ # via tensorflow
43
+ fsspec==2025.2.0
44
+ # via
45
+ # gradio-client
46
+ # huggingface-hub
47
+ gast==0.6.0
48
+ # via tensorflow
49
+ google-pasta==0.2.0
50
+ # via tensorflow
51
+ gradio==5.20.0
52
+ # via deepdanbooru-app (pyproject.toml)
53
+ gradio-client==1.7.2
54
+ # via gradio
55
+ groovy==0.1.2
56
+ # via gradio
57
+ grpcio==1.70.0
58
+ # via
59
+ # tensorboard
60
+ # tensorflow
61
+ h11==0.14.0
62
+ # via
63
+ # httpcore
64
+ # uvicorn
65
+ h5py==3.13.0
66
+ # via
67
+ # keras
68
+ # tensorflow
69
+ hf-transfer==0.1.9
70
+ # via deepdanbooru-app (pyproject.toml)
71
+ httpcore==1.0.7
72
+ # via httpx
73
+ httpx==0.28.1
74
+ # via
75
+ # gradio
76
+ # gradio-client
77
+ # safehttpx
78
+ huggingface-hub==0.29.1
79
+ # via
80
+ # gradio
81
+ # gradio-client
82
+ idna==3.10
83
+ # via
84
+ # anyio
85
+ # httpx
86
+ # requests
87
+ imageio==2.37.0
88
+ # via scikit-image
89
+ jinja2==3.1.5
90
+ # via gradio
91
+ keras==3.8.0
92
+ # via tensorflow
93
+ lazy-loader==0.4
94
+ # via scikit-image
95
+ libclang==18.1.1
96
+ # via tensorflow
97
+ markdown==3.7
98
+ # via tensorboard
99
+ markdown-it-py==3.0.0
100
+ # via rich
101
+ markupsafe==2.1.5
102
+ # via
103
+ # gradio
104
+ # jinja2
105
+ # werkzeug
106
+ mdurl==0.1.2
107
+ # via markdown-it-py
108
+ ml-dtypes==0.4.1
109
+ # via
110
+ # keras
111
+ # tensorflow
112
+ namex==0.0.8
113
+ # via keras
114
+ networkx==3.4.2
115
+ # via scikit-image
116
+ numpy==2.0.2
117
+ # via
118
+ # deepdanbooru
119
+ # gradio
120
+ # h5py
121
+ # imageio
122
+ # keras
123
+ # ml-dtypes
124
+ # pandas
125
+ # scikit-image
126
+ # scipy
127
+ # tensorboard
128
+ # tensorflow
129
+ # tifffile
130
+ opt-einsum==3.4.0
131
+ # via tensorflow
132
+ optree==0.14.1
133
+ # via keras
134
+ orjson==3.10.15
135
+ # via gradio
136
+ packaging==24.2
137
+ # via
138
+ # gradio
139
+ # gradio-client
140
+ # huggingface-hub
141
+ # keras
142
+ # lazy-loader
143
+ # scikit-image
144
+ # tensorboard
145
+ # tensorflow
146
+ pandas==2.2.3
147
+ # via gradio
148
  pillow==10.3.0
149
+ # via
150
+ # gradio
151
+ # imageio
152
+ # scikit-image
153
+ protobuf==5.29.3
154
+ # via
155
+ # tensorboard
156
+ # tensorflow
157
+ pydantic==2.10.6
158
+ # via
159
+ # fastapi
160
+ # gradio
161
+ pydantic-core==2.27.2
162
+ # via pydantic
163
+ pydub==0.25.1
164
+ # via gradio
165
+ pygments==2.19.1
166
+ # via rich
167
+ python-dateutil==2.9.0.post0
168
+ # via pandas
169
+ python-multipart==0.0.20
170
+ # via gradio
171
+ pytz==2025.1
172
+ # via pandas
173
+ pyyaml==6.0.2
174
+ # via
175
+ # gradio
176
+ # huggingface-hub
177
+ requests==2.32.3
178
+ # via
179
+ # deepdanbooru
180
+ # huggingface-hub
181
+ # tensorflow
182
+ rich==13.9.4
183
+ # via
184
+ # keras
185
+ # typer
186
+ ruff==0.9.9
187
+ # via gradio
188
+ safehttpx==0.1.6
189
+ # via gradio
190
+ scikit-image==0.25.2
191
+ # via deepdanbooru
192
+ scipy==1.15.2
193
+ # via scikit-image
194
+ semantic-version==2.10.0
195
+ # via gradio
196
+ setuptools==75.8.2
197
+ # via
198
+ # tensorboard
199
+ # tensorflow
200
+ shellingham==1.5.4
201
+ # via typer
202
+ six==1.17.0
203
+ # via
204
+ # astunparse
205
+ # deepdanbooru
206
+ # google-pasta
207
+ # python-dateutil
208
+ # tensorboard
209
+ # tensorflow
210
+ sniffio==1.3.1
211
+ # via anyio
212
+ starlette==0.46.0
213
+ # via
214
+ # fastapi
215
+ # gradio
216
+ tensorboard==2.18.0
217
+ # via tensorflow
218
+ tensorboard-data-server==0.7.2
219
+ # via tensorboard
220
+ tensorflow==2.18.0
221
+ # via deepdanbooru-app (pyproject.toml)
222
+ tensorflow-io-gcs-filesystem==0.37.1
223
+ # via tensorflow
224
+ termcolor==2.5.0
225
+ # via tensorflow
226
+ tifffile==2025.2.18
227
+ # via scikit-image
228
+ tomlkit==0.13.2
229
+ # via gradio
230
+ tqdm==4.67.1
231
+ # via huggingface-hub
232
+ typer==0.15.2
233
+ # via gradio
234
+ typing-extensions==4.12.2
235
+ # via
236
+ # anyio
237
+ # fastapi
238
+ # gradio
239
+ # gradio-client
240
+ # huggingface-hub
241
+ # optree
242
+ # pydantic
243
+ # pydantic-core
244
+ # rich
245
+ # tensorflow
246
+ # typer
247
+ # uvicorn
248
+ tzdata==2025.1
249
+ # via pandas
250
+ urllib3==2.3.0
251
+ # via requests
252
+ uvicorn==0.34.0
253
+ # via gradio
254
+ websockets==15.0
255
+ # via gradio-client
256
+ werkzeug==3.1.3
257
+ # via tensorboard
258
+ wheel==0.45.1
259
+ # via astunparse
260
+ wrapt==1.17.2
261
+ # via tensorflow
uv.lock ADDED
The diff for this file is too large to render. See raw diff