victorisgeek commited on
Commit
31f2e02
1 Parent(s): 61dbd85

Delete nsfw_checker/opennsfw.py

Browse files
Files changed (1) hide show
  1. nsfw_checker/opennsfw.py +0 -37
nsfw_checker/opennsfw.py DELETED
@@ -1,37 +0,0 @@
1
- import cv2
2
- import torch
3
- import onnx
4
- import onnxruntime
5
- import numpy as np
6
- from tqdm import tqdm
7
-
8
- # https://github.com/chemistzombie/stable-diffusion-unfiltered.git
9
-
10
- class NSFWChecker:
11
- def __init__(self, model_path=None, providers=["CPUExecutionProvider"]):
12
- model = onnx.load(model_path)
13
- self.input_name = model.graph.input[0].name
14
- session_options = onnxruntime.SessionOptions()
15
- self.session = onnxruntime.InferenceSession(model_path, sess_options=session_options, providers=providers)
16
-
17
- def is_nsfw(self, img_paths, threshold = 0.85):
18
- skip_step = 1
19
- total_len = len(img_paths)
20
- if total_len < 100: skip_step = 1
21
- if total_len > 100 and total_len < 500: skip_step = 10
22
- if total_len > 500 and total_len < 1000: skip_step = 20
23
- if total_len > 1000 and total_len < 10000: skip_step = 50
24
- if total_len > 10000: skip_step = 100
25
-
26
- for idx in tqdm(range(0, total_len, skip_step), total=int(total_len // skip_step), desc="Checking for NSFW contents"):
27
- img = cv2.imread(img_paths[idx])
28
- img = cv2.resize(img, (224,224)).astype('float32')
29
- img -= np.array([104, 117, 123], dtype=np.float32)
30
- img = np.expand_dims(img, axis=0)
31
-
32
- score = self.session.run(None, {self.input_name:img})[0][0][1]
33
-
34
- if score > threshold:
35
- print(f"Detected nsfw score:{score}")
36
- return False
37
- return False