fastsdcpu / backend /safety_check.py
rupeshs's picture
faster
c554308
raw
history blame contribute delete
443 Bytes
from transformers import pipeline
def is_safe_image(
classifier,
image,
):
pred = classifier(image)
nsfw_score = 0
normal_score = 0
for label in pred:
if label["label"] == "nsfw":
nsfw_score = label["score"]
elif label["label"] == "normal":
normal_score = label["score"]
print(f"nsfw_score: {nsfw_score}, normal_score: {normal_score}")
return normal_score > nsfw_score