Spaces:
Running
Running
Commit
·
a577ee6
1
Parent(s):
6434d07
updates
Browse files
vouchervision/OCR_google_cloud_vision.py
CHANGED
@@ -815,10 +815,28 @@ class SafetyCheck():
|
|
815 |
response = self.client.safe_search_detection(image=image)
|
816 |
safe = response.safe_search_annotation
|
817 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
818 |
# Check the levels of adult, violence, racy, etc. content.
|
819 |
if (safe.adult > vision.Likelihood.POSSIBLE or
|
820 |
safe.violence > vision.Likelihood.POSSIBLE or
|
821 |
safe.racy > vision.Likelihood.POSSIBLE):
|
|
|
822 |
return True # The image violates safe search guidelines.
|
823 |
-
|
|
|
824 |
return False # The image is considered safe.
|
|
|
815 |
response = self.client.safe_search_detection(image=image)
|
816 |
safe = response.safe_search_annotation
|
817 |
|
818 |
+
likelihood_name = (
|
819 |
+
"UNKNOWN",
|
820 |
+
"VERY_UNLIKELY",
|
821 |
+
"UNLIKELY",
|
822 |
+
"POSSIBLE",
|
823 |
+
"LIKELY",
|
824 |
+
"VERY_LIKELY",
|
825 |
+
)
|
826 |
+
print("Safe search:")
|
827 |
+
|
828 |
+
print(f"adult: {likelihood_name[safe.adult]}")
|
829 |
+
print(f"medical: {likelihood_name[safe.medical]}")
|
830 |
+
print(f"spoofed: {likelihood_name[safe.spoof]}")
|
831 |
+
print(f"violence: {likelihood_name[safe.violence]}")
|
832 |
+
print(f"racy: {likelihood_name[safe.racy]}")
|
833 |
+
|
834 |
# Check the levels of adult, violence, racy, etc. content.
|
835 |
if (safe.adult > vision.Likelihood.POSSIBLE or
|
836 |
safe.violence > vision.Likelihood.POSSIBLE or
|
837 |
safe.racy > vision.Likelihood.POSSIBLE):
|
838 |
+
print("Found violation")
|
839 |
return True # The image violates safe search guidelines.
|
840 |
+
|
841 |
+
print("Found NO violation")
|
842 |
return False # The image is considered safe.
|