Spaces:
Running
Running
Commit
·
61d4fcb
1
Parent(s):
c375a6f
updates
Browse files
vouchervision/OCR_google_cloud_vision.py
CHANGED
|
@@ -810,6 +810,7 @@ class SafetyCheck():
|
|
| 810 |
return credentials
|
| 811 |
|
| 812 |
def check_for_inappropriate_content(self, file_stream):
|
|
|
|
| 813 |
content = file_stream.read()
|
| 814 |
image = vision.Image(content=content)
|
| 815 |
response = self.client.safe_search_detection(image=image)
|
|
@@ -832,11 +833,11 @@ class SafetyCheck():
|
|
| 832 |
print(f"racy: {likelihood_name[safe.racy]}")
|
| 833 |
|
| 834 |
# Check the levels of adult, violence, racy, etc. content.
|
| 835 |
-
if (safe.adult >
|
| 836 |
-
safe.medical >
|
| 837 |
-
safe.
|
| 838 |
-
safe.violence >
|
| 839 |
-
safe.racy >
|
| 840 |
print("Found violation")
|
| 841 |
return True # The image violates safe search guidelines.
|
| 842 |
|
|
|
|
| 810 |
return credentials
|
| 811 |
|
| 812 |
def check_for_inappropriate_content(self, file_stream):
|
| 813 |
+
LEVEL = 2
|
| 814 |
content = file_stream.read()
|
| 815 |
image = vision.Image(content=content)
|
| 816 |
response = self.client.safe_search_detection(image=image)
|
|
|
|
| 833 |
print(f"racy: {likelihood_name[safe.racy]}")
|
| 834 |
|
| 835 |
# Check the levels of adult, violence, racy, etc. content.
|
| 836 |
+
if (safe.adult > LEVEL or
|
| 837 |
+
safe.medical > LEVEL or
|
| 838 |
+
safe.spoof > LEVEL or
|
| 839 |
+
safe.violence > LEVEL or
|
| 840 |
+
safe.racy > LEVEL):
|
| 841 |
print("Found violation")
|
| 842 |
return True # The image violates safe search guidelines.
|
| 843 |
|