nixaut-codelabs commited on
Commit
b8ddba8
·
verified ·
1 Parent(s): 8cc880c

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +5 -9
app.py CHANGED
@@ -90,10 +90,9 @@ model.eval()
90
 
91
  detoxify_model = Detoxify('multilingual')
92
 
93
- # Use a more accurate NSFW image detection model
94
  print("Loading NSFW image classification model...")
95
- # Use a smaller, faster model for NSFW detection
96
- nsfw_classifier = pipeline("image-classification", model="cafeai/nsfw-detector")
97
  print("NSFW image classification model loaded.")
98
 
99
  MODERATION_SYSTEM_PROMPT = (
@@ -306,10 +305,7 @@ def classify_text_with_detoxify(text):
306
  def classify_image(image_data):
307
  try:
308
  img = Image.open(io.BytesIO(image_data)).convert("RGB")
309
- # Resize for faster processing
310
- img = img.resize((224, 224))
311
-
312
- # Use the NSFW detector
313
  results = nsfw_classifier(img)
314
 
315
  # Extract the NSFW score
@@ -319,8 +315,8 @@ def classify_image(image_data):
319
  nsfw_score = result['score']
320
  break
321
 
322
- # Use a threshold of 0.7 for stricter detection
323
- classification = 'u' if nsfw_score > 0.7 else 's'
324
 
325
  return {
326
  "classification": classification,
 
90
 
91
  detoxify_model = Detoxify('multilingual')
92
 
93
+ # Use the Falconsai NSFW image detection model
94
  print("Loading NSFW image classification model...")
95
+ nsfw_classifier = pipeline("image-classification", model="Falconsai/nsfw_image_detection")
 
96
  print("NSFW image classification model loaded.")
97
 
98
  MODERATION_SYSTEM_PROMPT = (
 
305
  def classify_image(image_data):
306
  try:
307
  img = Image.open(io.BytesIO(image_data)).convert("RGB")
308
+ # Use the Falconsai NSFW detector
 
 
 
309
  results = nsfw_classifier(img)
310
 
311
  # Extract the NSFW score
 
315
  nsfw_score = result['score']
316
  break
317
 
318
+ # Use a threshold of 0.5 for NSFW detection
319
+ classification = 'u' if nsfw_score > 0.5 else 's'
320
 
321
  return {
322
  "classification": classification,