ccabrerafreepik commited on
Commit
638ef29
·
verified ·
1 Parent(s): 0272246

Update app.py

Browse files

Remove pipeline to increase speed

Files changed (1) hide show
  1. app.py +4 -12
app.py CHANGED
@@ -1,11 +1,9 @@
1
- from transformers import pipeline
2
  from PIL import Image
3
  import gradio as gr
4
  from nsfw_image_detector import NSFWDetector
5
  import torch
6
 
7
- # Load your model from the Hub
8
- classifier_pipe = pipeline("image-classification", model="Freepik/nsfw_image_detector", torch_dtype=torch.bfloat16, device="cpu")
9
  classifier_nsfw = NSFWDetector(dtype=torch.bfloat16, device="cpu")
10
 
11
  # Define the inference function
@@ -13,7 +11,6 @@ def classify_image(image, confidence_level):
13
  # Get predictions from both models
14
  result_nsfw_proba = classifier_nsfw.predict_proba(image)
15
  is_nsfw_method = result_nsfw_proba[0][confidence_level] >= 0.5
16
- result_pipe = classifier_pipe(image)
17
 
18
  # Format NSFW probability scores
19
  proba_dict = result_nsfw_proba[0]
@@ -25,12 +22,8 @@ def classify_image(image, confidence_level):
25
  is_nsfw_str = f"NSFW Classification ({confidence_level.title()}):\n"
26
  is_nsfw_str += "🔴 True" if is_nsfw_method else "🟢 False"
27
 
28
- # Format pipeline results
29
- pipe_str = "Pipeline Results:\n"
30
- for result in result_pipe:
31
- pipe_str += f"{result['label']}: {result['score']:.4f}\n"
32
 
33
- return nsfw_proba_str, is_nsfw_str, pipe_str
34
 
35
  # Create Gradio interface
36
  demo = gr.Interface(
@@ -44,9 +37,8 @@ demo = gr.Interface(
44
  )
45
  ],
46
  outputs=[
47
- gr.Textbox(label="NSFW Probability Scores (recommended)", lines=3),
48
- gr.Textbox(label="NSFW Classification (recommended)", lines=2),
49
- gr.Textbox(label="Pipeline Results (not recommended, specially in production)", lines=3)
50
  ],
51
  title="NSFW Image Classifier",
52
  description="Upload an image and select a confidence level to get a prediction using the Freepik/nsfw_image_detector model."
 
 
1
  from PIL import Image
2
  import gradio as gr
3
  from nsfw_image_detector import NSFWDetector
4
  import torch
5
 
6
+
 
7
  classifier_nsfw = NSFWDetector(dtype=torch.bfloat16, device="cpu")
8
 
9
  # Define the inference function
 
11
  # Get predictions from both models
12
  result_nsfw_proba = classifier_nsfw.predict_proba(image)
13
  is_nsfw_method = result_nsfw_proba[0][confidence_level] >= 0.5
 
14
 
15
  # Format NSFW probability scores
16
  proba_dict = result_nsfw_proba[0]
 
22
  is_nsfw_str = f"NSFW Classification ({confidence_level.title()}):\n"
23
  is_nsfw_str += "🔴 True" if is_nsfw_method else "🟢 False"
24
 
 
 
 
 
25
 
26
+ return nsfw_proba_str, is_nsfw_str
27
 
28
  # Create Gradio interface
29
  demo = gr.Interface(
 
37
  )
38
  ],
39
  outputs=[
40
+ gr.Textbox(label="NSFW Categories Scores", lines=3),
41
+ gr.Textbox(label="NSFW Classification", lines=2),
 
42
  ],
43
  title="NSFW Image Classifier",
44
  description="Upload an image and select a confidence level to get a prediction using the Freepik/nsfw_image_detector model."