thrimurthi2025 commited on
Commit
20f32b6
Β·
verified Β·
1 Parent(s): 825f4e1

Updated app.py with new model

Browse files
Files changed (1) hide show
  1. app.py +10 -13
app.py CHANGED
@@ -1,4 +1,3 @@
1
- # app.py
2
  import gradio as gr
3
  from transformers import pipeline
4
  from PIL import Image
@@ -8,7 +7,6 @@ import traceback
8
  MODEL_ID = "umm-maybe/AI-image-detector"
9
 
10
  # Load the model pipeline (image-classification)
11
- # device = -1 forces CPU; Spaces manages hardware, so omit device for default
12
  pipe = pipeline("image-classification", model=MODEL_ID)
13
 
14
  def predict_image(image: Image.Image):
@@ -21,37 +19,36 @@ def predict_image(image: Image.Image):
21
  res = pipe(image)
22
 
23
  # typical pipeline returns list of dicts: [{"label": "...", "score": ...}, ...]
24
- # But some wrappers return richer JSON. Try to detect structure:
25
- # If res is a list of simple label/score dicts, use the top result.
26
  if isinstance(res, list) and isinstance(res[0], dict) and "label" in res[0]:
27
  top = res[0]
28
  label = top.get("label", "")
29
  score = float(top.get("score", 0.0))
30
- # Map label to friendly text if needed
31
  if label.lower().startswith("artificial") or "ai" in label.lower():
32
  verdict = "AI-generated"
33
  elif label.lower().startswith("human") or "real" in label.lower():
34
  verdict = "Human-made"
35
  else:
36
- # fallback: if score high and label looks like 'artificial'
37
  verdict = label
 
38
  return f"{verdict} β€” {score:.2f}"
39
- # If pipeline returns nested structure (some wrappers do), attempt to use it:
 
40
  if isinstance(res, dict):
41
- # check nested fields used by composite wrappers
42
  imr = res.get("individual_model_results") or res.get("models") or {}
43
- ai_det = imr.get("ai_detector") or imr.get("umm-maybe") or None
44
  if ai_det:
45
  ai_prob = ai_det.get("ai_probability") or ai_det.get("score") or ai_det.get("confidence")
46
  ai_prob = float(ai_prob)
47
  if ai_prob >= 0.6:
48
  return f"AI-generated β€” {ai_prob:.2f}"
49
  elif ai_prob <= 0.4:
50
- return f"Human-made β€” {(1-ai_prob):.2f}"
51
  else:
52
  return f"Uncertain β€” {ai_prob:.2f}"
53
- # Last fallback: stringify result
54
  return str(res)
 
55
  except Exception as e:
56
  traceback.print_exc()
57
  return f"Error: {e}"
@@ -61,8 +58,8 @@ demo = gr.Interface(
61
  fn=predict_image,
62
  inputs=gr.Image(type="pil"),
63
  outputs="text",
64
- title="AI Image Detector (umm-maybe)",
65
- description="Upload an image. The model returns whether it's AI-generated or human-made."
66
  )
67
 
68
  if __name__ == "__main__":
 
 
1
  import gradio as gr
2
  from transformers import pipeline
3
  from PIL import Image
 
7
  MODEL_ID = "umm-maybe/AI-image-detector"
8
 
9
  # Load the model pipeline (image-classification)
 
10
  pipe = pipeline("image-classification", model=MODEL_ID)
11
 
12
  def predict_image(image: Image.Image):
 
19
  res = pipe(image)
20
 
21
  # typical pipeline returns list of dicts: [{"label": "...", "score": ...}, ...]
 
 
22
  if isinstance(res, list) and isinstance(res[0], dict) and "label" in res[0]:
23
  top = res[0]
24
  label = top.get("label", "")
25
  score = float(top.get("score", 0.0))
26
+
27
  if label.lower().startswith("artificial") or "ai" in label.lower():
28
  verdict = "AI-generated"
29
  elif label.lower().startswith("human") or "real" in label.lower():
30
  verdict = "Human-made"
31
  else:
 
32
  verdict = label
33
+
34
  return f"{verdict} β€” {score:.2f}"
35
+
36
+ # Handle nested response structures
37
  if isinstance(res, dict):
 
38
  imr = res.get("individual_model_results") or res.get("models") or {}
39
+ ai_det = imr.get("ai_detector") or None
40
  if ai_det:
41
  ai_prob = ai_det.get("ai_probability") or ai_det.get("score") or ai_det.get("confidence")
42
  ai_prob = float(ai_prob)
43
  if ai_prob >= 0.6:
44
  return f"AI-generated β€” {ai_prob:.2f}"
45
  elif ai_prob <= 0.4:
46
+ return f"Human-made β€” {(1 - ai_prob):.2f}"
47
  else:
48
  return f"Uncertain β€” {ai_prob:.2f}"
49
+
50
  return str(res)
51
+
52
  except Exception as e:
53
  traceback.print_exc()
54
  return f"Error: {e}"
 
58
  fn=predict_image,
59
  inputs=gr.Image(type="pil"),
60
  outputs="text",
61
+ title="AI Image Detector",
62
+ description="Upload an image to detect whether it is AI-generated or human-made."
63
  )
64
 
65
  if __name__ == "__main__":