moderators / utils.py
fcakyon's picture
Update utils.py
804963d verified
import gradio as gr
from typing import Any, Dict
from urllib.request import urlopen, Request
from io import BytesIO
from pathlib import Path
from PIL import Image
from functools import lru_cache
from moderators import AutoModerator
_MODEL_CACHE: Dict[str, Any] = {}
EXAMPLE_ITEMS = [
("https://assets.clevelandclinic.org/transform/LargeFeatureImage/cd71f4bd-81d4-45d8-a450-74df78e4477a/Apples-184940975-770x533-1_jpg", "viddexa/nsfw-detection-2-mini"),
("https://encrypted-tbn0.gstatic.com/images?q=tbn:ANd9GcSbRwt56NYsiHwrT8oS-igzgeEzp7p3Jbe2dw&s", "viddexa/nsfw-detection-2-mini"),
("https://img.freepik.com/premium-photo/portrait-beautiful-young-woman_1048944-5548042.jpg", "viddexa/nsfw-detection-2-mini"),
]
CUSTOM_CSS = """
.header {
text-align: center;
padding: 2rem 1rem;
background: linear-gradient(135deg, #14b8a6 0%, #0d9488 100%);
color: white;
border-radius: 8px;
margin-bottom: 2rem;
}
.header h1 { margin: 0; font-size: 2.5em; }
.header p { margin: 0.5rem 0; opacity: 0.9; }
.header a { color: white; text-decoration: underline; }
/* Hide built-in Gradio footer */
footer {
display: none !important;
}
/* Custom footer styling */
.custom-footer {
text-align: center;
padding: 1rem;
color: #888;
margin-top: 2rem;
}
"""
EXAMPLES_DIR = Path("examples")
EXAMPLES_DIR.mkdir(exist_ok=True)
@lru_cache(maxsize=32)
def download_image(url: str) -> Image.Image:
"""Download and return PIL Image from URL."""
req = Request(url, headers={"User-Agent": "viddexa-gradio-demo/1.0"})
with urlopen(req, timeout=20) as resp:
return Image.open(BytesIO(resp.read())).convert("RGB")
def prepare_examples():
"""Download and prepare example images for display."""
examples = []
for i, (url, model) in enumerate(EXAMPLE_ITEMS):
img_path = EXAMPLES_DIR / f"example_{i}.jpg"
if not img_path.exists():
try:
img = download_image(url)
img.save(img_path, "JPEG", quality=95)
except Exception as e:
print(f"Warning: Could not download example {i}: {e}")
continue
examples.append([str(img_path), model])
return examples
def load_model(model_id: str, token: str | None = None) -> Any:
"""Load model with caching."""
if model_id not in _MODEL_CACHE:
_MODEL_CACHE[model_id] = AutoModerator.from_pretrained(model_id, token=token, use_fast=True)
return _MODEL_CACHE[model_id]
def analyze(image_path: str | None, image_url: str | None, model_id: str, token: str | None = None):
"""Run inference and return classification scores."""
if not image_path and not image_url:
raise gr.Error("Provide an image or URL")
img = Image.open(image_path).convert("RGB") if image_path else download_image(image_url)
model = load_model(model_id, token)
results = model(img)
classifications = results[0].classifications if hasattr(results[0], "classifications") else results[0]["classifications"]
return {str(k): float(v) for k, v in (classifications.items() if isinstance(classifications, dict) else [(c["label"], c["score"]) for c in classifications])}