|
from fastapi import FastAPI, Query |
|
from nsfw_detector import predict |
|
from PIL import Image |
|
import requests |
|
from io import BytesIO |
|
|
|
app = FastAPI() |
|
|
|
|
|
model = predict.load_model("nsfw_mobilenet2.224x224.h5") |
|
|
|
@app.get("/check-nsfw/") |
|
def check_nsfw(image_url: str = Query(..., description="URL da imagem para análise")): |
|
""" |
|
Analisa uma imagem a partir de um URL e detecta conteúdo NSFW. |
|
""" |
|
try: |
|
|
|
response = requests.get(image_url) |
|
response.raise_for_status() |
|
image = Image.open(BytesIO(response.content)) |
|
|
|
|
|
predictions = predict.classify(model, {"image": image}) |
|
|
|
|
|
nsfw_score = predictions["image"]["porn"] + predictions["image"]["sexy"] |
|
sfw_score = predictions["image"]["neutral"] |
|
classification = "NSFW" if nsfw_score > sfw_score else "SFW" |
|
|
|
return { |
|
"image_url": image_url, |
|
"classification": classification, |
|
"scores": predictions["image"] |
|
} |
|
except Exception as e: |
|
return {"error": str(e)} |