ai_detector / inference.py
ivanm151's picture
first fix2
00886d1
# inference.py
from models import TextModels, ImageModel
from preprocessing import clean_text, load_image_from_url
# инициализация моделей один раз при старте FastAPI
text_model = TextModels()
image_model = ImageModel()
def detect_text(text):
text = clean_text(text)
score = text_model.predict(text)
return {
"type": "text",
"ai_probability": score,
"generation_type": "LLM",
"possible_sources": None
}
def detect_image(url: str):
try:
image = load_image_from_url(url)
score = image_model.predict(image)
return {
"url": url,
"ai_probability": float(score),
"generation_type": "diffusion",
"possible_sources": None
}
except Exception as e:
return {
"url": url,
"error": str(e)
}
# batch detect
def detect_page(texts, images):
"""
texts: list of strings
images: list of URLs
"""
text_results = [detect_text(t) for t in texts]
image_results = [detect_image(i) for i in images]
return {
"texts": text_results,
"images": image_results
}