Toxic-comment / app /models.py
asahwells's picture
Update threshold to 0.70 and fix notebook logic
048d981
raw
history blame contribute delete
545 Bytes
"""
Pydantic models shared across the API.
"""
from typing import Literal
from pydantic import BaseModel, Field
class IncomingMessage(BaseModel):
text: str = Field(..., min_length=1)
class ToxicityResult(BaseModel):
"""Normalized output from the toxicity classification model."""
label: Literal["TOXIC", "NON-TOXIC"]
confidence: float = Field(..., ge=0.0, le=1.0)
class FilterResult(BaseModel):
status: Literal["accepted", "rejected"]
message: str
label: Literal["TOXIC", "NON-TOXIC"]
confidence: float