| import torch
|
| import torch.nn.functional as F
|
| from transformers import RobertaTokenizerFast, RobertaForSequenceClassification
|
| import gradio as gr
|
|
|
|
|
| MODEL_NAME = "Clement1290/261_DetectionAI_GeminiPlusGPT"
|
|
|
| LABEL_MAP = {
|
| 0: "Human",
|
| 1: "AI"
|
| }
|
|
|
|
|
| device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
|
|
|
| tokenizer = RobertaTokenizerFast.from_pretrained(MODEL_NAME)
|
| model = RobertaForSequenceClassification.from_pretrained(MODEL_NAME)
|
| model.to(device)
|
| model.eval()
|
|
|
|
|
| def classify_text(text: str):
|
| if not text.strip():
|
| return "N/A", "Please input some text.", 0.0
|
|
|
| inputs = tokenizer(
|
| text,
|
| return_tensors="pt",
|
| truncation=True,
|
| max_length=512,
|
| padding=True,
|
| ).to(device)
|
|
|
| with torch.no_grad():
|
| logits = model(**inputs).logits
|
| probs = F.softmax(logits, dim=-1)[0]
|
|
|
| pred_label = torch.argmax(probs).item()
|
| confidence = probs[pred_label].item()
|
|
|
| label_str = LABEL_MAP.get(pred_label, "Unknown")
|
| pred_str = f"{pred_label} ({label_str})"
|
|
|
| return pred_str, f"{label_str}", round(float(confidence), 4)
|
|
|
|
|
|
|
| title = "Human vs AI Text Classifier"
|
| description = (
|
| "RoBERTa-based binary classifier. "
|
| "Prediction: 0 = Human, 1 = AI."
|
| )
|
|
|
| demo = gr.Interface(
|
| fn=classify_text,
|
| inputs=gr.Textbox(
|
| lines=8,
|
| placeholder="Paste a paragraph here...",
|
| label="Input text",
|
| ),
|
| outputs=[
|
| gr.Textbox(label="Raw Prediction (id + label)"),
|
| gr.Textbox(label="Label"),
|
| gr.Number(label="Confidence score")
|
| ],
|
| title=title,
|
| description=description,
|
| examples=[
|
| ["This is a short note I wrote myself about my day and my thoughts."],
|
| ["As an advanced AI model, I can generate human-like responses across a wide range of tasks."]
|
| ]
|
| )
|
|
|
| if __name__ == "__main__":
|
| demo.launch()
|
|
|
|
|