| #!/usr/bin/env python3 | |
| """AI Detector Example - Python Inference""" | |
| from transformers import AutoTokenizer, AutoModelForSequenceClassification | |
| import torch | |
| def detect_ai(text, model_id="darwinkernelpanic/ai-detector-pgx"): | |
| tokenizer = AutoTokenizer.from_pretrained(model_id) | |
| model = AutoModelForSequenceClassification.from_pretrained(model_id) | |
| inputs = tokenizer(text, return_tensors="pt", truncation=True, max_length=512, padding=True) | |
| with torch.no_grad(): | |
| outputs = model(**inputs) | |
| probs = torch.softmax(outputs.logits, dim=1) | |
| ai_prob = probs[0][1].item() | |
| return {"ai_prob": ai_prob, "is_ai": ai_prob > 0.5} | |
| if __name__ == "__main__": | |
| text = "The mitochondria is the powerhouse of the cell..." | |
| result = detect_ai(text) | |
| print(f"AI Probability: {result['ai_prob']:.2%}") | |
| print(f"Verdict: {'AI' if result['is_ai'] else 'Human'}") | |