Spaces:
Sleeping
Sleeping
| import gradio as gr | |
| from transformers import pipeline | |
| # Use a stable, supported model | |
| clf = pipeline("text-classification", model="roberta-base-openai-detector") | |
| def detect_ai(text): | |
| if not text.strip(): | |
| return "No text provided." | |
| try: | |
| result = clf(text)[0] | |
| label = result['label'] | |
| score = round(result['score'] * 100, 2) | |
| return f"{label} ({score}%)" | |
| except Exception as e: | |
| return f"Error: {str(e)}" | |
| # Simple Interface to expose API at /api/predict/ | |
| demo = gr.Interface( | |
| fn=detect_ai, | |
| inputs=gr.Textbox(lines=4, label="Enter text"), | |
| outputs="text", | |
| title="AI Text Detector" | |
| ) | |
| demo.launch() | |