Spaces:
Sleeping
Sleeping
| import gradio as gr | |
| from transformers import AutoTokenizer, AutoModelForSequenceClassification | |
| import numpy as np | |
| from utils import model_predict | |
| roberta_base_detector = AutoModelForSequenceClassification.from_pretrained("Models/fine_tuned/roberta-base-openai-detector-model") | |
| roberta_base_detector_tknz = AutoTokenizer.from_pretrained("Models/fine_tuned/roberta-base-openai-detector-tokenizer") | |
| chatgpt_lli_hc3_detector = AutoModelForSequenceClassification.from_pretrained("Models/fine_tuned/chatgpt-detector-lli-hc3-model") | |
| chatgpt_lli_hc3_detector_tknz = AutoTokenizer.from_pretrained("Models/fine_tuned/chatgpt-detector-lli-hc3-tokenizer") | |
| chatgpt_roberta_detector = AutoModelForSequenceClassification.from_pretrained("Models/fine_tuned/chatgpt-detector-roberta-model") | |
| chatgpt_roberta_detector_tknz = AutoTokenizer.from_pretrained("Models/fine_tuned/chatgpt-detector-roberta-tokenizer") | |
| def classify_text(text): | |
| # Get predictions from each model | |
| roberta_base_pred = model_predict(roberta_base_detector, roberta_base_detector_tknz, text) | |
| chatgpt_lli_hc3_pred = model_predict(chatgpt_lli_hc3_detector, chatgpt_lli_hc3_detector_tknz, text) | |
| chatgpt_roberta_pred = model_predict(chatgpt_roberta_detector, chatgpt_roberta_detector_tknz, text) | |
| # Count the votes for AI and Human | |
| votes = {"AI": 0, "Human": 0} | |
| for pred in [roberta_base_pred, chatgpt_lli_hc3_pred, chatgpt_roberta_pred]: | |
| if pred == 1: | |
| votes["AI"] += 1 | |
| elif pred == 0: | |
| votes["Human"] += 1 | |
| else: | |
| raise AssertionError("A problem exists with the code.") | |
| # Determine final decision based on majority | |
| if votes["AI"] > votes["Human"]: | |
| return "AI" | |
| else: | |
| return "Human" | |
| # Create Gradio Interface | |
| iface = gr.Interface( | |
| fn=classify_text, | |
| inputs=gr.Textbox(lines=2, placeholder="Enter a sentence to classify..."), | |
| outputs="text" | |
| ) | |
| iface.launch() | |