import gradio as gr from transformers import pipeline # The pipeline will automatically load the model and tokenizer # from the directory where the app is running. try: classifier = pipeline("text-classification", model="./", tokenizer="./") def classify_text(text): if not text: return "Please enter some text to classify." result = classifier(text)[0] # Map the default labels to more descriptive ones label = "Hate Speech" if result['label'] == 'LABEL_1' else "Not Hate Speech" score = result['score'] return f"Prediction: {label}\nConfidence: {score:.4f}" iface = gr.Interface( fn=classify_text, inputs=gr.Textbox(lines=5, placeholder="Enter a comment in English or Hindi..."), outputs=gr.Textbox(label="Result"), title="Multilingual Hate Speech Classifier", description="A model to classify comments as hate speech or not." ) iface.launch() except Exception as e: gr.Interface( lambda x: f"An error occurred: {e}", inputs="text", outputs="text", title="Error Loading Model", description="There was an issue loading the model. Please check your files and dependencies." ).launch()