MUSKAN17 commited on
Commit
6e0d498
·
verified ·
1 Parent(s): bd8aeaa

Upload app.py

Browse files
Files changed (1) hide show
  1. app.py +35 -0
app.py ADDED
@@ -0,0 +1,35 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import gradio as gr
2
+ from transformers import AutoModelForSequenceClassification, AutoTokenizer
3
+ import torch
4
+ import torch.nn.functional as F
5
+
6
+ # Load the Hugging Face model and tokenizer for text classification
7
+ model_name = "vai0511/ai-content-classifier"
8
+ model = AutoModelForSequenceClassification.from_pretrained(model_name)
9
+ tokenizer = AutoTokenizer.from_pretrained(model_name)
10
+
11
+ # Function to classify text (Synchronous Function)
12
+ def classify_text(text: str):
13
+ inputs = tokenizer(text, return_tensors="pt", truncation=True, padding=True, max_length=512)
14
+ with torch.no_grad(): # Disable gradient calculations for inference
15
+ outputs = model(**inputs)
16
+
17
+ logits = outputs.logits # Raw model predictions (logits)
18
+ probabilities = F.softmax(logits, dim=1) # Convert logits to probabilities using softmax
19
+ percentages = probabilities[0].tolist() # Convert probabilities to a list for easy access
20
+ labels = {0: "Human-Written", 1: "AI-Generated", 2: "Paraphrased"}
21
+ predicted_class = torch.argmax(logits, dim=1).item()
22
+ result = labels[predicted_class]
23
+ percentages = {labels[i]: round(percentages[i] * 100, 2) for i in range(len(percentages))}
24
+ return result, percentages
25
+
26
+ # Create Gradio interface
27
+ iface = gr.Interface(
28
+ fn=classify_text,
29
+ inputs=gr.Textbox(label="Enter Text to Classify"),
30
+ outputs=[gr.Textbox(label="Classification Result"), gr.JSON(label="Classification Percentages")],
31
+ live=True
32
+ )
33
+
34
+ # Launch Gradio interface
35
+ iface.launch()