KenLumod commited on
Commit
74ba4a7
·
verified ·
1 Parent(s): 5cc26f5

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +10 -28
app.py CHANGED
@@ -1,31 +1,13 @@
1
- from transformers import AutoModel, AutoTokenizer, AutoConfig
2
  import torch
3
- import torch.nn as nn
4
 
5
- # 1. Load base components
6
- model_name = "KenLumod/ML-Fake-Real-News-Detector-Final"
7
- tokenizer = AutoTokenizer.from_pretrained(model_name)
8
- config = AutoConfig.from_pretrained(model_name)
9
 
10
- # 2. Load base BERT (without classification head)
11
- bert = AutoModel.from_pretrained(model_name)
12
-
13
- # 3. Add your custom classifier (must match training architecture)
14
- class FakeNewsClassifier(nn.Module):
15
- def __init__(self, bert_model):
16
- super().__init__()
17
- self.bert = bert_model
18
- self.classifier = nn.Sequential(
19
- nn.Dropout(0.1),
20
- nn.Linear(768, 512), # Match your hidden layer size
21
- nn.ReLU(),
22
- nn.Linear(512, config.num_labels), # Uses config's label count
23
- nn.LogSoftmax(dim=1)
24
- )
25
-
26
- def forward(self, input_ids, attention_mask):
27
- outputs = self.bert(input_ids, attention_mask=attention_mask)
28
- return self.classifier(outputs.last_hidden_state[:, 0, 🙂) # CLS token
29
-
30
- # 4. Create complete model
31
- model = FakeNewsClassifier(bert).eval()
 
1
+ from transformers import AutoTokenizer, AutoModelForSequenceClassification
2
  import torch
 
3
 
4
+ # 1. Load your already-working model directly
5
+ model = AutoModelForSequenceClassification.from_pretrained("KenLumod/ML-Fake-Real-News-Detector-Final")
6
+ tokenizer = AutoTokenizer.from_pretrained("KenLumod/ML-Fake-Real-News-Detector-Final")
 
7
 
8
+ # 2. Prediction function (unchanged from your working version)
9
+ def predict(text):
10
+ inputs = tokenizer(text, return_tensors="pt", truncation=True, max_length=256)
11
+ with torch.no_grad():
12
+ outputs = model(**inputs)
13
+ return "FAKE" if outputs.logits.argmax() == 1 else "REAL" # Your existing logic