Spaces:
Sleeping
Sleeping
Update app.py
Browse files
app.py
CHANGED
|
@@ -1,4 +1,4 @@
|
|
| 1 |
-
|
| 2 |
import torch
|
| 3 |
from transformers import AutoTokenizer, AutoModelForSequenceClassification
|
| 4 |
import numpy as np
|
|
@@ -45,20 +45,20 @@ class FallacyFinder:
|
|
| 45 |
'no_fallacy': "The statement appears to be logically sound and free of common fallacies."
|
| 46 |
}
|
| 47 |
|
| 48 |
-
#
|
| 49 |
self.model = None
|
| 50 |
self.tokenizer = None
|
| 51 |
self.use_model = False
|
| 52 |
|
| 53 |
try:
|
| 54 |
-
|
| 55 |
-
|
| 56 |
-
|
| 57 |
-
|
| 58 |
-
|
| 59 |
-
|
| 60 |
-
|
| 61 |
-
print("
|
| 62 |
|
| 63 |
def predict_with_rules(self, text):
|
| 64 |
"""Rule-based fallacy detection for when model isn't available"""
|
|
|
|
| 1 |
+
iimport gradio as gr
|
| 2 |
import torch
|
| 3 |
from transformers import AutoTokenizer, AutoModelForSequenceClassification
|
| 4 |
import numpy as np
|
|
|
|
| 45 |
'no_fallacy': "The statement appears to be logically sound and free of common fallacies."
|
| 46 |
}
|
| 47 |
|
| 48 |
+
# Load your trained model from Hugging Face
|
| 49 |
self.model = None
|
| 50 |
self.tokenizer = None
|
| 51 |
self.use_model = False
|
| 52 |
|
| 53 |
try:
|
| 54 |
+
print("Loading model: SamanthaStorm/fallacyfinder")
|
| 55 |
+
self.tokenizer = AutoTokenizer.from_pretrained("SamanthaStorm/fallacyfinder")
|
| 56 |
+
self.model = AutoModelForSequenceClassification.from_pretrained("SamanthaStorm/fallacyfinder")
|
| 57 |
+
self.use_model = True
|
| 58 |
+
print("✅ Model loaded successfully!")
|
| 59 |
+
except Exception as e:
|
| 60 |
+
print(f"❌ Error loading model: {e}")
|
| 61 |
+
print("Falling back to rule-based approach")
|
| 62 |
|
| 63 |
def predict_with_rules(self, text):
|
| 64 |
"""Rule-based fallacy detection for when model isn't available"""
|