| import os
|
| import joblib
|
| from transformers import AutoTokenizer, TFAutoModelForSequenceClassification
|
| from config import Config
|
|
|
| class ModelManager:
|
| def __init__(self):
|
| self.bert_model = None
|
| self.bert_tokenizer = None
|
| self.naive_bayes_model = None
|
| self._load_models()
|
|
|
| def _load_models(self):
|
| self._load_bert_model()
|
| self._load_naive_bayes_model()
|
|
|
| def _load_bert_model(self):
|
| try:
|
| print(f"Loading BERT model from {Config.BERT_MODEL_REPO_ID}...")
|
| self.bert_tokenizer = AutoTokenizer.from_pretrained(Config.BERT_MODEL_REPO_ID)
|
| self.bert_model = TFAutoModelForSequenceClassification.from_pretrained(Config.BERT_MODEL_REPO_ID)
|
| print("✅ BERT model loaded successfully!")
|
| except Exception as e:
|
| print(f"❌ Error loading BERT model: {e}")
|
|
|
| def _load_naive_bayes_model(self):
|
| try:
|
| if os.path.exists(Config.NAIVE_BAYES_MODEL_PATH):
|
| self.naive_bayes_model = joblib.load(Config.NAIVE_BAYES_MODEL_PATH)
|
| print("✅ Naive Bayes model loaded successfully")
|
| else:
|
| print(f"⚠️ Naive Bayes model not found at {Config.NAIVE_BAYES_MODEL_PATH}")
|
| except Exception as e:
|
| print(f"❌ Error loading Naive Bayes model: {e}")
|
|
|
| @property
|
| def models_available(self):
|
| return self.bert_model or self.naive_bayes_model
|
|
|
| @property
|
| def default_model(self):
|
| return "Naive Bayes" if self.naive_bayes_model else "BERT"
|
|
|