| import tensorflow as tf | |
| from transformers import DistilBertTokenizer, TFDistilBertForSequenceClassification | |
| from huggingface_hub import snapshot_download | |
| import joblib | |
| import os | |
| import os | |
| from transformers import TFDistilBertForSequenceClassification | |
| print("π Current working directory:", os.getcwd()) | |
| print("π Checking model path:", os.listdir("nlp_intent/intent_model") if os.path.exists("nlp_intent/intent_model") else "β not found") | |
| try: | |
| model = TFDistilBertForSequenceClassification.from_pretrained("nlp_intent/intent_model") | |
| print("β Model loaded successfully!") | |
| except Exception as e: | |
| print("π¨ Model load failed:", e) | |
| repo_path = snapshot_download( | |
| repo_id="samithcs/nlp_intent_model", | |
| repo_type="model" | |
| ) | |
| MODEL_PATH = os.path.join(repo_path, "nlp_intent", "intent_model") | |
| TOKENIZER_PATH = os.path.join(repo_path, "nlp_intent", "intent_tokenizer") | |
| LABEL_PATH = os.path.join(repo_path, "nlp_intent", "label_encoder.joblib") | |
| model = TFDistilBertForSequenceClassification.from_pretrained(MODEL_PATH) | |
| tokenizer = DistilBertTokenizer.from_pretrained(TOKENIZER_PATH) | |
| label_encoder = joblib.load(LABEL_PATH) | |
| print("Model loaded successfully!") | |
| def predict_intent(text: str) -> dict: | |
| inputs = tokenizer(text, return_tensors="tf", truncation=True, padding=True, max_length=128) | |
| outputs = model(inputs) | |
| predicted_class = tf.argmax(outputs.logits, axis=1).numpy()[0] | |
| intent = label_encoder.inverse_transform([predicted_class])[0] | |
| confidence = float(tf.nn.softmax(outputs.logits)[0][predicted_class].numpy()) | |
| return {"intent": intent, "confidence": confidence} |