BERT Text Classification Model
This is a simple model for text classification using BERT.
Usage
To use the model, you can call the classify_text function with a text input, and it will return the predicted class label.
text = "This is a positive review."
predicted_class = classify_text(text)
print("Predicted class:", predicted_class)
from transformers import BertTokenizer, BertForSequenceClassification
# Load pre-trained BERT tokenizer and model
tokenizer = BertTokenizer.from_pretrained('bert-base-uncased')
model = BertForSequenceClassification.from_pretrained('bert-base-uncased')
# Define a function to classify text
def classify_text(text):
inputs = tokenizer(text, return_tensors='pt', padding=True, truncation=True)
outputs = model(**inputs)
logits = outputs.logits
probabilities = logits.softmax(dim=1)
predicted_class = probabilities.argmax(dim=1).item()
return predicted_class
# Example usage
text = "This is a positive review."
predicted_class = classify_text(text)
print("Predicted class:", predicted_class)
- Downloads last month
- -
from huggingface_hub import hf_hub_download import joblib model = joblib.load( hf_hub_download("EngrSamad/BERT-Text-Classification-Model", "sklearn_model.joblib") ) # only load pickle files from sources you trust # read more about it here https://skops.readthedocs.io/en/stable/persistence.html