AurelPx commited on
Commit
3591fcc
·
verified ·
1 Parent(s): f696084

Upload README.md with huggingface_hub

Browse files
Files changed (1) hide show
  1. README.md +10 -5
README.md CHANGED
@@ -81,23 +81,28 @@ import torch.nn.functional as F
81
 
82
  # Load model and tokenizer
83
  model_name = "AurelPx/FinRoBERTa-Mendeley"
 
 
84
  tokenizer = AutoTokenizer.from_pretrained(model_name)
85
- model = AutoModelForSequenceClassification.from_pretrained(model_name)
86
 
87
- # Example text
88
  text = "Tesla shares rally after strong earnings report"
89
 
90
- # Tokenize and predict
91
- inputs = tokenizer(text, return_tensors="pt", truncation=True, padding=True)
 
 
92
  with torch.no_grad():
93
  logits = model(**inputs).logits
 
 
94
  probs = F.softmax(logits, dim=-1)
95
  pred = probs.argmax().item()
96
 
97
  labels = ["Negative", "Neutral", "Positive"]
98
  print(f"Sentence: {text}")
99
  print(f"Predicted Sentiment: {labels[pred]}")
100
- print(f"Probabilities: {probs}")
101
  ```
102
 
103
  ---
 
81
 
82
  # Load model and tokenizer
83
  model_name = "AurelPx/FinRoBERTa-Mendeley"
84
+ device = "cuda" if torch.cuda.is_available() else "cpu"
85
+
86
  tokenizer = AutoTokenizer.from_pretrained(model_name)
87
+ model = AutoModelForSequenceClassification.from_pretrained(model_name).to(device)
88
 
 
89
  text = "Tesla shares rally after strong earnings report"
90
 
91
+ # Tokenize and move to correct device
92
+ inputs = tokenizer(text, return_tensors="pt", truncation=True, padding=True).to(device)
93
+
94
+ # Predict
95
  with torch.no_grad():
96
  logits = model(**inputs).logits
97
+
98
+ # Compute probabilities and predicted label
99
  probs = F.softmax(logits, dim=-1)
100
  pred = probs.argmax().item()
101
 
102
  labels = ["Negative", "Neutral", "Positive"]
103
  print(f"Sentence: {text}")
104
  print(f"Predicted Sentiment: {labels[pred]}")
105
+ print(f"Probabilities: {probs.cpu().numpy()}")
106
  ```
107
 
108
  ---