Spaces:
Sleeping
Sleeping
Add attribute "name" to model.py
Browse files
category_classification/models/allenai__scibert_sci_vocab_uncased/model.py
CHANGED
|
@@ -1,45 +1,46 @@
|
|
| 1 |
-
from transformers import AutoModelForSequenceClassification, AutoTokenizer
|
| 2 |
-
import torch
|
| 3 |
-
|
| 4 |
-
|
| 5 |
-
|
| 6 |
-
|
| 7 |
-
|
| 8 |
-
self.
|
| 9 |
-
self.
|
| 10 |
-
self.
|
| 11 |
-
self.model.
|
| 12 |
-
|
| 13 |
-
|
| 14 |
-
|
| 15 |
-
|
| 16 |
-
f"
|
| 17 |
-
|
| 18 |
-
for
|
| 19 |
-
|
| 20 |
-
|
| 21 |
-
|
| 22 |
-
|
| 23 |
-
|
| 24 |
-
|
| 25 |
-
|
| 26 |
-
|
| 27 |
-
|
| 28 |
-
|
| 29 |
-
|
| 30 |
-
|
| 31 |
-
|
| 32 |
-
|
| 33 |
-
|
| 34 |
-
|
| 35 |
-
|
| 36 |
-
|
| 37 |
-
|
| 38 |
-
|
| 39 |
-
|
| 40 |
-
|
| 41 |
-
self.
|
| 42 |
-
|
| 43 |
-
|
| 44 |
-
|
| 45 |
-
|
|
|
|
|
|
| 1 |
+
from transformers import AutoModelForSequenceClassification, AutoTokenizer
|
| 2 |
+
import torch
|
| 3 |
+
|
| 4 |
+
name = "allenai/scibert_scivocab_uncased"
|
| 5 |
+
|
| 6 |
+
class SciBertPaperClassifier:
|
| 7 |
+
def __init__(self, model_path="trained_model"):
|
| 8 |
+
self.model = AutoModelForSequenceClassification.from_pretrained(model_path)
|
| 9 |
+
self.tokenizer = AutoTokenizer.from_pretrained(model_path)
|
| 10 |
+
self.device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
|
| 11 |
+
self.model.to(self.device)
|
| 12 |
+
self.model.eval()
|
| 13 |
+
|
| 14 |
+
def __call__(self, inputs):
|
| 15 |
+
texts = [
|
| 16 |
+
f"AUTHORS: {' '.join(authors) if isinstance(authors, list) else authors} "
|
| 17 |
+
f"TITLE: {paper['title']} ABSTRACT: {paper['abstract']}"
|
| 18 |
+
for paper in inputs
|
| 19 |
+
for authors in [paper.get("authors", "")]
|
| 20 |
+
]
|
| 21 |
+
|
| 22 |
+
inputs = self.tokenizer(
|
| 23 |
+
texts, truncation=True, padding=True, max_length=256, return_tensors="pt"
|
| 24 |
+
).to(self.device)
|
| 25 |
+
|
| 26 |
+
with torch.no_grad():
|
| 27 |
+
outputs = self.model(**inputs)
|
| 28 |
+
|
| 29 |
+
probs = torch.nn.functional.softmax(outputs.logits, dim=-1)
|
| 30 |
+
scores, labels = torch.max(probs, dim=1)
|
| 31 |
+
|
| 32 |
+
return [
|
| 33 |
+
[{"label": self.model.config.id2label[label.item()], "score": score.item()}]
|
| 34 |
+
for label, score in zip(labels, scores)
|
| 35 |
+
]
|
| 36 |
+
|
| 37 |
+
def __getstate__(self):
|
| 38 |
+
return self.__dict__
|
| 39 |
+
|
| 40 |
+
def __setstate__(self, state):
|
| 41 |
+
self.__dict__ = state
|
| 42 |
+
self.model.to(self.device)
|
| 43 |
+
|
| 44 |
+
|
| 45 |
+
def get_model():
|
| 46 |
+
return SciBertPaperClassifier()
|