Update README.md
Browse files
README.md
CHANGED
|
@@ -35,30 +35,29 @@ pip install transformers
|
|
| 35 |
from transformers import AutoTokenizer, AutoModelForSequenceClassification
|
| 36 |
import torch
|
| 37 |
|
| 38 |
-
# Load the fine-tuned model and tokenizer
|
| 39 |
-
model_name = "sihuapeng/ESM2
|
| 40 |
tokenizer = AutoTokenizer.from_pretrained(model_name)
|
| 41 |
model = AutoModelForSequenceClassification.from_pretrained(model_name)
|
| 42 |
|
| 43 |
-
#
|
| 44 |
-
|
| 45 |
|
| 46 |
-
#
|
| 47 |
-
inputs = tokenizer(
|
| 48 |
|
| 49 |
-
#
|
| 50 |
with torch.no_grad():
|
| 51 |
outputs = model(**inputs)
|
|
|
|
|
|
|
| 52 |
|
| 53 |
-
#
|
| 54 |
-
|
| 55 |
-
|
|
|
|
|
|
|
| 56 |
|
| 57 |
-
# Output the predicted class
|
| 58 |
-
print ("===========================================================================================================================================")
|
| 59 |
-
print ("ID to Label mapping: {0: 'CYtoplasmicMembrane', 1: 'Cellwall', 2: 'Cytoplasmic', 3: 'Extracellular', 4: 'OuterMembrane', 5: 'Periplasmic'}")
|
| 60 |
-
print(f"Predicted class ID: {predicted_class_id}")
|
| 61 |
-
print ("===========================================================================================================================================")
|
| 62 |
```
|
| 63 |
|
| 64 |
## Funding
|
|
|
|
| 35 |
from transformers import AutoTokenizer, AutoModelForSequenceClassification
|
| 36 |
import torch
|
| 37 |
|
| 38 |
+
# Load the fine-tuned model and tokenizer from Hugging Face
|
| 39 |
+
model_name = "sihuapeng/TarPepSubLoc-ESM2"
|
| 40 |
tokenizer = AutoTokenizer.from_pretrained(model_name)
|
| 41 |
model = AutoModelForSequenceClassification.from_pretrained(model_name)
|
| 42 |
|
| 43 |
+
# Define the amino acid sequence
|
| 44 |
+
sequence = "MKFLIVFVALFAMAVARPNLAEIVRQVSDVEPEKWSSDVETSDGTSIKQEGVLKNAGTDNEAAVVHGSFTWVDEKTGEKFTITYVADENGYQPQGAHLPVAPVA"
|
| 45 |
|
| 46 |
+
# Tokenize the sequence
|
| 47 |
+
inputs = tokenizer(sequence, return_tensors="pt")
|
| 48 |
|
| 49 |
+
# Make the prediction
|
| 50 |
with torch.no_grad():
|
| 51 |
outputs = model(**inputs)
|
| 52 |
+
logits = outputs.logits
|
| 53 |
+
predicted_class_id = logits.argmax().item()
|
| 54 |
|
| 55 |
+
# Map the predicted class id to the class label
|
| 56 |
+
labels = ["SP", "MT", "CH", "TH", "Other"]
|
| 57 |
+
predicted_label = labels[predicted_class_id]
|
| 58 |
+
|
| 59 |
+
print(f"The predicted class for the sequence is: {predicted_label}")
|
| 60 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 61 |
```
|
| 62 |
|
| 63 |
## Funding
|