Update README.md
Browse filesexample:
```
from transformers import AutoTokenizer, AutoModelForSequenceClassification
import torch
# Load the fine-tuned model and tokenizer
model_name = "sihuapeng/ESM2-finetuned-PPSL"
tokenizer = AutoTokenizer.from_pretrained(model_name)
model = AutoModelForSequenceClassification.from_pretrained(model_name)
# Simulate a protein sequence
protein_sequence = "MSKKVLITGGAGYIGSVLTPILLEKGYEVCVIDNLMFDQISLLSCFHNKNFTFINGDAMDENLIRQEVAKADIIIPLAALVGAPLCKRNPKLAKMINYEAVKMISDFASPSQIFIYPNTNSGYGIGEKDAMCTEESPLRPISEYGIDKVHAEQYLLDKGNCVTFRLATVFGISPRMRLDLLVNDFTYRAYRDKFIVLFEEHFRRNYIHVRDVVKGFIHGIENYDKMKGQAYNMGLSSANLTKRQLAETIKKYIPDFYIHSANIGEDPDKRDYLVSNTKLEATGWKPDNTLEDGIKELLRAFKMMKVNRFANFN"
# Encode the sequence as model input
inputs = tokenizer(protein_sequence, return_tensors="pt")
# Perform inference using the model
with torch.no_grad():
outputs = model(**inputs)
# Get the prediction results
logits = outputs.logits
predicted_class_id = torch.argmax(logits, dim=-1).item()
# Output the predicted class
print(f"Predicted class ID: {predicted_class_id}")
```