File size: 1,744 Bytes
5ee43e9 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 |
import argparse
import logging
import time
import torch
from transformers import AutoTokenizer, BertForSequenceClassification
import torch_neuronx
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)
def main():
parser = argparse.ArgumentParser(description="Run Bert on Neuron")
parser.add_argument(
"--model", type=str, default="google-bert/bert-base-uncased", help="Bert model name"
)
parser.add_argument("--batch-size", type=int, default=1, help="Batch size")
args = parser.parse_args()
torch.set_default_dtype(torch.float32)
torch.manual_seed(42)
model = BertForSequenceClassification.from_pretrained(
args.model, torch_dtype=torch.float32, attn_implementation="eager"
)
model.eval()
tokenizer = AutoTokenizer.from_pretrained(args.model)
inputs = tokenizer("Hamilton is considered to be the best musical of human history.", return_tensors="pt")
# Run once to establish shapes before compile
with torch.no_grad():
logits = model(**inputs).logits
model.forward = torch.compile(model.forward, backend="neuron", fullgraph=True)
# Warmup
warmup_start = time.time()
with torch.no_grad():
logits = model(**inputs)
warmup_time = time.time() - warmup_start
# Run
run_start = time.time()
with torch.no_grad():
logits = model(**inputs).logits
run_time = time.time() - run_start
predicted_class_id = logits.argmax().item()
predicted_class_label = model.config.id2label[predicted_class_id]
logger.info(f"Warmup: {warmup_time:.2f}s, Run: {run_time:.4f}s")
logger.info(f"Output label: {predicted_class_label}")
if __name__ == "__main__":
main()
"""
Works
"""
|