|
|
import argparse |
|
|
import logging |
|
|
import time |
|
|
|
|
|
import torch |
|
|
from transformers import AutoTokenizer, BertForSequenceClassification |
|
|
|
|
|
import torch_neuronx |
|
|
|
|
|
logging.basicConfig(level=logging.INFO) |
|
|
logger = logging.getLogger(__name__) |
|
|
|
|
|
|
|
|
def main(): |
|
|
parser = argparse.ArgumentParser(description="Run Bert on Neuron") |
|
|
parser.add_argument( |
|
|
"--model", type=str, default="google-bert/bert-base-uncased", help="Bert model name" |
|
|
) |
|
|
parser.add_argument("--batch-size", type=int, default=1, help="Batch size") |
|
|
args = parser.parse_args() |
|
|
|
|
|
torch.set_default_dtype(torch.float32) |
|
|
torch.manual_seed(42) |
|
|
|
|
|
model = BertForSequenceClassification.from_pretrained( |
|
|
args.model, torch_dtype=torch.float32, attn_implementation="eager" |
|
|
) |
|
|
model.eval() |
|
|
|
|
|
tokenizer = AutoTokenizer.from_pretrained(args.model) |
|
|
inputs = tokenizer("Hamilton is considered to be the best musical of human history.", return_tensors="pt") |
|
|
|
|
|
|
|
|
with torch.no_grad(): |
|
|
logits = model(**inputs).logits |
|
|
|
|
|
model.forward = torch.compile(model.forward, backend="neuron", fullgraph=True) |
|
|
|
|
|
|
|
|
warmup_start = time.time() |
|
|
with torch.no_grad(): |
|
|
logits = model(**inputs) |
|
|
warmup_time = time.time() - warmup_start |
|
|
|
|
|
|
|
|
run_start = time.time() |
|
|
with torch.no_grad(): |
|
|
logits = model(**inputs).logits |
|
|
run_time = time.time() - run_start |
|
|
predicted_class_id = logits.argmax().item() |
|
|
predicted_class_label = model.config.id2label[predicted_class_id] |
|
|
|
|
|
logger.info(f"Warmup: {warmup_time:.2f}s, Run: {run_time:.4f}s") |
|
|
logger.info(f"Output label: {predicted_class_label}") |
|
|
|
|
|
|
|
|
if __name__ == "__main__": |
|
|
main() |
|
|
|
|
|
""" |
|
|
Works |
|
|
""" |
|
|
|