|
|
--- |
|
|
library_name: transformers |
|
|
tags: [] |
|
|
--- |
|
|
|
|
|
``` |
|
|
import torch |
|
|
from udev.models.amplify.modeling_amplify import AMPLIFY |
|
|
|
|
|
device = torch.device("cuda") # only cuda is supported |
|
|
model = AMPLIFY.from_pretrained('GleghornLab/AMPLIFY_120M', token=token).to(device) |
|
|
tokenizer = EsmTokenizer.from_pretrained('GleghornLab/AMPLIFY_120M', token=token) |
|
|
|
|
|
sequences = ['SEQWENCE', 'MEAEGAVE'] # list of seqs: str |
|
|
tokens = tokenizer(sequences, return_tensors='pt', padding=True, pad_to_multiple_of=8) |
|
|
tokens = {k: v.to(device) for k, v in tokens.items()} |
|
|
|
|
|
out = model( |
|
|
src=tokens['input_ids'], |
|
|
pad_mask=tokens['attention_mask'].float(), |
|
|
output_hidden_states=True, |
|
|
output_attentions=True |
|
|
) |
|
|
``` |