File size: 544 Bytes
b32936b | 1 2 3 4 5 6 7 8 9 10 11 12 13 | import torch
import warnings
warnings.filterwarnings("ignore")
from transformers import WhisperForConditionalGeneration, WhisperProcessor
model_name_or_path = "openai/whisper-large-v3"
processor = WhisperProcessor.from_pretrained(model_name_or_path)
model = WhisperForConditionalGeneration.from_pretrained(model_name_or_path, torch_dtype=torch.float16).to("cpu")
inputs = torch.zeros(1, 128, 3000, dtype=torch.float16, device="cpu")
out = model.generate(inputs, return_timestamps=True, num_beams=1)
print("generate output shape:", out.shape)
|