File size: 106 Bytes
03a907a
 
 
 
 
1
2
3
4
5
6
import re


def tokenize(text: str) -> list[str]:
    return re.findall(r"[A-Za-z']+", text.lower())