Spaces:
Sleeping
Sleeping
File size: 263 Bytes
d0326ea | 1 2 3 4 5 6 7 8 9 10 11 12 | from __future__ import annotations
from typing import List
def tokenize(text: str) -> List[str]:
"""Tokenize a normalized text into whitespace-delimited tokens."""
if not text:
return []
return [t for t in text.split(" ") if t]
|