Spaces:
Sleeping
Sleeping
File size: 427 Bytes
510a9b0 | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 | import tiktoken
def estimate_token_count(prompt: str, model: str) -> int:
"""
Estimate the token count for a given prompt and model.
Args:
prompt (str): The input prompt to tokenize.
model (str): The name of the model to use for token encoding.
Returns:
int: The estimated token count.
"""
encoding = tiktoken.encoding_for_model(model)
return len(encoding.encode(prompt))
|