| from transformers import AutoTokenizer | |
| tokenizer = AutoTokenizer.from_pretrained("Isotonic/gpt-human-assistant") | |
| def tokenize_function(examples): | |
| return tokenizer(examples["prompt"], truncation=True) | |
| tokenized_datasets = dataset.map(tokenize_function, batched=True) | |