{ "tokenizer_class": "CustomTokenizer", "model_type": "custom", "do_lower_case": false, "vocab_size": 30000, "note": "This model uses custom vocabulary-based tokenization. Tokenizer should be loaded from base model or use custom tokenization." }