{ "architectures": [ "NGramLM" ], "bos_token_id": 50256, "eos_token_id": 50256, "hidden_size": 128, "model_type": "ngram", "torch_dtype": "bfloat16", "transformers_version": "4.51.3", "vocab_size": 50257 }