{ "architectures": [ "CharacterCausalLMWrapper" ], "base_model_name_or_path": "sign/utf8-lm-tiny", "bos_token_id": 2, "dtype": "float32", "eos_token_id": 3, "model_type": "character_causal_lm", "num_bytes": 4, "pad_token_id": 0, "transformers_version": "4.57.3" }