{ "architectures": [ "TamilTinyStoriesForCausalLM" ], "auto_map": { "AutoConfig": "configuration_tamil_tiny_stories.TamilTinyStoriesConfig", "AutoModelForCausalLM": "modeling_tamil_tiny_stories.TamilTinyStoriesForCausalLM" }, "block_size": 128, "bos_token_id": 448, "dropout": 0.0, "dtype": "float32", "eos_token_id": 449, "hidden_size": 128, "is_decoder": true, "max_position_embeddings": 128, "model_type": "tamil_tiny_stories", "n_embd": 128, "n_head": 4, "n_layer": 4, "num_attention_heads": 4, "num_hidden_layers": 4, "original_vocab_size": 447, "pad_token_id": 447, "transformers_version": "5.3.0", "unk_token_id": 450, "use_cache": false, "vocab_size": 451 }