{ "architectures": [ "HFHookedTransformer" ], "hidden_size": 128, "n_ctx": 2048, "num_attention_heads": 4, "num_hidden_layers": 6, "torch_dtype": "bfloat16", "transformers_version": "4.45.2", "vocab_size": 50304 }