{ "activation": "relu", "architecture": "transformer_lm_big", "architectures": ["GSLMForCausalLM"], "model_type": "gslm", "auto_map": { "AutoModel": "modeling.GSLMForCausalLM", "AutoConfig": "config.GSLMConfig", "AutoModelForCausalLM": "modeling.GSLMForCausalLM" }, "attention_dropout": 0.1, "d_model": 1024, "dim_feedforward": 4096, "dropout": 0.1, "max_seq_length": 3072, "nhead": 16, "num_layers": 12, "n_layer": 12, "pad_idx": 1, "pad_token_id": 0, "share_input_output_embed": true, "torch_dtype": "float32", "vocab_size": 204 }