{ "hidden_size": 128, "hidden_act": "gelu", "initializer_range": 0.02, "vocab_size": 30522, "hidden_dropout_prob": 0.1, "num_attention_heads": 2, "type_vocab_size": 2, "max_position_embeddings": 1024, "num_hidden_layers": 2, "intermediate_size": 512, "attention_probs_dropout_prob": 0.1, "rope_scaling": { "type": "yarn", "factor": 2.0, "original_max_position_embeddings": 512 } }