{ "model_type": "text_to_cursor", "architecture": "TextToCursorModel", "encoder_type": "transformer", "decoder_type": "transformer", "source": "factorstudios/CURF", "vocab_size": 50257, "d_model": 2048, "d_ff": 8192, "num_encoder_layers": 12, "num_decoder_layers": 6, "num_heads": 32, "head_dim": 64, "cursor_dim": 2048, "attention_dropout": 0.1, "feed_forward_dropout": 0.1, "activation": "relu", "max_position_embeddings": 10000, "layer_norm_eps": 1e-06, "initializer_range": 0.02, "output_hidden_states": false, "output_attentions": false, "notes": "Downloaded from factorstudios/CURF. vocab_size=50257 matches GPT-2." }