cschroeder's picture
Upload folder using huggingface_hub
c941bd6 verified
{
"_unpadding": true,
"actv_fn": "gelu",
"add_timestep_emb": false,
"add_token_type_emb": false,
"architectures": [
"BertBlocksForMaskedLM"
],
"attention_gate": null,
"attn_dropout_prob": 0.1,
"attn_out_bias": false,
"attn_proj_bias": false,
"block_pos_enc_kind": "rope",
"block_pos_enc_kwargs": {
"base_global": 160000,
"base_local": 10000,
"max_seq_len": 1024,
"rope_dim": 64
},
"bos_token_id": 50281,
"classifier_dropout": 0.1,
"dtype": "float32",
"emb_dropout_prob": 0.0,
"emb_pos_enc_kind": "none",
"emb_pos_enc_kwargs": {},
"global_attention_every_n_layers": 3,
"head_type": "proj",
"hidden_dropout_prob": 0.0,
"hidden_size": 768,
"include_final_norm": true,
"initializer_cutoff_factor": 3.0,
"initializer_gain": 1.0,
"initializer_kind": "trunc_normal",
"initializer_range": 0.02,
"intermediate_size": 1152,
"local_attention": [
128,
128
],
"mask_token_id": 1,
"max_sequence_length": 1024,
"mlp_in_bias": false,
"mlp_out_bias": false,
"mlp_type": "glu",
"model_type": "bertblocks",
"norm_bias": false,
"norm_eps": 1e-06,
"norm_fn": "layer",
"norm_kind": "pre",
"norm_params": {},
"norm_qk": false,
"norm_scaling": false,
"num_attention_heads": 12,
"num_blocks": 22,
"num_classes": 2,
"num_kv_heads": 12,
"pad_token_id": 50283,
"problem_type": "regression",
"relative_attention_max_distance": 128,
"relative_attention_num_buckets": 32,
"residual_first_layer": false,
"transformers_version": "4.57.6",
"type_vocab_size": 1,
"vocab_size": 50368
}