File size: 3,149 Bytes
ea6268f | 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 | {
"_checkpoint_path": "/home/matteo/Albertone/Matformer/matformer/checkpoints_crazy/crazy_model_20251125_235754_last.ckpt",
"_matformer_config_dict": {
"_checkpoint_path": "/home/matteo/Albertone/Matformer/matformer/checkpoints_crazy/crazy_model_20251125_235754_last.ckpt",
"_model_class": "Autoregressive_Model",
"_tokenizer_name": "mrinaldi/Gettone",
"attention_type": [],
"bias": false,
"block_size_for_attention": 128,
"bos_token_id": 5,
"cloze_probability": null,
"compile_flexattn": false,
"custom_layers": {},
"decoder": null,
"default_layer": {
"attn_impl": "flash",
"ffn_activation": "swiglu",
"hooks": {},
"normalization": "rmsnorm",
"normalization_position": "pre",
"positional_encoding": [
"rope",
"alibi"
],
"sliding_window_size": null
},
"encoder": null,
"entropy": null,
"eos_token_id": 6,
"ffn_factor": 3.0,
"has_entropy_model": null,
"has_text_autoencoder": null,
"hidden_size": 768,
"is_causal": true,
"loss_type": "normal",
"mask_token_id": 4,
"masked_substitution_rate": 0.15,
"max_position_embeddings": 1024,
"model_class": null,
"name": "CrazyModel",
"num_attention_heads": 12,
"num_hidden_layers": 12,
"num_labels": 2,
"pad_token_id": 0,
"random_probability": null,
"rms_norm_eps": 1e-06,
"rope_theta": 10000.0,
"same_probability": null,
"sliding_type": null,
"tie_word_embeddings": false,
"tokenizer_name": null,
"tokenizer_type": null,
"training_objective": "crazy",
"vocab_size": 32768
},
"_model_class": "Autoregressive_Model",
"_tokenizer_name": "mrinaldi/Gettone",
"attention_type": [],
"auto_map": {
"AutoConfig": "modeling_matformer.MatformerConfig",
"AutoModel": "modeling_matformer.MatformerModel",
"AutoModelForCausalLM": "modeling_matformer.MatformerForCausalLM"
},
"bias": false,
"block_size_for_attention": 128,
"bos_token_id": 5,
"cloze_probability": null,
"compile_flexattn": false,
"custom_layers": {},
"decoder": null,
"default_layer": {
"attn_impl": "flash",
"ffn_activation": "swiglu",
"hooks": {},
"normalization": "rmsnorm",
"normalization_position": "pre",
"positional_encoding": [
"rope",
"alibi"
],
"sliding_window_size": null
},
"encoder": null,
"entropy": null,
"eos_token_id": 6,
"ffn_factor": 3.0,
"has_entropy_model": null,
"has_text_autoencoder": null,
"hidden_size": 768,
"is_causal": true,
"loss_type": "normal",
"mask_token_id": 4,
"masked_substitution_rate": 0.15,
"max_position_embeddings": 1024,
"model_class": null,
"model_type": "matformer",
"name": "CrazyModel",
"num_attention_heads": 12,
"num_hidden_layers": 12,
"pad_token_id": 0,
"random_probability": null,
"rms_norm_eps": 1e-06,
"rope_theta": 10000.0,
"same_probability": null,
"sliding_type": null,
"tokenizer_name": null,
"tokenizer_type": null,
"training_objective": "crazy",
"transformers_version": "4.57.1",
"use_cache": true,
"vocab_size": 32768
}
|