mrinaldi commited on
Commit
ea6268f
·
verified ·
1 Parent(s): 486bf2b

Upload folder using huggingface_hub

Browse files
Files changed (5) hide show
  1. README.md +38 -0
  2. checkpoint.ckpt +3 -0
  3. config.json +112 -0
  4. matformer_config.json +70 -0
  5. modeling_matformer.py +47 -0
README.md ADDED
@@ -0,0 +1,38 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ tags:
3
+ - matformer
4
+ - custom-model
5
+ library_name: transformers
6
+ ---
7
+
8
+ # Matformer Model
9
+
10
+ Trained using [Matformer](https://github.com/mrinaldi97/matformer).
11
+
12
+ ## Installation
13
+
14
+ ```bash
15
+ pip install git+https://github.com/mrinaldi97/matformer.git
16
+ ```
17
+
18
+ ## Usage
19
+
20
+ ```python
21
+ import torch
22
+ from transformers import AutoModelForCausalLM, AutoTokenizer
23
+
24
+ model = AutoModelForCausalLM.from_pretrained(
25
+ "mrinaldi/albertina-micro-crazy-model",
26
+ trust_remote_code=True
27
+ )
28
+ tokenizer=AutoTokenizer.from_pretrained(model.config._tokenizer_name)
29
+ text = "The transformer model is a"
30
+ inputs = tokenizer(text,return_tensors='pt')['input_ids'].to(model.device)
31
+
32
+ with torch.no_grad():
33
+ outputs = model.generate(inputs, max_new_tokens=50)
34
+
35
+ generated = model.matformer_model.tokenizer.decode(outputs[0].tolist())
36
+
37
+ print(generated)
38
+ ```
checkpoint.ckpt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4db9e86aa5d50359b43f6801701c549790b8dd22366b14c09216a712652cb012
3
+ size 1340791721
config.json ADDED
@@ -0,0 +1,112 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_checkpoint_path": "/home/matteo/Albertone/Matformer/matformer/checkpoints_crazy/crazy_model_20251125_235754_last.ckpt",
3
+ "_matformer_config_dict": {
4
+ "_checkpoint_path": "/home/matteo/Albertone/Matformer/matformer/checkpoints_crazy/crazy_model_20251125_235754_last.ckpt",
5
+ "_model_class": "Autoregressive_Model",
6
+ "_tokenizer_name": "mrinaldi/Gettone",
7
+ "attention_type": [],
8
+ "bias": false,
9
+ "block_size_for_attention": 128,
10
+ "bos_token_id": 5,
11
+ "cloze_probability": null,
12
+ "compile_flexattn": false,
13
+ "custom_layers": {},
14
+ "decoder": null,
15
+ "default_layer": {
16
+ "attn_impl": "flash",
17
+ "ffn_activation": "swiglu",
18
+ "hooks": {},
19
+ "normalization": "rmsnorm",
20
+ "normalization_position": "pre",
21
+ "positional_encoding": [
22
+ "rope",
23
+ "alibi"
24
+ ],
25
+ "sliding_window_size": null
26
+ },
27
+ "encoder": null,
28
+ "entropy": null,
29
+ "eos_token_id": 6,
30
+ "ffn_factor": 3.0,
31
+ "has_entropy_model": null,
32
+ "has_text_autoencoder": null,
33
+ "hidden_size": 768,
34
+ "is_causal": true,
35
+ "loss_type": "normal",
36
+ "mask_token_id": 4,
37
+ "masked_substitution_rate": 0.15,
38
+ "max_position_embeddings": 1024,
39
+ "model_class": null,
40
+ "name": "CrazyModel",
41
+ "num_attention_heads": 12,
42
+ "num_hidden_layers": 12,
43
+ "num_labels": 2,
44
+ "pad_token_id": 0,
45
+ "random_probability": null,
46
+ "rms_norm_eps": 1e-06,
47
+ "rope_theta": 10000.0,
48
+ "same_probability": null,
49
+ "sliding_type": null,
50
+ "tie_word_embeddings": false,
51
+ "tokenizer_name": null,
52
+ "tokenizer_type": null,
53
+ "training_objective": "crazy",
54
+ "vocab_size": 32768
55
+ },
56
+ "_model_class": "Autoregressive_Model",
57
+ "_tokenizer_name": "mrinaldi/Gettone",
58
+ "attention_type": [],
59
+ "auto_map": {
60
+ "AutoConfig": "modeling_matformer.MatformerConfig",
61
+ "AutoModel": "modeling_matformer.MatformerModel",
62
+ "AutoModelForCausalLM": "modeling_matformer.MatformerForCausalLM"
63
+ },
64
+ "bias": false,
65
+ "block_size_for_attention": 128,
66
+ "bos_token_id": 5,
67
+ "cloze_probability": null,
68
+ "compile_flexattn": false,
69
+ "custom_layers": {},
70
+ "decoder": null,
71
+ "default_layer": {
72
+ "attn_impl": "flash",
73
+ "ffn_activation": "swiglu",
74
+ "hooks": {},
75
+ "normalization": "rmsnorm",
76
+ "normalization_position": "pre",
77
+ "positional_encoding": [
78
+ "rope",
79
+ "alibi"
80
+ ],
81
+ "sliding_window_size": null
82
+ },
83
+ "encoder": null,
84
+ "entropy": null,
85
+ "eos_token_id": 6,
86
+ "ffn_factor": 3.0,
87
+ "has_entropy_model": null,
88
+ "has_text_autoencoder": null,
89
+ "hidden_size": 768,
90
+ "is_causal": true,
91
+ "loss_type": "normal",
92
+ "mask_token_id": 4,
93
+ "masked_substitution_rate": 0.15,
94
+ "max_position_embeddings": 1024,
95
+ "model_class": null,
96
+ "model_type": "matformer",
97
+ "name": "CrazyModel",
98
+ "num_attention_heads": 12,
99
+ "num_hidden_layers": 12,
100
+ "pad_token_id": 0,
101
+ "random_probability": null,
102
+ "rms_norm_eps": 1e-06,
103
+ "rope_theta": 10000.0,
104
+ "same_probability": null,
105
+ "sliding_type": null,
106
+ "tokenizer_name": null,
107
+ "tokenizer_type": null,
108
+ "training_objective": "crazy",
109
+ "transformers_version": "4.57.1",
110
+ "use_cache": true,
111
+ "vocab_size": 32768
112
+ }
matformer_config.json ADDED
@@ -0,0 +1,70 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "model_class": "Autoregressive_Model",
3
+ "model_config": {
4
+ "name": "CrazyModel",
5
+ "hidden_size": 768,
6
+ "ffn_factor": 3.0,
7
+ "vocab_size": 32768,
8
+ "bos_token_id": 5,
9
+ "eos_token_id": 6,
10
+ "pad_token_id": 0,
11
+ "mask_token_id": 4,
12
+ "masked_substitution_rate": 0.15,
13
+ "num_hidden_layers": 12,
14
+ "num_attention_heads": 12,
15
+ "tie_word_embeddings": false,
16
+ "rms_norm_eps": 1e-06,
17
+ "attention_type": [],
18
+ "max_position_embeddings": 1024,
19
+ "block_size_for_attention": 128,
20
+ "rope_theta": 10000.0,
21
+ "compile_flexattn": false,
22
+ "bias": false,
23
+ "training_objective": "crazy",
24
+ "is_causal": true,
25
+ "default_layer": {
26
+ "attn_impl": "flash",
27
+ "sliding_window_size": null,
28
+ "positional_encoding": [
29
+ "rope",
30
+ "alibi"
31
+ ],
32
+ "normalization": "rmsnorm",
33
+ "normalization_position": "pre",
34
+ "ffn_activation": "swiglu",
35
+ "hooks": {}
36
+ },
37
+ "custom_layers": {}
38
+ },
39
+ "training": {
40
+ "optimizer": "muon",
41
+ "lr_scheduling": true,
42
+ "lr": 0.0008,
43
+ "final_lr": 4e-05,
44
+ "hold_steps": 0.31,
45
+ "weight_decay": 0.01,
46
+ "scheduler": "custom",
47
+ "gradient_clip_val": 1.0,
48
+ "warmup_steps": 0.09,
49
+ "max_epochs": 1,
50
+ "accumulate_grad_batches": 5,
51
+ "seed": 27,
52
+ "save_every_n_steps": 100000,
53
+ "checkpoint_name": "crazy_model"
54
+ },
55
+ "tokenizer": {
56
+ "type": "huggingface",
57
+ "pretrained_name": "mrinaldi/Gettone",
58
+ "varlen_strategy": "unpadding"
59
+ },
60
+ "data": {
61
+ "data_root": "/home/matteo/Albertone/Albertina/micro_albertina",
62
+ "batch_size": 48,
63
+ "num_workers": 1,
64
+ "mdat_strategy": "Gettone_1024",
65
+ "mdat_view": null
66
+ },
67
+ "save_dir": "./checkpoints_crazy",
68
+ "wandb_project": "CrazyModel",
69
+ "wandb_run_name": "CrazyModel_ALIBI"
70
+ }
modeling_matformer.py ADDED
@@ -0,0 +1,47 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # modeling_matformer.py
2
+ import os
3
+ import sys
4
+
5
+ matformer_root = os.getenv("MATFORMER_ROOT")
6
+ if matformer_root:
7
+ matformer_root = os.path.abspath(os.path.expanduser(matformer_root))
8
+ if matformer_root not in sys.path:
9
+ sys.path.insert(0, matformer_root)
10
+
11
+ try:
12
+ from matformer.huggingface_integration import (
13
+ MatformerForCausalLM,
14
+ MatformerForMaskedLM,
15
+ MatformerForSequenceClassification,
16
+ MatformerModel,
17
+ MatformerConfig,
18
+ register_matformer
19
+ )
20
+ register_matformer()
21
+ except ImportError as e:
22
+ import subprocess
23
+ import tempfile
24
+
25
+ print("Installing Matformer from GitHub...")
26
+ try:
27
+ subprocess.check_call([
28
+ sys.executable, "-m", "pip", "install",
29
+ "git+https://github.com/mrinaldi97/matformer.git"
30
+ ])
31
+
32
+ from matformer.huggingface_integration import (
33
+ MatformerForCausalLM,
34
+ MatformerForMaskedLM,
35
+ MatformerForSequenceClassification,
36
+ MatformerModel,
37
+ MatformerConfig,
38
+ register_matformer
39
+ )
40
+ register_matformer()
41
+
42
+ except Exception as install_error:
43
+ raise ImportError(
44
+ "Failed to install Matformer. Install manually:\n"
45
+ " pip install git+https://github.com/mrinaldi97/matformer.git\n"
46
+ "Or set MATFORMER_ROOT environment variable"
47
+ ) from install_error