yzhangcs commited on
Commit
7c18483
·
1 Parent(s): 39d60a1

Remove the `norm_first` option

Browse files
Files changed (3) hide show
  1. config.json +3 -2
  2. generation_config.json +2 -1
  3. model.safetensors +1 -1
config.json CHANGED
@@ -3,6 +3,7 @@
3
  "architectures": [
4
  "GSAForCausalLM"
5
  ],
 
6
  "bos_token_id": 1,
7
  "clamp_max": null,
8
  "clamp_min": null,
@@ -33,11 +34,11 @@
33
  "share_conv_kernel": true,
34
  "tie_word_embeddings": false,
35
  "torch_dtype": "bfloat16",
36
- "transformers_version": "4.40.2",
37
  "use_cache": true,
38
  "use_norm": true,
39
  "use_output_gate": false,
40
  "use_rope": false,
41
  "use_short_conv": false,
42
  "vocab_size": 32000
43
- }
 
3
  "architectures": [
4
  "GSAForCausalLM"
5
  ],
6
+ "attn": null,
7
  "bos_token_id": 1,
8
  "clamp_max": null,
9
  "clamp_min": null,
 
34
  "share_conv_kernel": true,
35
  "tie_word_embeddings": false,
36
  "torch_dtype": "bfloat16",
37
+ "transformers_version": "4.48.2",
38
  "use_cache": true,
39
  "use_norm": true,
40
  "use_output_gate": false,
41
  "use_rope": false,
42
  "use_short_conv": false,
43
  "vocab_size": 32000
44
+ }
generation_config.json CHANGED
@@ -2,5 +2,6 @@
2
  "_from_model_config": true,
3
  "bos_token_id": 1,
4
  "eos_token_id": 2,
5
- "transformers_version": "4.40.2"
 
6
  }
 
2
  "_from_model_config": true,
3
  "bos_token_id": 1,
4
  "eos_token_id": 2,
5
+ "pad_token_id": 2,
6
+ "transformers_version": "4.48.2"
7
  }
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:7e1501b1d6a039918f18a9919e5356b1971997fc7665f2cc079d924f68630785
3
  size 2753785520
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:bf078acce9ada5103e493a130c9989865e6d6b4b86595402f6ac228de0baed34
3
  size 2753785520