muverqqw commited on
Commit
8a48e52
·
1 Parent(s): 2696785

Step Final_Loss6.35_Tok19.69M

Browse files
Files changed (1) hide show
  1. config.json +5 -1
config.json CHANGED
@@ -1,10 +1,14 @@
1
  {
 
 
 
2
  "attention_dropout": 0.0,
3
  "auto_map": {
4
  "AutoConfig": "configuration_alinlight.AlinlightConfig",
5
  "AutoModelForCausalLM": "modeling_alinlight.AlinlightForCausalLM"
6
  },
7
  "bos_token_id": 1,
 
8
  "eos_token_id": 2,
9
  "hidden_size": 2048,
10
  "initializer_range": 0.02,
@@ -20,6 +24,6 @@
20
  "rope_theta": 10000.0,
21
  "sliding_window": 4096,
22
  "transformers_version": "4.57.3",
23
- "use_cache": true,
24
  "vocab_size": 128000
25
  }
 
1
  {
2
+ "architectures": [
3
+ "AlinlightForCausalLM"
4
+ ],
5
  "attention_dropout": 0.0,
6
  "auto_map": {
7
  "AutoConfig": "configuration_alinlight.AlinlightConfig",
8
  "AutoModelForCausalLM": "modeling_alinlight.AlinlightForCausalLM"
9
  },
10
  "bos_token_id": 1,
11
+ "dtype": "bfloat16",
12
  "eos_token_id": 2,
13
  "hidden_size": 2048,
14
  "initializer_range": 0.02,
 
24
  "rope_theta": 10000.0,
25
  "sliding_window": 4096,
26
  "transformers_version": "4.57.3",
27
+ "use_cache": false,
28
  "vocab_size": 128000
29
  }