clp commited on
Commit
d9e4f3d
·
verified ·
1 Parent(s): 5a436ba

Trained with Unsloth

Browse files

Upload model trained with Unsloth 2x faster

Files changed (2) hide show
  1. config.json +3 -2
  2. generation_config.json +1 -1
config.json CHANGED
@@ -11,6 +11,7 @@
11
  128008,
12
  128009
13
  ],
 
14
  "hidden_act": "silu",
15
  "hidden_size": 4096,
16
  "initializer_range": 0.02,
@@ -46,8 +47,8 @@
46
  "rope_theta": 500000.0,
47
  "tie_word_embeddings": false,
48
  "torch_dtype": "float16",
49
- "transformers_version": "4.44.2",
50
- "unsloth_version": "2024.9.post2",
51
  "use_cache": true,
52
  "vocab_size": 128256
53
  }
 
11
  128008,
12
  128009
13
  ],
14
+ "head_dim": 128,
15
  "hidden_act": "silu",
16
  "hidden_size": 4096,
17
  "initializer_range": 0.02,
 
47
  "rope_theta": 500000.0,
48
  "tie_word_embeddings": false,
49
  "torch_dtype": "float16",
50
+ "transformers_version": "4.45.1",
51
+ "unsloth_version": "2024.9.post3",
52
  "use_cache": true,
53
  "vocab_size": 128256
54
  }
generation_config.json CHANGED
@@ -10,5 +10,5 @@
10
  "pad_token_id": 128004,
11
  "temperature": 0.6,
12
  "top_p": 0.9,
13
- "transformers_version": "4.44.2"
14
  }
 
10
  "pad_token_id": 128004,
11
  "temperature": 0.6,
12
  "top_p": 0.9,
13
+ "transformers_version": "4.45.1"
14
  }