Michael commited on
Commit
4cc27f1
·
verified ·
1 Parent(s): 0310c6f

Upload StableLmForCausalLM

Browse files
Files changed (3) hide show
  1. config.json +4 -4
  2. generation_config.json +4 -4
  3. model.safetensors +1 -1
config.json CHANGED
@@ -1,11 +1,11 @@
1
  {
2
- "_name_or_path": "/home/hlw3/ckpt/llmhub/hf/sx14",
3
  "architectures": [
4
  "StableLmForCausalLM"
5
  ],
6
  "attention_dropout": 0.0,
7
- "bos_token_id": 100289,
8
- "eos_token_id": 100290,
9
  "hidden_act": "silu",
10
  "hidden_dropout": 0.0,
11
  "hidden_size": 2048,
@@ -23,7 +23,7 @@
23
  "tie_word_embeddings": false,
24
  "torch_dtype": "bfloat16",
25
  "transformers_version": "4.39.1",
26
- "use_cache": false,
27
  "use_qkv_bias": true,
28
  "vocab_size": 100352
29
  }
 
1
  {
2
+ "_name_or_path": "/home/hlw3/ckpt/llmhub/hf/sc14",
3
  "architectures": [
4
  "StableLmForCausalLM"
5
  ],
6
  "attention_dropout": 0.0,
7
+ "bos_token_id": 100257,
8
+ "eos_token_id": 100257,
9
  "hidden_act": "silu",
10
  "hidden_dropout": 0.0,
11
  "hidden_size": 2048,
 
23
  "tie_word_embeddings": false,
24
  "torch_dtype": "bfloat16",
25
  "transformers_version": "4.39.1",
26
+ "use_cache": true,
27
  "use_qkv_bias": true,
28
  "vocab_size": 100352
29
  }
generation_config.json CHANGED
@@ -1,7 +1,7 @@
1
  {
2
  "_from_model_config": true,
3
- "bos_token_id": 100289,
4
- "eos_token_id": 100290,
5
- "transformers_version": "4.39.1",
6
- "use_cache": false
7
  }
 
1
  {
2
  "_from_model_config": true,
3
+ "bos_token_id": 100257,
4
+ "do_sample": true,
5
+ "eos_token_id": 100257,
6
+ "transformers_version": "4.39.1"
7
  }
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:55a900e9f92f64bcddd973f04fc379cb57cc601ed7ab1a0b87993807b0b8a7b1
3
  size 3289069520
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:86a4351423a54f2c964a37cb238d769de1618c79aedeb5d7b7afbbba48d7d909
3
  size 3289069520