Michael commited on
Commit
772067b
·
verified ·
1 Parent(s): e92f33e

Upload StableLmForCausalLM

Browse files
Files changed (2) hide show
  1. config.json +3 -3
  2. model.safetensors +1 -1
config.json CHANGED
@@ -1,11 +1,11 @@
1
  {
2
- "_name_or_path": "/home/hlw3/ckpt/llmhub/hf/04_01_2024_16_26",
3
  "architectures": [
4
  "StableLmForCausalLM"
5
  ],
6
  "attention_dropout": 0.0,
7
- "bos_token_id": 100289,
8
- "eos_token_id": 100290,
9
  "hidden_act": "silu",
10
  "hidden_dropout": 0.0,
11
  "hidden_size": 2048,
 
1
  {
2
+ "_name_or_path": "/home/hlw3/ckpt/llmhub/hf/04_02_2024_00_33",
3
  "architectures": [
4
  "StableLmForCausalLM"
5
  ],
6
  "attention_dropout": 0.0,
7
+ "bos_token_id": 100257,
8
+ "eos_token_id": 100257,
9
  "hidden_act": "silu",
10
  "hidden_dropout": 0.0,
11
  "hidden_size": 2048,
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:64bd0654033ab6410f9e30795fd3b7e1dd89f3738f7704975d8ebefcb503848e
3
  size 3289069520
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:372cc19fbf7c8ebab863e3fc6f29d30b71273d5f4f992c238b1bf3bc481220fc
3
  size 3289069520