Michael commited on
Commit
82d2047
·
verified ·
1 Parent(s): 2355582

Upload StableLmForCausalLM

Browse files
Files changed (2) hide show
  1. config.json +3 -3
  2. model.safetensors +1 -1
config.json CHANGED
@@ -1,11 +1,11 @@
1
  {
2
- "_name_or_path": "/home/hlw3/ckpt/llmhub/hf/04_01_2024_06_05",
3
  "architectures": [
4
  "StableLmForCausalLM"
5
  ],
6
  "attention_dropout": 0.0,
7
- "bos_token_id": 100289,
8
- "eos_token_id": 100290,
9
  "hidden_act": "silu",
10
  "hidden_dropout": 0.0,
11
  "hidden_size": 2048,
 
1
  {
2
+ "_name_or_path": "/home/hlw3/ckpt/llmhub/hf/04_01_2024_10_51",
3
  "architectures": [
4
  "StableLmForCausalLM"
5
  ],
6
  "attention_dropout": 0.0,
7
+ "bos_token_id": 100257,
8
+ "eos_token_id": 100257,
9
  "hidden_act": "silu",
10
  "hidden_dropout": 0.0,
11
  "hidden_size": 2048,
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:c3be7b05012f83b4ccc8a3bd825908840ddd1e0276ff060f45a845bada6a83d5
3
  size 3289069520
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:923e328ef5b7ff059ddc11d860de7d215474b41b85233a11ce21f5558189f72d
3
  size 3289069520