TriviumLabs commited on
Commit
13b7f87
·
verified ·
1 Parent(s): 3fcc3a4

Upload Qwen3_5ForCausalLM

Browse files
Files changed (2) hide show
  1. config.json +1 -1
  2. model.safetensors +2 -2
config.json CHANGED
@@ -6,7 +6,7 @@
6
  "attention_dropout": 0.0,
7
  "attn_output_gate": true,
8
  "bos_token_id": null,
9
- "dtype": "bfloat16",
10
  "eos_token_id": 248044,
11
  "full_attention_interval": 4,
12
  "head_dim": 256,
 
6
  "attention_dropout": 0.0,
7
  "attn_output_gate": true,
8
  "bos_token_id": null,
9
+ "dtype": "float16",
10
  "eos_token_id": 248044,
11
  "full_attention_interval": 4,
12
  "head_dim": 256,
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:4bc74bcab79f6a4a5184269df79ec199c864908cdca2579406cd612bed8642f3
3
- size 3763692048
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9b4e029995d02e85ca0039fc3c90cf511546193503fac851199e144b9c29d594
3
+ size 3763691728