OmarIDK commited on
Commit
28bba09
·
verified ·
1 Parent(s): 1b384a5

Upload Qwen3ForCausalLM

Browse files
Files changed (3) hide show
  1. config.json +1 -1
  2. generation_config.json +6 -0
  3. model.safetensors +2 -2
config.json CHANGED
@@ -1,6 +1,6 @@
1
  {
2
  "architectures": [
3
- "Qwen3Model"
4
  ],
5
  "attention_bias": false,
6
  "attention_dropout": 0.0,
 
1
  {
2
  "architectures": [
3
+ "Qwen3ForCausalLM"
4
  ],
5
  "attention_bias": false,
6
  "attention_dropout": 0.0,
generation_config.json ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token_id": 151643,
3
+ "eos_token_id": 151643,
4
+ "max_new_tokens": 2048,
5
+ "transformers_version": "4.52.2"
6
+ }
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:a8c85fec96e24d580c8f7659ee1cb3e4841d49a934f28b9d072d93bc0d1e3afa
3
- size 2384233112
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0220d28eaa7fa4a0a5475663b48e5d54f23a829518fe8091c02c4443c623d3d8
3
+ size 2384234968