DidulaThavishaPro commited on
Commit
7d54e8f
·
verified ·
1 Parent(s): f039f4b

(Trained with Unsloth)

Browse files
Files changed (1) hide show
  1. config.json +3 -2
config.json CHANGED
@@ -41,6 +41,7 @@
41
  ],
42
  "max_position_embeddings": 32768,
43
  "max_window_layers": 28,
 
44
  "model_type": "qwen2",
45
  "num_attention_heads": 28,
46
  "num_hidden_layers": 28,
@@ -51,9 +52,9 @@
51
  "rope_theta": 1000000.0,
52
  "sliding_window": null,
53
  "tie_word_embeddings": false,
54
- "transformers_version": "4.56.2",
55
  "unsloth_fixed": true,
56
- "unsloth_version": "2025.10.10",
57
  "use_cache": true,
58
  "use_sliding_window": false,
59
  "vocab_size": 152064
 
41
  ],
42
  "max_position_embeddings": 32768,
43
  "max_window_layers": 28,
44
+ "model_name": "DidulaThavishaPro/exp_11_0_sft_16bit_vllm",
45
  "model_type": "qwen2",
46
  "num_attention_heads": 28,
47
  "num_hidden_layers": 28,
 
52
  "rope_theta": 1000000.0,
53
  "sliding_window": null,
54
  "tie_word_embeddings": false,
55
+ "transformers_version": "4.57.1",
56
  "unsloth_fixed": true,
57
+ "unsloth_version": "2025.10.11",
58
  "use_cache": true,
59
  "use_sliding_window": false,
60
  "vocab_size": 152064