mmcarpi commited on
Commit
22033fb
·
verified ·
1 Parent(s): 48c03e0

Add files using upload-large-folder tool

Browse files
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. gold-causal/best/config.json +33 -0
  2. gold-causal/best/generation_config.json +7 -0
  3. gold-causal/last/config.json +33 -0
  4. gold-causal/last/generation_config.json +7 -0
  5. gold-causal/step_001180/config.json +33 -0
  6. gold-causal/step_001180/generation_config.json +7 -0
  7. gold-causal/step_002360/config.json +33 -0
  8. gold-causal/step_002360/generation_config.json +7 -0
  9. gold-causal/step_003540/config.json +33 -0
  10. gold-causal/step_003540/generation_config.json +7 -0
  11. gold-causal/step_004720/config.json +33 -0
  12. gold-causal/step_004720/generation_config.json +7 -0
  13. gold-causal/step_005900/config.json +33 -0
  14. gold-causal/step_005900/generation_config.json +7 -0
  15. gold-constant/best/config.json +33 -0
  16. gold-constant/best/generation_config.json +7 -0
  17. gold-constant/last/config.json +33 -0
  18. gold-constant/last/generation_config.json +7 -0
  19. gold-constant/step_001180/config.json +33 -0
  20. gold-constant/step_001180/generation_config.json +7 -0
  21. gold-constant/step_002360/config.json +33 -0
  22. gold-constant/step_002360/generation_config.json +7 -0
  23. gold-constant/step_003540/config.json +33 -0
  24. gold-constant/step_003540/generation_config.json +7 -0
  25. gold-constant/step_004720/config.json +33 -0
  26. gold-constant/step_004720/generation_config.json +7 -0
  27. gold-constant/step_005900/config.json +33 -0
  28. gold-constant/step_005900/generation_config.json +7 -0
  29. gold-cosine/best/config.json +33 -0
  30. gold-cosine/best/generation_config.json +7 -0
  31. gold-cosine/last/config.json +33 -0
  32. gold-cosine/last/generation_config.json +7 -0
  33. gold-cosine/step_001180/config.json +33 -0
  34. gold-cosine/step_001180/generation_config.json +7 -0
  35. gold-cosine/step_002360/config.json +33 -0
  36. gold-cosine/step_002360/generation_config.json +7 -0
  37. gold-cosine/step_003540/config.json +33 -0
  38. gold-cosine/step_003540/generation_config.json +7 -0
  39. gold-cosine/step_004720/config.json +33 -0
  40. gold-cosine/step_004720/generation_config.json +7 -0
  41. gold-cosine/step_005900/config.json +33 -0
  42. gold-cosine/step_005900/generation_config.json +7 -0
  43. gold-linear/best/config.json +33 -0
  44. gold-linear/best/generation_config.json +7 -0
  45. gold-linear/last/config.json +33 -0
  46. gold-linear/last/generation_config.json +7 -0
  47. gold-linear/step_001180/config.json +33 -0
  48. gold-linear/step_001180/generation_config.json +7 -0
  49. gold-linear/step_002360/config.json +33 -0
  50. gold-linear/step_002360/generation_config.json +7 -0
gold-causal/best/config.json ADDED
@@ -0,0 +1,33 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "architectures": [
3
+ "FlexQwenForCausalLM"
4
+ ],
5
+ "cls_token_id": 1,
6
+ "dropout_rate": 0.0,
7
+ "dtype": "float32",
8
+ "embedding_dim": 768,
9
+ "head_dim": 64,
10
+ "hidden_dim": 1536,
11
+ "initializer_range": 0.02,
12
+ "max_position_embeddings": 512,
13
+ "model_type": "flexqwen",
14
+ "moe_hidden_dim": 512,
15
+ "moe_num_experts": 0,
16
+ "moe_num_experts_per_token": -1,
17
+ "num_attention_heads": 12,
18
+ "num_hidden_layers": 12,
19
+ "num_kv_groups": 4,
20
+ "pad_token_id": 3,
21
+ "qk_norm": true,
22
+ "rms_norm_eps": 1e-06,
23
+ "rope_theta": 10000,
24
+ "tie_word_embeddings": true,
25
+ "transformers_version": "5.3.0",
26
+ "vocab_size": 64000,
27
+ "auto_map": {
28
+ "AutoConfig": "flexqwen.FlexQwenConfig",
29
+ "AutoModel": "flexqwen.FlexQwen",
30
+ "AutoModelForCausalLM": "flexqwen.FlexQwenForCausalLM",
31
+ "AutoModelForSequenceClassification": "flexqwen.FlexQwenForSequenceClassification"
32
+ }
33
+ }
gold-causal/best/generation_config.json ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ {
2
+ "_from_model_config": true,
3
+ "output_attentions": false,
4
+ "output_hidden_states": false,
5
+ "pad_token_id": 3,
6
+ "transformers_version": "5.3.0"
7
+ }
gold-causal/last/config.json ADDED
@@ -0,0 +1,33 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "architectures": [
3
+ "FlexQwenForCausalLM"
4
+ ],
5
+ "cls_token_id": 1,
6
+ "dropout_rate": 0.0,
7
+ "dtype": "float32",
8
+ "embedding_dim": 768,
9
+ "head_dim": 64,
10
+ "hidden_dim": 1536,
11
+ "initializer_range": 0.02,
12
+ "max_position_embeddings": 512,
13
+ "model_type": "flexqwen",
14
+ "moe_hidden_dim": 512,
15
+ "moe_num_experts": 0,
16
+ "moe_num_experts_per_token": -1,
17
+ "num_attention_heads": 12,
18
+ "num_hidden_layers": 12,
19
+ "num_kv_groups": 4,
20
+ "pad_token_id": 3,
21
+ "qk_norm": true,
22
+ "rms_norm_eps": 1e-06,
23
+ "rope_theta": 10000,
24
+ "tie_word_embeddings": true,
25
+ "transformers_version": "5.3.0",
26
+ "vocab_size": 64000,
27
+ "auto_map": {
28
+ "AutoConfig": "flexqwen.FlexQwenConfig",
29
+ "AutoModel": "flexqwen.FlexQwen",
30
+ "AutoModelForCausalLM": "flexqwen.FlexQwenForCausalLM",
31
+ "AutoModelForSequenceClassification": "flexqwen.FlexQwenForSequenceClassification"
32
+ }
33
+ }
gold-causal/last/generation_config.json ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ {
2
+ "_from_model_config": true,
3
+ "output_attentions": false,
4
+ "output_hidden_states": false,
5
+ "pad_token_id": 3,
6
+ "transformers_version": "5.3.0"
7
+ }
gold-causal/step_001180/config.json ADDED
@@ -0,0 +1,33 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "architectures": [
3
+ "FlexQwenForCausalLM"
4
+ ],
5
+ "cls_token_id": 1,
6
+ "dropout_rate": 0.0,
7
+ "dtype": "float32",
8
+ "embedding_dim": 768,
9
+ "head_dim": 64,
10
+ "hidden_dim": 1536,
11
+ "initializer_range": 0.02,
12
+ "max_position_embeddings": 512,
13
+ "model_type": "flexqwen",
14
+ "moe_hidden_dim": 512,
15
+ "moe_num_experts": 0,
16
+ "moe_num_experts_per_token": -1,
17
+ "num_attention_heads": 12,
18
+ "num_hidden_layers": 12,
19
+ "num_kv_groups": 4,
20
+ "pad_token_id": 3,
21
+ "qk_norm": true,
22
+ "rms_norm_eps": 1e-06,
23
+ "rope_theta": 10000,
24
+ "tie_word_embeddings": true,
25
+ "transformers_version": "5.3.0",
26
+ "vocab_size": 64000,
27
+ "auto_map": {
28
+ "AutoConfig": "flexqwen.FlexQwenConfig",
29
+ "AutoModel": "flexqwen.FlexQwen",
30
+ "AutoModelForCausalLM": "flexqwen.FlexQwenForCausalLM",
31
+ "AutoModelForSequenceClassification": "flexqwen.FlexQwenForSequenceClassification"
32
+ }
33
+ }
gold-causal/step_001180/generation_config.json ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ {
2
+ "_from_model_config": true,
3
+ "output_attentions": false,
4
+ "output_hidden_states": false,
5
+ "pad_token_id": 3,
6
+ "transformers_version": "5.3.0"
7
+ }
gold-causal/step_002360/config.json ADDED
@@ -0,0 +1,33 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "architectures": [
3
+ "FlexQwenForCausalLM"
4
+ ],
5
+ "cls_token_id": 1,
6
+ "dropout_rate": 0.0,
7
+ "dtype": "float32",
8
+ "embedding_dim": 768,
9
+ "head_dim": 64,
10
+ "hidden_dim": 1536,
11
+ "initializer_range": 0.02,
12
+ "max_position_embeddings": 512,
13
+ "model_type": "flexqwen",
14
+ "moe_hidden_dim": 512,
15
+ "moe_num_experts": 0,
16
+ "moe_num_experts_per_token": -1,
17
+ "num_attention_heads": 12,
18
+ "num_hidden_layers": 12,
19
+ "num_kv_groups": 4,
20
+ "pad_token_id": 3,
21
+ "qk_norm": true,
22
+ "rms_norm_eps": 1e-06,
23
+ "rope_theta": 10000,
24
+ "tie_word_embeddings": true,
25
+ "transformers_version": "5.3.0",
26
+ "vocab_size": 64000,
27
+ "auto_map": {
28
+ "AutoConfig": "flexqwen.FlexQwenConfig",
29
+ "AutoModel": "flexqwen.FlexQwen",
30
+ "AutoModelForCausalLM": "flexqwen.FlexQwenForCausalLM",
31
+ "AutoModelForSequenceClassification": "flexqwen.FlexQwenForSequenceClassification"
32
+ }
33
+ }
gold-causal/step_002360/generation_config.json ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ {
2
+ "_from_model_config": true,
3
+ "output_attentions": false,
4
+ "output_hidden_states": false,
5
+ "pad_token_id": 3,
6
+ "transformers_version": "5.3.0"
7
+ }
gold-causal/step_003540/config.json ADDED
@@ -0,0 +1,33 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "architectures": [
3
+ "FlexQwenForCausalLM"
4
+ ],
5
+ "cls_token_id": 1,
6
+ "dropout_rate": 0.0,
7
+ "dtype": "float32",
8
+ "embedding_dim": 768,
9
+ "head_dim": 64,
10
+ "hidden_dim": 1536,
11
+ "initializer_range": 0.02,
12
+ "max_position_embeddings": 512,
13
+ "model_type": "flexqwen",
14
+ "moe_hidden_dim": 512,
15
+ "moe_num_experts": 0,
16
+ "moe_num_experts_per_token": -1,
17
+ "num_attention_heads": 12,
18
+ "num_hidden_layers": 12,
19
+ "num_kv_groups": 4,
20
+ "pad_token_id": 3,
21
+ "qk_norm": true,
22
+ "rms_norm_eps": 1e-06,
23
+ "rope_theta": 10000,
24
+ "tie_word_embeddings": true,
25
+ "transformers_version": "5.3.0",
26
+ "vocab_size": 64000,
27
+ "auto_map": {
28
+ "AutoConfig": "flexqwen.FlexQwenConfig",
29
+ "AutoModel": "flexqwen.FlexQwen",
30
+ "AutoModelForCausalLM": "flexqwen.FlexQwenForCausalLM",
31
+ "AutoModelForSequenceClassification": "flexqwen.FlexQwenForSequenceClassification"
32
+ }
33
+ }
gold-causal/step_003540/generation_config.json ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ {
2
+ "_from_model_config": true,
3
+ "output_attentions": false,
4
+ "output_hidden_states": false,
5
+ "pad_token_id": 3,
6
+ "transformers_version": "5.3.0"
7
+ }
gold-causal/step_004720/config.json ADDED
@@ -0,0 +1,33 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "architectures": [
3
+ "FlexQwenForCausalLM"
4
+ ],
5
+ "cls_token_id": 1,
6
+ "dropout_rate": 0.0,
7
+ "dtype": "float32",
8
+ "embedding_dim": 768,
9
+ "head_dim": 64,
10
+ "hidden_dim": 1536,
11
+ "initializer_range": 0.02,
12
+ "max_position_embeddings": 512,
13
+ "model_type": "flexqwen",
14
+ "moe_hidden_dim": 512,
15
+ "moe_num_experts": 0,
16
+ "moe_num_experts_per_token": -1,
17
+ "num_attention_heads": 12,
18
+ "num_hidden_layers": 12,
19
+ "num_kv_groups": 4,
20
+ "pad_token_id": 3,
21
+ "qk_norm": true,
22
+ "rms_norm_eps": 1e-06,
23
+ "rope_theta": 10000,
24
+ "tie_word_embeddings": true,
25
+ "transformers_version": "5.3.0",
26
+ "vocab_size": 64000,
27
+ "auto_map": {
28
+ "AutoConfig": "flexqwen.FlexQwenConfig",
29
+ "AutoModel": "flexqwen.FlexQwen",
30
+ "AutoModelForCausalLM": "flexqwen.FlexQwenForCausalLM",
31
+ "AutoModelForSequenceClassification": "flexqwen.FlexQwenForSequenceClassification"
32
+ }
33
+ }
gold-causal/step_004720/generation_config.json ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ {
2
+ "_from_model_config": true,
3
+ "output_attentions": false,
4
+ "output_hidden_states": false,
5
+ "pad_token_id": 3,
6
+ "transformers_version": "5.3.0"
7
+ }
gold-causal/step_005900/config.json ADDED
@@ -0,0 +1,33 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "architectures": [
3
+ "FlexQwenForCausalLM"
4
+ ],
5
+ "cls_token_id": 1,
6
+ "dropout_rate": 0.0,
7
+ "dtype": "float32",
8
+ "embedding_dim": 768,
9
+ "head_dim": 64,
10
+ "hidden_dim": 1536,
11
+ "initializer_range": 0.02,
12
+ "max_position_embeddings": 512,
13
+ "model_type": "flexqwen",
14
+ "moe_hidden_dim": 512,
15
+ "moe_num_experts": 0,
16
+ "moe_num_experts_per_token": -1,
17
+ "num_attention_heads": 12,
18
+ "num_hidden_layers": 12,
19
+ "num_kv_groups": 4,
20
+ "pad_token_id": 3,
21
+ "qk_norm": true,
22
+ "rms_norm_eps": 1e-06,
23
+ "rope_theta": 10000,
24
+ "tie_word_embeddings": true,
25
+ "transformers_version": "5.3.0",
26
+ "vocab_size": 64000,
27
+ "auto_map": {
28
+ "AutoConfig": "flexqwen.FlexQwenConfig",
29
+ "AutoModel": "flexqwen.FlexQwen",
30
+ "AutoModelForCausalLM": "flexqwen.FlexQwenForCausalLM",
31
+ "AutoModelForSequenceClassification": "flexqwen.FlexQwenForSequenceClassification"
32
+ }
33
+ }
gold-causal/step_005900/generation_config.json ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ {
2
+ "_from_model_config": true,
3
+ "output_attentions": false,
4
+ "output_hidden_states": false,
5
+ "pad_token_id": 3,
6
+ "transformers_version": "5.3.0"
7
+ }
gold-constant/best/config.json ADDED
@@ -0,0 +1,33 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "architectures": [
3
+ "FlexQwenForCausalLM"
4
+ ],
5
+ "cls_token_id": 1,
6
+ "dropout_rate": 0.0,
7
+ "dtype": "float32",
8
+ "embedding_dim": 768,
9
+ "head_dim": 64,
10
+ "hidden_dim": 1536,
11
+ "initializer_range": 0.02,
12
+ "max_position_embeddings": 512,
13
+ "model_type": "flexqwen",
14
+ "moe_hidden_dim": 512,
15
+ "moe_num_experts": 0,
16
+ "moe_num_experts_per_token": -1,
17
+ "num_attention_heads": 12,
18
+ "num_hidden_layers": 12,
19
+ "num_kv_groups": 4,
20
+ "pad_token_id": 3,
21
+ "qk_norm": true,
22
+ "rms_norm_eps": 1e-06,
23
+ "rope_theta": 10000,
24
+ "tie_word_embeddings": true,
25
+ "transformers_version": "5.3.0",
26
+ "vocab_size": 64000,
27
+ "auto_map": {
28
+ "AutoConfig": "flexqwen.FlexQwenConfig",
29
+ "AutoModel": "flexqwen.FlexQwen",
30
+ "AutoModelForCausalLM": "flexqwen.FlexQwenForCausalLM",
31
+ "AutoModelForSequenceClassification": "flexqwen.FlexQwenForSequenceClassification"
32
+ }
33
+ }
gold-constant/best/generation_config.json ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ {
2
+ "_from_model_config": true,
3
+ "output_attentions": false,
4
+ "output_hidden_states": false,
5
+ "pad_token_id": 3,
6
+ "transformers_version": "5.3.0"
7
+ }
gold-constant/last/config.json ADDED
@@ -0,0 +1,33 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "architectures": [
3
+ "FlexQwenForCausalLM"
4
+ ],
5
+ "cls_token_id": 1,
6
+ "dropout_rate": 0.0,
7
+ "dtype": "float32",
8
+ "embedding_dim": 768,
9
+ "head_dim": 64,
10
+ "hidden_dim": 1536,
11
+ "initializer_range": 0.02,
12
+ "max_position_embeddings": 512,
13
+ "model_type": "flexqwen",
14
+ "moe_hidden_dim": 512,
15
+ "moe_num_experts": 0,
16
+ "moe_num_experts_per_token": -1,
17
+ "num_attention_heads": 12,
18
+ "num_hidden_layers": 12,
19
+ "num_kv_groups": 4,
20
+ "pad_token_id": 3,
21
+ "qk_norm": true,
22
+ "rms_norm_eps": 1e-06,
23
+ "rope_theta": 10000,
24
+ "tie_word_embeddings": true,
25
+ "transformers_version": "5.3.0",
26
+ "vocab_size": 64000,
27
+ "auto_map": {
28
+ "AutoConfig": "flexqwen.FlexQwenConfig",
29
+ "AutoModel": "flexqwen.FlexQwen",
30
+ "AutoModelForCausalLM": "flexqwen.FlexQwenForCausalLM",
31
+ "AutoModelForSequenceClassification": "flexqwen.FlexQwenForSequenceClassification"
32
+ }
33
+ }
gold-constant/last/generation_config.json ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ {
2
+ "_from_model_config": true,
3
+ "output_attentions": false,
4
+ "output_hidden_states": false,
5
+ "pad_token_id": 3,
6
+ "transformers_version": "5.3.0"
7
+ }
gold-constant/step_001180/config.json ADDED
@@ -0,0 +1,33 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "architectures": [
3
+ "FlexQwenForCausalLM"
4
+ ],
5
+ "cls_token_id": 1,
6
+ "dropout_rate": 0.0,
7
+ "dtype": "float32",
8
+ "embedding_dim": 768,
9
+ "head_dim": 64,
10
+ "hidden_dim": 1536,
11
+ "initializer_range": 0.02,
12
+ "max_position_embeddings": 512,
13
+ "model_type": "flexqwen",
14
+ "moe_hidden_dim": 512,
15
+ "moe_num_experts": 0,
16
+ "moe_num_experts_per_token": -1,
17
+ "num_attention_heads": 12,
18
+ "num_hidden_layers": 12,
19
+ "num_kv_groups": 4,
20
+ "pad_token_id": 3,
21
+ "qk_norm": true,
22
+ "rms_norm_eps": 1e-06,
23
+ "rope_theta": 10000,
24
+ "tie_word_embeddings": true,
25
+ "transformers_version": "5.3.0",
26
+ "vocab_size": 64000,
27
+ "auto_map": {
28
+ "AutoConfig": "flexqwen.FlexQwenConfig",
29
+ "AutoModel": "flexqwen.FlexQwen",
30
+ "AutoModelForCausalLM": "flexqwen.FlexQwenForCausalLM",
31
+ "AutoModelForSequenceClassification": "flexqwen.FlexQwenForSequenceClassification"
32
+ }
33
+ }
gold-constant/step_001180/generation_config.json ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ {
2
+ "_from_model_config": true,
3
+ "output_attentions": false,
4
+ "output_hidden_states": false,
5
+ "pad_token_id": 3,
6
+ "transformers_version": "5.3.0"
7
+ }
gold-constant/step_002360/config.json ADDED
@@ -0,0 +1,33 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "architectures": [
3
+ "FlexQwenForCausalLM"
4
+ ],
5
+ "cls_token_id": 1,
6
+ "dropout_rate": 0.0,
7
+ "dtype": "float32",
8
+ "embedding_dim": 768,
9
+ "head_dim": 64,
10
+ "hidden_dim": 1536,
11
+ "initializer_range": 0.02,
12
+ "max_position_embeddings": 512,
13
+ "model_type": "flexqwen",
14
+ "moe_hidden_dim": 512,
15
+ "moe_num_experts": 0,
16
+ "moe_num_experts_per_token": -1,
17
+ "num_attention_heads": 12,
18
+ "num_hidden_layers": 12,
19
+ "num_kv_groups": 4,
20
+ "pad_token_id": 3,
21
+ "qk_norm": true,
22
+ "rms_norm_eps": 1e-06,
23
+ "rope_theta": 10000,
24
+ "tie_word_embeddings": true,
25
+ "transformers_version": "5.3.0",
26
+ "vocab_size": 64000,
27
+ "auto_map": {
28
+ "AutoConfig": "flexqwen.FlexQwenConfig",
29
+ "AutoModel": "flexqwen.FlexQwen",
30
+ "AutoModelForCausalLM": "flexqwen.FlexQwenForCausalLM",
31
+ "AutoModelForSequenceClassification": "flexqwen.FlexQwenForSequenceClassification"
32
+ }
33
+ }
gold-constant/step_002360/generation_config.json ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ {
2
+ "_from_model_config": true,
3
+ "output_attentions": false,
4
+ "output_hidden_states": false,
5
+ "pad_token_id": 3,
6
+ "transformers_version": "5.3.0"
7
+ }
gold-constant/step_003540/config.json ADDED
@@ -0,0 +1,33 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "architectures": [
3
+ "FlexQwenForCausalLM"
4
+ ],
5
+ "cls_token_id": 1,
6
+ "dropout_rate": 0.0,
7
+ "dtype": "float32",
8
+ "embedding_dim": 768,
9
+ "head_dim": 64,
10
+ "hidden_dim": 1536,
11
+ "initializer_range": 0.02,
12
+ "max_position_embeddings": 512,
13
+ "model_type": "flexqwen",
14
+ "moe_hidden_dim": 512,
15
+ "moe_num_experts": 0,
16
+ "moe_num_experts_per_token": -1,
17
+ "num_attention_heads": 12,
18
+ "num_hidden_layers": 12,
19
+ "num_kv_groups": 4,
20
+ "pad_token_id": 3,
21
+ "qk_norm": true,
22
+ "rms_norm_eps": 1e-06,
23
+ "rope_theta": 10000,
24
+ "tie_word_embeddings": true,
25
+ "transformers_version": "5.3.0",
26
+ "vocab_size": 64000,
27
+ "auto_map": {
28
+ "AutoConfig": "flexqwen.FlexQwenConfig",
29
+ "AutoModel": "flexqwen.FlexQwen",
30
+ "AutoModelForCausalLM": "flexqwen.FlexQwenForCausalLM",
31
+ "AutoModelForSequenceClassification": "flexqwen.FlexQwenForSequenceClassification"
32
+ }
33
+ }
gold-constant/step_003540/generation_config.json ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ {
2
+ "_from_model_config": true,
3
+ "output_attentions": false,
4
+ "output_hidden_states": false,
5
+ "pad_token_id": 3,
6
+ "transformers_version": "5.3.0"
7
+ }
gold-constant/step_004720/config.json ADDED
@@ -0,0 +1,33 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "architectures": [
3
+ "FlexQwenForCausalLM"
4
+ ],
5
+ "cls_token_id": 1,
6
+ "dropout_rate": 0.0,
7
+ "dtype": "float32",
8
+ "embedding_dim": 768,
9
+ "head_dim": 64,
10
+ "hidden_dim": 1536,
11
+ "initializer_range": 0.02,
12
+ "max_position_embeddings": 512,
13
+ "model_type": "flexqwen",
14
+ "moe_hidden_dim": 512,
15
+ "moe_num_experts": 0,
16
+ "moe_num_experts_per_token": -1,
17
+ "num_attention_heads": 12,
18
+ "num_hidden_layers": 12,
19
+ "num_kv_groups": 4,
20
+ "pad_token_id": 3,
21
+ "qk_norm": true,
22
+ "rms_norm_eps": 1e-06,
23
+ "rope_theta": 10000,
24
+ "tie_word_embeddings": true,
25
+ "transformers_version": "5.3.0",
26
+ "vocab_size": 64000,
27
+ "auto_map": {
28
+ "AutoConfig": "flexqwen.FlexQwenConfig",
29
+ "AutoModel": "flexqwen.FlexQwen",
30
+ "AutoModelForCausalLM": "flexqwen.FlexQwenForCausalLM",
31
+ "AutoModelForSequenceClassification": "flexqwen.FlexQwenForSequenceClassification"
32
+ }
33
+ }
gold-constant/step_004720/generation_config.json ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ {
2
+ "_from_model_config": true,
3
+ "output_attentions": false,
4
+ "output_hidden_states": false,
5
+ "pad_token_id": 3,
6
+ "transformers_version": "5.3.0"
7
+ }
gold-constant/step_005900/config.json ADDED
@@ -0,0 +1,33 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "architectures": [
3
+ "FlexQwenForCausalLM"
4
+ ],
5
+ "cls_token_id": 1,
6
+ "dropout_rate": 0.0,
7
+ "dtype": "float32",
8
+ "embedding_dim": 768,
9
+ "head_dim": 64,
10
+ "hidden_dim": 1536,
11
+ "initializer_range": 0.02,
12
+ "max_position_embeddings": 512,
13
+ "model_type": "flexqwen",
14
+ "moe_hidden_dim": 512,
15
+ "moe_num_experts": 0,
16
+ "moe_num_experts_per_token": -1,
17
+ "num_attention_heads": 12,
18
+ "num_hidden_layers": 12,
19
+ "num_kv_groups": 4,
20
+ "pad_token_id": 3,
21
+ "qk_norm": true,
22
+ "rms_norm_eps": 1e-06,
23
+ "rope_theta": 10000,
24
+ "tie_word_embeddings": true,
25
+ "transformers_version": "5.3.0",
26
+ "vocab_size": 64000,
27
+ "auto_map": {
28
+ "AutoConfig": "flexqwen.FlexQwenConfig",
29
+ "AutoModel": "flexqwen.FlexQwen",
30
+ "AutoModelForCausalLM": "flexqwen.FlexQwenForCausalLM",
31
+ "AutoModelForSequenceClassification": "flexqwen.FlexQwenForSequenceClassification"
32
+ }
33
+ }
gold-constant/step_005900/generation_config.json ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ {
2
+ "_from_model_config": true,
3
+ "output_attentions": false,
4
+ "output_hidden_states": false,
5
+ "pad_token_id": 3,
6
+ "transformers_version": "5.3.0"
7
+ }
gold-cosine/best/config.json ADDED
@@ -0,0 +1,33 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "architectures": [
3
+ "FlexQwenForCausalLM"
4
+ ],
5
+ "cls_token_id": 1,
6
+ "dropout_rate": 0.0,
7
+ "dtype": "float32",
8
+ "embedding_dim": 768,
9
+ "head_dim": 64,
10
+ "hidden_dim": 1536,
11
+ "initializer_range": 0.02,
12
+ "max_position_embeddings": 512,
13
+ "model_type": "flexqwen",
14
+ "moe_hidden_dim": 512,
15
+ "moe_num_experts": 0,
16
+ "moe_num_experts_per_token": -1,
17
+ "num_attention_heads": 12,
18
+ "num_hidden_layers": 12,
19
+ "num_kv_groups": 4,
20
+ "pad_token_id": 3,
21
+ "qk_norm": true,
22
+ "rms_norm_eps": 1e-06,
23
+ "rope_theta": 10000,
24
+ "tie_word_embeddings": true,
25
+ "transformers_version": "5.3.0",
26
+ "vocab_size": 64000,
27
+ "auto_map": {
28
+ "AutoConfig": "flexqwen.FlexQwenConfig",
29
+ "AutoModel": "flexqwen.FlexQwen",
30
+ "AutoModelForCausalLM": "flexqwen.FlexQwenForCausalLM",
31
+ "AutoModelForSequenceClassification": "flexqwen.FlexQwenForSequenceClassification"
32
+ }
33
+ }
gold-cosine/best/generation_config.json ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ {
2
+ "_from_model_config": true,
3
+ "output_attentions": false,
4
+ "output_hidden_states": false,
5
+ "pad_token_id": 3,
6
+ "transformers_version": "5.3.0"
7
+ }
gold-cosine/last/config.json ADDED
@@ -0,0 +1,33 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "architectures": [
3
+ "FlexQwenForCausalLM"
4
+ ],
5
+ "cls_token_id": 1,
6
+ "dropout_rate": 0.0,
7
+ "dtype": "float32",
8
+ "embedding_dim": 768,
9
+ "head_dim": 64,
10
+ "hidden_dim": 1536,
11
+ "initializer_range": 0.02,
12
+ "max_position_embeddings": 512,
13
+ "model_type": "flexqwen",
14
+ "moe_hidden_dim": 512,
15
+ "moe_num_experts": 0,
16
+ "moe_num_experts_per_token": -1,
17
+ "num_attention_heads": 12,
18
+ "num_hidden_layers": 12,
19
+ "num_kv_groups": 4,
20
+ "pad_token_id": 3,
21
+ "qk_norm": true,
22
+ "rms_norm_eps": 1e-06,
23
+ "rope_theta": 10000,
24
+ "tie_word_embeddings": true,
25
+ "transformers_version": "5.3.0",
26
+ "vocab_size": 64000,
27
+ "auto_map": {
28
+ "AutoConfig": "flexqwen.FlexQwenConfig",
29
+ "AutoModel": "flexqwen.FlexQwen",
30
+ "AutoModelForCausalLM": "flexqwen.FlexQwenForCausalLM",
31
+ "AutoModelForSequenceClassification": "flexqwen.FlexQwenForSequenceClassification"
32
+ }
33
+ }
gold-cosine/last/generation_config.json ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ {
2
+ "_from_model_config": true,
3
+ "output_attentions": false,
4
+ "output_hidden_states": false,
5
+ "pad_token_id": 3,
6
+ "transformers_version": "5.3.0"
7
+ }
gold-cosine/step_001180/config.json ADDED
@@ -0,0 +1,33 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "architectures": [
3
+ "FlexQwenForCausalLM"
4
+ ],
5
+ "cls_token_id": 1,
6
+ "dropout_rate": 0.0,
7
+ "dtype": "float32",
8
+ "embedding_dim": 768,
9
+ "head_dim": 64,
10
+ "hidden_dim": 1536,
11
+ "initializer_range": 0.02,
12
+ "max_position_embeddings": 512,
13
+ "model_type": "flexqwen",
14
+ "moe_hidden_dim": 512,
15
+ "moe_num_experts": 0,
16
+ "moe_num_experts_per_token": -1,
17
+ "num_attention_heads": 12,
18
+ "num_hidden_layers": 12,
19
+ "num_kv_groups": 4,
20
+ "pad_token_id": 3,
21
+ "qk_norm": true,
22
+ "rms_norm_eps": 1e-06,
23
+ "rope_theta": 10000,
24
+ "tie_word_embeddings": true,
25
+ "transformers_version": "5.3.0",
26
+ "vocab_size": 64000,
27
+ "auto_map": {
28
+ "AutoConfig": "flexqwen.FlexQwenConfig",
29
+ "AutoModel": "flexqwen.FlexQwen",
30
+ "AutoModelForCausalLM": "flexqwen.FlexQwenForCausalLM",
31
+ "AutoModelForSequenceClassification": "flexqwen.FlexQwenForSequenceClassification"
32
+ }
33
+ }
gold-cosine/step_001180/generation_config.json ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ {
2
+ "_from_model_config": true,
3
+ "output_attentions": false,
4
+ "output_hidden_states": false,
5
+ "pad_token_id": 3,
6
+ "transformers_version": "5.3.0"
7
+ }
gold-cosine/step_002360/config.json ADDED
@@ -0,0 +1,33 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "architectures": [
3
+ "FlexQwenForCausalLM"
4
+ ],
5
+ "cls_token_id": 1,
6
+ "dropout_rate": 0.0,
7
+ "dtype": "float32",
8
+ "embedding_dim": 768,
9
+ "head_dim": 64,
10
+ "hidden_dim": 1536,
11
+ "initializer_range": 0.02,
12
+ "max_position_embeddings": 512,
13
+ "model_type": "flexqwen",
14
+ "moe_hidden_dim": 512,
15
+ "moe_num_experts": 0,
16
+ "moe_num_experts_per_token": -1,
17
+ "num_attention_heads": 12,
18
+ "num_hidden_layers": 12,
19
+ "num_kv_groups": 4,
20
+ "pad_token_id": 3,
21
+ "qk_norm": true,
22
+ "rms_norm_eps": 1e-06,
23
+ "rope_theta": 10000,
24
+ "tie_word_embeddings": true,
25
+ "transformers_version": "5.3.0",
26
+ "vocab_size": 64000,
27
+ "auto_map": {
28
+ "AutoConfig": "flexqwen.FlexQwenConfig",
29
+ "AutoModel": "flexqwen.FlexQwen",
30
+ "AutoModelForCausalLM": "flexqwen.FlexQwenForCausalLM",
31
+ "AutoModelForSequenceClassification": "flexqwen.FlexQwenForSequenceClassification"
32
+ }
33
+ }
gold-cosine/step_002360/generation_config.json ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ {
2
+ "_from_model_config": true,
3
+ "output_attentions": false,
4
+ "output_hidden_states": false,
5
+ "pad_token_id": 3,
6
+ "transformers_version": "5.3.0"
7
+ }
gold-cosine/step_003540/config.json ADDED
@@ -0,0 +1,33 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "architectures": [
3
+ "FlexQwenForCausalLM"
4
+ ],
5
+ "cls_token_id": 1,
6
+ "dropout_rate": 0.0,
7
+ "dtype": "float32",
8
+ "embedding_dim": 768,
9
+ "head_dim": 64,
10
+ "hidden_dim": 1536,
11
+ "initializer_range": 0.02,
12
+ "max_position_embeddings": 512,
13
+ "model_type": "flexqwen",
14
+ "moe_hidden_dim": 512,
15
+ "moe_num_experts": 0,
16
+ "moe_num_experts_per_token": -1,
17
+ "num_attention_heads": 12,
18
+ "num_hidden_layers": 12,
19
+ "num_kv_groups": 4,
20
+ "pad_token_id": 3,
21
+ "qk_norm": true,
22
+ "rms_norm_eps": 1e-06,
23
+ "rope_theta": 10000,
24
+ "tie_word_embeddings": true,
25
+ "transformers_version": "5.3.0",
26
+ "vocab_size": 64000,
27
+ "auto_map": {
28
+ "AutoConfig": "flexqwen.FlexQwenConfig",
29
+ "AutoModel": "flexqwen.FlexQwen",
30
+ "AutoModelForCausalLM": "flexqwen.FlexQwenForCausalLM",
31
+ "AutoModelForSequenceClassification": "flexqwen.FlexQwenForSequenceClassification"
32
+ }
33
+ }
gold-cosine/step_003540/generation_config.json ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ {
2
+ "_from_model_config": true,
3
+ "output_attentions": false,
4
+ "output_hidden_states": false,
5
+ "pad_token_id": 3,
6
+ "transformers_version": "5.3.0"
7
+ }
gold-cosine/step_004720/config.json ADDED
@@ -0,0 +1,33 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "architectures": [
3
+ "FlexQwenForCausalLM"
4
+ ],
5
+ "cls_token_id": 1,
6
+ "dropout_rate": 0.0,
7
+ "dtype": "float32",
8
+ "embedding_dim": 768,
9
+ "head_dim": 64,
10
+ "hidden_dim": 1536,
11
+ "initializer_range": 0.02,
12
+ "max_position_embeddings": 512,
13
+ "model_type": "flexqwen",
14
+ "moe_hidden_dim": 512,
15
+ "moe_num_experts": 0,
16
+ "moe_num_experts_per_token": -1,
17
+ "num_attention_heads": 12,
18
+ "num_hidden_layers": 12,
19
+ "num_kv_groups": 4,
20
+ "pad_token_id": 3,
21
+ "qk_norm": true,
22
+ "rms_norm_eps": 1e-06,
23
+ "rope_theta": 10000,
24
+ "tie_word_embeddings": true,
25
+ "transformers_version": "5.3.0",
26
+ "vocab_size": 64000,
27
+ "auto_map": {
28
+ "AutoConfig": "flexqwen.FlexQwenConfig",
29
+ "AutoModel": "flexqwen.FlexQwen",
30
+ "AutoModelForCausalLM": "flexqwen.FlexQwenForCausalLM",
31
+ "AutoModelForSequenceClassification": "flexqwen.FlexQwenForSequenceClassification"
32
+ }
33
+ }
gold-cosine/step_004720/generation_config.json ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ {
2
+ "_from_model_config": true,
3
+ "output_attentions": false,
4
+ "output_hidden_states": false,
5
+ "pad_token_id": 3,
6
+ "transformers_version": "5.3.0"
7
+ }
gold-cosine/step_005900/config.json ADDED
@@ -0,0 +1,33 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "architectures": [
3
+ "FlexQwenForCausalLM"
4
+ ],
5
+ "cls_token_id": 1,
6
+ "dropout_rate": 0.0,
7
+ "dtype": "float32",
8
+ "embedding_dim": 768,
9
+ "head_dim": 64,
10
+ "hidden_dim": 1536,
11
+ "initializer_range": 0.02,
12
+ "max_position_embeddings": 512,
13
+ "model_type": "flexqwen",
14
+ "moe_hidden_dim": 512,
15
+ "moe_num_experts": 0,
16
+ "moe_num_experts_per_token": -1,
17
+ "num_attention_heads": 12,
18
+ "num_hidden_layers": 12,
19
+ "num_kv_groups": 4,
20
+ "pad_token_id": 3,
21
+ "qk_norm": true,
22
+ "rms_norm_eps": 1e-06,
23
+ "rope_theta": 10000,
24
+ "tie_word_embeddings": true,
25
+ "transformers_version": "5.3.0",
26
+ "vocab_size": 64000,
27
+ "auto_map": {
28
+ "AutoConfig": "flexqwen.FlexQwenConfig",
29
+ "AutoModel": "flexqwen.FlexQwen",
30
+ "AutoModelForCausalLM": "flexqwen.FlexQwenForCausalLM",
31
+ "AutoModelForSequenceClassification": "flexqwen.FlexQwenForSequenceClassification"
32
+ }
33
+ }
gold-cosine/step_005900/generation_config.json ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ {
2
+ "_from_model_config": true,
3
+ "output_attentions": false,
4
+ "output_hidden_states": false,
5
+ "pad_token_id": 3,
6
+ "transformers_version": "5.3.0"
7
+ }
gold-linear/best/config.json ADDED
@@ -0,0 +1,33 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "architectures": [
3
+ "FlexQwenForCausalLM"
4
+ ],
5
+ "cls_token_id": 1,
6
+ "dropout_rate": 0.0,
7
+ "dtype": "float32",
8
+ "embedding_dim": 768,
9
+ "head_dim": 64,
10
+ "hidden_dim": 1536,
11
+ "initializer_range": 0.02,
12
+ "max_position_embeddings": 512,
13
+ "model_type": "flexqwen",
14
+ "moe_hidden_dim": 512,
15
+ "moe_num_experts": 0,
16
+ "moe_num_experts_per_token": -1,
17
+ "num_attention_heads": 12,
18
+ "num_hidden_layers": 12,
19
+ "num_kv_groups": 4,
20
+ "pad_token_id": 3,
21
+ "qk_norm": true,
22
+ "rms_norm_eps": 1e-06,
23
+ "rope_theta": 10000,
24
+ "tie_word_embeddings": true,
25
+ "transformers_version": "5.3.0",
26
+ "vocab_size": 64000,
27
+ "auto_map": {
28
+ "AutoConfig": "flexqwen.FlexQwenConfig",
29
+ "AutoModel": "flexqwen.FlexQwen",
30
+ "AutoModelForCausalLM": "flexqwen.FlexQwenForCausalLM",
31
+ "AutoModelForSequenceClassification": "flexqwen.FlexQwenForSequenceClassification"
32
+ }
33
+ }
gold-linear/best/generation_config.json ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ {
2
+ "_from_model_config": true,
3
+ "output_attentions": false,
4
+ "output_hidden_states": false,
5
+ "pad_token_id": 3,
6
+ "transformers_version": "5.3.0"
7
+ }
gold-linear/last/config.json ADDED
@@ -0,0 +1,33 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "architectures": [
3
+ "FlexQwenForCausalLM"
4
+ ],
5
+ "cls_token_id": 1,
6
+ "dropout_rate": 0.0,
7
+ "dtype": "float32",
8
+ "embedding_dim": 768,
9
+ "head_dim": 64,
10
+ "hidden_dim": 1536,
11
+ "initializer_range": 0.02,
12
+ "max_position_embeddings": 512,
13
+ "model_type": "flexqwen",
14
+ "moe_hidden_dim": 512,
15
+ "moe_num_experts": 0,
16
+ "moe_num_experts_per_token": -1,
17
+ "num_attention_heads": 12,
18
+ "num_hidden_layers": 12,
19
+ "num_kv_groups": 4,
20
+ "pad_token_id": 3,
21
+ "qk_norm": true,
22
+ "rms_norm_eps": 1e-06,
23
+ "rope_theta": 10000,
24
+ "tie_word_embeddings": true,
25
+ "transformers_version": "5.3.0",
26
+ "vocab_size": 64000,
27
+ "auto_map": {
28
+ "AutoConfig": "flexqwen.FlexQwenConfig",
29
+ "AutoModel": "flexqwen.FlexQwen",
30
+ "AutoModelForCausalLM": "flexqwen.FlexQwenForCausalLM",
31
+ "AutoModelForSequenceClassification": "flexqwen.FlexQwenForSequenceClassification"
32
+ }
33
+ }
gold-linear/last/generation_config.json ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ {
2
+ "_from_model_config": true,
3
+ "output_attentions": false,
4
+ "output_hidden_states": false,
5
+ "pad_token_id": 3,
6
+ "transformers_version": "5.3.0"
7
+ }
gold-linear/step_001180/config.json ADDED
@@ -0,0 +1,33 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "architectures": [
3
+ "FlexQwenForCausalLM"
4
+ ],
5
+ "cls_token_id": 1,
6
+ "dropout_rate": 0.0,
7
+ "dtype": "float32",
8
+ "embedding_dim": 768,
9
+ "head_dim": 64,
10
+ "hidden_dim": 1536,
11
+ "initializer_range": 0.02,
12
+ "max_position_embeddings": 512,
13
+ "model_type": "flexqwen",
14
+ "moe_hidden_dim": 512,
15
+ "moe_num_experts": 0,
16
+ "moe_num_experts_per_token": -1,
17
+ "num_attention_heads": 12,
18
+ "num_hidden_layers": 12,
19
+ "num_kv_groups": 4,
20
+ "pad_token_id": 3,
21
+ "qk_norm": true,
22
+ "rms_norm_eps": 1e-06,
23
+ "rope_theta": 10000,
24
+ "tie_word_embeddings": true,
25
+ "transformers_version": "5.3.0",
26
+ "vocab_size": 64000,
27
+ "auto_map": {
28
+ "AutoConfig": "flexqwen.FlexQwenConfig",
29
+ "AutoModel": "flexqwen.FlexQwen",
30
+ "AutoModelForCausalLM": "flexqwen.FlexQwenForCausalLM",
31
+ "AutoModelForSequenceClassification": "flexqwen.FlexQwenForSequenceClassification"
32
+ }
33
+ }
gold-linear/step_001180/generation_config.json ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ {
2
+ "_from_model_config": true,
3
+ "output_attentions": false,
4
+ "output_hidden_states": false,
5
+ "pad_token_id": 3,
6
+ "transformers_version": "5.3.0"
7
+ }
gold-linear/step_002360/config.json ADDED
@@ -0,0 +1,33 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "architectures": [
3
+ "FlexQwenForCausalLM"
4
+ ],
5
+ "cls_token_id": 1,
6
+ "dropout_rate": 0.0,
7
+ "dtype": "float32",
8
+ "embedding_dim": 768,
9
+ "head_dim": 64,
10
+ "hidden_dim": 1536,
11
+ "initializer_range": 0.02,
12
+ "max_position_embeddings": 512,
13
+ "model_type": "flexqwen",
14
+ "moe_hidden_dim": 512,
15
+ "moe_num_experts": 0,
16
+ "moe_num_experts_per_token": -1,
17
+ "num_attention_heads": 12,
18
+ "num_hidden_layers": 12,
19
+ "num_kv_groups": 4,
20
+ "pad_token_id": 3,
21
+ "qk_norm": true,
22
+ "rms_norm_eps": 1e-06,
23
+ "rope_theta": 10000,
24
+ "tie_word_embeddings": true,
25
+ "transformers_version": "5.3.0",
26
+ "vocab_size": 64000,
27
+ "auto_map": {
28
+ "AutoConfig": "flexqwen.FlexQwenConfig",
29
+ "AutoModel": "flexqwen.FlexQwen",
30
+ "AutoModelForCausalLM": "flexqwen.FlexQwenForCausalLM",
31
+ "AutoModelForSequenceClassification": "flexqwen.FlexQwenForSequenceClassification"
32
+ }
33
+ }
gold-linear/step_002360/generation_config.json ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ {
2
+ "_from_model_config": true,
3
+ "output_attentions": false,
4
+ "output_hidden_states": false,
5
+ "pad_token_id": 3,
6
+ "transformers_version": "5.3.0"
7
+ }