diff --git a/gold-causal/best/config.json b/gold-causal/best/config.json new file mode 100644 index 0000000000000000000000000000000000000000..f5800c6cb97e51281d294ee4e187a222051889d3 --- /dev/null +++ b/gold-causal/best/config.json @@ -0,0 +1,33 @@ +{ + "architectures": [ + "FlexQwenForCausalLM" + ], + "cls_token_id": 1, + "dropout_rate": 0.0, + "dtype": "float32", + "embedding_dim": 768, + "head_dim": 64, + "hidden_dim": 1536, + "initializer_range": 0.02, + "max_position_embeddings": 512, + "model_type": "flexqwen", + "moe_hidden_dim": 512, + "moe_num_experts": 0, + "moe_num_experts_per_token": -1, + "num_attention_heads": 12, + "num_hidden_layers": 12, + "num_kv_groups": 4, + "pad_token_id": 3, + "qk_norm": true, + "rms_norm_eps": 1e-06, + "rope_theta": 10000, + "tie_word_embeddings": true, + "transformers_version": "5.3.0", + "vocab_size": 64000, + "auto_map": { + "AutoConfig": "flexqwen.FlexQwenConfig", + "AutoModel": "flexqwen.FlexQwen", + "AutoModelForCausalLM": "flexqwen.FlexQwenForCausalLM", + "AutoModelForSequenceClassification": "flexqwen.FlexQwenForSequenceClassification" + } +} \ No newline at end of file diff --git a/gold-causal/best/generation_config.json b/gold-causal/best/generation_config.json new file mode 100644 index 0000000000000000000000000000000000000000..cdde72fc7644fe801e3a54754b0594ec498f2120 --- /dev/null +++ b/gold-causal/best/generation_config.json @@ -0,0 +1,7 @@ +{ + "_from_model_config": true, + "output_attentions": false, + "output_hidden_states": false, + "pad_token_id": 3, + "transformers_version": "5.3.0" +} diff --git a/gold-causal/last/config.json b/gold-causal/last/config.json new file mode 100644 index 0000000000000000000000000000000000000000..f5800c6cb97e51281d294ee4e187a222051889d3 --- /dev/null +++ b/gold-causal/last/config.json @@ -0,0 +1,33 @@ +{ + "architectures": [ + "FlexQwenForCausalLM" + ], + "cls_token_id": 1, + "dropout_rate": 0.0, + "dtype": "float32", + "embedding_dim": 768, + "head_dim": 64, + "hidden_dim": 1536, + "initializer_range": 0.02, + "max_position_embeddings": 512, + "model_type": "flexqwen", + "moe_hidden_dim": 512, + "moe_num_experts": 0, + "moe_num_experts_per_token": -1, + "num_attention_heads": 12, + "num_hidden_layers": 12, + "num_kv_groups": 4, + "pad_token_id": 3, + "qk_norm": true, + "rms_norm_eps": 1e-06, + "rope_theta": 10000, + "tie_word_embeddings": true, + "transformers_version": "5.3.0", + "vocab_size": 64000, + "auto_map": { + "AutoConfig": "flexqwen.FlexQwenConfig", + "AutoModel": "flexqwen.FlexQwen", + "AutoModelForCausalLM": "flexqwen.FlexQwenForCausalLM", + "AutoModelForSequenceClassification": "flexqwen.FlexQwenForSequenceClassification" + } +} \ No newline at end of file diff --git a/gold-causal/last/generation_config.json b/gold-causal/last/generation_config.json new file mode 100644 index 0000000000000000000000000000000000000000..cdde72fc7644fe801e3a54754b0594ec498f2120 --- /dev/null +++ b/gold-causal/last/generation_config.json @@ -0,0 +1,7 @@ +{ + "_from_model_config": true, + "output_attentions": false, + "output_hidden_states": false, + "pad_token_id": 3, + "transformers_version": "5.3.0" +} diff --git a/gold-causal/step_001180/config.json b/gold-causal/step_001180/config.json new file mode 100644 index 0000000000000000000000000000000000000000..f5800c6cb97e51281d294ee4e187a222051889d3 --- /dev/null +++ b/gold-causal/step_001180/config.json @@ -0,0 +1,33 @@ +{ + "architectures": [ + "FlexQwenForCausalLM" + ], + "cls_token_id": 1, + "dropout_rate": 0.0, + "dtype": "float32", + "embedding_dim": 768, + "head_dim": 64, + "hidden_dim": 1536, + "initializer_range": 0.02, + "max_position_embeddings": 512, + "model_type": "flexqwen", + "moe_hidden_dim": 512, + "moe_num_experts": 0, + "moe_num_experts_per_token": -1, + "num_attention_heads": 12, + "num_hidden_layers": 12, + "num_kv_groups": 4, + "pad_token_id": 3, + "qk_norm": true, + "rms_norm_eps": 1e-06, + "rope_theta": 10000, + "tie_word_embeddings": true, + "transformers_version": "5.3.0", + "vocab_size": 64000, + "auto_map": { + "AutoConfig": "flexqwen.FlexQwenConfig", + "AutoModel": "flexqwen.FlexQwen", + "AutoModelForCausalLM": "flexqwen.FlexQwenForCausalLM", + "AutoModelForSequenceClassification": "flexqwen.FlexQwenForSequenceClassification" + } +} \ No newline at end of file diff --git a/gold-causal/step_001180/generation_config.json b/gold-causal/step_001180/generation_config.json new file mode 100644 index 0000000000000000000000000000000000000000..cdde72fc7644fe801e3a54754b0594ec498f2120 --- /dev/null +++ b/gold-causal/step_001180/generation_config.json @@ -0,0 +1,7 @@ +{ + "_from_model_config": true, + "output_attentions": false, + "output_hidden_states": false, + "pad_token_id": 3, + "transformers_version": "5.3.0" +} diff --git a/gold-causal/step_002360/config.json b/gold-causal/step_002360/config.json new file mode 100644 index 0000000000000000000000000000000000000000..f5800c6cb97e51281d294ee4e187a222051889d3 --- /dev/null +++ b/gold-causal/step_002360/config.json @@ -0,0 +1,33 @@ +{ + "architectures": [ + "FlexQwenForCausalLM" + ], + "cls_token_id": 1, + "dropout_rate": 0.0, + "dtype": "float32", + "embedding_dim": 768, + "head_dim": 64, + "hidden_dim": 1536, + "initializer_range": 0.02, + "max_position_embeddings": 512, + "model_type": "flexqwen", + "moe_hidden_dim": 512, + "moe_num_experts": 0, + "moe_num_experts_per_token": -1, + "num_attention_heads": 12, + "num_hidden_layers": 12, + "num_kv_groups": 4, + "pad_token_id": 3, + "qk_norm": true, + "rms_norm_eps": 1e-06, + "rope_theta": 10000, + "tie_word_embeddings": true, + "transformers_version": "5.3.0", + "vocab_size": 64000, + "auto_map": { + "AutoConfig": "flexqwen.FlexQwenConfig", + "AutoModel": "flexqwen.FlexQwen", + "AutoModelForCausalLM": "flexqwen.FlexQwenForCausalLM", + "AutoModelForSequenceClassification": "flexqwen.FlexQwenForSequenceClassification" + } +} \ No newline at end of file diff --git a/gold-causal/step_002360/generation_config.json b/gold-causal/step_002360/generation_config.json new file mode 100644 index 0000000000000000000000000000000000000000..cdde72fc7644fe801e3a54754b0594ec498f2120 --- /dev/null +++ b/gold-causal/step_002360/generation_config.json @@ -0,0 +1,7 @@ +{ + "_from_model_config": true, + "output_attentions": false, + "output_hidden_states": false, + "pad_token_id": 3, + "transformers_version": "5.3.0" +} diff --git a/gold-causal/step_003540/config.json b/gold-causal/step_003540/config.json new file mode 100644 index 0000000000000000000000000000000000000000..f5800c6cb97e51281d294ee4e187a222051889d3 --- /dev/null +++ b/gold-causal/step_003540/config.json @@ -0,0 +1,33 @@ +{ + "architectures": [ + "FlexQwenForCausalLM" + ], + "cls_token_id": 1, + "dropout_rate": 0.0, + "dtype": "float32", + "embedding_dim": 768, + "head_dim": 64, + "hidden_dim": 1536, + "initializer_range": 0.02, + "max_position_embeddings": 512, + "model_type": "flexqwen", + "moe_hidden_dim": 512, + "moe_num_experts": 0, + "moe_num_experts_per_token": -1, + "num_attention_heads": 12, + "num_hidden_layers": 12, + "num_kv_groups": 4, + "pad_token_id": 3, + "qk_norm": true, + "rms_norm_eps": 1e-06, + "rope_theta": 10000, + "tie_word_embeddings": true, + "transformers_version": "5.3.0", + "vocab_size": 64000, + "auto_map": { + "AutoConfig": "flexqwen.FlexQwenConfig", + "AutoModel": "flexqwen.FlexQwen", + "AutoModelForCausalLM": "flexqwen.FlexQwenForCausalLM", + "AutoModelForSequenceClassification": "flexqwen.FlexQwenForSequenceClassification" + } +} \ No newline at end of file diff --git a/gold-causal/step_003540/generation_config.json b/gold-causal/step_003540/generation_config.json new file mode 100644 index 0000000000000000000000000000000000000000..cdde72fc7644fe801e3a54754b0594ec498f2120 --- /dev/null +++ b/gold-causal/step_003540/generation_config.json @@ -0,0 +1,7 @@ +{ + "_from_model_config": true, + "output_attentions": false, + "output_hidden_states": false, + "pad_token_id": 3, + "transformers_version": "5.3.0" +} diff --git a/gold-causal/step_004720/config.json b/gold-causal/step_004720/config.json new file mode 100644 index 0000000000000000000000000000000000000000..f5800c6cb97e51281d294ee4e187a222051889d3 --- /dev/null +++ b/gold-causal/step_004720/config.json @@ -0,0 +1,33 @@ +{ + "architectures": [ + "FlexQwenForCausalLM" + ], + "cls_token_id": 1, + "dropout_rate": 0.0, + "dtype": "float32", + "embedding_dim": 768, + "head_dim": 64, + "hidden_dim": 1536, + "initializer_range": 0.02, + "max_position_embeddings": 512, + "model_type": "flexqwen", + "moe_hidden_dim": 512, + "moe_num_experts": 0, + "moe_num_experts_per_token": -1, + "num_attention_heads": 12, + "num_hidden_layers": 12, + "num_kv_groups": 4, + "pad_token_id": 3, + "qk_norm": true, + "rms_norm_eps": 1e-06, + "rope_theta": 10000, + "tie_word_embeddings": true, + "transformers_version": "5.3.0", + "vocab_size": 64000, + "auto_map": { + "AutoConfig": "flexqwen.FlexQwenConfig", + "AutoModel": "flexqwen.FlexQwen", + "AutoModelForCausalLM": "flexqwen.FlexQwenForCausalLM", + "AutoModelForSequenceClassification": "flexqwen.FlexQwenForSequenceClassification" + } +} \ No newline at end of file diff --git a/gold-causal/step_004720/generation_config.json b/gold-causal/step_004720/generation_config.json new file mode 100644 index 0000000000000000000000000000000000000000..cdde72fc7644fe801e3a54754b0594ec498f2120 --- /dev/null +++ b/gold-causal/step_004720/generation_config.json @@ -0,0 +1,7 @@ +{ + "_from_model_config": true, + "output_attentions": false, + "output_hidden_states": false, + "pad_token_id": 3, + "transformers_version": "5.3.0" +} diff --git a/gold-causal/step_005900/config.json b/gold-causal/step_005900/config.json new file mode 100644 index 0000000000000000000000000000000000000000..f5800c6cb97e51281d294ee4e187a222051889d3 --- /dev/null +++ b/gold-causal/step_005900/config.json @@ -0,0 +1,33 @@ +{ + "architectures": [ + "FlexQwenForCausalLM" + ], + "cls_token_id": 1, + "dropout_rate": 0.0, + "dtype": "float32", + "embedding_dim": 768, + "head_dim": 64, + "hidden_dim": 1536, + "initializer_range": 0.02, + "max_position_embeddings": 512, + "model_type": "flexqwen", + "moe_hidden_dim": 512, + "moe_num_experts": 0, + "moe_num_experts_per_token": -1, + "num_attention_heads": 12, + "num_hidden_layers": 12, + "num_kv_groups": 4, + "pad_token_id": 3, + "qk_norm": true, + "rms_norm_eps": 1e-06, + "rope_theta": 10000, + "tie_word_embeddings": true, + "transformers_version": "5.3.0", + "vocab_size": 64000, + "auto_map": { + "AutoConfig": "flexqwen.FlexQwenConfig", + "AutoModel": "flexqwen.FlexQwen", + "AutoModelForCausalLM": "flexqwen.FlexQwenForCausalLM", + "AutoModelForSequenceClassification": "flexqwen.FlexQwenForSequenceClassification" + } +} \ No newline at end of file diff --git a/gold-causal/step_005900/generation_config.json b/gold-causal/step_005900/generation_config.json new file mode 100644 index 0000000000000000000000000000000000000000..cdde72fc7644fe801e3a54754b0594ec498f2120 --- /dev/null +++ b/gold-causal/step_005900/generation_config.json @@ -0,0 +1,7 @@ +{ + "_from_model_config": true, + "output_attentions": false, + "output_hidden_states": false, + "pad_token_id": 3, + "transformers_version": "5.3.0" +} diff --git a/gold-constant/best/config.json b/gold-constant/best/config.json new file mode 100644 index 0000000000000000000000000000000000000000..f5800c6cb97e51281d294ee4e187a222051889d3 --- /dev/null +++ b/gold-constant/best/config.json @@ -0,0 +1,33 @@ +{ + "architectures": [ + "FlexQwenForCausalLM" + ], + "cls_token_id": 1, + "dropout_rate": 0.0, + "dtype": "float32", + "embedding_dim": 768, + "head_dim": 64, + "hidden_dim": 1536, + "initializer_range": 0.02, + "max_position_embeddings": 512, + "model_type": "flexqwen", + "moe_hidden_dim": 512, + "moe_num_experts": 0, + "moe_num_experts_per_token": -1, + "num_attention_heads": 12, + "num_hidden_layers": 12, + "num_kv_groups": 4, + "pad_token_id": 3, + "qk_norm": true, + "rms_norm_eps": 1e-06, + "rope_theta": 10000, + "tie_word_embeddings": true, + "transformers_version": "5.3.0", + "vocab_size": 64000, + "auto_map": { + "AutoConfig": "flexqwen.FlexQwenConfig", + "AutoModel": "flexqwen.FlexQwen", + "AutoModelForCausalLM": "flexqwen.FlexQwenForCausalLM", + "AutoModelForSequenceClassification": "flexqwen.FlexQwenForSequenceClassification" + } +} \ No newline at end of file diff --git a/gold-constant/best/generation_config.json b/gold-constant/best/generation_config.json new file mode 100644 index 0000000000000000000000000000000000000000..cdde72fc7644fe801e3a54754b0594ec498f2120 --- /dev/null +++ b/gold-constant/best/generation_config.json @@ -0,0 +1,7 @@ +{ + "_from_model_config": true, + "output_attentions": false, + "output_hidden_states": false, + "pad_token_id": 3, + "transformers_version": "5.3.0" +} diff --git a/gold-constant/last/config.json b/gold-constant/last/config.json new file mode 100644 index 0000000000000000000000000000000000000000..f5800c6cb97e51281d294ee4e187a222051889d3 --- /dev/null +++ b/gold-constant/last/config.json @@ -0,0 +1,33 @@ +{ + "architectures": [ + "FlexQwenForCausalLM" + ], + "cls_token_id": 1, + "dropout_rate": 0.0, + "dtype": "float32", + "embedding_dim": 768, + "head_dim": 64, + "hidden_dim": 1536, + "initializer_range": 0.02, + "max_position_embeddings": 512, + "model_type": "flexqwen", + "moe_hidden_dim": 512, + "moe_num_experts": 0, + "moe_num_experts_per_token": -1, + "num_attention_heads": 12, + "num_hidden_layers": 12, + "num_kv_groups": 4, + "pad_token_id": 3, + "qk_norm": true, + "rms_norm_eps": 1e-06, + "rope_theta": 10000, + "tie_word_embeddings": true, + "transformers_version": "5.3.0", + "vocab_size": 64000, + "auto_map": { + "AutoConfig": "flexqwen.FlexQwenConfig", + "AutoModel": "flexqwen.FlexQwen", + "AutoModelForCausalLM": "flexqwen.FlexQwenForCausalLM", + "AutoModelForSequenceClassification": "flexqwen.FlexQwenForSequenceClassification" + } +} \ No newline at end of file diff --git a/gold-constant/last/generation_config.json b/gold-constant/last/generation_config.json new file mode 100644 index 0000000000000000000000000000000000000000..cdde72fc7644fe801e3a54754b0594ec498f2120 --- /dev/null +++ b/gold-constant/last/generation_config.json @@ -0,0 +1,7 @@ +{ + "_from_model_config": true, + "output_attentions": false, + "output_hidden_states": false, + "pad_token_id": 3, + "transformers_version": "5.3.0" +} diff --git a/gold-constant/step_001180/config.json b/gold-constant/step_001180/config.json new file mode 100644 index 0000000000000000000000000000000000000000..f5800c6cb97e51281d294ee4e187a222051889d3 --- /dev/null +++ b/gold-constant/step_001180/config.json @@ -0,0 +1,33 @@ +{ + "architectures": [ + "FlexQwenForCausalLM" + ], + "cls_token_id": 1, + "dropout_rate": 0.0, + "dtype": "float32", + "embedding_dim": 768, + "head_dim": 64, + "hidden_dim": 1536, + "initializer_range": 0.02, + "max_position_embeddings": 512, + "model_type": "flexqwen", + "moe_hidden_dim": 512, + "moe_num_experts": 0, + "moe_num_experts_per_token": -1, + "num_attention_heads": 12, + "num_hidden_layers": 12, + "num_kv_groups": 4, + "pad_token_id": 3, + "qk_norm": true, + "rms_norm_eps": 1e-06, + "rope_theta": 10000, + "tie_word_embeddings": true, + "transformers_version": "5.3.0", + "vocab_size": 64000, + "auto_map": { + "AutoConfig": "flexqwen.FlexQwenConfig", + "AutoModel": "flexqwen.FlexQwen", + "AutoModelForCausalLM": "flexqwen.FlexQwenForCausalLM", + "AutoModelForSequenceClassification": "flexqwen.FlexQwenForSequenceClassification" + } +} \ No newline at end of file diff --git a/gold-constant/step_001180/generation_config.json b/gold-constant/step_001180/generation_config.json new file mode 100644 index 0000000000000000000000000000000000000000..cdde72fc7644fe801e3a54754b0594ec498f2120 --- /dev/null +++ b/gold-constant/step_001180/generation_config.json @@ -0,0 +1,7 @@ +{ + "_from_model_config": true, + "output_attentions": false, + "output_hidden_states": false, + "pad_token_id": 3, + "transformers_version": "5.3.0" +} diff --git a/gold-constant/step_002360/config.json b/gold-constant/step_002360/config.json new file mode 100644 index 0000000000000000000000000000000000000000..f5800c6cb97e51281d294ee4e187a222051889d3 --- /dev/null +++ b/gold-constant/step_002360/config.json @@ -0,0 +1,33 @@ +{ + "architectures": [ + "FlexQwenForCausalLM" + ], + "cls_token_id": 1, + "dropout_rate": 0.0, + "dtype": "float32", + "embedding_dim": 768, + "head_dim": 64, + "hidden_dim": 1536, + "initializer_range": 0.02, + "max_position_embeddings": 512, + "model_type": "flexqwen", + "moe_hidden_dim": 512, + "moe_num_experts": 0, + "moe_num_experts_per_token": -1, + "num_attention_heads": 12, + "num_hidden_layers": 12, + "num_kv_groups": 4, + "pad_token_id": 3, + "qk_norm": true, + "rms_norm_eps": 1e-06, + "rope_theta": 10000, + "tie_word_embeddings": true, + "transformers_version": "5.3.0", + "vocab_size": 64000, + "auto_map": { + "AutoConfig": "flexqwen.FlexQwenConfig", + "AutoModel": "flexqwen.FlexQwen", + "AutoModelForCausalLM": "flexqwen.FlexQwenForCausalLM", + "AutoModelForSequenceClassification": "flexqwen.FlexQwenForSequenceClassification" + } +} \ No newline at end of file diff --git a/gold-constant/step_002360/generation_config.json b/gold-constant/step_002360/generation_config.json new file mode 100644 index 0000000000000000000000000000000000000000..cdde72fc7644fe801e3a54754b0594ec498f2120 --- /dev/null +++ b/gold-constant/step_002360/generation_config.json @@ -0,0 +1,7 @@ +{ + "_from_model_config": true, + "output_attentions": false, + "output_hidden_states": false, + "pad_token_id": 3, + "transformers_version": "5.3.0" +} diff --git a/gold-constant/step_003540/config.json b/gold-constant/step_003540/config.json new file mode 100644 index 0000000000000000000000000000000000000000..f5800c6cb97e51281d294ee4e187a222051889d3 --- /dev/null +++ b/gold-constant/step_003540/config.json @@ -0,0 +1,33 @@ +{ + "architectures": [ + "FlexQwenForCausalLM" + ], + "cls_token_id": 1, + "dropout_rate": 0.0, + "dtype": "float32", + "embedding_dim": 768, + "head_dim": 64, + "hidden_dim": 1536, + "initializer_range": 0.02, + "max_position_embeddings": 512, + "model_type": "flexqwen", + "moe_hidden_dim": 512, + "moe_num_experts": 0, + "moe_num_experts_per_token": -1, + "num_attention_heads": 12, + "num_hidden_layers": 12, + "num_kv_groups": 4, + "pad_token_id": 3, + "qk_norm": true, + "rms_norm_eps": 1e-06, + "rope_theta": 10000, + "tie_word_embeddings": true, + "transformers_version": "5.3.0", + "vocab_size": 64000, + "auto_map": { + "AutoConfig": "flexqwen.FlexQwenConfig", + "AutoModel": "flexqwen.FlexQwen", + "AutoModelForCausalLM": "flexqwen.FlexQwenForCausalLM", + "AutoModelForSequenceClassification": "flexqwen.FlexQwenForSequenceClassification" + } +} \ No newline at end of file diff --git a/gold-constant/step_003540/generation_config.json b/gold-constant/step_003540/generation_config.json new file mode 100644 index 0000000000000000000000000000000000000000..cdde72fc7644fe801e3a54754b0594ec498f2120 --- /dev/null +++ b/gold-constant/step_003540/generation_config.json @@ -0,0 +1,7 @@ +{ + "_from_model_config": true, + "output_attentions": false, + "output_hidden_states": false, + "pad_token_id": 3, + "transformers_version": "5.3.0" +} diff --git a/gold-constant/step_004720/config.json b/gold-constant/step_004720/config.json new file mode 100644 index 0000000000000000000000000000000000000000..f5800c6cb97e51281d294ee4e187a222051889d3 --- /dev/null +++ b/gold-constant/step_004720/config.json @@ -0,0 +1,33 @@ +{ + "architectures": [ + "FlexQwenForCausalLM" + ], + "cls_token_id": 1, + "dropout_rate": 0.0, + "dtype": "float32", + "embedding_dim": 768, + "head_dim": 64, + "hidden_dim": 1536, + "initializer_range": 0.02, + "max_position_embeddings": 512, + "model_type": "flexqwen", + "moe_hidden_dim": 512, + "moe_num_experts": 0, + "moe_num_experts_per_token": -1, + "num_attention_heads": 12, + "num_hidden_layers": 12, + "num_kv_groups": 4, + "pad_token_id": 3, + "qk_norm": true, + "rms_norm_eps": 1e-06, + "rope_theta": 10000, + "tie_word_embeddings": true, + "transformers_version": "5.3.0", + "vocab_size": 64000, + "auto_map": { + "AutoConfig": "flexqwen.FlexQwenConfig", + "AutoModel": "flexqwen.FlexQwen", + "AutoModelForCausalLM": "flexqwen.FlexQwenForCausalLM", + "AutoModelForSequenceClassification": "flexqwen.FlexQwenForSequenceClassification" + } +} \ No newline at end of file diff --git a/gold-constant/step_004720/generation_config.json b/gold-constant/step_004720/generation_config.json new file mode 100644 index 0000000000000000000000000000000000000000..cdde72fc7644fe801e3a54754b0594ec498f2120 --- /dev/null +++ b/gold-constant/step_004720/generation_config.json @@ -0,0 +1,7 @@ +{ + "_from_model_config": true, + "output_attentions": false, + "output_hidden_states": false, + "pad_token_id": 3, + "transformers_version": "5.3.0" +} diff --git a/gold-constant/step_005900/config.json b/gold-constant/step_005900/config.json new file mode 100644 index 0000000000000000000000000000000000000000..f5800c6cb97e51281d294ee4e187a222051889d3 --- /dev/null +++ b/gold-constant/step_005900/config.json @@ -0,0 +1,33 @@ +{ + "architectures": [ + "FlexQwenForCausalLM" + ], + "cls_token_id": 1, + "dropout_rate": 0.0, + "dtype": "float32", + "embedding_dim": 768, + "head_dim": 64, + "hidden_dim": 1536, + "initializer_range": 0.02, + "max_position_embeddings": 512, + "model_type": "flexqwen", + "moe_hidden_dim": 512, + "moe_num_experts": 0, + "moe_num_experts_per_token": -1, + "num_attention_heads": 12, + "num_hidden_layers": 12, + "num_kv_groups": 4, + "pad_token_id": 3, + "qk_norm": true, + "rms_norm_eps": 1e-06, + "rope_theta": 10000, + "tie_word_embeddings": true, + "transformers_version": "5.3.0", + "vocab_size": 64000, + "auto_map": { + "AutoConfig": "flexqwen.FlexQwenConfig", + "AutoModel": "flexqwen.FlexQwen", + "AutoModelForCausalLM": "flexqwen.FlexQwenForCausalLM", + "AutoModelForSequenceClassification": "flexqwen.FlexQwenForSequenceClassification" + } +} \ No newline at end of file diff --git a/gold-constant/step_005900/generation_config.json b/gold-constant/step_005900/generation_config.json new file mode 100644 index 0000000000000000000000000000000000000000..cdde72fc7644fe801e3a54754b0594ec498f2120 --- /dev/null +++ b/gold-constant/step_005900/generation_config.json @@ -0,0 +1,7 @@ +{ + "_from_model_config": true, + "output_attentions": false, + "output_hidden_states": false, + "pad_token_id": 3, + "transformers_version": "5.3.0" +} diff --git a/gold-cosine/best/config.json b/gold-cosine/best/config.json new file mode 100644 index 0000000000000000000000000000000000000000..f5800c6cb97e51281d294ee4e187a222051889d3 --- /dev/null +++ b/gold-cosine/best/config.json @@ -0,0 +1,33 @@ +{ + "architectures": [ + "FlexQwenForCausalLM" + ], + "cls_token_id": 1, + "dropout_rate": 0.0, + "dtype": "float32", + "embedding_dim": 768, + "head_dim": 64, + "hidden_dim": 1536, + "initializer_range": 0.02, + "max_position_embeddings": 512, + "model_type": "flexqwen", + "moe_hidden_dim": 512, + "moe_num_experts": 0, + "moe_num_experts_per_token": -1, + "num_attention_heads": 12, + "num_hidden_layers": 12, + "num_kv_groups": 4, + "pad_token_id": 3, + "qk_norm": true, + "rms_norm_eps": 1e-06, + "rope_theta": 10000, + "tie_word_embeddings": true, + "transformers_version": "5.3.0", + "vocab_size": 64000, + "auto_map": { + "AutoConfig": "flexqwen.FlexQwenConfig", + "AutoModel": "flexqwen.FlexQwen", + "AutoModelForCausalLM": "flexqwen.FlexQwenForCausalLM", + "AutoModelForSequenceClassification": "flexqwen.FlexQwenForSequenceClassification" + } +} \ No newline at end of file diff --git a/gold-cosine/best/generation_config.json b/gold-cosine/best/generation_config.json new file mode 100644 index 0000000000000000000000000000000000000000..cdde72fc7644fe801e3a54754b0594ec498f2120 --- /dev/null +++ b/gold-cosine/best/generation_config.json @@ -0,0 +1,7 @@ +{ + "_from_model_config": true, + "output_attentions": false, + "output_hidden_states": false, + "pad_token_id": 3, + "transformers_version": "5.3.0" +} diff --git a/gold-cosine/last/config.json b/gold-cosine/last/config.json new file mode 100644 index 0000000000000000000000000000000000000000..f5800c6cb97e51281d294ee4e187a222051889d3 --- /dev/null +++ b/gold-cosine/last/config.json @@ -0,0 +1,33 @@ +{ + "architectures": [ + "FlexQwenForCausalLM" + ], + "cls_token_id": 1, + "dropout_rate": 0.0, + "dtype": "float32", + "embedding_dim": 768, + "head_dim": 64, + "hidden_dim": 1536, + "initializer_range": 0.02, + "max_position_embeddings": 512, + "model_type": "flexqwen", + "moe_hidden_dim": 512, + "moe_num_experts": 0, + "moe_num_experts_per_token": -1, + "num_attention_heads": 12, + "num_hidden_layers": 12, + "num_kv_groups": 4, + "pad_token_id": 3, + "qk_norm": true, + "rms_norm_eps": 1e-06, + "rope_theta": 10000, + "tie_word_embeddings": true, + "transformers_version": "5.3.0", + "vocab_size": 64000, + "auto_map": { + "AutoConfig": "flexqwen.FlexQwenConfig", + "AutoModel": "flexqwen.FlexQwen", + "AutoModelForCausalLM": "flexqwen.FlexQwenForCausalLM", + "AutoModelForSequenceClassification": "flexqwen.FlexQwenForSequenceClassification" + } +} \ No newline at end of file diff --git a/gold-cosine/last/generation_config.json b/gold-cosine/last/generation_config.json new file mode 100644 index 0000000000000000000000000000000000000000..cdde72fc7644fe801e3a54754b0594ec498f2120 --- /dev/null +++ b/gold-cosine/last/generation_config.json @@ -0,0 +1,7 @@ +{ + "_from_model_config": true, + "output_attentions": false, + "output_hidden_states": false, + "pad_token_id": 3, + "transformers_version": "5.3.0" +} diff --git a/gold-cosine/step_001180/config.json b/gold-cosine/step_001180/config.json new file mode 100644 index 0000000000000000000000000000000000000000..f5800c6cb97e51281d294ee4e187a222051889d3 --- /dev/null +++ b/gold-cosine/step_001180/config.json @@ -0,0 +1,33 @@ +{ + "architectures": [ + "FlexQwenForCausalLM" + ], + "cls_token_id": 1, + "dropout_rate": 0.0, + "dtype": "float32", + "embedding_dim": 768, + "head_dim": 64, + "hidden_dim": 1536, + "initializer_range": 0.02, + "max_position_embeddings": 512, + "model_type": "flexqwen", + "moe_hidden_dim": 512, + "moe_num_experts": 0, + "moe_num_experts_per_token": -1, + "num_attention_heads": 12, + "num_hidden_layers": 12, + "num_kv_groups": 4, + "pad_token_id": 3, + "qk_norm": true, + "rms_norm_eps": 1e-06, + "rope_theta": 10000, + "tie_word_embeddings": true, + "transformers_version": "5.3.0", + "vocab_size": 64000, + "auto_map": { + "AutoConfig": "flexqwen.FlexQwenConfig", + "AutoModel": "flexqwen.FlexQwen", + "AutoModelForCausalLM": "flexqwen.FlexQwenForCausalLM", + "AutoModelForSequenceClassification": "flexqwen.FlexQwenForSequenceClassification" + } +} \ No newline at end of file diff --git a/gold-cosine/step_001180/generation_config.json b/gold-cosine/step_001180/generation_config.json new file mode 100644 index 0000000000000000000000000000000000000000..cdde72fc7644fe801e3a54754b0594ec498f2120 --- /dev/null +++ b/gold-cosine/step_001180/generation_config.json @@ -0,0 +1,7 @@ +{ + "_from_model_config": true, + "output_attentions": false, + "output_hidden_states": false, + "pad_token_id": 3, + "transformers_version": "5.3.0" +} diff --git a/gold-cosine/step_002360/config.json b/gold-cosine/step_002360/config.json new file mode 100644 index 0000000000000000000000000000000000000000..f5800c6cb97e51281d294ee4e187a222051889d3 --- /dev/null +++ b/gold-cosine/step_002360/config.json @@ -0,0 +1,33 @@ +{ + "architectures": [ + "FlexQwenForCausalLM" + ], + "cls_token_id": 1, + "dropout_rate": 0.0, + "dtype": "float32", + "embedding_dim": 768, + "head_dim": 64, + "hidden_dim": 1536, + "initializer_range": 0.02, + "max_position_embeddings": 512, + "model_type": "flexqwen", + "moe_hidden_dim": 512, + "moe_num_experts": 0, + "moe_num_experts_per_token": -1, + "num_attention_heads": 12, + "num_hidden_layers": 12, + "num_kv_groups": 4, + "pad_token_id": 3, + "qk_norm": true, + "rms_norm_eps": 1e-06, + "rope_theta": 10000, + "tie_word_embeddings": true, + "transformers_version": "5.3.0", + "vocab_size": 64000, + "auto_map": { + "AutoConfig": "flexqwen.FlexQwenConfig", + "AutoModel": "flexqwen.FlexQwen", + "AutoModelForCausalLM": "flexqwen.FlexQwenForCausalLM", + "AutoModelForSequenceClassification": "flexqwen.FlexQwenForSequenceClassification" + } +} \ No newline at end of file diff --git a/gold-cosine/step_002360/generation_config.json b/gold-cosine/step_002360/generation_config.json new file mode 100644 index 0000000000000000000000000000000000000000..cdde72fc7644fe801e3a54754b0594ec498f2120 --- /dev/null +++ b/gold-cosine/step_002360/generation_config.json @@ -0,0 +1,7 @@ +{ + "_from_model_config": true, + "output_attentions": false, + "output_hidden_states": false, + "pad_token_id": 3, + "transformers_version": "5.3.0" +} diff --git a/gold-cosine/step_003540/config.json b/gold-cosine/step_003540/config.json new file mode 100644 index 0000000000000000000000000000000000000000..f5800c6cb97e51281d294ee4e187a222051889d3 --- /dev/null +++ b/gold-cosine/step_003540/config.json @@ -0,0 +1,33 @@ +{ + "architectures": [ + "FlexQwenForCausalLM" + ], + "cls_token_id": 1, + "dropout_rate": 0.0, + "dtype": "float32", + "embedding_dim": 768, + "head_dim": 64, + "hidden_dim": 1536, + "initializer_range": 0.02, + "max_position_embeddings": 512, + "model_type": "flexqwen", + "moe_hidden_dim": 512, + "moe_num_experts": 0, + "moe_num_experts_per_token": -1, + "num_attention_heads": 12, + "num_hidden_layers": 12, + "num_kv_groups": 4, + "pad_token_id": 3, + "qk_norm": true, + "rms_norm_eps": 1e-06, + "rope_theta": 10000, + "tie_word_embeddings": true, + "transformers_version": "5.3.0", + "vocab_size": 64000, + "auto_map": { + "AutoConfig": "flexqwen.FlexQwenConfig", + "AutoModel": "flexqwen.FlexQwen", + "AutoModelForCausalLM": "flexqwen.FlexQwenForCausalLM", + "AutoModelForSequenceClassification": "flexqwen.FlexQwenForSequenceClassification" + } +} \ No newline at end of file diff --git a/gold-cosine/step_003540/generation_config.json b/gold-cosine/step_003540/generation_config.json new file mode 100644 index 0000000000000000000000000000000000000000..cdde72fc7644fe801e3a54754b0594ec498f2120 --- /dev/null +++ b/gold-cosine/step_003540/generation_config.json @@ -0,0 +1,7 @@ +{ + "_from_model_config": true, + "output_attentions": false, + "output_hidden_states": false, + "pad_token_id": 3, + "transformers_version": "5.3.0" +} diff --git a/gold-cosine/step_004720/config.json b/gold-cosine/step_004720/config.json new file mode 100644 index 0000000000000000000000000000000000000000..f5800c6cb97e51281d294ee4e187a222051889d3 --- /dev/null +++ b/gold-cosine/step_004720/config.json @@ -0,0 +1,33 @@ +{ + "architectures": [ + "FlexQwenForCausalLM" + ], + "cls_token_id": 1, + "dropout_rate": 0.0, + "dtype": "float32", + "embedding_dim": 768, + "head_dim": 64, + "hidden_dim": 1536, + "initializer_range": 0.02, + "max_position_embeddings": 512, + "model_type": "flexqwen", + "moe_hidden_dim": 512, + "moe_num_experts": 0, + "moe_num_experts_per_token": -1, + "num_attention_heads": 12, + "num_hidden_layers": 12, + "num_kv_groups": 4, + "pad_token_id": 3, + "qk_norm": true, + "rms_norm_eps": 1e-06, + "rope_theta": 10000, + "tie_word_embeddings": true, + "transformers_version": "5.3.0", + "vocab_size": 64000, + "auto_map": { + "AutoConfig": "flexqwen.FlexQwenConfig", + "AutoModel": "flexqwen.FlexQwen", + "AutoModelForCausalLM": "flexqwen.FlexQwenForCausalLM", + "AutoModelForSequenceClassification": "flexqwen.FlexQwenForSequenceClassification" + } +} \ No newline at end of file diff --git a/gold-cosine/step_004720/generation_config.json b/gold-cosine/step_004720/generation_config.json new file mode 100644 index 0000000000000000000000000000000000000000..cdde72fc7644fe801e3a54754b0594ec498f2120 --- /dev/null +++ b/gold-cosine/step_004720/generation_config.json @@ -0,0 +1,7 @@ +{ + "_from_model_config": true, + "output_attentions": false, + "output_hidden_states": false, + "pad_token_id": 3, + "transformers_version": "5.3.0" +} diff --git a/gold-cosine/step_005900/config.json b/gold-cosine/step_005900/config.json new file mode 100644 index 0000000000000000000000000000000000000000..f5800c6cb97e51281d294ee4e187a222051889d3 --- /dev/null +++ b/gold-cosine/step_005900/config.json @@ -0,0 +1,33 @@ +{ + "architectures": [ + "FlexQwenForCausalLM" + ], + "cls_token_id": 1, + "dropout_rate": 0.0, + "dtype": "float32", + "embedding_dim": 768, + "head_dim": 64, + "hidden_dim": 1536, + "initializer_range": 0.02, + "max_position_embeddings": 512, + "model_type": "flexqwen", + "moe_hidden_dim": 512, + "moe_num_experts": 0, + "moe_num_experts_per_token": -1, + "num_attention_heads": 12, + "num_hidden_layers": 12, + "num_kv_groups": 4, + "pad_token_id": 3, + "qk_norm": true, + "rms_norm_eps": 1e-06, + "rope_theta": 10000, + "tie_word_embeddings": true, + "transformers_version": "5.3.0", + "vocab_size": 64000, + "auto_map": { + "AutoConfig": "flexqwen.FlexQwenConfig", + "AutoModel": "flexqwen.FlexQwen", + "AutoModelForCausalLM": "flexqwen.FlexQwenForCausalLM", + "AutoModelForSequenceClassification": "flexqwen.FlexQwenForSequenceClassification" + } +} \ No newline at end of file diff --git a/gold-cosine/step_005900/generation_config.json b/gold-cosine/step_005900/generation_config.json new file mode 100644 index 0000000000000000000000000000000000000000..cdde72fc7644fe801e3a54754b0594ec498f2120 --- /dev/null +++ b/gold-cosine/step_005900/generation_config.json @@ -0,0 +1,7 @@ +{ + "_from_model_config": true, + "output_attentions": false, + "output_hidden_states": false, + "pad_token_id": 3, + "transformers_version": "5.3.0" +} diff --git a/gold-linear/best/config.json b/gold-linear/best/config.json new file mode 100644 index 0000000000000000000000000000000000000000..f5800c6cb97e51281d294ee4e187a222051889d3 --- /dev/null +++ b/gold-linear/best/config.json @@ -0,0 +1,33 @@ +{ + "architectures": [ + "FlexQwenForCausalLM" + ], + "cls_token_id": 1, + "dropout_rate": 0.0, + "dtype": "float32", + "embedding_dim": 768, + "head_dim": 64, + "hidden_dim": 1536, + "initializer_range": 0.02, + "max_position_embeddings": 512, + "model_type": "flexqwen", + "moe_hidden_dim": 512, + "moe_num_experts": 0, + "moe_num_experts_per_token": -1, + "num_attention_heads": 12, + "num_hidden_layers": 12, + "num_kv_groups": 4, + "pad_token_id": 3, + "qk_norm": true, + "rms_norm_eps": 1e-06, + "rope_theta": 10000, + "tie_word_embeddings": true, + "transformers_version": "5.3.0", + "vocab_size": 64000, + "auto_map": { + "AutoConfig": "flexqwen.FlexQwenConfig", + "AutoModel": "flexqwen.FlexQwen", + "AutoModelForCausalLM": "flexqwen.FlexQwenForCausalLM", + "AutoModelForSequenceClassification": "flexqwen.FlexQwenForSequenceClassification" + } +} \ No newline at end of file diff --git a/gold-linear/best/generation_config.json b/gold-linear/best/generation_config.json new file mode 100644 index 0000000000000000000000000000000000000000..cdde72fc7644fe801e3a54754b0594ec498f2120 --- /dev/null +++ b/gold-linear/best/generation_config.json @@ -0,0 +1,7 @@ +{ + "_from_model_config": true, + "output_attentions": false, + "output_hidden_states": false, + "pad_token_id": 3, + "transformers_version": "5.3.0" +} diff --git a/gold-linear/last/config.json b/gold-linear/last/config.json new file mode 100644 index 0000000000000000000000000000000000000000..f5800c6cb97e51281d294ee4e187a222051889d3 --- /dev/null +++ b/gold-linear/last/config.json @@ -0,0 +1,33 @@ +{ + "architectures": [ + "FlexQwenForCausalLM" + ], + "cls_token_id": 1, + "dropout_rate": 0.0, + "dtype": "float32", + "embedding_dim": 768, + "head_dim": 64, + "hidden_dim": 1536, + "initializer_range": 0.02, + "max_position_embeddings": 512, + "model_type": "flexqwen", + "moe_hidden_dim": 512, + "moe_num_experts": 0, + "moe_num_experts_per_token": -1, + "num_attention_heads": 12, + "num_hidden_layers": 12, + "num_kv_groups": 4, + "pad_token_id": 3, + "qk_norm": true, + "rms_norm_eps": 1e-06, + "rope_theta": 10000, + "tie_word_embeddings": true, + "transformers_version": "5.3.0", + "vocab_size": 64000, + "auto_map": { + "AutoConfig": "flexqwen.FlexQwenConfig", + "AutoModel": "flexqwen.FlexQwen", + "AutoModelForCausalLM": "flexqwen.FlexQwenForCausalLM", + "AutoModelForSequenceClassification": "flexqwen.FlexQwenForSequenceClassification" + } +} \ No newline at end of file diff --git a/gold-linear/last/generation_config.json b/gold-linear/last/generation_config.json new file mode 100644 index 0000000000000000000000000000000000000000..cdde72fc7644fe801e3a54754b0594ec498f2120 --- /dev/null +++ b/gold-linear/last/generation_config.json @@ -0,0 +1,7 @@ +{ + "_from_model_config": true, + "output_attentions": false, + "output_hidden_states": false, + "pad_token_id": 3, + "transformers_version": "5.3.0" +} diff --git a/gold-linear/step_001180/config.json b/gold-linear/step_001180/config.json new file mode 100644 index 0000000000000000000000000000000000000000..f5800c6cb97e51281d294ee4e187a222051889d3 --- /dev/null +++ b/gold-linear/step_001180/config.json @@ -0,0 +1,33 @@ +{ + "architectures": [ + "FlexQwenForCausalLM" + ], + "cls_token_id": 1, + "dropout_rate": 0.0, + "dtype": "float32", + "embedding_dim": 768, + "head_dim": 64, + "hidden_dim": 1536, + "initializer_range": 0.02, + "max_position_embeddings": 512, + "model_type": "flexqwen", + "moe_hidden_dim": 512, + "moe_num_experts": 0, + "moe_num_experts_per_token": -1, + "num_attention_heads": 12, + "num_hidden_layers": 12, + "num_kv_groups": 4, + "pad_token_id": 3, + "qk_norm": true, + "rms_norm_eps": 1e-06, + "rope_theta": 10000, + "tie_word_embeddings": true, + "transformers_version": "5.3.0", + "vocab_size": 64000, + "auto_map": { + "AutoConfig": "flexqwen.FlexQwenConfig", + "AutoModel": "flexqwen.FlexQwen", + "AutoModelForCausalLM": "flexqwen.FlexQwenForCausalLM", + "AutoModelForSequenceClassification": "flexqwen.FlexQwenForSequenceClassification" + } +} \ No newline at end of file diff --git a/gold-linear/step_001180/generation_config.json b/gold-linear/step_001180/generation_config.json new file mode 100644 index 0000000000000000000000000000000000000000..cdde72fc7644fe801e3a54754b0594ec498f2120 --- /dev/null +++ b/gold-linear/step_001180/generation_config.json @@ -0,0 +1,7 @@ +{ + "_from_model_config": true, + "output_attentions": false, + "output_hidden_states": false, + "pad_token_id": 3, + "transformers_version": "5.3.0" +} diff --git a/gold-linear/step_002360/config.json b/gold-linear/step_002360/config.json new file mode 100644 index 0000000000000000000000000000000000000000..f5800c6cb97e51281d294ee4e187a222051889d3 --- /dev/null +++ b/gold-linear/step_002360/config.json @@ -0,0 +1,33 @@ +{ + "architectures": [ + "FlexQwenForCausalLM" + ], + "cls_token_id": 1, + "dropout_rate": 0.0, + "dtype": "float32", + "embedding_dim": 768, + "head_dim": 64, + "hidden_dim": 1536, + "initializer_range": 0.02, + "max_position_embeddings": 512, + "model_type": "flexqwen", + "moe_hidden_dim": 512, + "moe_num_experts": 0, + "moe_num_experts_per_token": -1, + "num_attention_heads": 12, + "num_hidden_layers": 12, + "num_kv_groups": 4, + "pad_token_id": 3, + "qk_norm": true, + "rms_norm_eps": 1e-06, + "rope_theta": 10000, + "tie_word_embeddings": true, + "transformers_version": "5.3.0", + "vocab_size": 64000, + "auto_map": { + "AutoConfig": "flexqwen.FlexQwenConfig", + "AutoModel": "flexqwen.FlexQwen", + "AutoModelForCausalLM": "flexqwen.FlexQwenForCausalLM", + "AutoModelForSequenceClassification": "flexqwen.FlexQwenForSequenceClassification" + } +} \ No newline at end of file diff --git a/gold-linear/step_002360/generation_config.json b/gold-linear/step_002360/generation_config.json new file mode 100644 index 0000000000000000000000000000000000000000..cdde72fc7644fe801e3a54754b0594ec498f2120 --- /dev/null +++ b/gold-linear/step_002360/generation_config.json @@ -0,0 +1,7 @@ +{ + "_from_model_config": true, + "output_attentions": false, + "output_hidden_states": false, + "pad_token_id": 3, + "transformers_version": "5.3.0" +} diff --git a/gold-linear/step_003540/config.json b/gold-linear/step_003540/config.json new file mode 100644 index 0000000000000000000000000000000000000000..f5800c6cb97e51281d294ee4e187a222051889d3 --- /dev/null +++ b/gold-linear/step_003540/config.json @@ -0,0 +1,33 @@ +{ + "architectures": [ + "FlexQwenForCausalLM" + ], + "cls_token_id": 1, + "dropout_rate": 0.0, + "dtype": "float32", + "embedding_dim": 768, + "head_dim": 64, + "hidden_dim": 1536, + "initializer_range": 0.02, + "max_position_embeddings": 512, + "model_type": "flexqwen", + "moe_hidden_dim": 512, + "moe_num_experts": 0, + "moe_num_experts_per_token": -1, + "num_attention_heads": 12, + "num_hidden_layers": 12, + "num_kv_groups": 4, + "pad_token_id": 3, + "qk_norm": true, + "rms_norm_eps": 1e-06, + "rope_theta": 10000, + "tie_word_embeddings": true, + "transformers_version": "5.3.0", + "vocab_size": 64000, + "auto_map": { + "AutoConfig": "flexqwen.FlexQwenConfig", + "AutoModel": "flexqwen.FlexQwen", + "AutoModelForCausalLM": "flexqwen.FlexQwenForCausalLM", + "AutoModelForSequenceClassification": "flexqwen.FlexQwenForSequenceClassification" + } +} \ No newline at end of file diff --git a/gold-linear/step_003540/generation_config.json b/gold-linear/step_003540/generation_config.json new file mode 100644 index 0000000000000000000000000000000000000000..cdde72fc7644fe801e3a54754b0594ec498f2120 --- /dev/null +++ b/gold-linear/step_003540/generation_config.json @@ -0,0 +1,7 @@ +{ + "_from_model_config": true, + "output_attentions": false, + "output_hidden_states": false, + "pad_token_id": 3, + "transformers_version": "5.3.0" +} diff --git a/gold-linear/step_004720/config.json b/gold-linear/step_004720/config.json new file mode 100644 index 0000000000000000000000000000000000000000..f5800c6cb97e51281d294ee4e187a222051889d3 --- /dev/null +++ b/gold-linear/step_004720/config.json @@ -0,0 +1,33 @@ +{ + "architectures": [ + "FlexQwenForCausalLM" + ], + "cls_token_id": 1, + "dropout_rate": 0.0, + "dtype": "float32", + "embedding_dim": 768, + "head_dim": 64, + "hidden_dim": 1536, + "initializer_range": 0.02, + "max_position_embeddings": 512, + "model_type": "flexqwen", + "moe_hidden_dim": 512, + "moe_num_experts": 0, + "moe_num_experts_per_token": -1, + "num_attention_heads": 12, + "num_hidden_layers": 12, + "num_kv_groups": 4, + "pad_token_id": 3, + "qk_norm": true, + "rms_norm_eps": 1e-06, + "rope_theta": 10000, + "tie_word_embeddings": true, + "transformers_version": "5.3.0", + "vocab_size": 64000, + "auto_map": { + "AutoConfig": "flexqwen.FlexQwenConfig", + "AutoModel": "flexqwen.FlexQwen", + "AutoModelForCausalLM": "flexqwen.FlexQwenForCausalLM", + "AutoModelForSequenceClassification": "flexqwen.FlexQwenForSequenceClassification" + } +} \ No newline at end of file diff --git a/gold-linear/step_004720/generation_config.json b/gold-linear/step_004720/generation_config.json new file mode 100644 index 0000000000000000000000000000000000000000..cdde72fc7644fe801e3a54754b0594ec498f2120 --- /dev/null +++ b/gold-linear/step_004720/generation_config.json @@ -0,0 +1,7 @@ +{ + "_from_model_config": true, + "output_attentions": false, + "output_hidden_states": false, + "pad_token_id": 3, + "transformers_version": "5.3.0" +} diff --git a/gold-linear/step_005900/config.json b/gold-linear/step_005900/config.json new file mode 100644 index 0000000000000000000000000000000000000000..f5800c6cb97e51281d294ee4e187a222051889d3 --- /dev/null +++ b/gold-linear/step_005900/config.json @@ -0,0 +1,33 @@ +{ + "architectures": [ + "FlexQwenForCausalLM" + ], + "cls_token_id": 1, + "dropout_rate": 0.0, + "dtype": "float32", + "embedding_dim": 768, + "head_dim": 64, + "hidden_dim": 1536, + "initializer_range": 0.02, + "max_position_embeddings": 512, + "model_type": "flexqwen", + "moe_hidden_dim": 512, + "moe_num_experts": 0, + "moe_num_experts_per_token": -1, + "num_attention_heads": 12, + "num_hidden_layers": 12, + "num_kv_groups": 4, + "pad_token_id": 3, + "qk_norm": true, + "rms_norm_eps": 1e-06, + "rope_theta": 10000, + "tie_word_embeddings": true, + "transformers_version": "5.3.0", + "vocab_size": 64000, + "auto_map": { + "AutoConfig": "flexqwen.FlexQwenConfig", + "AutoModel": "flexqwen.FlexQwen", + "AutoModelForCausalLM": "flexqwen.FlexQwenForCausalLM", + "AutoModelForSequenceClassification": "flexqwen.FlexQwenForSequenceClassification" + } +} \ No newline at end of file diff --git a/gold-linear/step_005900/generation_config.json b/gold-linear/step_005900/generation_config.json new file mode 100644 index 0000000000000000000000000000000000000000..cdde72fc7644fe801e3a54754b0594ec498f2120 --- /dev/null +++ b/gold-linear/step_005900/generation_config.json @@ -0,0 +1,7 @@ +{ + "_from_model_config": true, + "output_attentions": false, + "output_hidden_states": false, + "pad_token_id": 3, + "transformers_version": "5.3.0" +} diff --git a/gold-masked/step_001180/config.json b/gold-masked/step_001180/config.json new file mode 100644 index 0000000000000000000000000000000000000000..f5800c6cb97e51281d294ee4e187a222051889d3 --- /dev/null +++ b/gold-masked/step_001180/config.json @@ -0,0 +1,33 @@ +{ + "architectures": [ + "FlexQwenForCausalLM" + ], + "cls_token_id": 1, + "dropout_rate": 0.0, + "dtype": "float32", + "embedding_dim": 768, + "head_dim": 64, + "hidden_dim": 1536, + "initializer_range": 0.02, + "max_position_embeddings": 512, + "model_type": "flexqwen", + "moe_hidden_dim": 512, + "moe_num_experts": 0, + "moe_num_experts_per_token": -1, + "num_attention_heads": 12, + "num_hidden_layers": 12, + "num_kv_groups": 4, + "pad_token_id": 3, + "qk_norm": true, + "rms_norm_eps": 1e-06, + "rope_theta": 10000, + "tie_word_embeddings": true, + "transformers_version": "5.3.0", + "vocab_size": 64000, + "auto_map": { + "AutoConfig": "flexqwen.FlexQwenConfig", + "AutoModel": "flexqwen.FlexQwen", + "AutoModelForCausalLM": "flexqwen.FlexQwenForCausalLM", + "AutoModelForSequenceClassification": "flexqwen.FlexQwenForSequenceClassification" + } +} \ No newline at end of file diff --git a/gold-shift/best/config.json b/gold-shift/best/config.json new file mode 100644 index 0000000000000000000000000000000000000000..f5800c6cb97e51281d294ee4e187a222051889d3 --- /dev/null +++ b/gold-shift/best/config.json @@ -0,0 +1,33 @@ +{ + "architectures": [ + "FlexQwenForCausalLM" + ], + "cls_token_id": 1, + "dropout_rate": 0.0, + "dtype": "float32", + "embedding_dim": 768, + "head_dim": 64, + "hidden_dim": 1536, + "initializer_range": 0.02, + "max_position_embeddings": 512, + "model_type": "flexqwen", + "moe_hidden_dim": 512, + "moe_num_experts": 0, + "moe_num_experts_per_token": -1, + "num_attention_heads": 12, + "num_hidden_layers": 12, + "num_kv_groups": 4, + "pad_token_id": 3, + "qk_norm": true, + "rms_norm_eps": 1e-06, + "rope_theta": 10000, + "tie_word_embeddings": true, + "transformers_version": "5.3.0", + "vocab_size": 64000, + "auto_map": { + "AutoConfig": "flexqwen.FlexQwenConfig", + "AutoModel": "flexqwen.FlexQwen", + "AutoModelForCausalLM": "flexqwen.FlexQwenForCausalLM", + "AutoModelForSequenceClassification": "flexqwen.FlexQwenForSequenceClassification" + } +} \ No newline at end of file diff --git a/gold-shift/best/generation_config.json b/gold-shift/best/generation_config.json new file mode 100644 index 0000000000000000000000000000000000000000..cdde72fc7644fe801e3a54754b0594ec498f2120 --- /dev/null +++ b/gold-shift/best/generation_config.json @@ -0,0 +1,7 @@ +{ + "_from_model_config": true, + "output_attentions": false, + "output_hidden_states": false, + "pad_token_id": 3, + "transformers_version": "5.3.0" +} diff --git a/gold-shift/last/config.json b/gold-shift/last/config.json new file mode 100644 index 0000000000000000000000000000000000000000..f5800c6cb97e51281d294ee4e187a222051889d3 --- /dev/null +++ b/gold-shift/last/config.json @@ -0,0 +1,33 @@ +{ + "architectures": [ + "FlexQwenForCausalLM" + ], + "cls_token_id": 1, + "dropout_rate": 0.0, + "dtype": "float32", + "embedding_dim": 768, + "head_dim": 64, + "hidden_dim": 1536, + "initializer_range": 0.02, + "max_position_embeddings": 512, + "model_type": "flexqwen", + "moe_hidden_dim": 512, + "moe_num_experts": 0, + "moe_num_experts_per_token": -1, + "num_attention_heads": 12, + "num_hidden_layers": 12, + "num_kv_groups": 4, + "pad_token_id": 3, + "qk_norm": true, + "rms_norm_eps": 1e-06, + "rope_theta": 10000, + "tie_word_embeddings": true, + "transformers_version": "5.3.0", + "vocab_size": 64000, + "auto_map": { + "AutoConfig": "flexqwen.FlexQwenConfig", + "AutoModel": "flexqwen.FlexQwen", + "AutoModelForCausalLM": "flexqwen.FlexQwenForCausalLM", + "AutoModelForSequenceClassification": "flexqwen.FlexQwenForSequenceClassification" + } +} \ No newline at end of file diff --git a/gold-shift/last/generation_config.json b/gold-shift/last/generation_config.json new file mode 100644 index 0000000000000000000000000000000000000000..cdde72fc7644fe801e3a54754b0594ec498f2120 --- /dev/null +++ b/gold-shift/last/generation_config.json @@ -0,0 +1,7 @@ +{ + "_from_model_config": true, + "output_attentions": false, + "output_hidden_states": false, + "pad_token_id": 3, + "transformers_version": "5.3.0" +} diff --git a/gold-shift/step_001180/config.json b/gold-shift/step_001180/config.json new file mode 100644 index 0000000000000000000000000000000000000000..f5800c6cb97e51281d294ee4e187a222051889d3 --- /dev/null +++ b/gold-shift/step_001180/config.json @@ -0,0 +1,33 @@ +{ + "architectures": [ + "FlexQwenForCausalLM" + ], + "cls_token_id": 1, + "dropout_rate": 0.0, + "dtype": "float32", + "embedding_dim": 768, + "head_dim": 64, + "hidden_dim": 1536, + "initializer_range": 0.02, + "max_position_embeddings": 512, + "model_type": "flexqwen", + "moe_hidden_dim": 512, + "moe_num_experts": 0, + "moe_num_experts_per_token": -1, + "num_attention_heads": 12, + "num_hidden_layers": 12, + "num_kv_groups": 4, + "pad_token_id": 3, + "qk_norm": true, + "rms_norm_eps": 1e-06, + "rope_theta": 10000, + "tie_word_embeddings": true, + "transformers_version": "5.3.0", + "vocab_size": 64000, + "auto_map": { + "AutoConfig": "flexqwen.FlexQwenConfig", + "AutoModel": "flexqwen.FlexQwen", + "AutoModelForCausalLM": "flexqwen.FlexQwenForCausalLM", + "AutoModelForSequenceClassification": "flexqwen.FlexQwenForSequenceClassification" + } +} \ No newline at end of file diff --git a/gold-shift/step_001180/generation_config.json b/gold-shift/step_001180/generation_config.json new file mode 100644 index 0000000000000000000000000000000000000000..cdde72fc7644fe801e3a54754b0594ec498f2120 --- /dev/null +++ b/gold-shift/step_001180/generation_config.json @@ -0,0 +1,7 @@ +{ + "_from_model_config": true, + "output_attentions": false, + "output_hidden_states": false, + "pad_token_id": 3, + "transformers_version": "5.3.0" +} diff --git a/gold-shift/step_001180/model.safetensors b/gold-shift/step_001180/model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..d04b2a4f74d15f9875f70ba7a021a5406e63be55 --- /dev/null +++ b/gold-shift/step_001180/model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:8734cd215ec6c1ad1dbbcda0b8f7a6b6811f58b86f2bf4424afdc9e0bec8956f +size 442333936 diff --git a/gold-shift/step_001180/trainer_state.pt b/gold-shift/step_001180/trainer_state.pt new file mode 100644 index 0000000000000000000000000000000000000000..fc8305f0c90085cc41dc7aeba1d34a7ca4419258 --- /dev/null +++ b/gold-shift/step_001180/trainer_state.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:a774add1046222e8460d718c1101aa8901b57efff6dcabe924e3e9a52d1a71bb +size 884221979 diff --git a/gold-shift/step_002360/config.json b/gold-shift/step_002360/config.json new file mode 100644 index 0000000000000000000000000000000000000000..f5800c6cb97e51281d294ee4e187a222051889d3 --- /dev/null +++ b/gold-shift/step_002360/config.json @@ -0,0 +1,33 @@ +{ + "architectures": [ + "FlexQwenForCausalLM" + ], + "cls_token_id": 1, + "dropout_rate": 0.0, + "dtype": "float32", + "embedding_dim": 768, + "head_dim": 64, + "hidden_dim": 1536, + "initializer_range": 0.02, + "max_position_embeddings": 512, + "model_type": "flexqwen", + "moe_hidden_dim": 512, + "moe_num_experts": 0, + "moe_num_experts_per_token": -1, + "num_attention_heads": 12, + "num_hidden_layers": 12, + "num_kv_groups": 4, + "pad_token_id": 3, + "qk_norm": true, + "rms_norm_eps": 1e-06, + "rope_theta": 10000, + "tie_word_embeddings": true, + "transformers_version": "5.3.0", + "vocab_size": 64000, + "auto_map": { + "AutoConfig": "flexqwen.FlexQwenConfig", + "AutoModel": "flexqwen.FlexQwen", + "AutoModelForCausalLM": "flexqwen.FlexQwenForCausalLM", + "AutoModelForSequenceClassification": "flexqwen.FlexQwenForSequenceClassification" + } +} \ No newline at end of file diff --git a/gold-shift/step_002360/generation_config.json b/gold-shift/step_002360/generation_config.json new file mode 100644 index 0000000000000000000000000000000000000000..cdde72fc7644fe801e3a54754b0594ec498f2120 --- /dev/null +++ b/gold-shift/step_002360/generation_config.json @@ -0,0 +1,7 @@ +{ + "_from_model_config": true, + "output_attentions": false, + "output_hidden_states": false, + "pad_token_id": 3, + "transformers_version": "5.3.0" +} diff --git a/gold-shift/step_002360/trainer_state.pt b/gold-shift/step_002360/trainer_state.pt new file mode 100644 index 0000000000000000000000000000000000000000..1825a1bed758f615cb1993a68030f2f51854db37 --- /dev/null +++ b/gold-shift/step_002360/trainer_state.pt @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d66ceb7b97a6d826168ac95768ba4838e5205a17a26f9fe3799c740e643a032d +size 884221979 diff --git a/gold-shift/step_003540/config.json b/gold-shift/step_003540/config.json new file mode 100644 index 0000000000000000000000000000000000000000..f5800c6cb97e51281d294ee4e187a222051889d3 --- /dev/null +++ b/gold-shift/step_003540/config.json @@ -0,0 +1,33 @@ +{ + "architectures": [ + "FlexQwenForCausalLM" + ], + "cls_token_id": 1, + "dropout_rate": 0.0, + "dtype": "float32", + "embedding_dim": 768, + "head_dim": 64, + "hidden_dim": 1536, + "initializer_range": 0.02, + "max_position_embeddings": 512, + "model_type": "flexqwen", + "moe_hidden_dim": 512, + "moe_num_experts": 0, + "moe_num_experts_per_token": -1, + "num_attention_heads": 12, + "num_hidden_layers": 12, + "num_kv_groups": 4, + "pad_token_id": 3, + "qk_norm": true, + "rms_norm_eps": 1e-06, + "rope_theta": 10000, + "tie_word_embeddings": true, + "transformers_version": "5.3.0", + "vocab_size": 64000, + "auto_map": { + "AutoConfig": "flexqwen.FlexQwenConfig", + "AutoModel": "flexqwen.FlexQwen", + "AutoModelForCausalLM": "flexqwen.FlexQwenForCausalLM", + "AutoModelForSequenceClassification": "flexqwen.FlexQwenForSequenceClassification" + } +} \ No newline at end of file diff --git a/gold-shift/step_003540/generation_config.json b/gold-shift/step_003540/generation_config.json new file mode 100644 index 0000000000000000000000000000000000000000..cdde72fc7644fe801e3a54754b0594ec498f2120 --- /dev/null +++ b/gold-shift/step_003540/generation_config.json @@ -0,0 +1,7 @@ +{ + "_from_model_config": true, + "output_attentions": false, + "output_hidden_states": false, + "pad_token_id": 3, + "transformers_version": "5.3.0" +} diff --git a/gold-shift/step_004720/config.json b/gold-shift/step_004720/config.json new file mode 100644 index 0000000000000000000000000000000000000000..f5800c6cb97e51281d294ee4e187a222051889d3 --- /dev/null +++ b/gold-shift/step_004720/config.json @@ -0,0 +1,33 @@ +{ + "architectures": [ + "FlexQwenForCausalLM" + ], + "cls_token_id": 1, + "dropout_rate": 0.0, + "dtype": "float32", + "embedding_dim": 768, + "head_dim": 64, + "hidden_dim": 1536, + "initializer_range": 0.02, + "max_position_embeddings": 512, + "model_type": "flexqwen", + "moe_hidden_dim": 512, + "moe_num_experts": 0, + "moe_num_experts_per_token": -1, + "num_attention_heads": 12, + "num_hidden_layers": 12, + "num_kv_groups": 4, + "pad_token_id": 3, + "qk_norm": true, + "rms_norm_eps": 1e-06, + "rope_theta": 10000, + "tie_word_embeddings": true, + "transformers_version": "5.3.0", + "vocab_size": 64000, + "auto_map": { + "AutoConfig": "flexqwen.FlexQwenConfig", + "AutoModel": "flexqwen.FlexQwen", + "AutoModelForCausalLM": "flexqwen.FlexQwenForCausalLM", + "AutoModelForSequenceClassification": "flexqwen.FlexQwenForSequenceClassification" + } +} \ No newline at end of file diff --git a/gold-shift/step_004720/generation_config.json b/gold-shift/step_004720/generation_config.json new file mode 100644 index 0000000000000000000000000000000000000000..cdde72fc7644fe801e3a54754b0594ec498f2120 --- /dev/null +++ b/gold-shift/step_004720/generation_config.json @@ -0,0 +1,7 @@ +{ + "_from_model_config": true, + "output_attentions": false, + "output_hidden_states": false, + "pad_token_id": 3, + "transformers_version": "5.3.0" +} diff --git a/gold-shift/step_005900/config.json b/gold-shift/step_005900/config.json new file mode 100644 index 0000000000000000000000000000000000000000..f5800c6cb97e51281d294ee4e187a222051889d3 --- /dev/null +++ b/gold-shift/step_005900/config.json @@ -0,0 +1,33 @@ +{ + "architectures": [ + "FlexQwenForCausalLM" + ], + "cls_token_id": 1, + "dropout_rate": 0.0, + "dtype": "float32", + "embedding_dim": 768, + "head_dim": 64, + "hidden_dim": 1536, + "initializer_range": 0.02, + "max_position_embeddings": 512, + "model_type": "flexqwen", + "moe_hidden_dim": 512, + "moe_num_experts": 0, + "moe_num_experts_per_token": -1, + "num_attention_heads": 12, + "num_hidden_layers": 12, + "num_kv_groups": 4, + "pad_token_id": 3, + "qk_norm": true, + "rms_norm_eps": 1e-06, + "rope_theta": 10000, + "tie_word_embeddings": true, + "transformers_version": "5.3.0", + "vocab_size": 64000, + "auto_map": { + "AutoConfig": "flexqwen.FlexQwenConfig", + "AutoModel": "flexqwen.FlexQwen", + "AutoModelForCausalLM": "flexqwen.FlexQwenForCausalLM", + "AutoModelForSequenceClassification": "flexqwen.FlexQwenForSequenceClassification" + } +} \ No newline at end of file diff --git a/gold-shift/step_005900/generation_config.json b/gold-shift/step_005900/generation_config.json new file mode 100644 index 0000000000000000000000000000000000000000..cdde72fc7644fe801e3a54754b0594ec498f2120 --- /dev/null +++ b/gold-shift/step_005900/generation_config.json @@ -0,0 +1,7 @@ +{ + "_from_model_config": true, + "output_attentions": false, + "output_hidden_states": false, + "pad_token_id": 3, + "transformers_version": "5.3.0" +}