diff --git a/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank16/global_step_128/actor/lora_adapter/adapter_config.json b/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank16/global_step_128/actor/lora_adapter/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..590bc87060aefec6080008fe9bfefafce43ffd07 --- /dev/null +++ b/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank16/global_step_128/actor/lora_adapter/adapter_config.json @@ -0,0 +1,49 @@ +{ + "task_type": "CAUSAL_LM", + "peft_type": "LORA", + "auto_mapping": null, + "peft_version": "0.18.1", + "base_model_name_or_path": "/dev/shm/verl-cache/b3c28c1b99a08b84eb28d5733b49c01c/aa8e72537993ba99e69dfaafa59ed015b17504d1", + "revision": null, + "inference_mode": false, + "r": 16, + "target_modules": [ + "k_proj", + "o_proj", + "down_proj", + "gate_proj", + "q_proj", + "up_proj", + "v_proj" + ], + "exclude_modules": null, + "lora_alpha": 32, + "lora_dropout": 0.0, + "fan_in_fan_out": false, + "bias": "none", + "use_rslora": false, + "modules_to_save": null, + "init_lora_weights": true, + "layers_to_transform": null, + "layers_pattern": null, + "rank_pattern": {}, + "alpha_pattern": {}, + "megatron_config": null, + "megatron_core": "megatron.core", + "trainable_token_indices": null, + "loftq_config": {}, + "eva_config": null, + "corda_config": null, + "use_dora": false, + "alora_invocation_tokens": null, + "use_qalora": false, + "qalora_group_size": 16, + "layer_replication": null, + "runtime_config": { + "ephemeral_gpu_offload": false + }, + "lora_bias": false, + "target_parameters": null, + "arrow_config": null, + "ensure_weight_tying": false +} \ No newline at end of file diff --git a/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank16/global_step_128/actor/lora_adapter/adapter_model.safetensors b/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank16/global_step_128/actor/lora_adapter/adapter_model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..915f6d82221cca9040b0bb3f4c509bd014b1d19d --- /dev/null +++ b/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank16/global_step_128/actor/lora_adapter/adapter_model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:6f1fb99524d62fb79af445666801a84e91e21c6b661b2dec1cffb3e8b622c19c +size 119801496 diff --git a/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank16/global_step_192/actor/lora_adapter/adapter_config.json b/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank16/global_step_192/actor/lora_adapter/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..590bc87060aefec6080008fe9bfefafce43ffd07 --- /dev/null +++ b/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank16/global_step_192/actor/lora_adapter/adapter_config.json @@ -0,0 +1,49 @@ +{ + "task_type": "CAUSAL_LM", + "peft_type": "LORA", + "auto_mapping": null, + "peft_version": "0.18.1", + "base_model_name_or_path": "/dev/shm/verl-cache/b3c28c1b99a08b84eb28d5733b49c01c/aa8e72537993ba99e69dfaafa59ed015b17504d1", + "revision": null, + "inference_mode": false, + "r": 16, + "target_modules": [ + "k_proj", + "o_proj", + "down_proj", + "gate_proj", + "q_proj", + "up_proj", + "v_proj" + ], + "exclude_modules": null, + "lora_alpha": 32, + "lora_dropout": 0.0, + "fan_in_fan_out": false, + "bias": "none", + "use_rslora": false, + "modules_to_save": null, + "init_lora_weights": true, + "layers_to_transform": null, + "layers_pattern": null, + "rank_pattern": {}, + "alpha_pattern": {}, + "megatron_config": null, + "megatron_core": "megatron.core", + "trainable_token_indices": null, + "loftq_config": {}, + "eva_config": null, + "corda_config": null, + "use_dora": false, + "alora_invocation_tokens": null, + "use_qalora": false, + "qalora_group_size": 16, + "layer_replication": null, + "runtime_config": { + "ephemeral_gpu_offload": false + }, + "lora_bias": false, + "target_parameters": null, + "arrow_config": null, + "ensure_weight_tying": false +} \ No newline at end of file diff --git a/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank16/global_step_192/actor/lora_adapter/adapter_model.safetensors b/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank16/global_step_192/actor/lora_adapter/adapter_model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..7cb0d217b7538b64e630e6219517ab1fb168986c --- /dev/null +++ b/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank16/global_step_192/actor/lora_adapter/adapter_model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:7a451728cf477be4a4e15b4fe2d64302710961042989101c16480abfe62b8c26 +size 119801496 diff --git a/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank16/global_step_256/actor/lora_adapter/adapter_config.json b/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank16/global_step_256/actor/lora_adapter/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..8b2b0eaf657b18da16cc9321dba0699e2a814d2f --- /dev/null +++ b/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank16/global_step_256/actor/lora_adapter/adapter_config.json @@ -0,0 +1,49 @@ +{ + "task_type": "CAUSAL_LM", + "peft_type": "LORA", + "auto_mapping": null, + "peft_version": "0.18.1", + "base_model_name_or_path": "/dev/shm/verl-cache/b3c28c1b99a08b84eb28d5733b49c01c/aa8e72537993ba99e69dfaafa59ed015b17504d1", + "revision": null, + "inference_mode": false, + "r": 16, + "target_modules": [ + "o_proj", + "q_proj", + "up_proj", + "k_proj", + "v_proj", + "gate_proj", + "down_proj" + ], + "exclude_modules": null, + "lora_alpha": 32, + "lora_dropout": 0.0, + "fan_in_fan_out": false, + "bias": "none", + "use_rslora": false, + "modules_to_save": null, + "init_lora_weights": true, + "layers_to_transform": null, + "layers_pattern": null, + "rank_pattern": {}, + "alpha_pattern": {}, + "megatron_config": null, + "megatron_core": "megatron.core", + "trainable_token_indices": null, + "loftq_config": {}, + "eva_config": null, + "corda_config": null, + "use_dora": false, + "alora_invocation_tokens": null, + "use_qalora": false, + "qalora_group_size": 16, + "layer_replication": null, + "runtime_config": { + "ephemeral_gpu_offload": false + }, + "lora_bias": false, + "target_parameters": null, + "arrow_config": null, + "ensure_weight_tying": false +} \ No newline at end of file diff --git a/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank16/global_step_256/actor/lora_adapter/adapter_model.safetensors b/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank16/global_step_256/actor/lora_adapter/adapter_model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..778c9b72d7f49c1f5103218f7e433f39fe6a4602 --- /dev/null +++ b/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank16/global_step_256/actor/lora_adapter/adapter_model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:bbfd16f689eb5119e11269f253dc9eb1504a96f73091e1d6932d8c462108a762 +size 119801496 diff --git a/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank16/global_step_320/actor/lora_adapter/adapter_config.json b/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank16/global_step_320/actor/lora_adapter/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..8b2b0eaf657b18da16cc9321dba0699e2a814d2f --- /dev/null +++ b/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank16/global_step_320/actor/lora_adapter/adapter_config.json @@ -0,0 +1,49 @@ +{ + "task_type": "CAUSAL_LM", + "peft_type": "LORA", + "auto_mapping": null, + "peft_version": "0.18.1", + "base_model_name_or_path": "/dev/shm/verl-cache/b3c28c1b99a08b84eb28d5733b49c01c/aa8e72537993ba99e69dfaafa59ed015b17504d1", + "revision": null, + "inference_mode": false, + "r": 16, + "target_modules": [ + "o_proj", + "q_proj", + "up_proj", + "k_proj", + "v_proj", + "gate_proj", + "down_proj" + ], + "exclude_modules": null, + "lora_alpha": 32, + "lora_dropout": 0.0, + "fan_in_fan_out": false, + "bias": "none", + "use_rslora": false, + "modules_to_save": null, + "init_lora_weights": true, + "layers_to_transform": null, + "layers_pattern": null, + "rank_pattern": {}, + "alpha_pattern": {}, + "megatron_config": null, + "megatron_core": "megatron.core", + "trainable_token_indices": null, + "loftq_config": {}, + "eva_config": null, + "corda_config": null, + "use_dora": false, + "alora_invocation_tokens": null, + "use_qalora": false, + "qalora_group_size": 16, + "layer_replication": null, + "runtime_config": { + "ephemeral_gpu_offload": false + }, + "lora_bias": false, + "target_parameters": null, + "arrow_config": null, + "ensure_weight_tying": false +} \ No newline at end of file diff --git a/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank16/global_step_320/actor/lora_adapter/adapter_model.safetensors b/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank16/global_step_320/actor/lora_adapter/adapter_model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..f0d2403e03bff0bbf641981ea8849c5ee6e669c7 --- /dev/null +++ b/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank16/global_step_320/actor/lora_adapter/adapter_model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:4d50ef6d9c84b34bdda22be8f3d0989baa73604480b42c2e1bbc74da24af7632 +size 119801496 diff --git a/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank16/global_step_384/actor/lora_adapter/adapter_config.json b/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank16/global_step_384/actor/lora_adapter/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..8b2b0eaf657b18da16cc9321dba0699e2a814d2f --- /dev/null +++ b/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank16/global_step_384/actor/lora_adapter/adapter_config.json @@ -0,0 +1,49 @@ +{ + "task_type": "CAUSAL_LM", + "peft_type": "LORA", + "auto_mapping": null, + "peft_version": "0.18.1", + "base_model_name_or_path": "/dev/shm/verl-cache/b3c28c1b99a08b84eb28d5733b49c01c/aa8e72537993ba99e69dfaafa59ed015b17504d1", + "revision": null, + "inference_mode": false, + "r": 16, + "target_modules": [ + "o_proj", + "q_proj", + "up_proj", + "k_proj", + "v_proj", + "gate_proj", + "down_proj" + ], + "exclude_modules": null, + "lora_alpha": 32, + "lora_dropout": 0.0, + "fan_in_fan_out": false, + "bias": "none", + "use_rslora": false, + "modules_to_save": null, + "init_lora_weights": true, + "layers_to_transform": null, + "layers_pattern": null, + "rank_pattern": {}, + "alpha_pattern": {}, + "megatron_config": null, + "megatron_core": "megatron.core", + "trainable_token_indices": null, + "loftq_config": {}, + "eva_config": null, + "corda_config": null, + "use_dora": false, + "alora_invocation_tokens": null, + "use_qalora": false, + "qalora_group_size": 16, + "layer_replication": null, + "runtime_config": { + "ephemeral_gpu_offload": false + }, + "lora_bias": false, + "target_parameters": null, + "arrow_config": null, + "ensure_weight_tying": false +} \ No newline at end of file diff --git a/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank16/global_step_384/actor/lora_adapter/adapter_model.safetensors b/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank16/global_step_384/actor/lora_adapter/adapter_model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..95a0eb75077046591de621e3e469252d2b2523a2 --- /dev/null +++ b/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank16/global_step_384/actor/lora_adapter/adapter_model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:a64b26eec62e2b067f83546607c803010d245e51fab76a260968b944ef1ea14e +size 119801496 diff --git a/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank16/global_step_448/actor/lora_adapter/adapter_config.json b/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank16/global_step_448/actor/lora_adapter/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..8b2b0eaf657b18da16cc9321dba0699e2a814d2f --- /dev/null +++ b/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank16/global_step_448/actor/lora_adapter/adapter_config.json @@ -0,0 +1,49 @@ +{ + "task_type": "CAUSAL_LM", + "peft_type": "LORA", + "auto_mapping": null, + "peft_version": "0.18.1", + "base_model_name_or_path": "/dev/shm/verl-cache/b3c28c1b99a08b84eb28d5733b49c01c/aa8e72537993ba99e69dfaafa59ed015b17504d1", + "revision": null, + "inference_mode": false, + "r": 16, + "target_modules": [ + "o_proj", + "q_proj", + "up_proj", + "k_proj", + "v_proj", + "gate_proj", + "down_proj" + ], + "exclude_modules": null, + "lora_alpha": 32, + "lora_dropout": 0.0, + "fan_in_fan_out": false, + "bias": "none", + "use_rslora": false, + "modules_to_save": null, + "init_lora_weights": true, + "layers_to_transform": null, + "layers_pattern": null, + "rank_pattern": {}, + "alpha_pattern": {}, + "megatron_config": null, + "megatron_core": "megatron.core", + "trainable_token_indices": null, + "loftq_config": {}, + "eva_config": null, + "corda_config": null, + "use_dora": false, + "alora_invocation_tokens": null, + "use_qalora": false, + "qalora_group_size": 16, + "layer_replication": null, + "runtime_config": { + "ephemeral_gpu_offload": false + }, + "lora_bias": false, + "target_parameters": null, + "arrow_config": null, + "ensure_weight_tying": false +} \ No newline at end of file diff --git a/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank16/global_step_448/actor/lora_adapter/adapter_model.safetensors b/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank16/global_step_448/actor/lora_adapter/adapter_model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..438471f3c4e272df8918160d10f546acefdb0dd8 --- /dev/null +++ b/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank16/global_step_448/actor/lora_adapter/adapter_model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d768ffde3ce3412b87033edf42df9896bf5a964c16885a1c9ab6eb6a74375123 +size 119801496 diff --git a/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank16/global_step_512/actor/lora_adapter/adapter_config.json b/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank16/global_step_512/actor/lora_adapter/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..8b2b0eaf657b18da16cc9321dba0699e2a814d2f --- /dev/null +++ b/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank16/global_step_512/actor/lora_adapter/adapter_config.json @@ -0,0 +1,49 @@ +{ + "task_type": "CAUSAL_LM", + "peft_type": "LORA", + "auto_mapping": null, + "peft_version": "0.18.1", + "base_model_name_or_path": "/dev/shm/verl-cache/b3c28c1b99a08b84eb28d5733b49c01c/aa8e72537993ba99e69dfaafa59ed015b17504d1", + "revision": null, + "inference_mode": false, + "r": 16, + "target_modules": [ + "o_proj", + "q_proj", + "up_proj", + "k_proj", + "v_proj", + "gate_proj", + "down_proj" + ], + "exclude_modules": null, + "lora_alpha": 32, + "lora_dropout": 0.0, + "fan_in_fan_out": false, + "bias": "none", + "use_rslora": false, + "modules_to_save": null, + "init_lora_weights": true, + "layers_to_transform": null, + "layers_pattern": null, + "rank_pattern": {}, + "alpha_pattern": {}, + "megatron_config": null, + "megatron_core": "megatron.core", + "trainable_token_indices": null, + "loftq_config": {}, + "eva_config": null, + "corda_config": null, + "use_dora": false, + "alora_invocation_tokens": null, + "use_qalora": false, + "qalora_group_size": 16, + "layer_replication": null, + "runtime_config": { + "ephemeral_gpu_offload": false + }, + "lora_bias": false, + "target_parameters": null, + "arrow_config": null, + "ensure_weight_tying": false +} \ No newline at end of file diff --git a/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank16/global_step_512/actor/lora_adapter/adapter_model.safetensors b/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank16/global_step_512/actor/lora_adapter/adapter_model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..aefdd27154c5832431e9479a6e83b03449137498 --- /dev/null +++ b/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank16/global_step_512/actor/lora_adapter/adapter_model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:4fcbae757ee3552da3cdc0e7775969069cd616937d3d9413bfeec09be238ba52 +size 119801496 diff --git a/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank16/global_step_64/actor/lora_adapter/adapter_config.json b/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank16/global_step_64/actor/lora_adapter/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..590bc87060aefec6080008fe9bfefafce43ffd07 --- /dev/null +++ b/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank16/global_step_64/actor/lora_adapter/adapter_config.json @@ -0,0 +1,49 @@ +{ + "task_type": "CAUSAL_LM", + "peft_type": "LORA", + "auto_mapping": null, + "peft_version": "0.18.1", + "base_model_name_or_path": "/dev/shm/verl-cache/b3c28c1b99a08b84eb28d5733b49c01c/aa8e72537993ba99e69dfaafa59ed015b17504d1", + "revision": null, + "inference_mode": false, + "r": 16, + "target_modules": [ + "k_proj", + "o_proj", + "down_proj", + "gate_proj", + "q_proj", + "up_proj", + "v_proj" + ], + "exclude_modules": null, + "lora_alpha": 32, + "lora_dropout": 0.0, + "fan_in_fan_out": false, + "bias": "none", + "use_rslora": false, + "modules_to_save": null, + "init_lora_weights": true, + "layers_to_transform": null, + "layers_pattern": null, + "rank_pattern": {}, + "alpha_pattern": {}, + "megatron_config": null, + "megatron_core": "megatron.core", + "trainable_token_indices": null, + "loftq_config": {}, + "eva_config": null, + "corda_config": null, + "use_dora": false, + "alora_invocation_tokens": null, + "use_qalora": false, + "qalora_group_size": 16, + "layer_replication": null, + "runtime_config": { + "ephemeral_gpu_offload": false + }, + "lora_bias": false, + "target_parameters": null, + "arrow_config": null, + "ensure_weight_tying": false +} \ No newline at end of file diff --git a/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank16/global_step_64/actor/lora_adapter/adapter_model.safetensors b/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank16/global_step_64/actor/lora_adapter/adapter_model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..543c448a10c302d2617965b69e629ac65dfbeabc --- /dev/null +++ b/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank16/global_step_64/actor/lora_adapter/adapter_model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:2578414694c142126fdba73a11718d68178ca5b6f4f6dbb851a58e561f7eaca7 +size 119801496 diff --git a/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank32/global_step_128/actor/lora_adapter/adapter_config.json b/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank32/global_step_128/actor/lora_adapter/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..e1a19c7a9b647dadbd48d9658b15cd0e42460cb0 --- /dev/null +++ b/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank32/global_step_128/actor/lora_adapter/adapter_config.json @@ -0,0 +1,49 @@ +{ + "task_type": "CAUSAL_LM", + "peft_type": "LORA", + "auto_mapping": null, + "peft_version": "0.18.1", + "base_model_name_or_path": "/dev/shm/verl-cache/b3c28c1b99a08b84eb28d5733b49c01c/aa8e72537993ba99e69dfaafa59ed015b17504d1", + "revision": null, + "inference_mode": false, + "r": 32, + "target_modules": [ + "k_proj", + "up_proj", + "down_proj", + "v_proj", + "q_proj", + "o_proj", + "gate_proj" + ], + "exclude_modules": null, + "lora_alpha": 64, + "lora_dropout": 0.0, + "fan_in_fan_out": false, + "bias": "none", + "use_rslora": false, + "modules_to_save": null, + "init_lora_weights": true, + "layers_to_transform": null, + "layers_pattern": null, + "rank_pattern": {}, + "alpha_pattern": {}, + "megatron_config": null, + "megatron_core": "megatron.core", + "trainable_token_indices": null, + "loftq_config": {}, + "eva_config": null, + "corda_config": null, + "use_dora": false, + "alora_invocation_tokens": null, + "use_qalora": false, + "qalora_group_size": 16, + "layer_replication": null, + "runtime_config": { + "ephemeral_gpu_offload": false + }, + "lora_bias": false, + "target_parameters": null, + "arrow_config": null, + "ensure_weight_tying": false +} \ No newline at end of file diff --git a/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank32/global_step_128/actor/lora_adapter/adapter_model.safetensors b/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank32/global_step_128/actor/lora_adapter/adapter_model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..14961e7e2f1af26be3d2f807bb90f89fc004cf45 --- /dev/null +++ b/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank32/global_step_128/actor/lora_adapter/adapter_model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:1fe6185fe4d2ee1e35ffacfd2e07c7a58583bfd4d83e8f31d09cb046b165a3a7 +size 239536248 diff --git a/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank32/global_step_192/actor/lora_adapter/adapter_config.json b/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank32/global_step_192/actor/lora_adapter/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..e1a19c7a9b647dadbd48d9658b15cd0e42460cb0 --- /dev/null +++ b/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank32/global_step_192/actor/lora_adapter/adapter_config.json @@ -0,0 +1,49 @@ +{ + "task_type": "CAUSAL_LM", + "peft_type": "LORA", + "auto_mapping": null, + "peft_version": "0.18.1", + "base_model_name_or_path": "/dev/shm/verl-cache/b3c28c1b99a08b84eb28d5733b49c01c/aa8e72537993ba99e69dfaafa59ed015b17504d1", + "revision": null, + "inference_mode": false, + "r": 32, + "target_modules": [ + "k_proj", + "up_proj", + "down_proj", + "v_proj", + "q_proj", + "o_proj", + "gate_proj" + ], + "exclude_modules": null, + "lora_alpha": 64, + "lora_dropout": 0.0, + "fan_in_fan_out": false, + "bias": "none", + "use_rslora": false, + "modules_to_save": null, + "init_lora_weights": true, + "layers_to_transform": null, + "layers_pattern": null, + "rank_pattern": {}, + "alpha_pattern": {}, + "megatron_config": null, + "megatron_core": "megatron.core", + "trainable_token_indices": null, + "loftq_config": {}, + "eva_config": null, + "corda_config": null, + "use_dora": false, + "alora_invocation_tokens": null, + "use_qalora": false, + "qalora_group_size": 16, + "layer_replication": null, + "runtime_config": { + "ephemeral_gpu_offload": false + }, + "lora_bias": false, + "target_parameters": null, + "arrow_config": null, + "ensure_weight_tying": false +} \ No newline at end of file diff --git a/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank32/global_step_192/actor/lora_adapter/adapter_model.safetensors b/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank32/global_step_192/actor/lora_adapter/adapter_model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..f80e6aa515a032f5e201e5d87ee09e32e6eff936 --- /dev/null +++ b/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank32/global_step_192/actor/lora_adapter/adapter_model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:ecabed98da06e091980486ca97ffd64bcefca86da6da62c84d6fd89ce91e29be +size 239536248 diff --git a/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank32/global_step_256/actor/lora_adapter/adapter_config.json b/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank32/global_step_256/actor/lora_adapter/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..e1a19c7a9b647dadbd48d9658b15cd0e42460cb0 --- /dev/null +++ b/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank32/global_step_256/actor/lora_adapter/adapter_config.json @@ -0,0 +1,49 @@ +{ + "task_type": "CAUSAL_LM", + "peft_type": "LORA", + "auto_mapping": null, + "peft_version": "0.18.1", + "base_model_name_or_path": "/dev/shm/verl-cache/b3c28c1b99a08b84eb28d5733b49c01c/aa8e72537993ba99e69dfaafa59ed015b17504d1", + "revision": null, + "inference_mode": false, + "r": 32, + "target_modules": [ + "k_proj", + "up_proj", + "down_proj", + "v_proj", + "q_proj", + "o_proj", + "gate_proj" + ], + "exclude_modules": null, + "lora_alpha": 64, + "lora_dropout": 0.0, + "fan_in_fan_out": false, + "bias": "none", + "use_rslora": false, + "modules_to_save": null, + "init_lora_weights": true, + "layers_to_transform": null, + "layers_pattern": null, + "rank_pattern": {}, + "alpha_pattern": {}, + "megatron_config": null, + "megatron_core": "megatron.core", + "trainable_token_indices": null, + "loftq_config": {}, + "eva_config": null, + "corda_config": null, + "use_dora": false, + "alora_invocation_tokens": null, + "use_qalora": false, + "qalora_group_size": 16, + "layer_replication": null, + "runtime_config": { + "ephemeral_gpu_offload": false + }, + "lora_bias": false, + "target_parameters": null, + "arrow_config": null, + "ensure_weight_tying": false +} \ No newline at end of file diff --git a/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank32/global_step_256/actor/lora_adapter/adapter_model.safetensors b/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank32/global_step_256/actor/lora_adapter/adapter_model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..f0f48958aca91fb8d767d70198eae12f8a4e80bd --- /dev/null +++ b/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank32/global_step_256/actor/lora_adapter/adapter_model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:c697eea35c14797c8141135eb4d6dbb3baf5f6be92ac69134e7bce9874e01deb +size 239536248 diff --git a/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank32/global_step_320/actor/lora_adapter/adapter_config.json b/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank32/global_step_320/actor/lora_adapter/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..e1a19c7a9b647dadbd48d9658b15cd0e42460cb0 --- /dev/null +++ b/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank32/global_step_320/actor/lora_adapter/adapter_config.json @@ -0,0 +1,49 @@ +{ + "task_type": "CAUSAL_LM", + "peft_type": "LORA", + "auto_mapping": null, + "peft_version": "0.18.1", + "base_model_name_or_path": "/dev/shm/verl-cache/b3c28c1b99a08b84eb28d5733b49c01c/aa8e72537993ba99e69dfaafa59ed015b17504d1", + "revision": null, + "inference_mode": false, + "r": 32, + "target_modules": [ + "k_proj", + "up_proj", + "down_proj", + "v_proj", + "q_proj", + "o_proj", + "gate_proj" + ], + "exclude_modules": null, + "lora_alpha": 64, + "lora_dropout": 0.0, + "fan_in_fan_out": false, + "bias": "none", + "use_rslora": false, + "modules_to_save": null, + "init_lora_weights": true, + "layers_to_transform": null, + "layers_pattern": null, + "rank_pattern": {}, + "alpha_pattern": {}, + "megatron_config": null, + "megatron_core": "megatron.core", + "trainable_token_indices": null, + "loftq_config": {}, + "eva_config": null, + "corda_config": null, + "use_dora": false, + "alora_invocation_tokens": null, + "use_qalora": false, + "qalora_group_size": 16, + "layer_replication": null, + "runtime_config": { + "ephemeral_gpu_offload": false + }, + "lora_bias": false, + "target_parameters": null, + "arrow_config": null, + "ensure_weight_tying": false +} \ No newline at end of file diff --git a/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank32/global_step_320/actor/lora_adapter/adapter_model.safetensors b/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank32/global_step_320/actor/lora_adapter/adapter_model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..f90bf84a7504ce89f9c56b22021a22d20eb7608d --- /dev/null +++ b/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank32/global_step_320/actor/lora_adapter/adapter_model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:278824427889e589d4fecd55b33fb38189408f865295f8e2649ca8d2d97b99f1 +size 239536248 diff --git a/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank32/global_step_384/actor/lora_adapter/adapter_config.json b/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank32/global_step_384/actor/lora_adapter/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..e1a19c7a9b647dadbd48d9658b15cd0e42460cb0 --- /dev/null +++ b/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank32/global_step_384/actor/lora_adapter/adapter_config.json @@ -0,0 +1,49 @@ +{ + "task_type": "CAUSAL_LM", + "peft_type": "LORA", + "auto_mapping": null, + "peft_version": "0.18.1", + "base_model_name_or_path": "/dev/shm/verl-cache/b3c28c1b99a08b84eb28d5733b49c01c/aa8e72537993ba99e69dfaafa59ed015b17504d1", + "revision": null, + "inference_mode": false, + "r": 32, + "target_modules": [ + "k_proj", + "up_proj", + "down_proj", + "v_proj", + "q_proj", + "o_proj", + "gate_proj" + ], + "exclude_modules": null, + "lora_alpha": 64, + "lora_dropout": 0.0, + "fan_in_fan_out": false, + "bias": "none", + "use_rslora": false, + "modules_to_save": null, + "init_lora_weights": true, + "layers_to_transform": null, + "layers_pattern": null, + "rank_pattern": {}, + "alpha_pattern": {}, + "megatron_config": null, + "megatron_core": "megatron.core", + "trainable_token_indices": null, + "loftq_config": {}, + "eva_config": null, + "corda_config": null, + "use_dora": false, + "alora_invocation_tokens": null, + "use_qalora": false, + "qalora_group_size": 16, + "layer_replication": null, + "runtime_config": { + "ephemeral_gpu_offload": false + }, + "lora_bias": false, + "target_parameters": null, + "arrow_config": null, + "ensure_weight_tying": false +} \ No newline at end of file diff --git a/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank32/global_step_384/actor/lora_adapter/adapter_model.safetensors b/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank32/global_step_384/actor/lora_adapter/adapter_model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..b7efd145aba0fab19202652d56b6b673aec97a16 --- /dev/null +++ b/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank32/global_step_384/actor/lora_adapter/adapter_model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:497e9f4fb588330bde941e871067729c531f1f19b99b622ef6ee7f1fadcbfbc6 +size 239536248 diff --git a/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank32/global_step_448/actor/lora_adapter/adapter_config.json b/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank32/global_step_448/actor/lora_adapter/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..e1a19c7a9b647dadbd48d9658b15cd0e42460cb0 --- /dev/null +++ b/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank32/global_step_448/actor/lora_adapter/adapter_config.json @@ -0,0 +1,49 @@ +{ + "task_type": "CAUSAL_LM", + "peft_type": "LORA", + "auto_mapping": null, + "peft_version": "0.18.1", + "base_model_name_or_path": "/dev/shm/verl-cache/b3c28c1b99a08b84eb28d5733b49c01c/aa8e72537993ba99e69dfaafa59ed015b17504d1", + "revision": null, + "inference_mode": false, + "r": 32, + "target_modules": [ + "k_proj", + "up_proj", + "down_proj", + "v_proj", + "q_proj", + "o_proj", + "gate_proj" + ], + "exclude_modules": null, + "lora_alpha": 64, + "lora_dropout": 0.0, + "fan_in_fan_out": false, + "bias": "none", + "use_rslora": false, + "modules_to_save": null, + "init_lora_weights": true, + "layers_to_transform": null, + "layers_pattern": null, + "rank_pattern": {}, + "alpha_pattern": {}, + "megatron_config": null, + "megatron_core": "megatron.core", + "trainable_token_indices": null, + "loftq_config": {}, + "eva_config": null, + "corda_config": null, + "use_dora": false, + "alora_invocation_tokens": null, + "use_qalora": false, + "qalora_group_size": 16, + "layer_replication": null, + "runtime_config": { + "ephemeral_gpu_offload": false + }, + "lora_bias": false, + "target_parameters": null, + "arrow_config": null, + "ensure_weight_tying": false +} \ No newline at end of file diff --git a/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank32/global_step_448/actor/lora_adapter/adapter_model.safetensors b/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank32/global_step_448/actor/lora_adapter/adapter_model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..882ef8d2f898e13c97fbc09a8c80b0bbb7f3b167 --- /dev/null +++ b/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank32/global_step_448/actor/lora_adapter/adapter_model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:776e22339116930f3018a2f3b8fb3c2697ef89123100f610aaaeecdcf42a291d +size 239536248 diff --git a/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank32/global_step_512/actor/lora_adapter/adapter_config.json b/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank32/global_step_512/actor/lora_adapter/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..e1a19c7a9b647dadbd48d9658b15cd0e42460cb0 --- /dev/null +++ b/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank32/global_step_512/actor/lora_adapter/adapter_config.json @@ -0,0 +1,49 @@ +{ + "task_type": "CAUSAL_LM", + "peft_type": "LORA", + "auto_mapping": null, + "peft_version": "0.18.1", + "base_model_name_or_path": "/dev/shm/verl-cache/b3c28c1b99a08b84eb28d5733b49c01c/aa8e72537993ba99e69dfaafa59ed015b17504d1", + "revision": null, + "inference_mode": false, + "r": 32, + "target_modules": [ + "k_proj", + "up_proj", + "down_proj", + "v_proj", + "q_proj", + "o_proj", + "gate_proj" + ], + "exclude_modules": null, + "lora_alpha": 64, + "lora_dropout": 0.0, + "fan_in_fan_out": false, + "bias": "none", + "use_rslora": false, + "modules_to_save": null, + "init_lora_weights": true, + "layers_to_transform": null, + "layers_pattern": null, + "rank_pattern": {}, + "alpha_pattern": {}, + "megatron_config": null, + "megatron_core": "megatron.core", + "trainable_token_indices": null, + "loftq_config": {}, + "eva_config": null, + "corda_config": null, + "use_dora": false, + "alora_invocation_tokens": null, + "use_qalora": false, + "qalora_group_size": 16, + "layer_replication": null, + "runtime_config": { + "ephemeral_gpu_offload": false + }, + "lora_bias": false, + "target_parameters": null, + "arrow_config": null, + "ensure_weight_tying": false +} \ No newline at end of file diff --git a/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank32/global_step_512/actor/lora_adapter/adapter_model.safetensors b/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank32/global_step_512/actor/lora_adapter/adapter_model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..fc02a00937c913f2b42a4e465d42180604618ce0 --- /dev/null +++ b/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank32/global_step_512/actor/lora_adapter/adapter_model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:52afade49505cacb116c3c7c814606d6775ef04d16cb73588036b135f4221c0b +size 239536248 diff --git a/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank32/global_step_64/actor/lora_adapter/adapter_config.json b/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank32/global_step_64/actor/lora_adapter/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..e1a19c7a9b647dadbd48d9658b15cd0e42460cb0 --- /dev/null +++ b/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank32/global_step_64/actor/lora_adapter/adapter_config.json @@ -0,0 +1,49 @@ +{ + "task_type": "CAUSAL_LM", + "peft_type": "LORA", + "auto_mapping": null, + "peft_version": "0.18.1", + "base_model_name_or_path": "/dev/shm/verl-cache/b3c28c1b99a08b84eb28d5733b49c01c/aa8e72537993ba99e69dfaafa59ed015b17504d1", + "revision": null, + "inference_mode": false, + "r": 32, + "target_modules": [ + "k_proj", + "up_proj", + "down_proj", + "v_proj", + "q_proj", + "o_proj", + "gate_proj" + ], + "exclude_modules": null, + "lora_alpha": 64, + "lora_dropout": 0.0, + "fan_in_fan_out": false, + "bias": "none", + "use_rslora": false, + "modules_to_save": null, + "init_lora_weights": true, + "layers_to_transform": null, + "layers_pattern": null, + "rank_pattern": {}, + "alpha_pattern": {}, + "megatron_config": null, + "megatron_core": "megatron.core", + "trainable_token_indices": null, + "loftq_config": {}, + "eva_config": null, + "corda_config": null, + "use_dora": false, + "alora_invocation_tokens": null, + "use_qalora": false, + "qalora_group_size": 16, + "layer_replication": null, + "runtime_config": { + "ephemeral_gpu_offload": false + }, + "lora_bias": false, + "target_parameters": null, + "arrow_config": null, + "ensure_weight_tying": false +} \ No newline at end of file diff --git a/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank32/global_step_64/actor/lora_adapter/adapter_model.safetensors b/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank32/global_step_64/actor/lora_adapter/adapter_model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..d3df023d6d1741fa9714759c483351e6c4fe52ca --- /dev/null +++ b/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank32/global_step_64/actor/lora_adapter/adapter_model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:d2a7cdcd365628213d2cfa6afbdbaaa5b34e1c55c4a5151c57bb13aa64856509 +size 239536248 diff --git a/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank64/global_step_128/actor/lora_adapter/adapter_config.json b/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank64/global_step_128/actor/lora_adapter/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..57aef2590b385ad5bd5f93574bb5bafa396fbc2e --- /dev/null +++ b/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank64/global_step_128/actor/lora_adapter/adapter_config.json @@ -0,0 +1,49 @@ +{ + "task_type": "CAUSAL_LM", + "peft_type": "LORA", + "auto_mapping": null, + "peft_version": "0.18.1", + "base_model_name_or_path": "/dev/shm/verl-cache/b3c28c1b99a08b84eb28d5733b49c01c/aa8e72537993ba99e69dfaafa59ed015b17504d1", + "revision": null, + "inference_mode": false, + "r": 64, + "target_modules": [ + "gate_proj", + "k_proj", + "o_proj", + "v_proj", + "up_proj", + "down_proj", + "q_proj" + ], + "exclude_modules": null, + "lora_alpha": 128, + "lora_dropout": 0.0, + "fan_in_fan_out": false, + "bias": "none", + "use_rslora": false, + "modules_to_save": null, + "init_lora_weights": true, + "layers_to_transform": null, + "layers_pattern": null, + "rank_pattern": {}, + "alpha_pattern": {}, + "megatron_config": null, + "megatron_core": "megatron.core", + "trainable_token_indices": null, + "loftq_config": {}, + "eva_config": null, + "corda_config": null, + "use_dora": false, + "alora_invocation_tokens": null, + "use_qalora": false, + "qalora_group_size": 16, + "layer_replication": null, + "runtime_config": { + "ephemeral_gpu_offload": false + }, + "lora_bias": false, + "target_parameters": null, + "arrow_config": null, + "ensure_weight_tying": false +} \ No newline at end of file diff --git a/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank64/global_step_128/actor/lora_adapter/adapter_model.safetensors b/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank64/global_step_128/actor/lora_adapter/adapter_model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..84beabf9b619bdd5b0ecb21fe752494c9850fb10 --- /dev/null +++ b/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank64/global_step_128/actor/lora_adapter/adapter_model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b7b9b3c8427ecaff63b4a3b32591942eb14858e5c67f3255a7a9e6acbb3800fd +size 479005032 diff --git a/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank64/global_step_192/actor/lora_adapter/adapter_config.json b/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank64/global_step_192/actor/lora_adapter/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..57aef2590b385ad5bd5f93574bb5bafa396fbc2e --- /dev/null +++ b/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank64/global_step_192/actor/lora_adapter/adapter_config.json @@ -0,0 +1,49 @@ +{ + "task_type": "CAUSAL_LM", + "peft_type": "LORA", + "auto_mapping": null, + "peft_version": "0.18.1", + "base_model_name_or_path": "/dev/shm/verl-cache/b3c28c1b99a08b84eb28d5733b49c01c/aa8e72537993ba99e69dfaafa59ed015b17504d1", + "revision": null, + "inference_mode": false, + "r": 64, + "target_modules": [ + "gate_proj", + "k_proj", + "o_proj", + "v_proj", + "up_proj", + "down_proj", + "q_proj" + ], + "exclude_modules": null, + "lora_alpha": 128, + "lora_dropout": 0.0, + "fan_in_fan_out": false, + "bias": "none", + "use_rslora": false, + "modules_to_save": null, + "init_lora_weights": true, + "layers_to_transform": null, + "layers_pattern": null, + "rank_pattern": {}, + "alpha_pattern": {}, + "megatron_config": null, + "megatron_core": "megatron.core", + "trainable_token_indices": null, + "loftq_config": {}, + "eva_config": null, + "corda_config": null, + "use_dora": false, + "alora_invocation_tokens": null, + "use_qalora": false, + "qalora_group_size": 16, + "layer_replication": null, + "runtime_config": { + "ephemeral_gpu_offload": false + }, + "lora_bias": false, + "target_parameters": null, + "arrow_config": null, + "ensure_weight_tying": false +} \ No newline at end of file diff --git a/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank64/global_step_192/actor/lora_adapter/adapter_model.safetensors b/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank64/global_step_192/actor/lora_adapter/adapter_model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..7de0764f0bc680046c1054539fa416c0518ca128 --- /dev/null +++ b/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank64/global_step_192/actor/lora_adapter/adapter_model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:bf347ef2c78a2814e2ee63f9822ded3cccfa0c9aaf3c1334705c5fe54f255ccf +size 479005032 diff --git a/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank64/global_step_256/actor/lora_adapter/adapter_config.json b/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank64/global_step_256/actor/lora_adapter/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..57aef2590b385ad5bd5f93574bb5bafa396fbc2e --- /dev/null +++ b/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank64/global_step_256/actor/lora_adapter/adapter_config.json @@ -0,0 +1,49 @@ +{ + "task_type": "CAUSAL_LM", + "peft_type": "LORA", + "auto_mapping": null, + "peft_version": "0.18.1", + "base_model_name_or_path": "/dev/shm/verl-cache/b3c28c1b99a08b84eb28d5733b49c01c/aa8e72537993ba99e69dfaafa59ed015b17504d1", + "revision": null, + "inference_mode": false, + "r": 64, + "target_modules": [ + "gate_proj", + "k_proj", + "o_proj", + "v_proj", + "up_proj", + "down_proj", + "q_proj" + ], + "exclude_modules": null, + "lora_alpha": 128, + "lora_dropout": 0.0, + "fan_in_fan_out": false, + "bias": "none", + "use_rslora": false, + "modules_to_save": null, + "init_lora_weights": true, + "layers_to_transform": null, + "layers_pattern": null, + "rank_pattern": {}, + "alpha_pattern": {}, + "megatron_config": null, + "megatron_core": "megatron.core", + "trainable_token_indices": null, + "loftq_config": {}, + "eva_config": null, + "corda_config": null, + "use_dora": false, + "alora_invocation_tokens": null, + "use_qalora": false, + "qalora_group_size": 16, + "layer_replication": null, + "runtime_config": { + "ephemeral_gpu_offload": false + }, + "lora_bias": false, + "target_parameters": null, + "arrow_config": null, + "ensure_weight_tying": false +} \ No newline at end of file diff --git a/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank64/global_step_256/actor/lora_adapter/adapter_model.safetensors b/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank64/global_step_256/actor/lora_adapter/adapter_model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..2a72dee63b50b483d379a5a9692cd24df712362d --- /dev/null +++ b/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank64/global_step_256/actor/lora_adapter/adapter_model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:13a6e06cf3d61d65e5e63e48a105986493761c23d3906e901f79836b4bd85faa +size 479005032 diff --git a/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank64/global_step_320/actor/lora_adapter/adapter_config.json b/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank64/global_step_320/actor/lora_adapter/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..57aef2590b385ad5bd5f93574bb5bafa396fbc2e --- /dev/null +++ b/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank64/global_step_320/actor/lora_adapter/adapter_config.json @@ -0,0 +1,49 @@ +{ + "task_type": "CAUSAL_LM", + "peft_type": "LORA", + "auto_mapping": null, + "peft_version": "0.18.1", + "base_model_name_or_path": "/dev/shm/verl-cache/b3c28c1b99a08b84eb28d5733b49c01c/aa8e72537993ba99e69dfaafa59ed015b17504d1", + "revision": null, + "inference_mode": false, + "r": 64, + "target_modules": [ + "gate_proj", + "k_proj", + "o_proj", + "v_proj", + "up_proj", + "down_proj", + "q_proj" + ], + "exclude_modules": null, + "lora_alpha": 128, + "lora_dropout": 0.0, + "fan_in_fan_out": false, + "bias": "none", + "use_rslora": false, + "modules_to_save": null, + "init_lora_weights": true, + "layers_to_transform": null, + "layers_pattern": null, + "rank_pattern": {}, + "alpha_pattern": {}, + "megatron_config": null, + "megatron_core": "megatron.core", + "trainable_token_indices": null, + "loftq_config": {}, + "eva_config": null, + "corda_config": null, + "use_dora": false, + "alora_invocation_tokens": null, + "use_qalora": false, + "qalora_group_size": 16, + "layer_replication": null, + "runtime_config": { + "ephemeral_gpu_offload": false + }, + "lora_bias": false, + "target_parameters": null, + "arrow_config": null, + "ensure_weight_tying": false +} \ No newline at end of file diff --git a/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank64/global_step_320/actor/lora_adapter/adapter_model.safetensors b/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank64/global_step_320/actor/lora_adapter/adapter_model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..d8d669b614d50dcfd7d5e799406e7fbe6bcd4b08 --- /dev/null +++ b/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank64/global_step_320/actor/lora_adapter/adapter_model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:fef5d4bfd5ed243c1ac6ae94430c697b6d0e5cba2f589f132720fb3afd8b8d19 +size 479005032 diff --git a/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank64/global_step_384/actor/lora_adapter/adapter_config.json b/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank64/global_step_384/actor/lora_adapter/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..57aef2590b385ad5bd5f93574bb5bafa396fbc2e --- /dev/null +++ b/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank64/global_step_384/actor/lora_adapter/adapter_config.json @@ -0,0 +1,49 @@ +{ + "task_type": "CAUSAL_LM", + "peft_type": "LORA", + "auto_mapping": null, + "peft_version": "0.18.1", + "base_model_name_or_path": "/dev/shm/verl-cache/b3c28c1b99a08b84eb28d5733b49c01c/aa8e72537993ba99e69dfaafa59ed015b17504d1", + "revision": null, + "inference_mode": false, + "r": 64, + "target_modules": [ + "gate_proj", + "k_proj", + "o_proj", + "v_proj", + "up_proj", + "down_proj", + "q_proj" + ], + "exclude_modules": null, + "lora_alpha": 128, + "lora_dropout": 0.0, + "fan_in_fan_out": false, + "bias": "none", + "use_rslora": false, + "modules_to_save": null, + "init_lora_weights": true, + "layers_to_transform": null, + "layers_pattern": null, + "rank_pattern": {}, + "alpha_pattern": {}, + "megatron_config": null, + "megatron_core": "megatron.core", + "trainable_token_indices": null, + "loftq_config": {}, + "eva_config": null, + "corda_config": null, + "use_dora": false, + "alora_invocation_tokens": null, + "use_qalora": false, + "qalora_group_size": 16, + "layer_replication": null, + "runtime_config": { + "ephemeral_gpu_offload": false + }, + "lora_bias": false, + "target_parameters": null, + "arrow_config": null, + "ensure_weight_tying": false +} \ No newline at end of file diff --git a/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank64/global_step_384/actor/lora_adapter/adapter_model.safetensors b/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank64/global_step_384/actor/lora_adapter/adapter_model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..e6f23bf410469bbf112f0cca75fc73fe955b8383 --- /dev/null +++ b/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank64/global_step_384/actor/lora_adapter/adapter_model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:dcf380ab14b47f94e7bed10897afba3e9e3e87fe7081aae6839d2d4ae939e008 +size 479005032 diff --git a/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank64/global_step_448/actor/lora_adapter/adapter_config.json b/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank64/global_step_448/actor/lora_adapter/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..57aef2590b385ad5bd5f93574bb5bafa396fbc2e --- /dev/null +++ b/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank64/global_step_448/actor/lora_adapter/adapter_config.json @@ -0,0 +1,49 @@ +{ + "task_type": "CAUSAL_LM", + "peft_type": "LORA", + "auto_mapping": null, + "peft_version": "0.18.1", + "base_model_name_or_path": "/dev/shm/verl-cache/b3c28c1b99a08b84eb28d5733b49c01c/aa8e72537993ba99e69dfaafa59ed015b17504d1", + "revision": null, + "inference_mode": false, + "r": 64, + "target_modules": [ + "gate_proj", + "k_proj", + "o_proj", + "v_proj", + "up_proj", + "down_proj", + "q_proj" + ], + "exclude_modules": null, + "lora_alpha": 128, + "lora_dropout": 0.0, + "fan_in_fan_out": false, + "bias": "none", + "use_rslora": false, + "modules_to_save": null, + "init_lora_weights": true, + "layers_to_transform": null, + "layers_pattern": null, + "rank_pattern": {}, + "alpha_pattern": {}, + "megatron_config": null, + "megatron_core": "megatron.core", + "trainable_token_indices": null, + "loftq_config": {}, + "eva_config": null, + "corda_config": null, + "use_dora": false, + "alora_invocation_tokens": null, + "use_qalora": false, + "qalora_group_size": 16, + "layer_replication": null, + "runtime_config": { + "ephemeral_gpu_offload": false + }, + "lora_bias": false, + "target_parameters": null, + "arrow_config": null, + "ensure_weight_tying": false +} \ No newline at end of file diff --git a/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank64/global_step_448/actor/lora_adapter/adapter_model.safetensors b/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank64/global_step_448/actor/lora_adapter/adapter_model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..dc0a7926c765a84f94a626efa487dcc68b44e5f9 --- /dev/null +++ b/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank64/global_step_448/actor/lora_adapter/adapter_model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b63300a374fa4a7f0f060c811c0f4ea0671f39a97b985f27d058115353531bae +size 479005032 diff --git a/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank64/global_step_512/actor/lora_adapter/adapter_config.json b/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank64/global_step_512/actor/lora_adapter/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..57aef2590b385ad5bd5f93574bb5bafa396fbc2e --- /dev/null +++ b/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank64/global_step_512/actor/lora_adapter/adapter_config.json @@ -0,0 +1,49 @@ +{ + "task_type": "CAUSAL_LM", + "peft_type": "LORA", + "auto_mapping": null, + "peft_version": "0.18.1", + "base_model_name_or_path": "/dev/shm/verl-cache/b3c28c1b99a08b84eb28d5733b49c01c/aa8e72537993ba99e69dfaafa59ed015b17504d1", + "revision": null, + "inference_mode": false, + "r": 64, + "target_modules": [ + "gate_proj", + "k_proj", + "o_proj", + "v_proj", + "up_proj", + "down_proj", + "q_proj" + ], + "exclude_modules": null, + "lora_alpha": 128, + "lora_dropout": 0.0, + "fan_in_fan_out": false, + "bias": "none", + "use_rslora": false, + "modules_to_save": null, + "init_lora_weights": true, + "layers_to_transform": null, + "layers_pattern": null, + "rank_pattern": {}, + "alpha_pattern": {}, + "megatron_config": null, + "megatron_core": "megatron.core", + "trainable_token_indices": null, + "loftq_config": {}, + "eva_config": null, + "corda_config": null, + "use_dora": false, + "alora_invocation_tokens": null, + "use_qalora": false, + "qalora_group_size": 16, + "layer_replication": null, + "runtime_config": { + "ephemeral_gpu_offload": false + }, + "lora_bias": false, + "target_parameters": null, + "arrow_config": null, + "ensure_weight_tying": false +} \ No newline at end of file diff --git a/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank64/global_step_512/actor/lora_adapter/adapter_model.safetensors b/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank64/global_step_512/actor/lora_adapter/adapter_model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..338ff026bb52ff675f1da54dd2fb56a197c643dd --- /dev/null +++ b/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank64/global_step_512/actor/lora_adapter/adapter_model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:78e0bb7767cbde6789b99110af8b00a98c6bdd147d23e9f31e13bb078519dc78 +size 479005032 diff --git a/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank64/global_step_64/actor/lora_adapter/adapter_config.json b/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank64/global_step_64/actor/lora_adapter/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..57aef2590b385ad5bd5f93574bb5bafa396fbc2e --- /dev/null +++ b/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank64/global_step_64/actor/lora_adapter/adapter_config.json @@ -0,0 +1,49 @@ +{ + "task_type": "CAUSAL_LM", + "peft_type": "LORA", + "auto_mapping": null, + "peft_version": "0.18.1", + "base_model_name_or_path": "/dev/shm/verl-cache/b3c28c1b99a08b84eb28d5733b49c01c/aa8e72537993ba99e69dfaafa59ed015b17504d1", + "revision": null, + "inference_mode": false, + "r": 64, + "target_modules": [ + "gate_proj", + "k_proj", + "o_proj", + "v_proj", + "up_proj", + "down_proj", + "q_proj" + ], + "exclude_modules": null, + "lora_alpha": 128, + "lora_dropout": 0.0, + "fan_in_fan_out": false, + "bias": "none", + "use_rslora": false, + "modules_to_save": null, + "init_lora_weights": true, + "layers_to_transform": null, + "layers_pattern": null, + "rank_pattern": {}, + "alpha_pattern": {}, + "megatron_config": null, + "megatron_core": "megatron.core", + "trainable_token_indices": null, + "loftq_config": {}, + "eva_config": null, + "corda_config": null, + "use_dora": false, + "alora_invocation_tokens": null, + "use_qalora": false, + "qalora_group_size": 16, + "layer_replication": null, + "runtime_config": { + "ephemeral_gpu_offload": false + }, + "lora_bias": false, + "target_parameters": null, + "arrow_config": null, + "ensure_weight_tying": false +} \ No newline at end of file diff --git a/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank64/global_step_64/actor/lora_adapter/adapter_model.safetensors b/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank64/global_step_64/actor/lora_adapter/adapter_model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..a02926a2b939727c1d4f079bfe8c8efeb6f8189f --- /dev/null +++ b/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank64/global_step_64/actor/lora_adapter/adapter_model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:9d71fd9a27da282dd2122f197317a221938aaa9f391f506020a8daa0408722ff +size 479005032 diff --git a/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank8/global_step_128/actor/lora_adapter/adapter_config.json b/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank8/global_step_128/actor/lora_adapter/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..4170a980c529fb82f202d37ac1d014a6ecc64647 --- /dev/null +++ b/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank8/global_step_128/actor/lora_adapter/adapter_config.json @@ -0,0 +1,49 @@ +{ + "task_type": "CAUSAL_LM", + "peft_type": "LORA", + "auto_mapping": null, + "peft_version": "0.18.1", + "base_model_name_or_path": "/dev/shm/verl-cache/b3c28c1b99a08b84eb28d5733b49c01c/aa8e72537993ba99e69dfaafa59ed015b17504d1", + "revision": null, + "inference_mode": false, + "r": 8, + "target_modules": [ + "up_proj", + "k_proj", + "gate_proj", + "o_proj", + "q_proj", + "v_proj", + "down_proj" + ], + "exclude_modules": null, + "lora_alpha": 16, + "lora_dropout": 0.0, + "fan_in_fan_out": false, + "bias": "none", + "use_rslora": false, + "modules_to_save": null, + "init_lora_weights": true, + "layers_to_transform": null, + "layers_pattern": null, + "rank_pattern": {}, + "alpha_pattern": {}, + "megatron_config": null, + "megatron_core": "megatron.core", + "trainable_token_indices": null, + "loftq_config": {}, + "eva_config": null, + "corda_config": null, + "use_dora": false, + "alora_invocation_tokens": null, + "use_qalora": false, + "qalora_group_size": 16, + "layer_replication": null, + "runtime_config": { + "ephemeral_gpu_offload": false + }, + "lora_bias": false, + "target_parameters": null, + "arrow_config": null, + "ensure_weight_tying": false +} \ No newline at end of file diff --git a/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank8/global_step_128/actor/lora_adapter/adapter_model.safetensors b/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank8/global_step_128/actor/lora_adapter/adapter_model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..f73e906984ce74f9aaceb46c094072094b718d56 --- /dev/null +++ b/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank8/global_step_128/actor/lora_adapter/adapter_model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:a2baeef11332894cb3bc6cc15b91c60c0e3581dad844a84a0ed9befe37740788 +size 59933600 diff --git a/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank8/global_step_192/actor/lora_adapter/adapter_config.json b/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank8/global_step_192/actor/lora_adapter/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..4170a980c529fb82f202d37ac1d014a6ecc64647 --- /dev/null +++ b/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank8/global_step_192/actor/lora_adapter/adapter_config.json @@ -0,0 +1,49 @@ +{ + "task_type": "CAUSAL_LM", + "peft_type": "LORA", + "auto_mapping": null, + "peft_version": "0.18.1", + "base_model_name_or_path": "/dev/shm/verl-cache/b3c28c1b99a08b84eb28d5733b49c01c/aa8e72537993ba99e69dfaafa59ed015b17504d1", + "revision": null, + "inference_mode": false, + "r": 8, + "target_modules": [ + "up_proj", + "k_proj", + "gate_proj", + "o_proj", + "q_proj", + "v_proj", + "down_proj" + ], + "exclude_modules": null, + "lora_alpha": 16, + "lora_dropout": 0.0, + "fan_in_fan_out": false, + "bias": "none", + "use_rslora": false, + "modules_to_save": null, + "init_lora_weights": true, + "layers_to_transform": null, + "layers_pattern": null, + "rank_pattern": {}, + "alpha_pattern": {}, + "megatron_config": null, + "megatron_core": "megatron.core", + "trainable_token_indices": null, + "loftq_config": {}, + "eva_config": null, + "corda_config": null, + "use_dora": false, + "alora_invocation_tokens": null, + "use_qalora": false, + "qalora_group_size": 16, + "layer_replication": null, + "runtime_config": { + "ephemeral_gpu_offload": false + }, + "lora_bias": false, + "target_parameters": null, + "arrow_config": null, + "ensure_weight_tying": false +} \ No newline at end of file diff --git a/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank8/global_step_192/actor/lora_adapter/adapter_model.safetensors b/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank8/global_step_192/actor/lora_adapter/adapter_model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..cb36ccaa0f57981ff22485815b80029425511145 --- /dev/null +++ b/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank8/global_step_192/actor/lora_adapter/adapter_model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:5c61a2c527490df11e4ba125fb2c6268348209d3a543d113b168663fb2799b33 +size 59933600 diff --git a/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank8/global_step_256/actor/lora_adapter/adapter_config.json b/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank8/global_step_256/actor/lora_adapter/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..4170a980c529fb82f202d37ac1d014a6ecc64647 --- /dev/null +++ b/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank8/global_step_256/actor/lora_adapter/adapter_config.json @@ -0,0 +1,49 @@ +{ + "task_type": "CAUSAL_LM", + "peft_type": "LORA", + "auto_mapping": null, + "peft_version": "0.18.1", + "base_model_name_or_path": "/dev/shm/verl-cache/b3c28c1b99a08b84eb28d5733b49c01c/aa8e72537993ba99e69dfaafa59ed015b17504d1", + "revision": null, + "inference_mode": false, + "r": 8, + "target_modules": [ + "up_proj", + "k_proj", + "gate_proj", + "o_proj", + "q_proj", + "v_proj", + "down_proj" + ], + "exclude_modules": null, + "lora_alpha": 16, + "lora_dropout": 0.0, + "fan_in_fan_out": false, + "bias": "none", + "use_rslora": false, + "modules_to_save": null, + "init_lora_weights": true, + "layers_to_transform": null, + "layers_pattern": null, + "rank_pattern": {}, + "alpha_pattern": {}, + "megatron_config": null, + "megatron_core": "megatron.core", + "trainable_token_indices": null, + "loftq_config": {}, + "eva_config": null, + "corda_config": null, + "use_dora": false, + "alora_invocation_tokens": null, + "use_qalora": false, + "qalora_group_size": 16, + "layer_replication": null, + "runtime_config": { + "ephemeral_gpu_offload": false + }, + "lora_bias": false, + "target_parameters": null, + "arrow_config": null, + "ensure_weight_tying": false +} \ No newline at end of file diff --git a/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank8/global_step_256/actor/lora_adapter/adapter_model.safetensors b/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank8/global_step_256/actor/lora_adapter/adapter_model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..5e0c81d6a92a15024745bc77e1bed745b0f29094 --- /dev/null +++ b/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank8/global_step_256/actor/lora_adapter/adapter_model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:5644ab2eb486c32c88de52d71fc35049f22d719a2e6dd633bdf7832d3190dcba +size 59933600 diff --git a/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank8/global_step_320/actor/lora_adapter/adapter_config.json b/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank8/global_step_320/actor/lora_adapter/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..4170a980c529fb82f202d37ac1d014a6ecc64647 --- /dev/null +++ b/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank8/global_step_320/actor/lora_adapter/adapter_config.json @@ -0,0 +1,49 @@ +{ + "task_type": "CAUSAL_LM", + "peft_type": "LORA", + "auto_mapping": null, + "peft_version": "0.18.1", + "base_model_name_or_path": "/dev/shm/verl-cache/b3c28c1b99a08b84eb28d5733b49c01c/aa8e72537993ba99e69dfaafa59ed015b17504d1", + "revision": null, + "inference_mode": false, + "r": 8, + "target_modules": [ + "up_proj", + "k_proj", + "gate_proj", + "o_proj", + "q_proj", + "v_proj", + "down_proj" + ], + "exclude_modules": null, + "lora_alpha": 16, + "lora_dropout": 0.0, + "fan_in_fan_out": false, + "bias": "none", + "use_rslora": false, + "modules_to_save": null, + "init_lora_weights": true, + "layers_to_transform": null, + "layers_pattern": null, + "rank_pattern": {}, + "alpha_pattern": {}, + "megatron_config": null, + "megatron_core": "megatron.core", + "trainable_token_indices": null, + "loftq_config": {}, + "eva_config": null, + "corda_config": null, + "use_dora": false, + "alora_invocation_tokens": null, + "use_qalora": false, + "qalora_group_size": 16, + "layer_replication": null, + "runtime_config": { + "ephemeral_gpu_offload": false + }, + "lora_bias": false, + "target_parameters": null, + "arrow_config": null, + "ensure_weight_tying": false +} \ No newline at end of file diff --git a/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank8/global_step_320/actor/lora_adapter/adapter_model.safetensors b/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank8/global_step_320/actor/lora_adapter/adapter_model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..38a791cf669fec6bb165395a33374403d7394580 --- /dev/null +++ b/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank8/global_step_320/actor/lora_adapter/adapter_model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b766ff08d520417a225846f0100cd7fc07899af1a2c820f858fdebbf1090a4cd +size 59933600 diff --git a/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank8/global_step_384/actor/lora_adapter/adapter_config.json b/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank8/global_step_384/actor/lora_adapter/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..4170a980c529fb82f202d37ac1d014a6ecc64647 --- /dev/null +++ b/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank8/global_step_384/actor/lora_adapter/adapter_config.json @@ -0,0 +1,49 @@ +{ + "task_type": "CAUSAL_LM", + "peft_type": "LORA", + "auto_mapping": null, + "peft_version": "0.18.1", + "base_model_name_or_path": "/dev/shm/verl-cache/b3c28c1b99a08b84eb28d5733b49c01c/aa8e72537993ba99e69dfaafa59ed015b17504d1", + "revision": null, + "inference_mode": false, + "r": 8, + "target_modules": [ + "up_proj", + "k_proj", + "gate_proj", + "o_proj", + "q_proj", + "v_proj", + "down_proj" + ], + "exclude_modules": null, + "lora_alpha": 16, + "lora_dropout": 0.0, + "fan_in_fan_out": false, + "bias": "none", + "use_rslora": false, + "modules_to_save": null, + "init_lora_weights": true, + "layers_to_transform": null, + "layers_pattern": null, + "rank_pattern": {}, + "alpha_pattern": {}, + "megatron_config": null, + "megatron_core": "megatron.core", + "trainable_token_indices": null, + "loftq_config": {}, + "eva_config": null, + "corda_config": null, + "use_dora": false, + "alora_invocation_tokens": null, + "use_qalora": false, + "qalora_group_size": 16, + "layer_replication": null, + "runtime_config": { + "ephemeral_gpu_offload": false + }, + "lora_bias": false, + "target_parameters": null, + "arrow_config": null, + "ensure_weight_tying": false +} \ No newline at end of file diff --git a/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank8/global_step_384/actor/lora_adapter/adapter_model.safetensors b/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank8/global_step_384/actor/lora_adapter/adapter_model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..ddefab0747e89cba613c0bec92e28b532b8b9647 --- /dev/null +++ b/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank8/global_step_384/actor/lora_adapter/adapter_model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:4e4edea85b52504c135362402956f8d601ac7c78c6ad67388804a182d7742526 +size 59933600 diff --git a/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank8/global_step_448/actor/lora_adapter/adapter_config.json b/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank8/global_step_448/actor/lora_adapter/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..4170a980c529fb82f202d37ac1d014a6ecc64647 --- /dev/null +++ b/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank8/global_step_448/actor/lora_adapter/adapter_config.json @@ -0,0 +1,49 @@ +{ + "task_type": "CAUSAL_LM", + "peft_type": "LORA", + "auto_mapping": null, + "peft_version": "0.18.1", + "base_model_name_or_path": "/dev/shm/verl-cache/b3c28c1b99a08b84eb28d5733b49c01c/aa8e72537993ba99e69dfaafa59ed015b17504d1", + "revision": null, + "inference_mode": false, + "r": 8, + "target_modules": [ + "up_proj", + "k_proj", + "gate_proj", + "o_proj", + "q_proj", + "v_proj", + "down_proj" + ], + "exclude_modules": null, + "lora_alpha": 16, + "lora_dropout": 0.0, + "fan_in_fan_out": false, + "bias": "none", + "use_rslora": false, + "modules_to_save": null, + "init_lora_weights": true, + "layers_to_transform": null, + "layers_pattern": null, + "rank_pattern": {}, + "alpha_pattern": {}, + "megatron_config": null, + "megatron_core": "megatron.core", + "trainable_token_indices": null, + "loftq_config": {}, + "eva_config": null, + "corda_config": null, + "use_dora": false, + "alora_invocation_tokens": null, + "use_qalora": false, + "qalora_group_size": 16, + "layer_replication": null, + "runtime_config": { + "ephemeral_gpu_offload": false + }, + "lora_bias": false, + "target_parameters": null, + "arrow_config": null, + "ensure_weight_tying": false +} \ No newline at end of file diff --git a/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank8/global_step_448/actor/lora_adapter/adapter_model.safetensors b/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank8/global_step_448/actor/lora_adapter/adapter_model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..d2dffa6b51925eb2932ded124a37f829e933585c --- /dev/null +++ b/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank8/global_step_448/actor/lora_adapter/adapter_model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:c228da320932cd246016cd0d2128bf2a9414f2ff3441c459b5b40b2057311c88 +size 59933600 diff --git a/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank8/global_step_512/actor/lora_adapter/adapter_config.json b/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank8/global_step_512/actor/lora_adapter/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..4170a980c529fb82f202d37ac1d014a6ecc64647 --- /dev/null +++ b/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank8/global_step_512/actor/lora_adapter/adapter_config.json @@ -0,0 +1,49 @@ +{ + "task_type": "CAUSAL_LM", + "peft_type": "LORA", + "auto_mapping": null, + "peft_version": "0.18.1", + "base_model_name_or_path": "/dev/shm/verl-cache/b3c28c1b99a08b84eb28d5733b49c01c/aa8e72537993ba99e69dfaafa59ed015b17504d1", + "revision": null, + "inference_mode": false, + "r": 8, + "target_modules": [ + "up_proj", + "k_proj", + "gate_proj", + "o_proj", + "q_proj", + "v_proj", + "down_proj" + ], + "exclude_modules": null, + "lora_alpha": 16, + "lora_dropout": 0.0, + "fan_in_fan_out": false, + "bias": "none", + "use_rslora": false, + "modules_to_save": null, + "init_lora_weights": true, + "layers_to_transform": null, + "layers_pattern": null, + "rank_pattern": {}, + "alpha_pattern": {}, + "megatron_config": null, + "megatron_core": "megatron.core", + "trainable_token_indices": null, + "loftq_config": {}, + "eva_config": null, + "corda_config": null, + "use_dora": false, + "alora_invocation_tokens": null, + "use_qalora": false, + "qalora_group_size": 16, + "layer_replication": null, + "runtime_config": { + "ephemeral_gpu_offload": false + }, + "lora_bias": false, + "target_parameters": null, + "arrow_config": null, + "ensure_weight_tying": false +} \ No newline at end of file diff --git a/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank8/global_step_512/actor/lora_adapter/adapter_model.safetensors b/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank8/global_step_512/actor/lora_adapter/adapter_model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..587f37eee2965c3a39119a8bcff0c83c05a8531a --- /dev/null +++ b/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank8/global_step_512/actor/lora_adapter/adapter_model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:b8fef230d44d308b338620cd86527f132ded5cb97c8e37d84e7f00910b5cbc17 +size 59933600 diff --git a/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank8/global_step_64/actor/lora_adapter/adapter_config.json b/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank8/global_step_64/actor/lora_adapter/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..4170a980c529fb82f202d37ac1d014a6ecc64647 --- /dev/null +++ b/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank8/global_step_64/actor/lora_adapter/adapter_config.json @@ -0,0 +1,49 @@ +{ + "task_type": "CAUSAL_LM", + "peft_type": "LORA", + "auto_mapping": null, + "peft_version": "0.18.1", + "base_model_name_or_path": "/dev/shm/verl-cache/b3c28c1b99a08b84eb28d5733b49c01c/aa8e72537993ba99e69dfaafa59ed015b17504d1", + "revision": null, + "inference_mode": false, + "r": 8, + "target_modules": [ + "up_proj", + "k_proj", + "gate_proj", + "o_proj", + "q_proj", + "v_proj", + "down_proj" + ], + "exclude_modules": null, + "lora_alpha": 16, + "lora_dropout": 0.0, + "fan_in_fan_out": false, + "bias": "none", + "use_rslora": false, + "modules_to_save": null, + "init_lora_weights": true, + "layers_to_transform": null, + "layers_pattern": null, + "rank_pattern": {}, + "alpha_pattern": {}, + "megatron_config": null, + "megatron_core": "megatron.core", + "trainable_token_indices": null, + "loftq_config": {}, + "eva_config": null, + "corda_config": null, + "use_dora": false, + "alora_invocation_tokens": null, + "use_qalora": false, + "qalora_group_size": 16, + "layer_replication": null, + "runtime_config": { + "ephemeral_gpu_offload": false + }, + "lora_bias": false, + "target_parameters": null, + "arrow_config": null, + "ensure_weight_tying": false +} \ No newline at end of file diff --git a/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank8/global_step_64/actor/lora_adapter/adapter_model.safetensors b/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank8/global_step_64/actor/lora_adapter/adapter_model.safetensors new file mode 100644 index 0000000000000000000000000000000000000000..dd61c7ecf9c13d0d11ab1d7e83a7717120885a2f --- /dev/null +++ b/qwen-3b-lora/Qwen/Qwen2.5-3B-Instruct-polaris-GRPO-LoRA-rank8/global_step_64/actor/lora_adapter/adapter_model.safetensors @@ -0,0 +1,3 @@ +version https://git-lfs.github.com/spec/v1 +oid sha256:c0394ec95001247f8c1421d4e648fe622b63cfa153359d31a7790c7f2372206c +size 59933600