diff --git a/fdfo-vlm-alignment-reward-no-cfg/epoch-0000200/adapter_config.json b/fdfo-vlm-alignment-reward-no-cfg/epoch-0000200/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-vlm-alignment-reward-no-cfg/epoch-0000200/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-vlm-alignment-reward-no-cfg/epoch-0000205/adapter_config.json b/fdfo-vlm-alignment-reward-no-cfg/epoch-0000205/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-vlm-alignment-reward-no-cfg/epoch-0000205/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-vlm-alignment-reward-no-cfg/epoch-0000210/adapter_config.json b/fdfo-vlm-alignment-reward-no-cfg/epoch-0000210/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-vlm-alignment-reward-no-cfg/epoch-0000210/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-vlm-alignment-reward-no-cfg/epoch-0000215/adapter_config.json b/fdfo-vlm-alignment-reward-no-cfg/epoch-0000215/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-vlm-alignment-reward-no-cfg/epoch-0000215/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-vlm-alignment-reward-no-cfg/epoch-0000220/adapter_config.json b/fdfo-vlm-alignment-reward-no-cfg/epoch-0000220/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-vlm-alignment-reward-no-cfg/epoch-0000220/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-vlm-alignment-reward-no-cfg/epoch-0000225/adapter_config.json b/fdfo-vlm-alignment-reward-no-cfg/epoch-0000225/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-vlm-alignment-reward-no-cfg/epoch-0000225/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-vlm-alignment-reward-no-cfg/epoch-0000230/adapter_config.json b/fdfo-vlm-alignment-reward-no-cfg/epoch-0000230/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-vlm-alignment-reward-no-cfg/epoch-0000230/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-vlm-alignment-reward-no-cfg/epoch-0000235/adapter_config.json b/fdfo-vlm-alignment-reward-no-cfg/epoch-0000235/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-vlm-alignment-reward-no-cfg/epoch-0000235/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-vlm-alignment-reward-no-cfg/epoch-0000240/adapter_config.json b/fdfo-vlm-alignment-reward-no-cfg/epoch-0000240/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-vlm-alignment-reward-no-cfg/epoch-0000240/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-vlm-alignment-reward-no-cfg/epoch-0000245/adapter_config.json b/fdfo-vlm-alignment-reward-no-cfg/epoch-0000245/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-vlm-alignment-reward-no-cfg/epoch-0000245/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-vlm-alignment-reward-no-cfg/epoch-0000250/adapter_config.json b/fdfo-vlm-alignment-reward-no-cfg/epoch-0000250/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-vlm-alignment-reward-no-cfg/epoch-0000250/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-vlm-alignment-reward-no-cfg/epoch-0000255/adapter_config.json b/fdfo-vlm-alignment-reward-no-cfg/epoch-0000255/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-vlm-alignment-reward-no-cfg/epoch-0000255/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-vlm-alignment-reward-no-cfg/epoch-0000260/adapter_config.json b/fdfo-vlm-alignment-reward-no-cfg/epoch-0000260/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-vlm-alignment-reward-no-cfg/epoch-0000260/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-vlm-alignment-reward-no-cfg/epoch-0000265/adapter_config.json b/fdfo-vlm-alignment-reward-no-cfg/epoch-0000265/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-vlm-alignment-reward-no-cfg/epoch-0000265/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-vlm-alignment-reward-no-cfg/epoch-0000270/adapter_config.json b/fdfo-vlm-alignment-reward-no-cfg/epoch-0000270/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-vlm-alignment-reward-no-cfg/epoch-0000270/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-vlm-alignment-reward-no-cfg/epoch-0000275/adapter_config.json b/fdfo-vlm-alignment-reward-no-cfg/epoch-0000275/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-vlm-alignment-reward-no-cfg/epoch-0000275/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-vlm-alignment-reward-no-cfg/epoch-0000280/adapter_config.json b/fdfo-vlm-alignment-reward-no-cfg/epoch-0000280/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-vlm-alignment-reward-no-cfg/epoch-0000280/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-vlm-alignment-reward-no-cfg/epoch-0000285/adapter_config.json b/fdfo-vlm-alignment-reward-no-cfg/epoch-0000285/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-vlm-alignment-reward-no-cfg/epoch-0000285/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-vlm-alignment-reward-no-cfg/epoch-0000290/adapter_config.json b/fdfo-vlm-alignment-reward-no-cfg/epoch-0000290/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-vlm-alignment-reward-no-cfg/epoch-0000290/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-vlm-alignment-reward-no-cfg/epoch-0000295/adapter_config.json b/fdfo-vlm-alignment-reward-no-cfg/epoch-0000295/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-vlm-alignment-reward-no-cfg/epoch-0000295/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-vlm-alignment-reward-no-cfg/epoch-0000300/adapter_config.json b/fdfo-vlm-alignment-reward-no-cfg/epoch-0000300/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-vlm-alignment-reward-no-cfg/epoch-0000300/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-vlm-alignment-reward-no-cfg/epoch-0000305/adapter_config.json b/fdfo-vlm-alignment-reward-no-cfg/epoch-0000305/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-vlm-alignment-reward-no-cfg/epoch-0000305/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-vlm-alignment-reward-no-cfg/epoch-0000310/adapter_config.json b/fdfo-vlm-alignment-reward-no-cfg/epoch-0000310/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-vlm-alignment-reward-no-cfg/epoch-0000310/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-vlm-alignment-reward-no-cfg/epoch-0000315/adapter_config.json b/fdfo-vlm-alignment-reward-no-cfg/epoch-0000315/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-vlm-alignment-reward-no-cfg/epoch-0000315/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-vlm-alignment-reward-no-cfg/epoch-0000320/adapter_config.json b/fdfo-vlm-alignment-reward-no-cfg/epoch-0000320/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-vlm-alignment-reward-no-cfg/epoch-0000320/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-vlm-alignment-reward-no-cfg/epoch-0000325/adapter_config.json b/fdfo-vlm-alignment-reward-no-cfg/epoch-0000325/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-vlm-alignment-reward-no-cfg/epoch-0000325/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-vlm-alignment-reward-no-cfg/epoch-0000330/adapter_config.json b/fdfo-vlm-alignment-reward-no-cfg/epoch-0000330/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-vlm-alignment-reward-no-cfg/epoch-0000330/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-vlm-alignment-reward-no-cfg/epoch-0000335/adapter_config.json b/fdfo-vlm-alignment-reward-no-cfg/epoch-0000335/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-vlm-alignment-reward-no-cfg/epoch-0000335/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-vlm-alignment-reward-no-cfg/epoch-0000340/adapter_config.json b/fdfo-vlm-alignment-reward-no-cfg/epoch-0000340/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-vlm-alignment-reward-no-cfg/epoch-0000340/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-vlm-alignment-reward-no-cfg/epoch-0000345/adapter_config.json b/fdfo-vlm-alignment-reward-no-cfg/epoch-0000345/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-vlm-alignment-reward-no-cfg/epoch-0000345/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-vlm-alignment-reward-no-cfg/epoch-0000350/adapter_config.json b/fdfo-vlm-alignment-reward-no-cfg/epoch-0000350/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-vlm-alignment-reward-no-cfg/epoch-0000350/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-vlm-alignment-reward-no-cfg/epoch-0000355/adapter_config.json b/fdfo-vlm-alignment-reward-no-cfg/epoch-0000355/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-vlm-alignment-reward-no-cfg/epoch-0000355/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-vlm-alignment-reward-no-cfg/epoch-0000360/adapter_config.json b/fdfo-vlm-alignment-reward-no-cfg/epoch-0000360/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-vlm-alignment-reward-no-cfg/epoch-0000360/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-vlm-alignment-reward-no-cfg/epoch-0000365/adapter_config.json b/fdfo-vlm-alignment-reward-no-cfg/epoch-0000365/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-vlm-alignment-reward-no-cfg/epoch-0000365/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-vlm-alignment-reward-no-cfg/epoch-0000495/adapter_config.json b/fdfo-vlm-alignment-reward-no-cfg/epoch-0000495/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-vlm-alignment-reward-no-cfg/epoch-0000495/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-vlm-alignment-reward-no-cfg/epoch-0000500/adapter_config.json b/fdfo-vlm-alignment-reward-no-cfg/epoch-0000500/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-vlm-alignment-reward-no-cfg/epoch-0000500/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-vlm-alignment-reward-no-cfg/epoch-0000505/adapter_config.json b/fdfo-vlm-alignment-reward-no-cfg/epoch-0000505/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-vlm-alignment-reward-no-cfg/epoch-0000505/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-vlm-alignment-reward-no-cfg/epoch-0000510/adapter_config.json b/fdfo-vlm-alignment-reward-no-cfg/epoch-0000510/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-vlm-alignment-reward-no-cfg/epoch-0000510/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-vlm-alignment-reward-no-cfg/epoch-0000515/adapter_config.json b/fdfo-vlm-alignment-reward-no-cfg/epoch-0000515/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-vlm-alignment-reward-no-cfg/epoch-0000515/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-vlm-alignment-reward-no-cfg/epoch-0000520/adapter_config.json b/fdfo-vlm-alignment-reward-no-cfg/epoch-0000520/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-vlm-alignment-reward-no-cfg/epoch-0000520/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-vlm-alignment-reward-no-cfg/epoch-0000525/adapter_config.json b/fdfo-vlm-alignment-reward-no-cfg/epoch-0000525/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-vlm-alignment-reward-no-cfg/epoch-0000525/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-vlm-alignment-reward-no-cfg/epoch-0000530/adapter_config.json b/fdfo-vlm-alignment-reward-no-cfg/epoch-0000530/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-vlm-alignment-reward-no-cfg/epoch-0000530/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-vlm-alignment-reward-no-cfg/epoch-0000535/adapter_config.json b/fdfo-vlm-alignment-reward-no-cfg/epoch-0000535/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-vlm-alignment-reward-no-cfg/epoch-0000535/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-vlm-alignment-reward-no-cfg/epoch-0000540/adapter_config.json b/fdfo-vlm-alignment-reward-no-cfg/epoch-0000540/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-vlm-alignment-reward-no-cfg/epoch-0000540/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-vlm-alignment-reward-no-cfg/epoch-0000545/adapter_config.json b/fdfo-vlm-alignment-reward-no-cfg/epoch-0000545/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-vlm-alignment-reward-no-cfg/epoch-0000545/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-vlm-alignment-reward-no-cfg/epoch-0000550/adapter_config.json b/fdfo-vlm-alignment-reward-no-cfg/epoch-0000550/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-vlm-alignment-reward-no-cfg/epoch-0000550/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-vlm-alignment-reward-no-cfg/epoch-0000555/adapter_config.json b/fdfo-vlm-alignment-reward-no-cfg/epoch-0000555/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-vlm-alignment-reward-no-cfg/epoch-0000555/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-vlm-alignment-reward-no-cfg/epoch-0000560/adapter_config.json b/fdfo-vlm-alignment-reward-no-cfg/epoch-0000560/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-vlm-alignment-reward-no-cfg/epoch-0000560/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-vlm-alignment-reward-no-cfg/epoch-0000565/adapter_config.json b/fdfo-vlm-alignment-reward-no-cfg/epoch-0000565/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-vlm-alignment-reward-no-cfg/epoch-0000565/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-vlm-alignment-reward-no-cfg/epoch-0000570/adapter_config.json b/fdfo-vlm-alignment-reward-no-cfg/epoch-0000570/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-vlm-alignment-reward-no-cfg/epoch-0000570/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-vlm-alignment-reward-no-cfg/epoch-0000575/adapter_config.json b/fdfo-vlm-alignment-reward-no-cfg/epoch-0000575/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-vlm-alignment-reward-no-cfg/epoch-0000575/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-vlm-alignment-reward-no-cfg/epoch-0000580/adapter_config.json b/fdfo-vlm-alignment-reward-no-cfg/epoch-0000580/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-vlm-alignment-reward-no-cfg/epoch-0000580/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-vlm-alignment-reward-no-cfg/epoch-0000585/adapter_config.json b/fdfo-vlm-alignment-reward-no-cfg/epoch-0000585/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-vlm-alignment-reward-no-cfg/epoch-0000585/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-vlm-alignment-reward-no-cfg/epoch-0000590/adapter_config.json b/fdfo-vlm-alignment-reward-no-cfg/epoch-0000590/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-vlm-alignment-reward-no-cfg/epoch-0000590/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-vlm-alignment-reward-no-cfg/epoch-0000595/adapter_config.json b/fdfo-vlm-alignment-reward-no-cfg/epoch-0000595/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-vlm-alignment-reward-no-cfg/epoch-0000595/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-vlm-alignment-reward-no-cfg/epoch-0000600/adapter_config.json b/fdfo-vlm-alignment-reward-no-cfg/epoch-0000600/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-vlm-alignment-reward-no-cfg/epoch-0000600/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-vlm-alignment-reward-no-cfg/epoch-0000605/adapter_config.json b/fdfo-vlm-alignment-reward-no-cfg/epoch-0000605/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-vlm-alignment-reward-no-cfg/epoch-0000605/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-vlm-alignment-reward-no-cfg/epoch-0000610/adapter_config.json b/fdfo-vlm-alignment-reward-no-cfg/epoch-0000610/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-vlm-alignment-reward-no-cfg/epoch-0000610/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-vlm-alignment-reward-no-cfg/epoch-0000615/adapter_config.json b/fdfo-vlm-alignment-reward-no-cfg/epoch-0000615/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-vlm-alignment-reward-no-cfg/epoch-0000615/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-vlm-alignment-reward-no-cfg/epoch-0000620/adapter_config.json b/fdfo-vlm-alignment-reward-no-cfg/epoch-0000620/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-vlm-alignment-reward-no-cfg/epoch-0000620/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-vlm-alignment-reward-no-cfg/epoch-0000625/adapter_config.json b/fdfo-vlm-alignment-reward-no-cfg/epoch-0000625/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-vlm-alignment-reward-no-cfg/epoch-0000625/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-vlm-alignment-reward-no-cfg/epoch-0000630/adapter_config.json b/fdfo-vlm-alignment-reward-no-cfg/epoch-0000630/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-vlm-alignment-reward-no-cfg/epoch-0000630/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-vlm-alignment-reward-no-cfg/epoch-0000635/adapter_config.json b/fdfo-vlm-alignment-reward-no-cfg/epoch-0000635/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-vlm-alignment-reward-no-cfg/epoch-0000635/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-vlm-alignment-reward-no-cfg/epoch-0000640/adapter_config.json b/fdfo-vlm-alignment-reward-no-cfg/epoch-0000640/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-vlm-alignment-reward-no-cfg/epoch-0000640/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-vlm-alignment-reward-no-cfg/epoch-0000645/adapter_config.json b/fdfo-vlm-alignment-reward-no-cfg/epoch-0000645/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-vlm-alignment-reward-no-cfg/epoch-0000645/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-vlm-alignment-reward-no-cfg/epoch-0000650/adapter_config.json b/fdfo-vlm-alignment-reward-no-cfg/epoch-0000650/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-vlm-alignment-reward-no-cfg/epoch-0000650/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-vlm-alignment-reward-no-cfg/epoch-0000655/adapter_config.json b/fdfo-vlm-alignment-reward-no-cfg/epoch-0000655/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-vlm-alignment-reward-no-cfg/epoch-0000655/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-vlm-alignment-reward-no-cfg/epoch-0000660/adapter_config.json b/fdfo-vlm-alignment-reward-no-cfg/epoch-0000660/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-vlm-alignment-reward-no-cfg/epoch-0000660/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-vlm-alignment-reward-no-cfg/epoch-0000665/adapter_config.json b/fdfo-vlm-alignment-reward-no-cfg/epoch-0000665/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-vlm-alignment-reward-no-cfg/epoch-0000665/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-vlm-alignment-reward-no-cfg/epoch-0000670/adapter_config.json b/fdfo-vlm-alignment-reward-no-cfg/epoch-0000670/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-vlm-alignment-reward-no-cfg/epoch-0000670/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-vlm-alignment-reward-no-cfg/epoch-0000675/adapter_config.json b/fdfo-vlm-alignment-reward-no-cfg/epoch-0000675/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-vlm-alignment-reward-no-cfg/epoch-0000675/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-vlm-alignment-reward-no-cfg/epoch-0000680/adapter_config.json b/fdfo-vlm-alignment-reward-no-cfg/epoch-0000680/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-vlm-alignment-reward-no-cfg/epoch-0000680/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-vlm-alignment-reward-no-cfg/epoch-0000685/adapter_config.json b/fdfo-vlm-alignment-reward-no-cfg/epoch-0000685/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-vlm-alignment-reward-no-cfg/epoch-0000685/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-vlm-alignment-reward-no-cfg/epoch-0000690/adapter_config.json b/fdfo-vlm-alignment-reward-no-cfg/epoch-0000690/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-vlm-alignment-reward-no-cfg/epoch-0000690/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-vlm-alignment-reward-no-cfg/epoch-0000695/adapter_config.json b/fdfo-vlm-alignment-reward-no-cfg/epoch-0000695/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-vlm-alignment-reward-no-cfg/epoch-0000695/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file