diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000005/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000005/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000005/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000010/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000010/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000010/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000015/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000015/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000015/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000020/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000020/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000020/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000025/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000025/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000025/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000030/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000030/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000030/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000035/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000035/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000035/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000040/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000040/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000040/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000045/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000045/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000045/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000050/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000050/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000050/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000055/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000055/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000055/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000060/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000060/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000060/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000065/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000065/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000065/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000070/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000070/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000070/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000075/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000075/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000075/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000080/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000080/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000080/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000085/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000085/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000085/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000090/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000090/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000090/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000095/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000095/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000095/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000100/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000100/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000100/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000105/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000105/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000105/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000110/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000110/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000110/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000115/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000115/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000115/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000120/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000120/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000120/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000125/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000125/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000125/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000130/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000130/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000130/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000135/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000135/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000135/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000140/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000140/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000140/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000145/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000145/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000145/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000150/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000150/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000150/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000155/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000155/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000155/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000160/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000160/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000160/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000165/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000165/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000165/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000170/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000170/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000170/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000175/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000175/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000175/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000180/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000180/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000180/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000185/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000185/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000185/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000190/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000190/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000190/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000195/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000195/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000195/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000200/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000200/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000200/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000205/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000205/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000205/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000210/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000210/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000210/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000215/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000215/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000215/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000220/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000220/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000220/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000225/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000225/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000225/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000230/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000230/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000230/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000235/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000235/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000235/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000240/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000240/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000240/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000245/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000245/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000245/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000250/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000250/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000250/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000255/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000255/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000255/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000260/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000260/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000260/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000265/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000265/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000265/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000270/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000270/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000270/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000275/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000275/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000275/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000280/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000280/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000280/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000285/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000285/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000285/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000290/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000290/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000290/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000295/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000295/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000295/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000300/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000300/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000300/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000305/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000305/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000305/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000310/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000310/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000310/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000315/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000315/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000315/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000320/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000320/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000320/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000325/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000325/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000325/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000330/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000330/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000330/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000335/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000335/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000335/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000340/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000340/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000340/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000345/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000345/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000345/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000350/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000350/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000350/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000355/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000355/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000355/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000360/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000360/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000360/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000365/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000365/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000365/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000370/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000370/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000370/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000375/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000375/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000375/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000380/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000380/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000380/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000385/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000385/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000385/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000390/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000390/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000390/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000395/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000395/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000395/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000400/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000400/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000400/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000405/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000405/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000405/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000410/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000410/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000410/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000415/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000415/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000415/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000420/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000420/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000420/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000425/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000425/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000425/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000430/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000430/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000430/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000435/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000435/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000435/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000440/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000440/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000440/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000445/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000445/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000445/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000450/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000450/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000450/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000455/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000455/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000455/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000460/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000460/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000460/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000465/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000465/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000465/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000470/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000470/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000470/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000475/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000475/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000475/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000480/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000480/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000480/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000485/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000485/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000485/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000490/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000490/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000490/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000495/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000495/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000495/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000500/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000500/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000500/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000505/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000505/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000505/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000510/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000510/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000510/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000515/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000515/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000515/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000520/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000520/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000520/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000525/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000525/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000525/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000530/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000530/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000530/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000535/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000535/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000535/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000540/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000540/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000540/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000545/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000545/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000545/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000550/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000550/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000550/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000555/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000555/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000555/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000560/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000560/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000560/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000565/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000565/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000565/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000570/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000570/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000570/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000575/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000575/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000575/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000580/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000580/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000580/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000585/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000585/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000585/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000590/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000590/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000590/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000595/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000595/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000595/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000600/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000600/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000600/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000605/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000605/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000605/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000610/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000610/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000610/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000615/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000615/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000615/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000620/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000620/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000620/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000625/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000625/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000625/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000630/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000630/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000630/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000635/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000635/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000635/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000640/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000640/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000640/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000645/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000645/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000645/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000650/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000650/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000650/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000655/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000655/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000655/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000660/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000660/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000660/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000665/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000665/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000665/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000670/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000670/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000670/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000675/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000675/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000675/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000680/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000680/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000680/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000685/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000685/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000685/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000690/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000690/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000690/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000695/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000695/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000695/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000700/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000700/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000700/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000705/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000705/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000705/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000710/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000710/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000710/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000715/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000715/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000715/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000720/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000720/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000720/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000725/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000725/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000725/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000730/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000730/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000730/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000735/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000735/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000735/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000740/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000740/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000740/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000745/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000745/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000745/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000750/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000750/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000750/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000755/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000755/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000755/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000760/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000760/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000760/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000765/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000765/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000765/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000770/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000770/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000770/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000775/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000775/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000775/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000780/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000780/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000780/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000785/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000785/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000785/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000790/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000790/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000790/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000795/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000795/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000795/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000800/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000800/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000800/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000805/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000805/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000805/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000810/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000810/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000810/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000815/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000815/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000815/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000820/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000820/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000820/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000825/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000825/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000825/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000830/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000830/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000830/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000835/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000835/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000835/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000840/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000840/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000840/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000845/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000845/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000845/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000850/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000850/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000850/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000855/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000855/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000855/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000860/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000860/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000860/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000865/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000865/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000865/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000870/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000870/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000870/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000875/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000875/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000875/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000880/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000880/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000880/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000885/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000885/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000885/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000890/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000890/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000890/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000895/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000895/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000895/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000900/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000900/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000900/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000905/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000905/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000905/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000910/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000910/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000910/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000915/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000915/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000915/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000920/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000920/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000920/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000925/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000925/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000925/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000930/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000930/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000930/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000935/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000935/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000935/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000940/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000940/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000940/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000945/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000945/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000945/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000950/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000950/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000950/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000955/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000955/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000955/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000960/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000960/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000960/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000965/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000965/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000965/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000970/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000970/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000970/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000975/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000975/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000975/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000980/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000980/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000980/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000985/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000985/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000985/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000990/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000990/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000990/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0000995/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0000995/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0000995/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-2.0/epoch-0001000/adapter_config.json b/fdfo-combined-reward-cfg-2.0/epoch-0001000/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-2.0/epoch-0001000/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-4.5/epoch-0000995/adapter_config.json b/fdfo-combined-reward-cfg-4.5/epoch-0000995/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-4.5/epoch-0000995/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file diff --git a/fdfo-combined-reward-cfg-4.5/epoch-0001000/adapter_config.json b/fdfo-combined-reward-cfg-4.5/epoch-0001000/adapter_config.json new file mode 100644 index 0000000000000000000000000000000000000000..51ffdb95d43cef3374bb01be37362b4899298c03 --- /dev/null +++ b/fdfo-combined-reward-cfg-4.5/epoch-0001000/adapter_config.json @@ -0,0 +1,50 @@ +{ + "alora_invocation_tokens": null, + "alpha_pattern": {}, + "arrow_config": null, + "auto_mapping": { + "base_model_class": "SD3Transformer2DModel", + "parent_library": "diffusers.models.transformers.transformer_sd3" + }, + "base_model_name_or_path": null, + "bias": "none", + "corda_config": null, + "ensure_weight_tying": false, + "eva_config": null, + "exclude_modules": null, + "fan_in_fan_out": false, + "inference_mode": true, + "init_lora_weights": "gaussian", + "layer_replication": null, + "layers_pattern": null, + "layers_to_transform": null, + "loftq_config": {}, + "lora_alpha": 64, + "lora_bias": false, + "lora_dropout": 0.0, + "megatron_config": null, + "megatron_core": "megatron.core", + "modules_to_save": null, + "peft_type": "LORA", + "peft_version": "0.18.1", + "qalora_group_size": 16, + "r": 32, + "rank_pattern": {}, + "revision": null, + "target_modules": [ + "attn.to_add_out", + "attn.add_k_proj", + "attn.to_out.0", + "attn.add_q_proj", + "attn.to_k", + "attn.add_v_proj", + "attn.to_v", + "attn.to_q" + ], + "target_parameters": null, + "task_type": null, + "trainable_token_indices": null, + "use_dora": false, + "use_qalora": false, + "use_rslora": false +} \ No newline at end of file