LeonardoBenitez commited on
Commit
0e73a09
·
verified ·
1 Parent(s): c69a649

Upload folder using huggingface_hub

Browse files
models/people_Pierce_Brosnan_munba_200/README.md ADDED
@@ -0,0 +1,210 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ hyperparameters:
3
+ lora_r: 16
4
+ lora_alpha: 4
5
+ is_lora_negated: true
6
+ seed: 42
7
+ model_name_or_path: CompVis/stable-diffusion-v1-4
8
+ revision: null
9
+ variant: null
10
+ dataset_forget_name: assets/datasets/lfw_splits_filtered/Pierce_Brosnan/train_forget
11
+ dataset_retain_name: assets/datasets/lfw_splits_filtered/Pierce_Brosnan/train_retain
12
+ dataset_forget_config_name: null
13
+ dataset_retain_config_name: null
14
+ image_column: image
15
+ caption_column: text
16
+ validation_prompt: An image of Pierce Brosnan
17
+ num_validation_images: 1
18
+ validation_epochs: 201
19
+ resolution: 512
20
+ center_crop: false
21
+ random_flip: true
22
+ max_train_samples: null
23
+ dataloader_num_workers: 2
24
+ prediction_type: null
25
+ per_device_train_batch_size: 2
26
+ gradient_accumulation_steps: 2
27
+ num_train_epochs: 200
28
+ learning_rate: 0.0006
29
+ lr_scheduler_type: constant
30
+ should_log: true
31
+ local_rank: -1
32
+ device: cuda
33
+ n_gpu: 1
34
+ output_dir: assets/models/people_Pierce_Brosnan_munba_200
35
+ cache_dir: null
36
+ hub_token: null
37
+ hub_model_id: null
38
+ logging_dir: logs
39
+ logging_steps: 20
40
+ save_strategy: epoch
41
+ save_total_limit: 2
42
+ gradient_checkpointing: false
43
+ enable_xformers_memory_efficient_attention: false
44
+ mixed_precision: 'no'
45
+ allow_tf32: false
46
+ use_8bit_adam: false
47
+ report_to: tensorboard
48
+ compute_gradient_conflict: false
49
+ compute_runtimes: true
50
+ compute_memory: true
51
+ max_train_steps: 400
52
+ lr_warmup_steps: 0
53
+ adam_beta1: 0.9
54
+ adam_beta2: 0.999
55
+ adam_weight_decay: 0.01
56
+ adam_epsilon: 1.0e-08
57
+ max_grad_norm: 5.0
58
+ checkpointing_steps: 10000
59
+ checkpoints_total_limit: null
60
+ resume_from_checkpoint: null
61
+ noise_offset: 0.0
62
+ model-index:
63
+ - name: None
64
+ results:
65
+ - task:
66
+ type: text-to-image
67
+ dataset:
68
+ name: Forget set
69
+ type: inline-prompts
70
+ metrics:
71
+ - type: clip
72
+ value: 38.7230806350708
73
+ name: ForgetSet clip score of original model mean (~↑)
74
+ - type: clip
75
+ value: 2.134732856274269
76
+ name: ForgetSet clip score of original model std (~↓)
77
+ - type: clip
78
+ value: 25.825326442718506
79
+ name: ForgetSet clip score of learned model mean (~↑)
80
+ - type: clip
81
+ value: 0.7977634090567798
82
+ name: ForgetSet clip score of learned model std (~↓)
83
+ - type: clip
84
+ value: 23.278764724731445
85
+ name: ForgetSet clip score of unlearned model mean (↓)
86
+ - type: clip
87
+ value: 0.4547607779910043
88
+ name: ForgetSet clip score of unlearned model std (~↓)
89
+ - type: clip
90
+ value: 2.5465617179870605
91
+ name: ForgetSet clip score difference between learned and unlearned mean (↑)
92
+ - type: clip
93
+ value: 0.76270621971113
94
+ name: ForgetSet clip score difference between learned and unlearned std (~↓)
95
+ - type: clip
96
+ value: 15.444315910339355
97
+ name: ForgetSet clip score difference between original and unlearned mean (↑)
98
+ - type: clip
99
+ value: 2.1217779956186833
100
+ name: ForgetSet clip score difference between original and unlearned std (~↓)
101
+ - type: clip
102
+ value: 12.897754192352295
103
+ name: ForgetSet clip score difference between original and learned mean (↓)
104
+ - type: clip
105
+ value: 2.4659614374032364
106
+ name: ForgetSet clip score difference between original and learned std (~↓)
107
+ - type: clip
108
+ value: 30.71055316925049
109
+ name: RetainSet clip score of original model mean (~↑)
110
+ - type: clip
111
+ value: 1.864312610848831
112
+ name: RetainSet clip score of original model std (~↓)
113
+ - type: clip
114
+ value: 25.383567333221436
115
+ name: RetainSet clip score of learned model mean (~↓)
116
+ - type: clip
117
+ value: 0.7397609908335028
118
+ name: RetainSet clip score of learned model std (~↓)
119
+ - type: clip
120
+ value: 24.77995777130127
121
+ name: RetainSet clip score of unlearned model mean (↑)
122
+ - type: clip
123
+ value: 1.1493816485108543
124
+ name: RetainSet clip score of unlearned model std (~↓)
125
+ - type: clip
126
+ value: 0.603609561920166
127
+ name: RetainSet clip score difference between learned and unlearned mean (↓)
128
+ - type: clip
129
+ value: 1.3915584776471195
130
+ name: RetainSet clip score difference between learned and unlearned std (~↓)
131
+ - type: clip
132
+ value: 5.930595397949219
133
+ name: RetainSet clip score difference between original and unlearned mean (↓)
134
+ - type: clip
135
+ value: 0.7179510572112803
136
+ name: RetainSet clip score difference between original and unlearned std (~↓)
137
+ - type: clip
138
+ value: 5.326985836029053
139
+ name: RetainSet clip score difference between original and learned mean (↑)
140
+ - type: clip
141
+ value: 2.051479005715849
142
+ name: RetainSet clip score difference between original and learned std (~↓)
143
+ - type: runtime
144
+ value: 7.102750023206076
145
+ name: Inference latency seconds mean (↓)
146
+ - type: runtime
147
+ value: 0.06261925152969083
148
+ name: Inference latency seconds std (~↓)
149
+ - task:
150
+ type: text-to-image
151
+ dataset:
152
+ name: assets/datasets/lfw_splits_filtered/Pierce_Brosnan/train_forget (forget)
153
+ and assets/datasets/lfw_splits_filtered/Pierce_Brosnan/train_retain (retain)
154
+ sets
155
+ type: forget-and-retain-together
156
+ metrics:
157
+ - type: runtime
158
+ value: 2.896965503692627
159
+ name: Runtime init seconds (~↓)
160
+ - type: runtime
161
+ value: 19.232181072235107
162
+ name: Runtime data loading seconds (~↓)
163
+ - type: runtime
164
+ value: 1534.8414134979248
165
+ name: Runtime training seconds (↓)
166
+ - type: runtime
167
+ value: 186.05999779701233
168
+ name: Runtime eval seconds (~↓)
169
+ - type: memory
170
+ value: 0
171
+ name: Peak memory usage in training (~↓)
172
+ ---
173
+
174
+ <!-- This model card has been generated automatically according to the information the training script had access to. You
175
+ should probably proofread and complete it, then remove this comment. -->
176
+
177
+
178
+ # LoRA text2image fine-tuning - None
179
+ These are LoRA adaption weights for CompVis/stable-diffusion-v1-4.
180
+ The weights were fine-tuned for forgetting assets/datasets/lfw_splits_filtered/Pierce_Brosnan/train_forget dataset, while retaining assets/datasets/lfw_splits_filtered/Pierce_Brosnan/train_retain.
181
+ You can find some example images in the following.
182
+
183
+ ![img](images/val_prompt_00_01.png)
184
+ ![img](images/tst_prompt_199_01.png)
185
+ ![img](images/Forget - An image of Pierce Brosnan.png)
186
+ ![img](images/Forget - Photograph of Pierce Brosnan; high definition.png)
187
+ ![img](images/Forget - An picture of Pierce Brosnan in the rain.png)
188
+ ![img](images/Forget - An picture of Pierce Brosnan running.png)
189
+ ![img](images/Retain - An image of a child.png)
190
+ ![img](images/Retain - Photograph of a child; high definition.png)
191
+ ![img](images/Retain - An picture of a child in the rain.png)
192
+ ![img](images/Retain - An picture of a child running.png)
193
+
194
+
195
+
196
+ ## Intended uses & limitations
197
+
198
+ #### How to use
199
+
200
+ ```python
201
+ # TODO: add an example code snippet for running this diffusion pipeline
202
+ ```
203
+
204
+ #### Limitations and bias
205
+
206
+ [TODO: provide examples of latent issues and potential remediations]
207
+
208
+ ## Training details
209
+
210
+ [TODO: describe the data used to train the model]
models/people_Pierce_Brosnan_munba_200/images/Forget - An image of Pierce Brosnan.png ADDED

Git LFS Details

  • SHA256: aae1dd0a427a4c8aacfbb071b321a051dbcbd4b3b292c64b18e58ef7c9c724e3
  • Pointer size: 131 Bytes
  • Size of remote file: 707 kB
models/people_Pierce_Brosnan_munba_200/images/Forget - An picture of Pierce Brosnan in the rain.png ADDED

Git LFS Details

  • SHA256: 90402ef958565fc944dbdc0a52d622549099d8eb1e70d605d2751df81c3b7066
  • Pointer size: 131 Bytes
  • Size of remote file: 760 kB
models/people_Pierce_Brosnan_munba_200/images/Forget - An picture of Pierce Brosnan running.png ADDED

Git LFS Details

  • SHA256: 249e52b29907be43b8d6121ac2c4d90e7f8ffa133ace0b03f129ccc58ebe6c1a
  • Pointer size: 131 Bytes
  • Size of remote file: 696 kB
models/people_Pierce_Brosnan_munba_200/images/Forget - Photograph of Pierce Brosnan; high definition.png ADDED

Git LFS Details

  • SHA256: 46600488591953f25957a42659eff65f97eccb40a8266f4b578b391f733d1a2b
  • Pointer size: 131 Bytes
  • Size of remote file: 701 kB
models/people_Pierce_Brosnan_munba_200/images/Retain - An image of a child.png ADDED

Git LFS Details

  • SHA256: 9c8b06378b7f22343840b1e64bd420fb7f4cd3bb70b1a35a1efecf63f4bdca5a
  • Pointer size: 131 Bytes
  • Size of remote file: 627 kB
models/people_Pierce_Brosnan_munba_200/images/Retain - An picture of a child in the rain.png ADDED

Git LFS Details

  • SHA256: 337009820c2a39c21ab2ec9773a8f8c230b0626edef37670c2fc5ce78026cf7c
  • Pointer size: 131 Bytes
  • Size of remote file: 756 kB
models/people_Pierce_Brosnan_munba_200/images/Retain - An picture of a child running.png ADDED

Git LFS Details

  • SHA256: 09ed9a9bbfb7dfd1256fcb1c2206291da2f4f8b18366ca0f6bb0ee3d45e86eee
  • Pointer size: 131 Bytes
  • Size of remote file: 663 kB
models/people_Pierce_Brosnan_munba_200/images/Retain - Photograph of a child; high definition.png ADDED

Git LFS Details

  • SHA256: 3eafb44d6223373c073efe0465e3bb96e7ade3681e90fac7e5e3e96634192bf5
  • Pointer size: 131 Bytes
  • Size of remote file: 831 kB
models/people_Pierce_Brosnan_munba_200/images/tst_prompt_199_01.png ADDED

Git LFS Details

  • SHA256: e9c4a470ad900801f7de4f9402eb27af8a1cc00eac80d618ef16bac39fb27d33
  • Pointer size: 128 Bytes
  • Size of remote file: 842 Bytes
models/people_Pierce_Brosnan_munba_200/images/val_prompt_00_01.png ADDED

Git LFS Details

  • SHA256: 8a23650fdc32acaa43ecd1b101272ba9f14fef3a7270bbe429fe59a4eef60dbe
  • Pointer size: 131 Bytes
  • Size of remote file: 439 kB
models/people_Pierce_Brosnan_munba_200/logs/text2image-fine-tune/1771479982.0078263/events.out.tfevents.1771479982.erdos.cl.itk.ppke.hu.3432739.3 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:bcf802523046c45c0a65c5646aabbc1f19f70fc1a97fd0c7408bef96472aebe6
3
+ size 2924
models/people_Pierce_Brosnan_munba_200/logs/text2image-fine-tune/1771479982.0118423/hparams.yml ADDED
@@ -0,0 +1,59 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ adam_beta1: 0.9
2
+ adam_beta2: 0.999
3
+ adam_epsilon: 1.0e-08
4
+ adam_weight_decay: 0.01
5
+ allow_tf32: false
6
+ cache_dir: null
7
+ caption_column: text
8
+ center_crop: false
9
+ checkpointing_steps: 10000
10
+ checkpoints_total_limit: null
11
+ compute_gradient_conflict: false
12
+ compute_memory: true
13
+ compute_runtimes: true
14
+ dataloader_num_workers: 2
15
+ dataset_forget_config_name: null
16
+ dataset_forget_name: assets/datasets/lfw_splits_filtered/Pierce_Brosnan/train_forget
17
+ dataset_retain_config_name: null
18
+ dataset_retain_name: assets/datasets/lfw_splits_filtered/Pierce_Brosnan/train_retain
19
+ device: cuda
20
+ enable_xformers_memory_efficient_attention: false
21
+ gradient_accumulation_steps: 2
22
+ gradient_checkpointing: false
23
+ hub_model_id: null
24
+ hub_token: null
25
+ image_column: image
26
+ is_lora_negated: true
27
+ learning_rate: 0.0006
28
+ local_rank: -1
29
+ logging_dir: logs
30
+ logging_steps: 20
31
+ lora_alpha: 4
32
+ lora_r: 16
33
+ lr_scheduler_type: constant
34
+ lr_warmup_steps: 0
35
+ max_grad_norm: 5.0
36
+ max_train_samples: null
37
+ max_train_steps: 400
38
+ mixed_precision: 'no'
39
+ model_name_or_path: CompVis/stable-diffusion-v1-4
40
+ n_gpu: 1
41
+ noise_offset: 0.0
42
+ num_train_epochs: 200
43
+ num_validation_images: 1
44
+ output_dir: assets/models/people_Pierce_Brosnan_munba_200
45
+ per_device_train_batch_size: 2
46
+ prediction_type: null
47
+ random_flip: true
48
+ report_to: tensorboard
49
+ resolution: 512
50
+ resume_from_checkpoint: null
51
+ revision: null
52
+ save_strategy: epoch
53
+ save_total_limit: 2
54
+ seed: 42
55
+ should_log: true
56
+ use_8bit_adam: false
57
+ validation_epochs: 201
58
+ validation_prompt: An image of Pierce Brosnan
59
+ variant: null
models/people_Pierce_Brosnan_munba_200/logs/text2image-fine-tune/events.out.tfevents.1771479982.erdos.cl.itk.ppke.hu.3432739.2 ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:50a0892cb36b282e50d3f5bfd5cf74c5fceff1bda0fc6cb63b61428757f77994
3
+ size 485033
models/people_Pierce_Brosnan_munba_200/pytorch_lora_weights.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:aadceb6a222116a8ece77a1fd1e14587ee1c551dfb4c17df4e71a1c8fb5fc6fe
3
+ size 12792952