File size: 14,680 Bytes
2e131a1
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
224
225
226
227
228
229
230
231
232
233
234
235
236
237
238
239
240
241
242
243
244
245
246
247
248
249
250
251
252
253
254
255
256
257
258
259
260
261
262
263
264
265
266
267
268
269
270
271
272
273
274
275
276
277
278
279
280
281
282
283
284
285
286
287
288
289
290
291
292
293
294
295
296
297
298
299
300
301
302
303
304
305
306
307
308
309
310
311
312
313
314
315
316
317
318
319
320
321
322
323
324
325
326
327
328
329
330
331
332
333
334
335
336
337
338
339
340
341
342
343
344
345
346
347
348
349
350
351
352
353
354
355
356
357
358
359
360
361
362
363
364
365
366
367
368
{
 "cells": [
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "\n",
    "# Welcome to StableTuner, Let's get started!\n",
    "#### This notebook will guide you through the setup process.\n",
    "\n",
    "\n",
    "__[Join the ST Discord for support, chat and fun times :)](https://discord.gg/DahNECrBUZ)__"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "#### Start by uploading your payload.zip file (just drag and drop it to the file area) and run this cell as it gets uploaded."
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "#Much thanks to IndustriaDitat and entmike for helping making ST linux compatible!\n",
    "from IPython.display import clear_output\n",
    "from subprocess import getoutput\n",
    "installed_xformers = False\n",
    "GPU_CardName = getoutput('nvidia-smi --query-gpu=name --format=csv,noheader')\n",
    "\n",
    "%pip install torch==1.12.1+cu116 torchvision==0.13.1+cu116 --extra-index-url \"https://download.pytorch.org/whl/cu116\"\n",
    "%pip install -U --pre triton\n",
    "%pip install ninja bitsandbytes\n",
    "if '4090' in GPU_CardName:\n",
    "    %pip install https://huggingface.co/industriaditat/xformers_precompiles/resolve/main/RTX4090-xf14-cu116-py38/xformers-0.0.14.dev0-cp38-cp38-linux_x86_64.whl\n",
    "    installed_xformers = True\n",
    "if '3090' in GPU_CardName:\n",
    "    %pip install https://huggingface.co/industriaditat/xformers_precompiles/resolve/main/RTX3090-xf14-cu116-py38/xformers-0.0.14.dev0-cp38-cp38-linux_x86_64.whl\n",
    "    installed_xformers = True\n",
    "if 'A5000' in GPU_CardName:\n",
    "    %pip install https://huggingface.co/industriaditat/xformers_precompiles/resolve/main/A5000-xf14-cu116-py38/xformers-0.0.14.dev0-cp38-cp38-linux_x86_64.whl\n",
    "    installed_xformers = True\n",
    "if 'T4' in GPU_CardName:\n",
    "    %pip install https://huggingface.co/r4ziel/xformers_pre_built/resolve/main/xformers-0.0.14.dev0-cp38-cp38-linux_x86_64_t4.whl\n",
    "    installed_xformers = True\n",
    "if 'A100' in GPU_CardName:\n",
    "    %pip install https://huggingface.co/industriaditat/xformers_precompiles/resolve/main/A100_13dev/xformers-0.0.13.dev0-py3-none-any.whl\n",
    "    installed_xformers = True\n",
    "if 'V100' in GPU_CardName:\n",
    "    %pip install https://huggingface.co/industriaditat/xformers_precompiles/resolve/main/V100_13dev/xformers-0.0.13.dev0-py3-none-any.whl\n",
    "    installed_xformers = True\n",
    "if installed_xformers == False:\n",
    "    clear_output()\n",
    "    print(\"No precompiled xformers found for your GPU. Please wait while we compile xformers for your GPU, this might take 20-40 minutes.\")\n",
    "    %pip install git+https://github.com/facebookresearch/xformers@1d31a3a#egg=xformers\n",
    "%pip install git+https://github.com/huggingface/diffusers.git@0ca1724#egg=diffusers --force-reinstall\n",
    "clear_output()\n",
    "print(\"Done!\")"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "#### Upload finished?, time to run this next cell!"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "\n",
    "%pip install -r requirements.txt\n",
    "clear_output()\n",
    "print(\"Done!\")"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "#### Looks like you're done installing, let's get training!\n",
    "\n"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "!accelerate launch --mixed_precision=\"fp16\" scripts/trainer.py  \\\n",
    "  --shuffle_per_epoch \\\n",
    "  --attention=\"xformers\" \\\n",
    "  --model_variant=\"base\" \\\n",
    "  --disable_cudnn_benchmark \\\n",
    "  --sample_step_interval=500 \\\n",
    "  --stop_text_encoder_training=15 \\\n",
    "  --pretrained_model_name_or_path=\"/workspace/any45\"  \\\n",
    "  --pretrained_vae_name_or_path=\"/workspace/any45/vae\"  \\\n",
    "  --output_dir=\"output/tw3_magic_new\"  \\\n",
    "  --seed=3434555  \\\n",
    "  --resolution=512  \\\n",
    "  --train_batch_size=40  \\\n",
    "  --num_train_epochs=20  \\\n",
    "  --mixed_precision=\"fp16\" \\\n",
    "  --use_bucketing \\\n",
    "  --aspect_mode=\"dynamic\" \\\n",
    "  --aspect_mode_action_preference=\"add\" \\\n",
    "  --use_8bit_adam \\\n",
    "  --gradient_checkpointing \\\n",
    "  --gradient_accumulation_steps=1 \\\n",
    "  --learning_rate=5e-6 \\\n",
    "  --lr_warmup_steps=0 \\\n",
    "  --lr_scheduler=\"cosine_with_restarts\" \\\n",
    "  --train_text_encoder \\\n",
    "  --use_image_names_as_captions \\\n",
    "  --concepts_list=\"stabletune_concept_list.json\" \\\n",
    "  --num_class_images=200 \\\n",
    "  --save_every_n_epoch=20 \\\n",
    "  --n_save_sample=1 \\\n",
    "  --sample_height=512 \\\n",
    "  --sample_width=512 \\\n",
    "  --dataset_repeats=1 \\\n",
    "  --clip_penultimate"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "## Model Playground\n",
    "\n",
    "#### This is where you can test your model and package it up."
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "Run this cell and select your output model, you can upload the model to HuggingFace or run the next cell to use the Web UI and play around with your model."
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "from ipywidgets import widgets\n",
    "import os\n",
    "import glob\n",
    "from IPython.display import clear_output\n",
    "import torch\n",
    "from torch import autocast\n",
    "from diffusers import StableDiffusionPipeline, DPMSolverMultistepScheduler\n",
    "from IPython.display import display\n",
    "import random\n",
    "import gradio as gr\n",
    "from requests import HTTPError\n",
    "from huggingface_hub import create_repo\n",
    "from huggingface_hub import HfApi\n",
    "from huggingface_hub import login\n",
    "from huggingface_hub import logging\n",
    "from huggingface_hub.utils import hf_raise_for_status, HfHubHTTPError\n",
    "def upload_hf(token,repo_name,model_path):\n",
    "    if token == '':\n",
    "        print(\"Please enter your HuggingFace token.\")\n",
    "        return\n",
    "    if repo_name == '':\n",
    "        print(\"Please enter your model name.\")\n",
    "        return\n",
    "    if model_path == '':\n",
    "        print(\"Couldn't find a valid model\")\n",
    "        return\n",
    "    api = login(token=token)\n",
    "    api = HfApi()\n",
    "    hf_username = HfApi().whoami(token)['name']\n",
    "    project_repo = repo_name\n",
    "    try:\n",
    "        create_repo(f'{hf_username}/{project_repo}', repo_type='model',token=token)\n",
    "    except HTTPError as http_e:\n",
    "        if http_e.response.status_code == 409:\n",
    "            print('The repo already exists')\n",
    "            pass\n",
    "        else:\n",
    "            print(f'An error occurred: {http_e.reason}')\n",
    "    except HfHubHTTPError as hfhub_e:\n",
    "        if hfhub_e.response.status_code == 409:\n",
    "            pass\n",
    "        else:\n",
    "            print(f'An error occurred: {hfhub_e.message}')\n",
    "    try:\n",
    "        print('Uploading...')\n",
    "        api.upload_folder(\n",
    "            folder_path=model_path,\n",
    "            path_in_repo='',\n",
    "            repo_id=f'{hf_username}/{project_repo}',\n",
    "            repo_type=\"model\",\n",
    "            ignore_patterns=\"**/logs/\",\n",
    "        )\n",
    "        print('Done!')\n",
    "        print(f'Model is at https://huggingface.co/{hf_username}/{project_repo}')\n",
    "    except Exception as general_e:\n",
    "        print(f'Exception occurred: {general_e}')\n",
    "if 'output' not in os.listdir():\n",
    "    print(\"No output folder found. Please run the training cell first.\")\n",
    "models = []\n",
    "model_dir = os.listdir('output')[0]\n",
    "output_sort = sorted(glob.iglob('output' + os.sep + model_dir + os.sep+ '*'), key=os.path.getctime, reverse=True)\n",
    "if len(output_sort) == 0:\n",
    "    print(\"No models found in output folder. Please run the training cell first.\")\n",
    "for model in output_sort:\n",
    "    required_folders = [\"vae\", \"unet\", \"tokenizer\", \"text_encoder\"]\n",
    "    if all(x in os.listdir(model) for x in required_folders):\n",
    "        models.append(model)\n",
    "model_selection = widgets.Dropdown(\n",
    "    layout={'width': 'initial'},\n",
    "    style={'description_width': 'initial'},\n",
    "    options=models,\n",
    "    value=models[0],\n",
    "    # rows=10,\n",
    "    description='Select Checkpoint:',\n",
    "    disabled=False\n",
    ")\n",
    "upload_btn = widgets.Button(\n",
    "    description='Upload to HuggingFace Hub',\n",
    "    style={'description_width': 'initial'},\n",
    "    layout={'width': 'initial'},\n",
    "    disabled=False,\n",
    "    button_style='', # 'success', 'info', 'warning', 'danger' or ''\n",
    "    tooltip='Press to start upload',\n",
    "    icon='check' # (FontAwesome names without the `fa-` prefix)\n",
    ")\n",
    "token_txt = widgets.Text(\n",
    "    value='',\n",
    "    style={'description_width': 'initial'},\n",
    "    layout={'width': 'initial'},\n",
    "    placeholder='HF Token',\n",
    "    description='Hugging Face Token:',\n",
    "    disabled=False\n",
    ")\n",
    "repo_txt = widgets.Text(\n",
    "    value=model_dir,\n",
    "    style={'description_width': 'initial'},\n",
    "    placeholder='Give your model a name',\n",
    "    description='Model Name:',\n",
    "    disabled=False\n",
    ")\n",
    "upload_btn.on_click(lambda x: upload_hf(token_txt.value,repo_txt.value,model_selection.value))\n",
    "clear_output()\n",
    "display(model_selection)\n",
    "display(token_txt)\n",
    "display(repo_txt)\n",
    "display(upload_btn)\n",
    "print('You can input your HuggingFace token and repo to upload your model to the HuggingFace Hub, make sure to use a write API token!')\n",
    "print('Alternatively, you can run the next cell to open up a UI where you can generate and zip up the model.')\n"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "All ready?, lets play around with it ;), this next cell will load a small UI for you to generate images and zip it up for download, re-run this cell if you selected a new model!"
   ]
  },
  {
   "cell_type": "code",
   "execution_count": null,
   "metadata": {},
   "outputs": [],
   "source": [
    "model_path = model_selection.value\n",
    "pipe = StableDiffusionPipeline.from_pretrained(model_path,safety_checker=None, torch_dtype=torch.float16).to(\"cuda\")\n",
    "scheduler = DPMSolverMultistepScheduler.from_config(pipe.scheduler.config)\n",
    "pipe.scheduler = scheduler\n",
    "print('Loaded checkpoint')\n",
    "def inference(prompt, negative_prompt, num_samples, height=512, width=512, num_inference_steps=50,seed=-1,guidance_scale=7.5):\n",
    "    with torch.autocast(\"cuda\"), torch.inference_mode():\n",
    "      if seed != -1:\n",
    "        g_cuda = torch.Generator(device='cuda')\n",
    "        g_cuda.manual_seed(int(seed))\n",
    "      else:\n",
    "        seed = random.randint(0, 100000)\n",
    "        g_cuda = torch.Generator(device='cuda')\n",
    "        g_cuda.manual_seed(seed)\n",
    "        return pipe(\n",
    "                prompt, height=int(height), width=int(width),\n",
    "                negative_prompt=negative_prompt,\n",
    "                num_images_per_prompt=int(num_samples),\n",
    "                num_inference_steps=int(num_inference_steps), guidance_scale=guidance_scale,\n",
    "                generator=g_cuda\n",
    "            ).images, seed\n",
    "def zip_model():\n",
    "  import shutil\n",
    "  print('Zipping Model!, Please wait until you see a done message, this can take a few minutes, you can keep generating while you wait!')\n",
    "  curLocation = os.getcwd()\n",
    "  model_name = os.path.dirname(model_path)\n",
    "  shutil.make_archive(model_name,'zip',model_path)\n",
    "  os.chdir(curLocation)\n",
    "  print('Done!')\n",
    "with gr.Blocks() as demo:\n",
    "    with gr.Row():\n",
    "        with gr.Column():\n",
    "            prompt = gr.Textbox(label=\"Prompt\", value=\"photo of zwx dog in a bucket\")\n",
    "            negative_prompt = gr.Textbox(label=\"Negative Prompt\", value=\"\")\n",
    "            with gr.Row():\n",
    "              run = gr.Button(value=\"Generate\")\n",
    "              zip = gr.Button(value=\"Zip Model For Download\")\n",
    "            with gr.Row():\n",
    "                num_samples = gr.Number(label=\"Number of Samples\", value=4)\n",
    "                guidance_scale = gr.Number(label=\"Guidance Scale\", value=7.5)\n",
    "            with gr.Row():\n",
    "                height = gr.Number(label=\"Height\", value=512)\n",
    "                width = gr.Number(label=\"Width\", value=512)\n",
    "            with gr.Row():\n",
    "              num_inference_steps = gr.Slider(label=\"Steps\", value=25)\n",
    "              seed = gr.Number(label=\"Seed\", value=-1)\n",
    "        with gr.Column():\n",
    "            gallery = gr.Gallery()\n",
    "            seedDisplay = gr.Number(label=\"Used Seed:\", value=0)\n",
    "\n",
    "    run.click(inference, inputs=[prompt, negative_prompt, num_samples, height, width, num_inference_steps,seed, guidance_scale], outputs=[gallery,seedDisplay])\n",
    "    zip.click(zip_model)\n",
    "demo.launch(debug=True,share=True)"
   ]
  },
  {
   "cell_type": "markdown",
   "metadata": {},
   "source": [
    "### This is the end, for now :) ,you can convert your model to CKPT back in StableTuner!."
   ]
  }
 ],
 "metadata": {
  "kernelspec": {
   "display_name": "Python 3 (ipykernel)",
   "language": "python",
   "name": "python3"
  },
  "language_info": {
   "codemirror_mode": {
    "name": "ipython",
    "version": 3
   },
   "file_extension": ".py",
   "mimetype": "text/x-python",
   "name": "python",
   "nbconvert_exporter": "python",
   "pygments_lexer": "ipython3",
   "version": "3.8.10"
  },
  "vscode": {
   "interpreter": {
    "hash": "886cb931ea414ad2a87adcccbb1ce9166879eb6056301acd331591c6290ceca8"
   }
  }
 },
 "nbformat": 4,
 "nbformat_minor": 4
}