NagisaNao commited on
Commit
f9b46d8
·
verified ·
1 Parent(s): 731a2de

✨ yay for modularity~

Browse files
files_cells/notebooks/en/auto_cleaner_en.ipynb CHANGED
@@ -26,19 +26,11 @@
26
  "from IPython.display import display, HTML, Javascript\n",
27
  "\n",
28
  "\n",
29
- "# ================= DETECT ENV =================\n",
30
- "def detect_environment():\n",
31
- " free_plan = (os.sysconf('SC_PAGE_SIZE') * os.sysconf('SC_PHYS_PAGES') / (1024 ** 3) <= 20)\n",
32
- " environments = {\n",
33
- " 'COLAB_GPU': ('Google Colab', \"/root\" if free_plan else \"/content\"),\n",
34
- " 'KAGGLE_URL_BASE': ('Kaggle', \"/kaggle/working/content\")\n",
35
- " }\n",
36
- " for env_var, (environment, path) in environments.items():\n",
37
- " if env_var in os.environ:\n",
38
- " return environment, path, free_plan\n",
39
- "\n",
40
- "env, root_path, free_plan = detect_environment()\n",
41
- "webui_path = f\"{root_path}/sdw\"\n",
42
  "\n",
43
  "\n",
44
  "# ==================== CSS ====================\n",
 
26
  "from IPython.display import display, HTML, Javascript\n",
27
  "\n",
28
  "\n",
29
+ "# Setup Env\n",
30
+ "env = os.environ.get('ENV_NAME')\n",
31
+ "root_path = os.environ.get('ROOT_PATH')\n",
32
+ "webui_path = os.environ.get('WEBUI_PATH')\n",
33
+ "free_plan = os.environ.get('FREE_PLAN')\n",
 
 
 
 
 
 
 
 
34
  "\n",
35
  "\n",
36
  "# ==================== CSS ====================\n",
files_cells/notebooks/en/downloading_en.ipynb CHANGED
@@ -10,6 +10,8 @@
10
  "source": [
11
  "##~ DOWNLOADING CODE | BY: ANXETY ~##\n",
12
  "\n",
 
 
13
  "import os\n",
14
  "import re\n",
15
  "import time\n",
@@ -25,19 +27,11 @@
25
  "from urllib.parse import urlparse, parse_qs\n",
26
  "\n",
27
  "\n",
28
- "# ================= DETECT ENV =================\n",
29
- "def detect_environment():\n",
30
- " free_plan = (os.sysconf('SC_PAGE_SIZE') * os.sysconf('SC_PHYS_PAGES') / (1024 ** 3) <= 20)\n",
31
- " environments = {\n",
32
- " 'COLAB_GPU': ('Google Colab', \"/root\" if free_plan else \"/content\"),\n",
33
- " 'KAGGLE_URL_BASE': ('Kaggle', \"/kaggle/working/content\")\n",
34
- " }\n",
35
- " for env_var, (environment, path) in environments.items():\n",
36
- " if env_var in os.environ:\n",
37
- " return environment, path, free_plan\n",
38
- "\n",
39
- "env, root_path, free_plan = detect_environment()\n",
40
- "webui_path = f\"{root_path}/sdw\"\n",
41
  "\n",
42
  "\n",
43
  "# ================ LIBRARIES V2 ================\n",
@@ -76,7 +70,7 @@
76
  " with capture.capture_output() as cap:\n",
77
  " !curl -s -OL https://github.com/DEX-1101/sd-webui-notebook/raw/main/res/new_tunnel --output-dir {root_path}\n",
78
  " !curl -s -Lo /usr/bin/cl https://github.com/cloudflare/cloudflared/releases/latest/download/cloudflared-linux-amd64 && chmod +x /usr/bin/cl\n",
79
- " !curl -sLO https://github.com/openziti/zrok/releases/download/v0.4.23/zrok_0.4.23_linux_amd64.tar.gz && tar -xzf zrok_0.4.23_linux_amd64.tar.gz -C /usr/bin && rm -f zrok_0.4.23_linux_amd64.tar.gz\n",
80
  " del cap\n",
81
  "\n",
82
  " clear_output()\n",
@@ -207,112 +201,9 @@
207
  "\n",
208
  "## Downloading model and stuff | oh~ Hey! If you're freaked out by that code too, don't worry, me too!\n",
209
  "print(\"📦 Downloading models and stuff...\", end='')\n",
210
- "model_list = {\n",
211
- " \"1.Anime (by XpucT) + INP\": [\n",
212
- " {\"url\": \"https://huggingface.co/XpucT/Anime/resolve/main/Anime_v2.safetensors\", \"name\": \"Anime_V2.safetensors\"},\n",
213
- " {\"url\": \"https://huggingface.co/XpucT/Anime/resolve/main/Anime_v2-inpainting.safetensors\", \"name\": \"Anime_V2-inpainting.safetensors\"}\n",
214
- " ],\n",
215
- " \"2.BluMix [Anime] [V7] + INP\": [\n",
216
- " {\"url\": \"https://civitai.com/api/download/models/361779\", \"name\": \"BluMix_V7.safetensors\"},\n",
217
- " {\"url\": \"https://civitai.com/api/download/models/363850\", \"name\": \"BluMix_V7-inpainting.safetensors\"}\n",
218
- " ],\n",
219
- " \"3.Cetus-Mix [Anime] [V4] + INP\": [\n",
220
- " {\"url\": \"https://civitai.com/api/download/models/130298\", \"name\": \"CetusMix_V4.safetensors\"},\n",
221
- " {\"url\": \"https://civitai.com/api/download/models/139882\", \"name\": \"CetusMix_V4-inpainting.safetensors\"}\n",
222
- " ],\n",
223
- " \"4.Counterfeit [Anime] [V3] + INP\": [\n",
224
- " {\"url\": \"https://huggingface.co/gsdf/Counterfeit-V3.0/resolve/main/Counterfeit-V3.0_fix_fp16.safetensors\", \"name\": \"Counterfeit_V3.safetensors\"},\n",
225
- " {\"url\": \"https://civitai.com/api/download/models/137911\", \"name\": \"Counterfeit_V3-inpainting.safetensors\"}\n",
226
- " ],\n",
227
- " \"5.CuteColor [Anime] [V3]\": [\n",
228
- " {\"url\": \"https://civitai.com/api/download/models/138754\", \"name\": \"CuteColor_V3.safetensors\"}\n",
229
- " ],\n",
230
- " \"6.Dark-Sushi-Mix [Anime]\": [\n",
231
- " {\"url\": \"https://civitai.com/api/download/models/101640\", \"name\": \"DarkSushiMix_2_5D.safetensors\"},\n",
232
- " {\"url\": \"https://civitai.com/api/download/models/56071\", \"name\": \"DarkSushiMix_colorful.safetensors\"}\n",
233
- " ],\n",
234
- " \"7.Deliberate [Realism] [V6] + INP\": [\n",
235
- " {\"url\": \"https://huggingface.co/XpucT/Deliberate/resolve/main/Deliberate_v6.safetensors\", \"name\": \"Deliberate_V6.safetensors\"},\n",
236
- " {\"url\": \"https://huggingface.co/XpucT/Deliberate/resolve/main/Deliberate_v6-inpainting.safetensors\", \"name\": \"Deliberate_V6-inpainting.safetensors\"}\n",
237
- " ],\n",
238
- " \"8.Meina-Mix [Anime] [V11] + INP\": [\n",
239
- " {\"url\": \"https://civitai.com/api/download/models/119057\", \"name\": \"MeinaMix_V11.safetensors\"},\n",
240
- " {\"url\": \"https://civitai.com/api/download/models/120702\", \"name\": \"MeinaMix_V11-inpainting.safetensors\"}\n",
241
- " ],\n",
242
- " \"9.Mix-Pro [Anime] [V4] + INP\": [\n",
243
- " {\"url\": \"https://civitai.com/api/download/models/125668\", \"name\": \"MixPro_V4.safetensors\"},\n",
244
- " {\"url\": \"https://civitai.com/api/download/models/139878\", \"name\": \"MixPro_V4-inpainting.safetensors\"}\n",
245
- " ]\n",
246
- "}\n",
247
- "\n",
248
- "vae_list = {\n",
249
- " \"1.Anime.vae\": [{\"url\": \"https://civitai.com/api/download/models/311162\", \"name\": \"Anime.vae.safetensors\"}],\n",
250
- " \"2.Anything.vae\": [{\"url\": \"https://huggingface.co/NoCrypt/resources/resolve/main/VAE/any.vae.safetensors\", \"name\": \"Anything.vae.safetensors\"}],\n",
251
- " \"3.Blessed2.vae\": [{\"url\": \"https://huggingface.co/NoCrypt/resources/resolve/main/VAE/blessed2.vae.safetensors\", \"name\": \"Blessed2.vae.safetensors\"}],\n",
252
- " \"4.ClearVae.vae\": [{\"url\": \"https://civitai.com/api/download/models/88156\", \"name\": \"ClearVae_23.vae.safetensors\"}],\n",
253
- " \"5.WD.vae\": [{\"url\": \"https://huggingface.co/NoCrypt/resources/resolve/main/VAE/wd.vae.safetensors\", \"name\": \"WD.vae.safetensors\"}]\n",
254
- "}\n",
255
- "\n",
256
- "controlnet_list = {\n",
257
- " \"1.canny\": [\n",
258
- " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11p_sd15_canny_fp16.safetensors\", \"name\": \"control_v11p_sd15_canny_fp16.safetensors\"},\n",
259
- " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11p_sd15_canny_fp16.yaml\", \"name\": \"control_v11p_sd15_canny_fp16.yaml\"}\n",
260
- " ],\n",
261
- " \"2.openpose\": [\n",
262
- " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11p_sd15_openpose_fp16.safetensors\", \"name\": \"control_v11p_sd15_openpose_fp16.safetensors\"},\n",
263
- " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11p_sd15_openpose_fp16.yaml\", \"name\": \"control_v11p_sd15_openpose_fp16.yaml\"}\n",
264
- " ],\n",
265
- " \"3.depth\": [\n",
266
- " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11f1p_sd15_depth_fp16.safetensors\", \"name\": \"control_v11f1p_sd15_depth_fp16.safetensors\"},\n",
267
- " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11f1p_sd15_depth_fp16.yaml\", \"name\": \"control_v11f1p_sd15_depth_fp16.yaml\"},\n",
268
- " {\"url\": \"https://huggingface.co/NagisaNao/models/resolve/main/ControlNet_v11/control_v11p_sd15_depth_anything_fp16.safetensors\", \"name\": \"control_v11p_sd15_depth_anything_fp16.safetensors\"}\n",
269
- " ],\n",
270
- " \"4.normal_map\": [\n",
271
- " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11p_sd15_normalbae_fp16.safetensors\", \"name\": \"control_v11p_sd15_normalbae_fp16.safetensors\"},\n",
272
- " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11p_sd15_normalbae_fp16.yaml\", \"name\": \"control_v11p_sd15_normalbae_fp16.yaml\"}\n",
273
- " ],\n",
274
- " \"5.mlsd\": [\n",
275
- " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11p_sd15_mlsd_fp16.safetensors\", \"name\": \"control_v11p_sd15_mlsd_fp16.safetensors\"},\n",
276
- " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11p_sd15_mlsd_fp16.yaml\", \"name\": \"control_v11p_sd15_mlsd_fp16.yaml\"}\n",
277
- " ],\n",
278
- " \"6.lineart\": [\n",
279
- " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11p_sd15_lineart_fp16.safetensors\", \"name\": \"control_v11p_sd15_lineart_fp16.safetensors\"},\n",
280
- " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11p_sd15s2_lineart_anime_fp16.safetensors\", \"name\": \"control_v11p_sd15s2_lineart_anime_fp16.safetensors\"},\n",
281
- " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11p_sd15_lineart_fp16.yaml\", \"name\": \"control_v11p_sd15_lineart_fp16.yaml\"},\n",
282
- " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11p_sd15s2_lineart_anime_fp16.yaml\", \"name\": \"control_v11p_sd15s2_lineart_anime_fp16.yaml\"}\n",
283
- " ],\n",
284
- " \"7.soft_edge\": [\n",
285
- " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11p_sd15_softedge_fp16.safetensors\", \"name\": \"control_v11p_sd15_softedge_fp16.safetensors\"},\n",
286
- " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11p_sd15_softedge_fp16.yaml\", \"name\": \"control_v11p_sd15_softedge_fp16.yaml\"}\n",
287
- " ],\n",
288
- " \"8.scribble\": [\n",
289
- " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11p_sd15_scribble_fp16.safetensors\", \"name\": \"control_v11p_sd15_scribble_fp16.safetensors\"},\n",
290
- " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11p_sd15_scribble_fp16.yaml\", \"name\": \"control_v11p_sd15_scribble_fp16.yaml\"}\n",
291
- " ],\n",
292
- " \"9.segmentation\": [\n",
293
- " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11p_sd15_seg_fp16.safetensors\", \"name\": \"control_v11p_sd15_seg_fp16.safetensors\"},\n",
294
- " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11p_sd15_seg_fp16.yaml\", \"name\": \"control_v11p_sd15_seg_fp16.yaml\"}\n",
295
- " ],\n",
296
- " \"10.shuffle\": [\n",
297
- " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11e_sd15_shuffle_fp16.safetensors\", \"name\": \"control_v11e_sd15_shuffle_fp16.safetensors\"},\n",
298
- " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11e_sd15_shuffle_fp16.yaml\", \"name\": \"control_v11e_sd15_shuffle_fp16.yaml\"}\n",
299
- " ],\n",
300
- " \"11.tile\": [\n",
301
- " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11f1e_sd15_tile_fp16.safetensors\", \"name\": \"control_v11f1e_sd15_tile_fp16.safetensors\"},\n",
302
- " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11f1e_sd15_tile_fp16.yaml\", \"name\": \"control_v11f1e_sd15_tile_fp16.yaml\"}\n",
303
- " ],\n",
304
- " \"12.inpaint\": [\n",
305
- " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11p_sd15_inpaint_fp16.safetensors\", \"name\": \"control_v11p_sd15_inpaint_fp16.safetensors\"},\n",
306
- " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11p_sd15_inpaint_fp16.yaml\", \"name\": \"control_v11p_sd15_inpaint_fp16.yaml\"}\n",
307
- " ],\n",
308
- " \"13.instruct_p2p\": [\n",
309
- " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11e_sd15_ip2p_fp16.safetensors\", \"name\": \"control_v11e_sd15_ip2p_fp16.safetensors\"},\n",
310
- " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11e_sd15_ip2p_fp16.yaml\", \"name\": \"control_v11e_sd15_ip2p_fp16.yaml\"}\n",
311
- " ]\n",
312
- "}\n",
313
  "\n",
314
  "url = \"\"\n",
315
- "prefixes = {\n",
316
  " \"model\": models_dir,\n",
317
  " \"vae\": vaes_dir,\n",
318
  " \"lora\": loras_dir,\n",
@@ -324,7 +215,7 @@
324
  "}\n",
325
  "\n",
326
  "extension_repo = []\n",
327
- "directories = [value for key, value in prefixes.items()] # for unpucking zip files\n",
328
  "!mkdir -p {\" \".join(directories)}\n",
329
  "\n",
330
  "hf_token = huggingface_token if huggingface_token else \"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\"\n",
@@ -335,137 +226,123 @@
335
  "from math import floor\n",
336
  "\n",
337
  "def center_text(text, terminal_width=45):\n",
338
- " text_length = len(text)\n",
339
- " left_padding = floor((terminal_width - text_length) / 2)\n",
340
- " right_padding = terminal_width - text_length - left_padding\n",
341
- " return f\"\\033[1m\\033[36m{' ' * left_padding}{text}{' ' * right_padding}\\033[0m\\033[32m\"\n",
342
  "\n",
343
  "def format_output(url, dst_dir, file_name):\n",
344
- " info = f\"[{file_name.split('.')[0]}]\"\n",
345
- " info = center_text(info)\n",
346
  "\n",
347
- " print(f\"\\n\\033[32m{'---'*20}]{info}[{'---'*20}\")\n",
348
  " print(f\"\\033[33mURL: \\033[34m{url}\")\n",
349
  " print(f\"\\033[33mSAVE DIR: \\033[34m{dst_dir}\")\n",
350
  " print(f\"\\033[33mFILE NAME: \\033[34m{file_name}\\033[0m\")\n",
351
  "\n",
352
  "''' GET CivitAi API - DATA '''\n",
353
  "\n",
354
- "def strip_(url, file_name=None):\n",
355
- " if 'github.com' in url:\n",
356
- " if '/blob/' in url:\n",
357
- " url = url.replace('/blob/', '/raw/')\n",
358
- "\n",
359
- " elif \"civitai.com\" in url:\n",
360
- " return CivitAi_API(url, file_name)\n",
361
- "\n",
362
- " elif \"huggingface.co\" in url:\n",
363
- " if '/blob/' in url:\n",
364
- " url = url.replace('/blob/', '/resolve/')\n",
365
- " if '?' in url:\n",
366
- " url = url.split('?')[0]\n",
367
- "\n",
368
- " return url\n",
369
- "\n",
370
  "def CivitAi_API(url, file_name=None):\n",
371
- " support_types = ('Checkpoint', 'Model', 'TextualInversion', 'LORA')\n",
372
- " civitai_token = \"62c0c5956b2f9defbd844d754000180b\"\n",
373
  "\n",
374
- " if '?token=' in url:\n",
375
- " url = url.split('?token=')[0]\n",
376
- " if '?type=' in url:\n",
377
- " url = url.replace('?type=', f'?token={civitai_token}&type=')\n",
378
- " else:\n",
379
- " url = f\"{url}?token={civitai_token}\"\n",
380
- "\n",
381
- " # Determine model or version id\n",
382
- " if \"civitai.com/models/\" in url:\n",
383
- " if '?modelVersionId=' in url:\n",
384
- " version_id = url.split('?modelVersionId=')[1]\n",
385
- " response = requests.get(f\"https://civitai.com/api/v1/model-versions/{version_id}\")\n",
386
- " # print(f\"end - https://civitai.com/api/v1/model-versions/{version_id}\")\n",
387
  " else:\n",
388
- " model_id = url.split('/models/')[1].split('/')[0]\n",
389
- " response = requests.get(f\"https://civitai.com/api/v1/models/{model_id}\")\n",
390
- " # print(f\"end - https://civitai.com/api/v1/models/{model_id}\")\n",
391
- " else:\n",
392
- " version_id = url.split('/models/')[1].split('/')[0]\n",
393
- " response = requests.get(f\"https://civitai.com/api/v1/model-versions/{version_id}\")\n",
394
- " # print(f\"end - https://civitai.com/api/v1/model-versions/{version_id}\")\n",
395
  "\n",
396
- " data = response.json()\n",
397
  "\n",
398
- " if response.status_code != 200:\n",
399
  " return None, None, None, None, None, None, None\n",
400
  "\n",
401
- " # Define model type and name\n",
402
- " if \"civitai.com/models/\" in url:\n",
403
- " if '?modelVersionId=' in url:\n",
404
- " model_type = data['model']['type']\n",
405
- " model_name = data['files'][0]['name']\n",
 
 
 
 
 
 
 
 
 
406
  " else:\n",
407
- " model_type = data['type']\n",
408
- " model_name = data['modelVersions'][0]['files'][0]['name']\n",
409
- " elif 'type=' in url:\n",
410
- " model_type = parse_qs(urlparse(url).query).get('type', [''])[0]\n",
411
- " if 'model' in model_type.lower():\n",
412
  " model_name = data['files'][0]['name']\n",
413
- " else:\n",
414
- " model_name = data['files'][1]['name']\n",
415
- " else:\n",
416
- " model_type = data['model']['type']\n",
417
- " model_name = data['files'][0]['name']\n",
418
  "\n",
 
419
  " model_name = file_name or model_name\n",
420
  "\n",
421
- " # Determine DownloadUrl\n",
422
- " if \"civitai.com/models/\" in url:\n",
423
- " if '?modelVersionId=' in url:\n",
424
- " download_url = data.get('downloadUrl')\n",
425
- " else:\n",
426
- " download_url = data[\"modelVersions\"][0].get(\"downloadUrl\", \"\")\n",
427
- " elif 'type=' in url:\n",
428
- " if any(t.lower() in model_type.lower() for t in support_types):\n",
429
- " download_url = data['files'][0]['downloadUrl']\n",
 
 
430
  " else:\n",
431
- " download_url = data['files'][1]['downloadUrl']\n",
432
- " else:\n",
433
- " download_url = data.get('downloadUrl')\n",
434
  "\n",
435
- " clean_url = re.sub(r'[?&]token=[^&]*', '', download_url) # hide token\n",
 
436
  "\n",
437
- " # Find a safe image: level less than 4 | Kaggle\n",
438
- " image_url, image_name = None, None\n",
439
- " if any(t in model_type for t in support_types):\n",
440
- " try:\n",
441
- " images = data.get('images') or data['modelVersions'][0].get('images', [])\n",
442
- " if env == 'Kaggle':\n",
443
- " image_url = next((image['url'] for image in images if image['nsfwLevel'] < 4), None)\n",
444
- " else:\n",
445
- " image_url = images[0]['url'] if images else None\n",
446
- " except KeyError:\n",
447
- " pass\n",
 
 
 
448
  "\n",
449
- " # Generate a name to save the image\n",
450
- " image_name = f\"{model_name.split('.')[0]}.preview.{image_url.split('.')[-1]}\" if image_url else None\n",
451
  "\n",
452
- " return f\"{download_url}{'&' if '?' in download_url else '?'}token={civitai_token}\", clean_url, model_type, model_name, image_url, image_name, data\n",
453
  "\n",
454
  "''' Main Download Code '''\n",
455
  "\n",
 
 
 
 
 
 
 
 
456
  "def download(url):\n",
457
  " links_and_paths = [link_or_path.strip() for link_or_path in url.split(',') if link_or_path.strip()]\n",
458
  "\n",
459
  " for link_or_path in links_and_paths:\n",
460
- " if any(link_or_path.lower().startswith(prefix) for prefix in prefixes):\n",
461
  " handle_manual(link_or_path)\n",
462
  " else:\n",
463
  " url, dst_dir, file_name = link_or_path.split()\n",
464
  " manual_download(url, dst_dir, file_name)\n",
465
  "\n",
466
- " unpack_zip_files()\n",
467
- "\n",
468
- "def unpack_zip_files():\n",
469
  " for directory in directories:\n",
470
  " for root, _, files in os.walk(directory):\n",
471
  " for file in files:\n",
@@ -485,8 +362,8 @@
485
  " if file_name:\n",
486
  " path = re.sub(r'\\[.*?\\]', '', path)\n",
487
  "\n",
488
- " if prefix in prefixes:\n",
489
- " dir = prefixes[prefix]\n",
490
  " if prefix != \"extension\":\n",
491
  " try:\n",
492
  " manual_download(path, dir, file_name=file_name)\n",
@@ -500,60 +377,52 @@
500
  " aria2c_header = \"--header='User-Agent: Mozilla/5.0' --allow-overwrite=true\"\n",
501
  " aria2_args = \"--optimize-concurrent-downloads --console-log-level=error --summary-interval=10 --stderr=true -c -x16 -s16 -k1M -j5\"\n",
502
  "\n",
503
- " if 'github.com' in url:\n",
504
- " url = strip_(url)\n",
505
- "\n",
506
- " # -- CivitAi APi+ V2 --\n",
507
- " elif 'civitai' in url:\n",
508
- " url, clean_url, model_type, file_name, image_url, image_name, data = strip_(url, file_name)\n",
509
  "\n",
 
 
510
  " if image_url and image_name:\n",
511
- " with capture.capture_output() as cap:\n",
512
- " !aria2c {aria2_args} -d {dst_dir} -o '{image_name}' '{image_url}'\n",
513
- " del cap\n",
514
  "\n",
515
- " elif \"huggingface.co\" in url:\n",
516
- " clean_url = strip_(url)\n",
517
- " basename = clean_url.split(\"/\")[-1] if file_name is None else file_name\n",
518
  "\n",
519
  " \"\"\" Formatted info output \"\"\"\n",
520
- " model_name_or_basename = file_name if not 'huggingface' in url else basename\n",
521
  " format_output(clean_url or url, dst_dir, model_name_or_basename)\n",
522
  "\n",
523
- " # ## -- for my tests --\n",
524
  " # print(url, dst_dir, model_name_or_basename)\n",
525
- " print(f\"\\033[31m[Data Info]:\\033[0m Failed to retrieve data from the API.\\n\") if 'civitai' in url and not data else None\n",
526
- " if 'civitai' in url and data and image_name:\n",
527
- " print(f\"\\033[32m[Preview DL]:\\033[0m {image_name} - {image_url}\\n\")\n",
 
 
 
528
  " # =====================\n",
 
 
 
529
  "\n",
530
- " # -- Git Hub --\n",
531
- " if 'github.com' in url or 'githubusercontent.com' in url:\n",
532
- " !aria2c {aria2_args} -d {dst_dir} -o '{basename}' '{url}'\n",
533
- "\n",
534
- " # -- GDrive --\n",
535
- " elif 'drive.google' in url:\n",
536
- " try:\n",
537
- " have_drive_link\n",
538
- " except:\n",
539
- " !pip install -q gdown==5.2.0 > /dev/null\n",
540
- " have_drive_link = True\n",
541
  "\n",
542
  " if 'folders' in url:\n",
543
- " !gdown --folder \"{url}\" -O {dst_dir} --fuzzy -c\n",
544
  " else:\n",
545
- " if file_name:\n",
546
- " !gdown \"{url}\" -O {dst_dir}/{file_name} --fuzzy -c\n",
547
- " else:\n",
548
- " !gdown \"{url}\" -O {dst_dir} --fuzzy -c\n",
549
  "\n",
550
- " # -- Hugging Face --\n",
551
- " elif 'huggingface' in url:\n",
552
- " !aria2c {header_option} {aria2_args} -d {dst_dir} -o '{basename}' '{url}'\n",
553
  "\n",
554
- " # -- Other --\n",
555
  " elif 'http' in url:\n",
556
- " !aria2c {aria2c_header} {aria2_args} -d {dst_dir} -o \"{file_name if file_name else ''}\" '{url}'\n",
557
  "\n",
558
  "''' SubModels - Added URLs '''\n",
559
  "\n",
@@ -593,7 +462,7 @@
593
  "\n",
594
  "''' file.txt - added urls '''\n",
595
  "\n",
596
- "def process_file_download(file_url, prefixes, unique_urls):\n",
597
  " files_urls = \"\"\n",
598
  "\n",
599
  " if file_url.startswith(\"http\"):\n",
@@ -608,8 +477,8 @@
608
  " current_tag = None\n",
609
  " for line in lines:\n",
610
  " line = line.strip()\n",
611
- " if any(f'# {tag}' in line.lower() for tag in prefixes):\n",
612
- " current_tag = next((tag for tag in prefixes if tag in line.lower()))\n",
613
  "\n",
614
  " urls = [url.split('#')[0].strip() for url in line.split(',')] # filter urls\n",
615
  " for url in urls:\n",
@@ -633,13 +502,13 @@
633
  " custom_file_url = f'{root_path}/{custom_file_url}'\n",
634
  "\n",
635
  " try:\n",
636
- " file_urls += process_file_download(custom_file_url, prefixes, unique_urls)\n",
637
  " except FileNotFoundError:\n",
638
  " pass\n",
639
  "\n",
640
  "# url prefixing\n",
641
  "urls = (Model_url, Vae_url, LoRA_url, Embedding_url, Extensions_url)\n",
642
- "prefixed_urls = (f\"{prefix}:{url}\" for prefix, url in zip(prefixes.keys(), urls) if url for url in url.replace(',', '').split())\n",
643
  "url += \", \".join(prefixed_urls) + \", \" + file_urls\n",
644
  "\n",
645
  "if detailed_download == \"on\":\n",
 
10
  "source": [
11
  "##~ DOWNLOADING CODE | BY: ANXETY ~##\n",
12
  "\n",
13
+ "from models_data import model_list, vae_list, controlnet_list\n",
14
+ "\n",
15
  "import os\n",
16
  "import re\n",
17
  "import time\n",
 
27
  "from urllib.parse import urlparse, parse_qs\n",
28
  "\n",
29
  "\n",
30
+ "# Setup Env\n",
31
+ "env = os.environ.get('ENV_NAME')\n",
32
+ "root_path = os.environ.get('ROOT_PATH')\n",
33
+ "webui_path = os.environ.get('WEBUI_PATH')\n",
34
+ "free_plan = os.environ.get('FREE_PLAN')\n",
 
 
 
 
 
 
 
 
35
  "\n",
36
  "\n",
37
  "# ================ LIBRARIES V2 ================\n",
 
70
  " with capture.capture_output() as cap:\n",
71
  " !curl -s -OL https://github.com/DEX-1101/sd-webui-notebook/raw/main/res/new_tunnel --output-dir {root_path}\n",
72
  " !curl -s -Lo /usr/bin/cl https://github.com/cloudflare/cloudflared/releases/latest/download/cloudflared-linux-amd64 && chmod +x /usr/bin/cl\n",
73
+ " !curl -sLO https://github.com/openziti/zrok/releases/download/v0.4.32/zrok_0.4.32_linux_amd64.tar.gz && tar -xzf zrok_0.4.32_linux_amd64.tar.gz -C /usr/bin && rm -f zrok_0.4.32_linux_amd64.tar.gz\n",
74
  " del cap\n",
75
  "\n",
76
  " clear_output()\n",
 
201
  "\n",
202
  "## Downloading model and stuff | oh~ Hey! If you're freaked out by that code too, don't worry, me too!\n",
203
  "print(\"📦 Downloading models and stuff...\", end='')\n",
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
204
  "\n",
205
  "url = \"\"\n",
206
+ "PREFIXES = {\n",
207
  " \"model\": models_dir,\n",
208
  " \"vae\": vaes_dir,\n",
209
  " \"lora\": loras_dir,\n",
 
215
  "}\n",
216
  "\n",
217
  "extension_repo = []\n",
218
+ "directories = [value for key, value in PREFIXES.items()] # for unpucking zip files\n",
219
  "!mkdir -p {\" \".join(directories)}\n",
220
  "\n",
221
  "hf_token = huggingface_token if huggingface_token else \"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\"\n",
 
226
  "from math import floor\n",
227
  "\n",
228
  "def center_text(text, terminal_width=45):\n",
229
+ " padding = (terminal_width - len(text)) // 2\n",
230
+ " return f\"\\033[1m\\033[36m{' ' * padding}{text}{' ' * padding}\\033[0m\\033[32m\"\n",
 
 
231
  "\n",
232
  "def format_output(url, dst_dir, file_name):\n",
233
+ " info = center_text(f\"[{file_name.split('.')[0]}]\")\n",
234
+ " separation_line = '\\033[32m' + '---' * 20\n",
235
  "\n",
236
+ " print(f\"\\n{separation_line}{info}{separation_line}\")\n",
237
  " print(f\"\\033[33mURL: \\033[34m{url}\")\n",
238
  " print(f\"\\033[33mSAVE DIR: \\033[34m{dst_dir}\")\n",
239
  " print(f\"\\033[33mFILE NAME: \\033[34m{file_name}\\033[0m\")\n",
240
  "\n",
241
  "''' GET CivitAi API - DATA '''\n",
242
  "\n",
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
243
  "def CivitAi_API(url, file_name=None):\n",
244
+ " SUPPORT_TYPES = ('Checkpoint', 'Model', 'TextualInversion', 'LORA')\n",
245
+ " CIVITAI_TOKEN = \"62c0c5956b2f9defbd844d754000180b\"\n",
246
  "\n",
247
+ " url = url.split('?token=')[0] if '?token=' in url else url\n",
248
+ " url = url.replace('?type=', f'?token={CIVITAI_TOKEN}&type=') if '?type=' in url else f\"{url}?token={CIVITAI_TOKEN}\"\n",
249
+ "\n",
250
+ " def get_model_data(url):\n",
251
+ " if \"civitai.com/models/\" in url:\n",
252
+ " if '?modelVersionId=' in url:\n",
253
+ " version_id = url.split('?modelVersionId=')[1]\n",
254
+ " return requests.get(f\"https://civitai.com/api/v1/model-versions/{version_id}\").json()\n",
255
+ " else:\n",
256
+ " model_id = url.split('/models/')[1].split('/')[0]\n",
257
+ " return requests.get(f\"https://civitai.com/api/v1/models/{model_id}\").json()\n",
 
 
258
  " else:\n",
259
+ " version_id = url.split('/models/')[1].split('/')[0]\n",
260
+ " return requests.get(f\"https://civitai.com/api/v1/model-versions/{version_id}\").json()\n",
 
 
 
 
 
261
  "\n",
262
+ " data = get_model_data(url)\n",
263
  "\n",
264
+ " if not data:\n",
265
  " return None, None, None, None, None, None, None\n",
266
  "\n",
267
+ " def extract_model_info(url, data):\n",
268
+ " if \"civitai.com/models/\" in url:\n",
269
+ " if '?modelVersionId=' in url:\n",
270
+ " model_type = data['model']['type']\n",
271
+ " model_name = data['files'][0]['name']\n",
272
+ " else:\n",
273
+ " model_type = data['type']\n",
274
+ " model_name = data['modelVersions'][0]['files'][0]['name']\n",
275
+ " elif 'type=' in url:\n",
276
+ " model_type = parse_qs(urlparse(url).query).get('type', [''])[0]\n",
277
+ " if 'model' in model_type.lower():\n",
278
+ " model_name = data['files'][0]['name']\n",
279
+ " else:\n",
280
+ " model_name = data['files'][1]['name']\n",
281
  " else:\n",
282
+ " model_type = data['model']['type']\n",
 
 
 
 
283
  " model_name = data['files'][0]['name']\n",
284
+ " return model_type, model_name\n",
 
 
 
 
285
  "\n",
286
+ " model_type, model_name = extract_model_info(url, data)\n",
287
  " model_name = file_name or model_name\n",
288
  "\n",
289
+ " def get_download_url(url, data, model_type):\n",
290
+ " if \"civitai.com/models/\" in url:\n",
291
+ " if '?modelVersionId=' in url:\n",
292
+ " return data.get('downloadUrl')\n",
293
+ " else:\n",
294
+ " return data[\"modelVersions\"][0].get(\"downloadUrl\", \"\")\n",
295
+ " elif 'type=' in url:\n",
296
+ " if any(t.lower() in model_type.lower() for t in SUPPORT_TYPES):\n",
297
+ " return data['files'][0]['downloadUrl']\n",
298
+ " else:\n",
299
+ " return data['files'][1]['downloadUrl']\n",
300
  " else:\n",
301
+ " return data.get('downloadUrl')\n",
 
 
302
  "\n",
303
+ " download_url = get_download_url(url, data, model_type)\n",
304
+ " clean_url = re.sub(r'[?&]token=[^&]*', '', download_url)\n",
305
  "\n",
306
+ " def get_image_info(data, model_type, model_name):\n",
307
+ " image_url, image_name = None, None\n",
308
+ " if any(t in model_type for t in SUPPORT_TYPES):\n",
309
+ " try:\n",
310
+ " images = data.get('images') or data['modelVersions'][0].get('images', [])\n",
311
+ " if env == 'Kaggle':\n",
312
+ " image_url = next((image['url'] for image in images if image['nsfwLevel'] < 4), None)\n",
313
+ " else:\n",
314
+ " image_url = images[0]['url'] if images else None\n",
315
+ " except KeyError:\n",
316
+ " pass\n",
317
+ "\n",
318
+ " image_name = f\"{model_name.split('.')[0]}.preview.{image_url.split('.')[-1]}\" if image_url else None\n",
319
+ " return image_url, image_name\n",
320
  "\n",
321
+ " image_url, image_name = get_image_info(data, model_type, model_name)\n",
 
322
  "\n",
323
+ " return f\"{download_url}{'&' if '?' in download_url else '?'}token={CIVITAI_TOKEN}\", clean_url, model_type, model_name, image_url, image_name, data\n",
324
  "\n",
325
  "''' Main Download Code '''\n",
326
  "\n",
327
+ "def strip_(url):\n",
328
+ " if 'github.com' in url:\n",
329
+ " return url.replace('/blob/', '/raw/')\n",
330
+ " elif \"huggingface.co\" in url:\n",
331
+ " url = url.replace('/blob/', '/resolve/')\n",
332
+ " return url.split('?')[0] if '?' in url else url\n",
333
+ " return url\n",
334
+ "\n",
335
  "def download(url):\n",
336
  " links_and_paths = [link_or_path.strip() for link_or_path in url.split(',') if link_or_path.strip()]\n",
337
  "\n",
338
  " for link_or_path in links_and_paths:\n",
339
+ " if any(link_or_path.lower().startswith(prefix) for prefix in PREFIXES):\n",
340
  " handle_manual(link_or_path)\n",
341
  " else:\n",
342
  " url, dst_dir, file_name = link_or_path.split()\n",
343
  " manual_download(url, dst_dir, file_name)\n",
344
  "\n",
345
+ " # Unpuck ZIPs Files\n",
 
 
346
  " for directory in directories:\n",
347
  " for root, _, files in os.walk(directory):\n",
348
  " for file in files:\n",
 
362
  " if file_name:\n",
363
  " path = re.sub(r'\\[.*?\\]', '', path)\n",
364
  "\n",
365
+ " if prefix in PREFIXES:\n",
366
+ " dir = PREFIXES[prefix]\n",
367
  " if prefix != \"extension\":\n",
368
  " try:\n",
369
  " manual_download(path, dir, file_name=file_name)\n",
 
377
  " aria2c_header = \"--header='User-Agent: Mozilla/5.0' --allow-overwrite=true\"\n",
378
  " aria2_args = \"--optimize-concurrent-downloads --console-log-level=error --summary-interval=10 --stderr=true -c -x16 -s16 -k1M -j5\"\n",
379
  "\n",
380
+ " clean_url = strip_(url)\n",
 
 
 
 
 
381
  "\n",
382
+ " if 'civitai' in url:\n",
383
+ " url, clean_url, model_type, file_name, image_url, image_name, data = CivitAi_API(url, file_name)\n",
384
  " if image_url and image_name:\n",
385
+ " command = [\"aria2c\"] + aria2_args.split() + [\"-d\", dst_dir, \"-o\", image_name, image_url]\n",
386
+ " subprocess.run(command, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)\n",
 
387
  "\n",
388
+ " elif 'github' in url or \"huggingface.co\" in url:\n",
389
+ " basename = url.split(\"/\")[-1] if file_name is None else file_name\n",
 
390
  "\n",
391
  " \"\"\" Formatted info output \"\"\"\n",
392
+ " model_name_or_basename = file_name if file_name else basename\n",
393
  " format_output(clean_url or url, dst_dir, model_name_or_basename)\n",
394
  "\n",
 
395
  " # print(url, dst_dir, model_name_or_basename)\n",
396
+ " if 'civitai' in url:\n",
397
+ " if not data:\n",
398
+ " print(\"\\033[31m[Data Info]:\\033[0m Failed to retrieve data from the API.\\n\")\n",
399
+ " if data and image_name:\n",
400
+ " print(f\"\\033[32m[Preview DL]:\\033[0m {image_name} - {image_url}\\n\")\n",
401
+ "\n",
402
  " # =====================\n",
403
+ " def run_aria2c(url, dst_dir, file_name=None, args=\"\", header=\"\"):\n",
404
+ " out = f\"-o '{file_name}'\" if file_name else \"\"\n",
405
+ " !aria2c {header} {args} -d {dst_dir} {out} '{url}'\n",
406
  "\n",
407
+ " # -- Google Drive --\n",
408
+ " if 'drive.google' in url:\n",
409
+ " if not globals().get('have_drive_link', False):\n",
410
+ " os.system(\"pip install -U gdown > /dev/null\")\n",
411
+ " globals()['have_drive_link'] = True\n",
 
 
 
 
 
 
412
  "\n",
413
  " if 'folders' in url:\n",
414
+ " os.system(f\"gdown --folder \\\"{url}\\\" -O {dst_dir} --fuzzy -c\")\n",
415
  " else:\n",
416
+ " out_path = f\"{dst_dir}/{file_name}\" if file_name else dst_dir\n",
417
+ " os.system(f\"gdown \\\"{url}\\\" -O {out_path} --fuzzy -c\")\n",
 
 
418
  "\n",
419
+ " # -- GitHub or Hugging Face --\n",
420
+ " elif 'github' in url or 'huggingface' in url:\n",
421
+ " run_aria2c(clean_url, dst_dir, basename, aria2_args, header_option if 'huggingface' in url else '')\n",
422
  "\n",
423
+ " # -- Other HTTP/Sources --\n",
424
  " elif 'http' in url:\n",
425
+ " run_aria2c(url, dst_dir, file_name, aria2_args, aria2c_header)\n",
426
  "\n",
427
  "''' SubModels - Added URLs '''\n",
428
  "\n",
 
462
  "\n",
463
  "''' file.txt - added urls '''\n",
464
  "\n",
465
+ "def process_file_download(file_url, PREFIXES, unique_urls):\n",
466
  " files_urls = \"\"\n",
467
  "\n",
468
  " if file_url.startswith(\"http\"):\n",
 
477
  " current_tag = None\n",
478
  " for line in lines:\n",
479
  " line = line.strip()\n",
480
+ " if any(f'# {tag}' in line.lower() for tag in PREFIXES):\n",
481
+ " current_tag = next((tag for tag in PREFIXES if tag in line.lower()))\n",
482
  "\n",
483
  " urls = [url.split('#')[0].strip() for url in line.split(',')] # filter urls\n",
484
  " for url in urls:\n",
 
502
  " custom_file_url = f'{root_path}/{custom_file_url}'\n",
503
  "\n",
504
  " try:\n",
505
+ " file_urls += process_file_download(custom_file_url, PREFIXES, unique_urls)\n",
506
  " except FileNotFoundError:\n",
507
  " pass\n",
508
  "\n",
509
  "# url prefixing\n",
510
  "urls = (Model_url, Vae_url, LoRA_url, Embedding_url, Extensions_url)\n",
511
+ "prefixed_urls = (f\"{prefix}:{url}\" for prefix, url in zip(PREFIXES.keys(), urls) if url for url in url.replace(',', '').split())\n",
512
  "url += \", \".join(prefixed_urls) + \", \" + file_urls\n",
513
  "\n",
514
  "if detailed_download == \"on\":\n",
files_cells/notebooks/en/launch_en.ipynb CHANGED
@@ -33,21 +33,13 @@
33
  "from datetime import timedelta\n",
34
  "from IPython.display import clear_output\n",
35
  "\n",
36
- "# ================= DETECT ENV =================\n",
37
- "def detect_environment():\n",
38
- " free_plan = (os.sysconf('SC_PAGE_SIZE') * os.sysconf('SC_PHYS_PAGES') / (1024 ** 3) <= 20)\n",
39
- " environments = {\n",
40
- " 'COLAB_GPU': ('Google Colab', \"/root\" if free_plan else \"/content\"),\n",
41
- " 'KAGGLE_URL_BASE': ('Kaggle', \"/kaggle/working/content\")\n",
42
- " }\n",
43
- "\n",
44
- " for env_var, (environment, path) in environments.items():\n",
45
- " if env_var in os.environ:\n",
46
- " return environment, path, free_plan\n",
47
- " return 'Unknown', '/unknown/path', free_plan\n",
48
- "\n",
49
- "env, root_path, free_plan = detect_environment()\n",
50
- "webui_path = f\"{root_path}/sdw\"\n",
51
  "\n",
52
  "def load_settings():\n",
53
  " SETTINGS_FILE = f'{root_path}/settings.json'\n",
@@ -62,6 +54,7 @@
62
  "commandline_arguments = settings.get('commandline_arguments', \"\")\n",
63
  "change_webui = settings.get('change_webui', \"\")\n",
64
  "\n",
 
65
  "# ======================== TUNNEL V2 ========================\n",
66
  "print('Please Wait...')\n",
67
  "\n",
@@ -95,6 +88,7 @@
95
  "\n",
96
  "clear_output()\n",
97
  "\n",
 
98
  "# =============== Automatic Fixing Path V3 ===============\n",
99
  "paths_to_check = {\n",
100
  " \"tagger_hf_cache_dir\": f\"{webui_path}/models/interrogators/\",\n",
 
33
  "from datetime import timedelta\n",
34
  "from IPython.display import clear_output\n",
35
  "\n",
36
+ "\n",
37
+ "# Setup Env\n",
38
+ "env = os.environ.get('ENV_NAME')\n",
39
+ "root_path = os.environ.get('ROOT_PATH')\n",
40
+ "webui_path = os.environ.get('WEBUI_PATH')\n",
41
+ "free_plan = os.environ.get('FREE_PLAN')\n",
42
+ "\n",
 
 
 
 
 
 
 
 
43
  "\n",
44
  "def load_settings():\n",
45
  " SETTINGS_FILE = f'{root_path}/settings.json'\n",
 
54
  "commandline_arguments = settings.get('commandline_arguments', \"\")\n",
55
  "change_webui = settings.get('change_webui', \"\")\n",
56
  "\n",
57
+ "\n",
58
  "# ======================== TUNNEL V2 ========================\n",
59
  "print('Please Wait...')\n",
60
  "\n",
 
88
  "\n",
89
  "clear_output()\n",
90
  "\n",
91
+ "\n",
92
  "# =============== Automatic Fixing Path V3 ===============\n",
93
  "paths_to_check = {\n",
94
  " \"tagger_hf_cache_dir\": f\"{webui_path}/models/interrogators/\",\n",
files_cells/notebooks/en/widgets_en.ipynb CHANGED
@@ -32,20 +32,11 @@
32
  "from IPython.display import display, HTML, Javascript, clear_output\n",
33
  "\n",
34
  "\n",
35
- "# ================= DETECT ENV =================\n",
36
- "def detect_environment():\n",
37
- " free_plan = (os.sysconf('SC_PAGE_SIZE') * os.sysconf('SC_PHYS_PAGES') / (1024 ** 3) <= 20)\n",
38
- " environments = {\n",
39
- " 'COLAB_GPU': ('Google Colab', \"/root\" if free_plan else \"/content\"),\n",
40
- " 'KAGGLE_URL_BASE': ('Kaggle', \"/kaggle/working/content\")\n",
41
- " }\n",
42
- " for env_var, (environment, path) in environments.items():\n",
43
- " if env_var in os.environ:\n",
44
- " return environment, path, free_plan\n",
45
- "\n",
46
- "env, root_path, free_plan = detect_environment()\n",
47
- "webui_path = f\"{root_path}/sdw\"\n",
48
- "!mkdir -p {root_path}\n",
49
  "\n",
50
  "\n",
51
  "# ==================== CSS JS ====================\n",
 
32
  "from IPython.display import display, HTML, Javascript, clear_output\n",
33
  "\n",
34
  "\n",
35
+ "# Setup Env\n",
36
+ "env = os.environ.get('ENV_NAME')\n",
37
+ "root_path = os.environ.get('ROOT_PATH')\n",
38
+ "webui_path = os.environ.get('WEBUI_PATH')\n",
39
+ "free_plan = os.environ.get('FREE_PLAN')\n",
 
 
 
 
 
 
 
 
 
40
  "\n",
41
  "\n",
42
  "# ==================== CSS JS ====================\n",
files_cells/notebooks/ru/auto_cleaner_ru.ipynb CHANGED
@@ -31,19 +31,11 @@
31
  "from IPython.display import display, HTML\n",
32
  "\n",
33
  "\n",
34
- "# ================= DETECT ENV =================\n",
35
- "def detect_environment():\n",
36
- " free_plan = (os.sysconf('SC_PAGE_SIZE') * os.sysconf('SC_PHYS_PAGES') / (1024 ** 3) <= 20)\n",
37
- " environments = {\n",
38
- " 'COLAB_GPU': ('Google Colab', \"/root\" if free_plan else \"/content\"),\n",
39
- " 'KAGGLE_URL_BASE': ('Kaggle', \"/kaggle/working/content\")\n",
40
- " }\n",
41
- " for env_var, (environment, path) in environments.items():\n",
42
- " if env_var in os.environ:\n",
43
- " return environment, path, free_plan\n",
44
- "\n",
45
- "env, root_path, free_plan = detect_environment()\n",
46
- "webui_path = f\"{root_path}/sdw\"\n",
47
  "\n",
48
  "\n",
49
  "# ==================== CSS ====================\n",
 
31
  "from IPython.display import display, HTML\n",
32
  "\n",
33
  "\n",
34
+ "# Setup Env\n",
35
+ "env = os.environ.get('ENV_NAME')\n",
36
+ "root_path = os.environ.get('ROOT_PATH')\n",
37
+ "webui_path = os.environ.get('WEBUI_PATH')\n",
38
+ "free_plan = os.environ.get('FREE_PLAN')\n",
 
 
 
 
 
 
 
 
39
  "\n",
40
  "\n",
41
  "# ==================== CSS ====================\n",
files_cells/notebooks/ru/downloading_ru.ipynb CHANGED
@@ -10,6 +10,8 @@
10
  "source": [
11
  "##~ DOWNLOADING CODE | BY: ANXETY ~##\n",
12
  "\n",
 
 
13
  "import os\n",
14
  "import re\n",
15
  "import time\n",
@@ -25,19 +27,11 @@
25
  "from urllib.parse import urlparse, parse_qs\n",
26
  "\n",
27
  "\n",
28
- "# ================= DETECT ENV =================\n",
29
- "def detect_environment():\n",
30
- " free_plan = (os.sysconf('SC_PAGE_SIZE') * os.sysconf('SC_PHYS_PAGES') / (1024 ** 3) <= 20)\n",
31
- " environments = {\n",
32
- " 'COLAB_GPU': ('Google Colab', \"/root\" if free_plan else \"/content\"),\n",
33
- " 'KAGGLE_URL_BASE': ('Kaggle', \"/kaggle/working/content\")\n",
34
- " }\n",
35
- " for env_var, (environment, path) in environments.items():\n",
36
- " if env_var in os.environ:\n",
37
- " return environment, path, free_plan\n",
38
- "\n",
39
- "env, root_path, free_plan = detect_environment()\n",
40
- "webui_path = f\"{root_path}/sdw\"\n",
41
  "\n",
42
  "\n",
43
  "# ================ LIBRARIES V2 ================\n",
@@ -76,7 +70,7 @@
76
  " with capture.capture_output() as cap:\n",
77
  " !curl -s -OL https://github.com/DEX-1101/sd-webui-notebook/raw/main/res/new_tunnel --output-dir {root_path}\n",
78
  " !curl -s -Lo /usr/bin/cl https://github.com/cloudflare/cloudflared/releases/latest/download/cloudflared-linux-amd64 && chmod +x /usr/bin/cl\n",
79
- " !curl -sLO https://github.com/openziti/zrok/releases/download/v0.4.23/zrok_0.4.23_linux_amd64.tar.gz && tar -xzf zrok_0.4.23_linux_amd64.tar.gz -C /usr/bin && rm -f zrok_0.4.23_linux_amd64.tar.gz\n",
80
  " del cap\n",
81
  "\n",
82
  " clear_output()\n",
@@ -207,112 +201,9 @@
207
  "\n",
208
  "## Downloading model and stuff | oh~ Hey! If you're freaked out by that code too, don't worry, me too!\n",
209
  "print(\"📦 Скачивание моделей и прочего...\", end='')\n",
210
- "model_list = {\n",
211
- " \"1.Anime (by XpucT) + INP\": [\n",
212
- " {\"url\": \"https://huggingface.co/XpucT/Anime/resolve/main/Anime_v2.safetensors\", \"name\": \"Anime_V2.safetensors\"},\n",
213
- " {\"url\": \"https://huggingface.co/XpucT/Anime/resolve/main/Anime_v2-inpainting.safetensors\", \"name\": \"Anime_V2-inpainting.safetensors\"}\n",
214
- " ],\n",
215
- " \"2.BluMix [Anime] [V7] + INP\": [\n",
216
- " {\"url\": \"https://civitai.com/api/download/models/361779\", \"name\": \"BluMix_V7.safetensors\"},\n",
217
- " {\"url\": \"https://civitai.com/api/download/models/363850\", \"name\": \"BluMix_V7-inpainting.safetensors\"}\n",
218
- " ],\n",
219
- " \"3.Cetus-Mix [Anime] [V4] + INP\": [\n",
220
- " {\"url\": \"https://civitai.com/api/download/models/130298\", \"name\": \"CetusMix_V4.safetensors\"},\n",
221
- " {\"url\": \"https://civitai.com/api/download/models/139882\", \"name\": \"CetusMix_V4-inpainting.safetensors\"}\n",
222
- " ],\n",
223
- " \"4.Counterfeit [Anime] [V3] + INP\": [\n",
224
- " {\"url\": \"https://huggingface.co/gsdf/Counterfeit-V3.0/resolve/main/Counterfeit-V3.0_fix_fp16.safetensors\", \"name\": \"Counterfeit_V3.safetensors\"},\n",
225
- " {\"url\": \"https://civitai.com/api/download/models/137911\", \"name\": \"Counterfeit_V3-inpainting.safetensors\"}\n",
226
- " ],\n",
227
- " \"5.CuteColor [Anime] [V3]\": [\n",
228
- " {\"url\": \"https://civitai.com/api/download/models/138754\", \"name\": \"CuteColor_V3.safetensors\"}\n",
229
- " ],\n",
230
- " \"6.Dark-Sushi-Mix [Anime]\": [\n",
231
- " {\"url\": \"https://civitai.com/api/download/models/101640\", \"name\": \"DarkSushiMix_2_5D.safetensors\"},\n",
232
- " {\"url\": \"https://civitai.com/api/download/models/56071\", \"name\": \"DarkSushiMix_colorful.safetensors\"}\n",
233
- " ],\n",
234
- " \"7.Deliberate [Realism] [V6] + INP\": [\n",
235
- " {\"url\": \"https://huggingface.co/XpucT/Deliberate/resolve/main/Deliberate_v6.safetensors\", \"name\": \"Deliberate_V6.safetensors\"},\n",
236
- " {\"url\": \"https://huggingface.co/XpucT/Deliberate/resolve/main/Deliberate_v6-inpainting.safetensors\", \"name\": \"Deliberate_V6-inpainting.safetensors\"}\n",
237
- " ],\n",
238
- " \"8.Meina-Mix [Anime] [V11] + INP\": [\n",
239
- " {\"url\": \"https://civitai.com/api/download/models/119057\", \"name\": \"MeinaMix_V11.safetensors\"},\n",
240
- " {\"url\": \"https://civitai.com/api/download/models/120702\", \"name\": \"MeinaMix_V11-inpainting.safetensors\"}\n",
241
- " ],\n",
242
- " \"9.Mix-Pro [Anime] [V4] + INP\": [\n",
243
- " {\"url\": \"https://civitai.com/api/download/models/125668\", \"name\": \"MixPro_V4.safetensors\"},\n",
244
- " {\"url\": \"https://civitai.com/api/download/models/139878\", \"name\": \"MixPro_V4-inpainting.safetensors\"}\n",
245
- " ]\n",
246
- "}\n",
247
- "\n",
248
- "vae_list = {\n",
249
- " \"1.Anime.vae\": [{\"url\": \"https://civitai.com/api/download/models/311162\", \"name\": \"Anime.vae.safetensors\"}],\n",
250
- " \"2.Anything.vae\": [{\"url\": \"https://huggingface.co/NoCrypt/resources/resolve/main/VAE/any.vae.safetensors\", \"name\": \"Anything.vae.safetensors\"}],\n",
251
- " \"3.Blessed2.vae\": [{\"url\": \"https://huggingface.co/NoCrypt/resources/resolve/main/VAE/blessed2.vae.safetensors\", \"name\": \"Blessed2.vae.safetensors\"}],\n",
252
- " \"4.ClearVae.vae\": [{\"url\": \"https://civitai.com/api/download/models/88156\", \"name\": \"ClearVae_23.vae.safetensors\"}],\n",
253
- " \"5.WD.vae\": [{\"url\": \"https://huggingface.co/NoCrypt/resources/resolve/main/VAE/wd.vae.safetensors\", \"name\": \"WD.vae.safetensors\"}]\n",
254
- "}\n",
255
- "\n",
256
- "controlnet_list = {\n",
257
- " \"1.canny\": [\n",
258
- " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11p_sd15_canny_fp16.safetensors\", \"name\": \"control_v11p_sd15_canny_fp16.safetensors\"},\n",
259
- " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11p_sd15_canny_fp16.yaml\", \"name\": \"control_v11p_sd15_canny_fp16.yaml\"}\n",
260
- " ],\n",
261
- " \"2.openpose\": [\n",
262
- " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11p_sd15_openpose_fp16.safetensors\", \"name\": \"control_v11p_sd15_openpose_fp16.safetensors\"},\n",
263
- " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11p_sd15_openpose_fp16.yaml\", \"name\": \"control_v11p_sd15_openpose_fp16.yaml\"}\n",
264
- " ],\n",
265
- " \"3.depth\": [\n",
266
- " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11f1p_sd15_depth_fp16.safetensors\", \"name\": \"control_v11f1p_sd15_depth_fp16.safetensors\"},\n",
267
- " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11f1p_sd15_depth_fp16.yaml\", \"name\": \"control_v11f1p_sd15_depth_fp16.yaml\"},\n",
268
- " {\"url\": \"https://huggingface.co/NagisaNao/models/resolve/main/ControlNet_v11/control_v11p_sd15_depth_anything_fp16.safetensors\", \"name\": \"control_v11p_sd15_depth_anything_fp16.safetensors\"}\n",
269
- " ],\n",
270
- " \"4.normal_map\": [\n",
271
- " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11p_sd15_normalbae_fp16.safetensors\", \"name\": \"control_v11p_sd15_normalbae_fp16.safetensors\"},\n",
272
- " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11p_sd15_normalbae_fp16.yaml\", \"name\": \"control_v11p_sd15_normalbae_fp16.yaml\"}\n",
273
- " ],\n",
274
- " \"5.mlsd\": [\n",
275
- " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11p_sd15_mlsd_fp16.safetensors\", \"name\": \"control_v11p_sd15_mlsd_fp16.safetensors\"},\n",
276
- " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11p_sd15_mlsd_fp16.yaml\", \"name\": \"control_v11p_sd15_mlsd_fp16.yaml\"}\n",
277
- " ],\n",
278
- " \"6.lineart\": [\n",
279
- " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11p_sd15_lineart_fp16.safetensors\", \"name\": \"control_v11p_sd15_lineart_fp16.safetensors\"},\n",
280
- " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11p_sd15s2_lineart_anime_fp16.safetensors\", \"name\": \"control_v11p_sd15s2_lineart_anime_fp16.safetensors\"},\n",
281
- " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11p_sd15_lineart_fp16.yaml\", \"name\": \"control_v11p_sd15_lineart_fp16.yaml\"},\n",
282
- " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11p_sd15s2_lineart_anime_fp16.yaml\", \"name\": \"control_v11p_sd15s2_lineart_anime_fp16.yaml\"}\n",
283
- " ],\n",
284
- " \"7.soft_edge\": [\n",
285
- " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11p_sd15_softedge_fp16.safetensors\", \"name\": \"control_v11p_sd15_softedge_fp16.safetensors\"},\n",
286
- " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11p_sd15_softedge_fp16.yaml\", \"name\": \"control_v11p_sd15_softedge_fp16.yaml\"}\n",
287
- " ],\n",
288
- " \"8.scribble\": [\n",
289
- " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11p_sd15_scribble_fp16.safetensors\", \"name\": \"control_v11p_sd15_scribble_fp16.safetensors\"},\n",
290
- " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11p_sd15_scribble_fp16.yaml\", \"name\": \"control_v11p_sd15_scribble_fp16.yaml\"}\n",
291
- " ],\n",
292
- " \"9.segmentation\": [\n",
293
- " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11p_sd15_seg_fp16.safetensors\", \"name\": \"control_v11p_sd15_seg_fp16.safetensors\"},\n",
294
- " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11p_sd15_seg_fp16.yaml\", \"name\": \"control_v11p_sd15_seg_fp16.yaml\"}\n",
295
- " ],\n",
296
- " \"10.shuffle\": [\n",
297
- " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11e_sd15_shuffle_fp16.safetensors\", \"name\": \"control_v11e_sd15_shuffle_fp16.safetensors\"},\n",
298
- " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11e_sd15_shuffle_fp16.yaml\", \"name\": \"control_v11e_sd15_shuffle_fp16.yaml\"}\n",
299
- " ],\n",
300
- " \"11.tile\": [\n",
301
- " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11f1e_sd15_tile_fp16.safetensors\", \"name\": \"control_v11f1e_sd15_tile_fp16.safetensors\"},\n",
302
- " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11f1e_sd15_tile_fp16.yaml\", \"name\": \"control_v11f1e_sd15_tile_fp16.yaml\"}\n",
303
- " ],\n",
304
- " \"12.inpaint\": [\n",
305
- " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11p_sd15_inpaint_fp16.safetensors\", \"name\": \"control_v11p_sd15_inpaint_fp16.safetensors\"},\n",
306
- " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11p_sd15_inpaint_fp16.yaml\", \"name\": \"control_v11p_sd15_inpaint_fp16.yaml\"}\n",
307
- " ],\n",
308
- " \"13.instruct_p2p\": [\n",
309
- " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11e_sd15_ip2p_fp16.safetensors\", \"name\": \"control_v11e_sd15_ip2p_fp16.safetensors\"},\n",
310
- " {\"url\": \"https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11e_sd15_ip2p_fp16.yaml\", \"name\": \"control_v11e_sd15_ip2p_fp16.yaml\"}\n",
311
- " ]\n",
312
- "}\n",
313
  "\n",
314
  "url = \"\"\n",
315
- "prefixes = {\n",
316
  " \"model\": models_dir,\n",
317
  " \"vae\": vaes_dir,\n",
318
  " \"lora\": loras_dir,\n",
@@ -324,7 +215,7 @@
324
  "}\n",
325
  "\n",
326
  "extension_repo = []\n",
327
- "directories = [value for key, value in prefixes.items()] # for unpucking zip files\n",
328
  "!mkdir -p {\" \".join(directories)}\n",
329
  "\n",
330
  "hf_token = huggingface_token if huggingface_token else \"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\"\n",
@@ -335,137 +226,123 @@
335
  "from math import floor\n",
336
  "\n",
337
  "def center_text(text, terminal_width=45):\n",
338
- " text_length = len(text)\n",
339
- " left_padding = floor((terminal_width - text_length) / 2)\n",
340
- " right_padding = terminal_width - text_length - left_padding\n",
341
- " return f\"\\033[1m\\033[36m{' ' * left_padding}{text}{' ' * right_padding}\\033[0m\\033[32m\"\n",
342
  "\n",
343
  "def format_output(url, dst_dir, file_name):\n",
344
- " info = f\"[{file_name.split('.')[0]}]\"\n",
345
- " info = center_text(info)\n",
346
  "\n",
347
- " print(f\"\\n\\033[32m{'---'*20}]{info}[{'---'*20}\")\n",
348
  " print(f\"\\033[33mURL: \\033[34m{url}\")\n",
349
  " print(f\"\\033[33mSAVE DIR: \\033[34m{dst_dir}\")\n",
350
  " print(f\"\\033[33mFILE NAME: \\033[34m{file_name}\\033[0m\")\n",
351
  "\n",
352
  "''' GET CivitAi API - DATA '''\n",
353
  "\n",
354
- "def strip_(url, file_name=None):\n",
355
- " if 'github.com' in url:\n",
356
- " if '/blob/' in url:\n",
357
- " url = url.replace('/blob/', '/raw/')\n",
358
- "\n",
359
- " elif \"civitai.com\" in url:\n",
360
- " return CivitAi_API(url, file_name)\n",
361
- "\n",
362
- " elif \"huggingface.co\" in url:\n",
363
- " if '/blob/' in url:\n",
364
- " url = url.replace('/blob/', '/resolve/')\n",
365
- " if '?' in url:\n",
366
- " url = url.split('?')[0]\n",
367
- "\n",
368
- " return url\n",
369
- "\n",
370
  "def CivitAi_API(url, file_name=None):\n",
371
- " support_types = ('Checkpoint', 'Model', 'TextualInversion', 'LORA')\n",
372
- " civitai_token = \"62c0c5956b2f9defbd844d754000180b\"\n",
373
  "\n",
374
- " if '?token=' in url:\n",
375
- " url = url.split('?token=')[0]\n",
376
- " if '?type=' in url:\n",
377
- " url = url.replace('?type=', f'?token={civitai_token}&type=')\n",
378
- " else:\n",
379
- " url = f\"{url}?token={civitai_token}\"\n",
380
- "\n",
381
- " # Determine model or version id\n",
382
- " if \"civitai.com/models/\" in url:\n",
383
- " if '?modelVersionId=' in url:\n",
384
- " version_id = url.split('?modelVersionId=')[1]\n",
385
- " response = requests.get(f\"https://civitai.com/api/v1/model-versions/{version_id}\")\n",
386
- " # print(f\"end - https://civitai.com/api/v1/model-versions/{version_id}\")\n",
387
  " else:\n",
388
- " model_id = url.split('/models/')[1].split('/')[0]\n",
389
- " response = requests.get(f\"https://civitai.com/api/v1/models/{model_id}\")\n",
390
- " # print(f\"end - https://civitai.com/api/v1/models/{model_id}\")\n",
391
- " else:\n",
392
- " version_id = url.split('/models/')[1].split('/')[0]\n",
393
- " response = requests.get(f\"https://civitai.com/api/v1/model-versions/{version_id}\")\n",
394
- " # print(f\"end - https://civitai.com/api/v1/model-versions/{version_id}\")\n",
395
  "\n",
396
- " data = response.json()\n",
397
  "\n",
398
- " if response.status_code != 200:\n",
399
  " return None, None, None, None, None, None, None\n",
400
  "\n",
401
- " # Define model type and name\n",
402
- " if \"civitai.com/models/\" in url:\n",
403
- " if '?modelVersionId=' in url:\n",
404
- " model_type = data['model']['type']\n",
405
- " model_name = data['files'][0]['name']\n",
 
 
 
 
 
 
 
 
 
406
  " else:\n",
407
- " model_type = data['type']\n",
408
- " model_name = data['modelVersions'][0]['files'][0]['name']\n",
409
- " elif 'type=' in url:\n",
410
- " model_type = parse_qs(urlparse(url).query).get('type', [''])[0]\n",
411
- " if 'model' in model_type.lower():\n",
412
  " model_name = data['files'][0]['name']\n",
413
- " else:\n",
414
- " model_name = data['files'][1]['name']\n",
415
- " else:\n",
416
- " model_type = data['model']['type']\n",
417
- " model_name = data['files'][0]['name']\n",
418
  "\n",
 
419
  " model_name = file_name or model_name\n",
420
  "\n",
421
- " # Determine DownloadUrl\n",
422
- " if \"civitai.com/models/\" in url:\n",
423
- " if '?modelVersionId=' in url:\n",
424
- " download_url = data.get('downloadUrl')\n",
425
- " else:\n",
426
- " download_url = data[\"modelVersions\"][0].get(\"downloadUrl\", \"\")\n",
427
- " elif 'type=' in url:\n",
428
- " if any(t.lower() in model_type.lower() for t in support_types):\n",
429
- " download_url = data['files'][0]['downloadUrl']\n",
 
 
430
  " else:\n",
431
- " download_url = data['files'][1]['downloadUrl']\n",
432
- " else:\n",
433
- " download_url = data.get('downloadUrl')\n",
434
  "\n",
435
- " clean_url = re.sub(r'[?&]token=[^&]*', '', download_url) # hide token\n",
 
436
  "\n",
437
- " # Find a safe image: level less than 4 | Kaggle\n",
438
- " image_url, image_name = None, None\n",
439
- " if any(t in model_type for t in support_types):\n",
440
- " try:\n",
441
- " images = data.get('images') or data['modelVersions'][0].get('images', [])\n",
442
- " if env == 'Kaggle':\n",
443
- " image_url = next((image['url'] for image in images if image['nsfwLevel'] < 4), None)\n",
444
- " else:\n",
445
- " image_url = images[0]['url'] if images else None\n",
446
- " except KeyError:\n",
447
- " pass\n",
 
 
 
448
  "\n",
449
- " # Generate a name to save the image\n",
450
- " image_name = f\"{model_name.split('.')[0]}.preview.{image_url.split('.')[-1]}\" if image_url else None\n",
451
  "\n",
452
- " return f\"{download_url}{'&' if '?' in download_url else '?'}token={civitai_token}\", clean_url, model_type, model_name, image_url, image_name, data\n",
453
  "\n",
454
  "''' Main Download Code '''\n",
455
  "\n",
 
 
 
 
 
 
 
 
456
  "def download(url):\n",
457
  " links_and_paths = [link_or_path.strip() for link_or_path in url.split(',') if link_or_path.strip()]\n",
458
  "\n",
459
  " for link_or_path in links_and_paths:\n",
460
- " if any(link_or_path.lower().startswith(prefix) for prefix in prefixes):\n",
461
  " handle_manual(link_or_path)\n",
462
  " else:\n",
463
  " url, dst_dir, file_name = link_or_path.split()\n",
464
  " manual_download(url, dst_dir, file_name)\n",
465
  "\n",
466
- " unpack_zip_files()\n",
467
- "\n",
468
- "def unpack_zip_files():\n",
469
  " for directory in directories:\n",
470
  " for root, _, files in os.walk(directory):\n",
471
  " for file in files:\n",
@@ -485,8 +362,8 @@
485
  " if file_name:\n",
486
  " path = re.sub(r'\\[.*?\\]', '', path)\n",
487
  "\n",
488
- " if prefix in prefixes:\n",
489
- " dir = prefixes[prefix]\n",
490
  " if prefix != \"extension\":\n",
491
  " try:\n",
492
  " manual_download(path, dir, file_name=file_name)\n",
@@ -500,60 +377,52 @@
500
  " aria2c_header = \"--header='User-Agent: Mozilla/5.0' --allow-overwrite=true\"\n",
501
  " aria2_args = \"--optimize-concurrent-downloads --console-log-level=error --summary-interval=10 --stderr=true -c -x16 -s16 -k1M -j5\"\n",
502
  "\n",
503
- " if 'github.com' in url:\n",
504
- " url = strip_(url)\n",
505
- "\n",
506
- " # -- CivitAi APi+ V2 --\n",
507
- " elif 'civitai' in url:\n",
508
- " url, clean_url, model_type, file_name, image_url, image_name, data = strip_(url, file_name)\n",
509
  "\n",
 
 
510
  " if image_url and image_name:\n",
511
- " with capture.capture_output() as cap:\n",
512
- " !aria2c {aria2_args} -d {dst_dir} -o '{image_name}' '{image_url}'\n",
513
- " del cap\n",
514
  "\n",
515
- " elif \"huggingface.co\" in url:\n",
516
- " clean_url = strip_(url)\n",
517
- " basename = clean_url.split(\"/\")[-1] if file_name is None else file_name\n",
518
  "\n",
519
  " \"\"\" Formatted info output \"\"\"\n",
520
- " model_name_or_basename = file_name if not 'huggingface' in url else basename\n",
521
  " format_output(clean_url or url, dst_dir, model_name_or_basename)\n",
522
  "\n",
523
- " # ## -- for my tests --\n",
524
  " # print(url, dst_dir, model_name_or_basename)\n",
525
- " print(f\"\\033[31m[Data Info]:\\033[0m Failed to retrieve data from the API.\\n\") if 'civitai' in url and not data else None\n",
526
- " if 'civitai' in url and data and image_name:\n",
527
- " print(f\"\\033[32m[Preview DL]:\\033[0m {image_name} - {image_url}\\n\")\n",
 
 
 
528
  " # =====================\n",
 
 
 
529
  "\n",
530
- " # -- Git Hub --\n",
531
- " if 'github.com' in url or 'githubusercontent.com' in url:\n",
532
- " !aria2c {aria2_args} -d {dst_dir} -o '{basename}' '{url}'\n",
533
- "\n",
534
- " # -- GDrive --\n",
535
- " elif 'drive.google' in url:\n",
536
- " try:\n",
537
- " have_drive_link\n",
538
- " except:\n",
539
- " !pip install -q gdown==5.2.0 > /dev/null\n",
540
- " have_drive_link = True\n",
541
  "\n",
542
  " if 'folders' in url:\n",
543
- " !gdown --folder \"{url}\" -O {dst_dir} --fuzzy -c\n",
544
  " else:\n",
545
- " if file_name:\n",
546
- " !gdown \"{url}\" -O {dst_dir}/{file_name} --fuzzy -c\n",
547
- " else:\n",
548
- " !gdown \"{url}\" -O {dst_dir} --fuzzy -c\n",
549
  "\n",
550
- " # -- Hugging Face --\n",
551
- " elif 'huggingface' in url:\n",
552
- " !aria2c {header_option} {aria2_args} -d {dst_dir} -o '{basename}' '{url}'\n",
553
  "\n",
554
- " # -- Other --\n",
555
  " elif 'http' in url:\n",
556
- " !aria2c {aria2c_header} {aria2_args} -d {dst_dir} -o \"{file_name if file_name else ''}\" '{url}'\n",
557
  "\n",
558
  "''' SubModels - Added URLs '''\n",
559
  "\n",
@@ -593,7 +462,7 @@
593
  "\n",
594
  "''' file.txt - added urls '''\n",
595
  "\n",
596
- "def process_file_download(file_url, prefixes, unique_urls):\n",
597
  " files_urls = \"\"\n",
598
  "\n",
599
  " if file_url.startswith(\"http\"):\n",
@@ -608,8 +477,8 @@
608
  " current_tag = None\n",
609
  " for line in lines:\n",
610
  " line = line.strip()\n",
611
- " if any(f'# {tag}' in line.lower() for tag in prefixes):\n",
612
- " current_tag = next((tag for tag in prefixes if tag in line.lower()))\n",
613
  "\n",
614
  " urls = [url.split('#')[0].strip() for url in line.split(',')] # filter urls\n",
615
  " for url in urls:\n",
@@ -633,13 +502,13 @@
633
  " custom_file_url = f'{root_path}/{custom_file_url}'\n",
634
  "\n",
635
  " try:\n",
636
- " file_urls += process_file_download(custom_file_url, prefixes, unique_urls)\n",
637
  " except FileNotFoundError:\n",
638
  " pass\n",
639
  "\n",
640
  "# url prefixing\n",
641
  "urls = (Model_url, Vae_url, LoRA_url, Embedding_url, Extensions_url)\n",
642
- "prefixed_urls = (f\"{prefix}:{url}\" for prefix, url in zip(prefixes.keys(), urls) if url for url in url.replace(',', '').split())\n",
643
  "url += \", \".join(prefixed_urls) + \", \" + file_urls\n",
644
  "\n",
645
  "if detailed_download == \"on\":\n",
 
10
  "source": [
11
  "##~ DOWNLOADING CODE | BY: ANXETY ~##\n",
12
  "\n",
13
+ "from models_data import model_list, vae_list, controlnet_list\n",
14
+ "\n",
15
  "import os\n",
16
  "import re\n",
17
  "import time\n",
 
27
  "from urllib.parse import urlparse, parse_qs\n",
28
  "\n",
29
  "\n",
30
+ "# Setup Env\n",
31
+ "env = os.environ.get('ENV_NAME')\n",
32
+ "root_path = os.environ.get('ROOT_PATH')\n",
33
+ "webui_path = os.environ.get('WEBUI_PATH')\n",
34
+ "free_plan = os.environ.get('FREE_PLAN')\n",
 
 
 
 
 
 
 
 
35
  "\n",
36
  "\n",
37
  "# ================ LIBRARIES V2 ================\n",
 
70
  " with capture.capture_output() as cap:\n",
71
  " !curl -s -OL https://github.com/DEX-1101/sd-webui-notebook/raw/main/res/new_tunnel --output-dir {root_path}\n",
72
  " !curl -s -Lo /usr/bin/cl https://github.com/cloudflare/cloudflared/releases/latest/download/cloudflared-linux-amd64 && chmod +x /usr/bin/cl\n",
73
+ " !curl -sLO https://github.com/openziti/zrok/releases/download/v0.4.32/zrok_0.4.32_linux_amd64.tar.gz && tar -xzf zrok_0.4.32_linux_amd64.tar.gz -C /usr/bin && rm -f zrok_0.4.32_linux_amd64.tar.gz\n",
74
  " del cap\n",
75
  "\n",
76
  " clear_output()\n",
 
201
  "\n",
202
  "## Downloading model and stuff | oh~ Hey! If you're freaked out by that code too, don't worry, me too!\n",
203
  "print(\"📦 Скачивание моделей и прочего...\", end='')\n",
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
204
  "\n",
205
  "url = \"\"\n",
206
+ "PREFIXES = {\n",
207
  " \"model\": models_dir,\n",
208
  " \"vae\": vaes_dir,\n",
209
  " \"lora\": loras_dir,\n",
 
215
  "}\n",
216
  "\n",
217
  "extension_repo = []\n",
218
+ "directories = [value for key, value in PREFIXES.items()] # for unpucking zip files\n",
219
  "!mkdir -p {\" \".join(directories)}\n",
220
  "\n",
221
  "hf_token = huggingface_token if huggingface_token else \"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\"\n",
 
226
  "from math import floor\n",
227
  "\n",
228
  "def center_text(text, terminal_width=45):\n",
229
+ " padding = (terminal_width - len(text)) // 2\n",
230
+ " return f\"\\033[1m\\033[36m{' ' * padding}{text}{' ' * padding}\\033[0m\\033[32m\"\n",
 
 
231
  "\n",
232
  "def format_output(url, dst_dir, file_name):\n",
233
+ " info = center_text(f\"[{file_name.split('.')[0]}]\")\n",
234
+ " separation_line = '\\033[32m' + '---' * 20\n",
235
  "\n",
236
+ " print(f\"\\n{separation_line}{info}{separation_line}\")\n",
237
  " print(f\"\\033[33mURL: \\033[34m{url}\")\n",
238
  " print(f\"\\033[33mSAVE DIR: \\033[34m{dst_dir}\")\n",
239
  " print(f\"\\033[33mFILE NAME: \\033[34m{file_name}\\033[0m\")\n",
240
  "\n",
241
  "''' GET CivitAi API - DATA '''\n",
242
  "\n",
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
243
  "def CivitAi_API(url, file_name=None):\n",
244
+ " SUPPORT_TYPES = ('Checkpoint', 'Model', 'TextualInversion', 'LORA')\n",
245
+ " CIVITAI_TOKEN = \"62c0c5956b2f9defbd844d754000180b\"\n",
246
  "\n",
247
+ " url = url.split('?token=')[0] if '?token=' in url else url\n",
248
+ " url = url.replace('?type=', f'?token={CIVITAI_TOKEN}&type=') if '?type=' in url else f\"{url}?token={CIVITAI_TOKEN}\"\n",
249
+ "\n",
250
+ " def get_model_data(url):\n",
251
+ " if \"civitai.com/models/\" in url:\n",
252
+ " if '?modelVersionId=' in url:\n",
253
+ " version_id = url.split('?modelVersionId=')[1]\n",
254
+ " return requests.get(f\"https://civitai.com/api/v1/model-versions/{version_id}\").json()\n",
255
+ " else:\n",
256
+ " model_id = url.split('/models/')[1].split('/')[0]\n",
257
+ " return requests.get(f\"https://civitai.com/api/v1/models/{model_id}\").json()\n",
 
 
258
  " else:\n",
259
+ " version_id = url.split('/models/')[1].split('/')[0]\n",
260
+ " return requests.get(f\"https://civitai.com/api/v1/model-versions/{version_id}\").json()\n",
 
 
 
 
 
261
  "\n",
262
+ " data = get_model_data(url)\n",
263
  "\n",
264
+ " if not data:\n",
265
  " return None, None, None, None, None, None, None\n",
266
  "\n",
267
+ " def extract_model_info(url, data):\n",
268
+ " if \"civitai.com/models/\" in url:\n",
269
+ " if '?modelVersionId=' in url:\n",
270
+ " model_type = data['model']['type']\n",
271
+ " model_name = data['files'][0]['name']\n",
272
+ " else:\n",
273
+ " model_type = data['type']\n",
274
+ " model_name = data['modelVersions'][0]['files'][0]['name']\n",
275
+ " elif 'type=' in url:\n",
276
+ " model_type = parse_qs(urlparse(url).query).get('type', [''])[0]\n",
277
+ " if 'model' in model_type.lower():\n",
278
+ " model_name = data['files'][0]['name']\n",
279
+ " else:\n",
280
+ " model_name = data['files'][1]['name']\n",
281
  " else:\n",
282
+ " model_type = data['model']['type']\n",
 
 
 
 
283
  " model_name = data['files'][0]['name']\n",
284
+ " return model_type, model_name\n",
 
 
 
 
285
  "\n",
286
+ " model_type, model_name = extract_model_info(url, data)\n",
287
  " model_name = file_name or model_name\n",
288
  "\n",
289
+ " def get_download_url(url, data, model_type):\n",
290
+ " if \"civitai.com/models/\" in url:\n",
291
+ " if '?modelVersionId=' in url:\n",
292
+ " return data.get('downloadUrl')\n",
293
+ " else:\n",
294
+ " return data[\"modelVersions\"][0].get(\"downloadUrl\", \"\")\n",
295
+ " elif 'type=' in url:\n",
296
+ " if any(t.lower() in model_type.lower() for t in SUPPORT_TYPES):\n",
297
+ " return data['files'][0]['downloadUrl']\n",
298
+ " else:\n",
299
+ " return data['files'][1]['downloadUrl']\n",
300
  " else:\n",
301
+ " return data.get('downloadUrl')\n",
 
 
302
  "\n",
303
+ " download_url = get_download_url(url, data, model_type)\n",
304
+ " clean_url = re.sub(r'[?&]token=[^&]*', '', download_url)\n",
305
  "\n",
306
+ " def get_image_info(data, model_type, model_name):\n",
307
+ " image_url, image_name = None, None\n",
308
+ " if any(t in model_type for t in SUPPORT_TYPES):\n",
309
+ " try:\n",
310
+ " images = data.get('images') or data['modelVersions'][0].get('images', [])\n",
311
+ " if env == 'Kaggle':\n",
312
+ " image_url = next((image['url'] for image in images if image['nsfwLevel'] < 4), None)\n",
313
+ " else:\n",
314
+ " image_url = images[0]['url'] if images else None\n",
315
+ " except KeyError:\n",
316
+ " pass\n",
317
+ "\n",
318
+ " image_name = f\"{model_name.split('.')[0]}.preview.{image_url.split('.')[-1]}\" if image_url else None\n",
319
+ " return image_url, image_name\n",
320
  "\n",
321
+ " image_url, image_name = get_image_info(data, model_type, model_name)\n",
 
322
  "\n",
323
+ " return f\"{download_url}{'&' if '?' in download_url else '?'}token={CIVITAI_TOKEN}\", clean_url, model_type, model_name, image_url, image_name, data\n",
324
  "\n",
325
  "''' Main Download Code '''\n",
326
  "\n",
327
+ "def strip_(url):\n",
328
+ " if 'github.com' in url:\n",
329
+ " return url.replace('/blob/', '/raw/')\n",
330
+ " elif \"huggingface.co\" in url:\n",
331
+ " url = url.replace('/blob/', '/resolve/')\n",
332
+ " return url.split('?')[0] if '?' in url else url\n",
333
+ " return url\n",
334
+ "\n",
335
  "def download(url):\n",
336
  " links_and_paths = [link_or_path.strip() for link_or_path in url.split(',') if link_or_path.strip()]\n",
337
  "\n",
338
  " for link_or_path in links_and_paths:\n",
339
+ " if any(link_or_path.lower().startswith(prefix) for prefix in PREFIXES):\n",
340
  " handle_manual(link_or_path)\n",
341
  " else:\n",
342
  " url, dst_dir, file_name = link_or_path.split()\n",
343
  " manual_download(url, dst_dir, file_name)\n",
344
  "\n",
345
+ " # Unpuck ZIPs Files\n",
 
 
346
  " for directory in directories:\n",
347
  " for root, _, files in os.walk(directory):\n",
348
  " for file in files:\n",
 
362
  " if file_name:\n",
363
  " path = re.sub(r'\\[.*?\\]', '', path)\n",
364
  "\n",
365
+ " if prefix in PREFIXES:\n",
366
+ " dir = PREFIXES[prefix]\n",
367
  " if prefix != \"extension\":\n",
368
  " try:\n",
369
  " manual_download(path, dir, file_name=file_name)\n",
 
377
  " aria2c_header = \"--header='User-Agent: Mozilla/5.0' --allow-overwrite=true\"\n",
378
  " aria2_args = \"--optimize-concurrent-downloads --console-log-level=error --summary-interval=10 --stderr=true -c -x16 -s16 -k1M -j5\"\n",
379
  "\n",
380
+ " clean_url = strip_(url)\n",
 
 
 
 
 
381
  "\n",
382
+ " if 'civitai' in url:\n",
383
+ " url, clean_url, model_type, file_name, image_url, image_name, data = CivitAi_API(url, file_name)\n",
384
  " if image_url and image_name:\n",
385
+ " command = [\"aria2c\"] + aria2_args.split() + [\"-d\", dst_dir, \"-o\", image_name, image_url]\n",
386
+ " subprocess.run(command, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)\n",
 
387
  "\n",
388
+ " elif 'github' in url or \"huggingface.co\" in url:\n",
389
+ " basename = url.split(\"/\")[-1] if file_name is None else file_name\n",
 
390
  "\n",
391
  " \"\"\" Formatted info output \"\"\"\n",
392
+ " model_name_or_basename = file_name if file_name else basename\n",
393
  " format_output(clean_url or url, dst_dir, model_name_or_basename)\n",
394
  "\n",
 
395
  " # print(url, dst_dir, model_name_or_basename)\n",
396
+ " if 'civitai' in url:\n",
397
+ " if not data:\n",
398
+ " print(\"\\033[31m[Data Info]:\\033[0m Failed to retrieve data from the API.\\n\")\n",
399
+ " if data and image_name:\n",
400
+ " print(f\"\\033[32m[Preview DL]:\\033[0m {image_name} - {image_url}\\n\")\n",
401
+ "\n",
402
  " # =====================\n",
403
+ " def run_aria2c(url, dst_dir, file_name=None, args=\"\", header=\"\"):\n",
404
+ " out = f\"-o '{file_name}'\" if file_name else \"\"\n",
405
+ " !aria2c {header} {args} -d {dst_dir} {out} '{url}'\n",
406
  "\n",
407
+ " # -- Google Drive --\n",
408
+ " if 'drive.google' in url:\n",
409
+ " if not globals().get('have_drive_link', False):\n",
410
+ " os.system(\"pip install -U gdown > /dev/null\")\n",
411
+ " globals()['have_drive_link'] = True\n",
 
 
 
 
 
 
412
  "\n",
413
  " if 'folders' in url:\n",
414
+ " os.system(f\"gdown --folder \\\"{url}\\\" -O {dst_dir} --fuzzy -c\")\n",
415
  " else:\n",
416
+ " out_path = f\"{dst_dir}/{file_name}\" if file_name else dst_dir\n",
417
+ " os.system(f\"gdown \\\"{url}\\\" -O {out_path} --fuzzy -c\")\n",
 
 
418
  "\n",
419
+ " # -- GitHub or Hugging Face --\n",
420
+ " elif 'github' in url or 'huggingface' in url:\n",
421
+ " run_aria2c(clean_url, dst_dir, basename, aria2_args, header_option if 'huggingface' in url else '')\n",
422
  "\n",
423
+ " # -- Other HTTP/Sources --\n",
424
  " elif 'http' in url:\n",
425
+ " run_aria2c(url, dst_dir, file_name, aria2_args, aria2c_header)\n",
426
  "\n",
427
  "''' SubModels - Added URLs '''\n",
428
  "\n",
 
462
  "\n",
463
  "''' file.txt - added urls '''\n",
464
  "\n",
465
+ "def process_file_download(file_url, PREFIXES, unique_urls):\n",
466
  " files_urls = \"\"\n",
467
  "\n",
468
  " if file_url.startswith(\"http\"):\n",
 
477
  " current_tag = None\n",
478
  " for line in lines:\n",
479
  " line = line.strip()\n",
480
+ " if any(f'# {tag}' in line.lower() for tag in PREFIXES):\n",
481
+ " current_tag = next((tag for tag in PREFIXES if tag in line.lower()))\n",
482
  "\n",
483
  " urls = [url.split('#')[0].strip() for url in line.split(',')] # filter urls\n",
484
  " for url in urls:\n",
 
502
  " custom_file_url = f'{root_path}/{custom_file_url}'\n",
503
  "\n",
504
  " try:\n",
505
+ " file_urls += process_file_download(custom_file_url, PREFIXES, unique_urls)\n",
506
  " except FileNotFoundError:\n",
507
  " pass\n",
508
  "\n",
509
  "# url prefixing\n",
510
  "urls = (Model_url, Vae_url, LoRA_url, Embedding_url, Extensions_url)\n",
511
+ "prefixed_urls = (f\"{prefix}:{url}\" for prefix, url in zip(PREFIXES.keys(), urls) if url for url in url.replace(',', '').split())\n",
512
  "url += \", \".join(prefixed_urls) + \", \" + file_urls\n",
513
  "\n",
514
  "if detailed_download == \"on\":\n",
files_cells/notebooks/ru/launch_ru.ipynb CHANGED
@@ -33,21 +33,13 @@
33
  "from datetime import timedelta\n",
34
  "from IPython.display import clear_output\n",
35
  "\n",
36
- "# ================= DETECT ENV =================\n",
37
- "def detect_environment():\n",
38
- " free_plan = (os.sysconf('SC_PAGE_SIZE') * os.sysconf('SC_PHYS_PAGES') / (1024 ** 3) <= 20)\n",
39
- " environments = {\n",
40
- " 'COLAB_GPU': ('Google Colab', \"/root\" if free_plan else \"/content\"),\n",
41
- " 'KAGGLE_URL_BASE': ('Kaggle', \"/kaggle/working/content\")\n",
42
- " }\n",
43
- "\n",
44
- " for env_var, (environment, path) in environments.items():\n",
45
- " if env_var in os.environ:\n",
46
- " return environment, path, free_plan\n",
47
- " return 'Unknown', '/unknown/path', free_plan\n",
48
- "\n",
49
- "env, root_path, free_plan = detect_environment()\n",
50
- "webui_path = f\"{root_path}/sdw\"\n",
51
  "\n",
52
  "def load_settings():\n",
53
  " SETTINGS_FILE = f'{root_path}/settings.json'\n",
@@ -62,6 +54,7 @@
62
  "commandline_arguments = settings.get('commandline_arguments', \"\")\n",
63
  "change_webui = settings.get('change_webui', \"\")\n",
64
  "\n",
 
65
  "# ======================== TUNNEL V2 ========================\n",
66
  "print('Please Wait...')\n",
67
  "\n",
@@ -95,6 +88,7 @@
95
  "\n",
96
  "clear_output()\n",
97
  "\n",
 
98
  "# =============== Automatic Fixing Path V3 ===============\n",
99
  "paths_to_check = {\n",
100
  " \"tagger_hf_cache_dir\": f\"{webui_path}/models/interrogators/\",\n",
 
33
  "from datetime import timedelta\n",
34
  "from IPython.display import clear_output\n",
35
  "\n",
36
+ "\n",
37
+ "# Setup Env\n",
38
+ "env = os.environ.get('ENV_NAME')\n",
39
+ "root_path = os.environ.get('ROOT_PATH')\n",
40
+ "webui_path = os.environ.get('WEBUI_PATH')\n",
41
+ "free_plan = os.environ.get('FREE_PLAN')\n",
42
+ "\n",
 
 
 
 
 
 
 
 
43
  "\n",
44
  "def load_settings():\n",
45
  " SETTINGS_FILE = f'{root_path}/settings.json'\n",
 
54
  "commandline_arguments = settings.get('commandline_arguments', \"\")\n",
55
  "change_webui = settings.get('change_webui', \"\")\n",
56
  "\n",
57
+ "\n",
58
  "# ======================== TUNNEL V2 ========================\n",
59
  "print('Please Wait...')\n",
60
  "\n",
 
88
  "\n",
89
  "clear_output()\n",
90
  "\n",
91
+ "\n",
92
  "# =============== Automatic Fixing Path V3 ===============\n",
93
  "paths_to_check = {\n",
94
  " \"tagger_hf_cache_dir\": f\"{webui_path}/models/interrogators/\",\n",
files_cells/notebooks/ru/widgets_ru.ipynb CHANGED
@@ -32,20 +32,11 @@
32
  "from IPython.display import display, HTML, Javascript, clear_output\n",
33
  "\n",
34
  "\n",
35
- "# ================= DETECT ENV =================\n",
36
- "def detect_environment():\n",
37
- " free_plan = (os.sysconf('SC_PAGE_SIZE') * os.sysconf('SC_PHYS_PAGES') / (1024 ** 3) <= 20)\n",
38
- " environments = {\n",
39
- " 'COLAB_GPU': ('Google Colab', \"/root\" if free_plan else \"/content\"),\n",
40
- " 'KAGGLE_URL_BASE': ('Kaggle', \"/kaggle/working/content\")\n",
41
- " }\n",
42
- " for env_var, (environment, path) in environments.items():\n",
43
- " if env_var in os.environ:\n",
44
- " return environment, path, free_plan\n",
45
- "\n",
46
- "env, root_path, free_plan = detect_environment()\n",
47
- "webui_path = f\"{root_path}/sdw\"\n",
48
- "!mkdir -p {root_path}\n",
49
  "\n",
50
  "\n",
51
  "# ==================== CSS JS ====================\n",
 
32
  "from IPython.display import display, HTML, Javascript, clear_output\n",
33
  "\n",
34
  "\n",
35
+ "# Setup Env\n",
36
+ "env = os.environ.get('ENV_NAME')\n",
37
+ "root_path = os.environ.get('ROOT_PATH')\n",
38
+ "webui_path = os.environ.get('WEBUI_PATH')\n",
39
+ "free_plan = os.environ.get('FREE_PLAN')\n",
 
 
 
 
 
 
 
 
 
40
  "\n",
41
  "\n",
42
  "# ==================== CSS JS ====================\n",
files_cells/python/en/auto_cleaner_en.py CHANGED
@@ -7,19 +7,11 @@ from ipywidgets import Label, Button, VBox, HBox
7
  from IPython.display import display, HTML, Javascript
8
 
9
 
10
- # ================= DETECT ENV =================
11
- def detect_environment():
12
- free_plan = (os.sysconf('SC_PAGE_SIZE') * os.sysconf('SC_PHYS_PAGES') / (1024 ** 3) <= 20)
13
- environments = {
14
- 'COLAB_GPU': ('Google Colab', "/root" if free_plan else "/content"),
15
- 'KAGGLE_URL_BASE': ('Kaggle', "/kaggle/working/content")
16
- }
17
- for env_var, (environment, path) in environments.items():
18
- if env_var in os.environ:
19
- return environment, path, free_plan
20
-
21
- env, root_path, free_plan = detect_environment()
22
- webui_path = f"{root_path}/sdw"
23
 
24
 
25
  # ==================== CSS ====================
 
7
  from IPython.display import display, HTML, Javascript
8
 
9
 
10
+ # Setup Env
11
+ env = os.environ.get('ENV_NAME')
12
+ root_path = os.environ.get('ROOT_PATH')
13
+ webui_path = os.environ.get('WEBUI_PATH')
14
+ free_plan = os.environ.get('FREE_PLAN')
 
 
 
 
 
 
 
 
15
 
16
 
17
  # ==================== CSS ====================
files_cells/python/en/downloading_en.py CHANGED
@@ -1,5 +1,7 @@
1
  ##~ DOWNLOADING CODE | BY: ANXETY ~##
2
 
 
 
3
  import os
4
  import re
5
  import time
@@ -15,19 +17,11 @@ from IPython.display import clear_output
15
  from urllib.parse import urlparse, parse_qs
16
 
17
 
18
- # ================= DETECT ENV =================
19
- def detect_environment():
20
- free_plan = (os.sysconf('SC_PAGE_SIZE') * os.sysconf('SC_PHYS_PAGES') / (1024 ** 3) <= 20)
21
- environments = {
22
- 'COLAB_GPU': ('Google Colab', "/root" if free_plan else "/content"),
23
- 'KAGGLE_URL_BASE': ('Kaggle', "/kaggle/working/content")
24
- }
25
- for env_var, (environment, path) in environments.items():
26
- if env_var in os.environ:
27
- return environment, path, free_plan
28
-
29
- env, root_path, free_plan = detect_environment()
30
- webui_path = f"{root_path}/sdw"
31
 
32
 
33
  # ================ LIBRARIES V2 ================
@@ -66,7 +60,7 @@ if not os.path.exists(flag_file):
66
  with capture.capture_output() as cap:
67
  get_ipython().system('curl -s -OL https://github.com/DEX-1101/sd-webui-notebook/raw/main/res/new_tunnel --output-dir {root_path}')
68
  get_ipython().system('curl -s -Lo /usr/bin/cl https://github.com/cloudflare/cloudflared/releases/latest/download/cloudflared-linux-amd64 && chmod +x /usr/bin/cl')
69
- get_ipython().system('curl -sLO https://github.com/openziti/zrok/releases/download/v0.4.23/zrok_0.4.23_linux_amd64.tar.gz && tar -xzf zrok_0.4.23_linux_amd64.tar.gz -C /usr/bin && rm -f zrok_0.4.23_linux_amd64.tar.gz')
70
  del cap
71
 
72
  clear_output()
@@ -197,112 +191,9 @@ if commit_hash:
197
 
198
  ## Downloading model and stuff | oh~ Hey! If you're freaked out by that code too, don't worry, me too!
199
  print("📦 Downloading models and stuff...", end='')
200
- model_list = {
201
- "1.Anime (by XpucT) + INP": [
202
- {"url": "https://huggingface.co/XpucT/Anime/resolve/main/Anime_v2.safetensors", "name": "Anime_V2.safetensors"},
203
- {"url": "https://huggingface.co/XpucT/Anime/resolve/main/Anime_v2-inpainting.safetensors", "name": "Anime_V2-inpainting.safetensors"}
204
- ],
205
- "2.BluMix [Anime] [V7] + INP": [
206
- {"url": "https://civitai.com/api/download/models/361779", "name": "BluMix_V7.safetensors"},
207
- {"url": "https://civitai.com/api/download/models/363850", "name": "BluMix_V7-inpainting.safetensors"}
208
- ],
209
- "3.Cetus-Mix [Anime] [V4] + INP": [
210
- {"url": "https://civitai.com/api/download/models/130298", "name": "CetusMix_V4.safetensors"},
211
- {"url": "https://civitai.com/api/download/models/139882", "name": "CetusMix_V4-inpainting.safetensors"}
212
- ],
213
- "4.Counterfeit [Anime] [V3] + INP": [
214
- {"url": "https://huggingface.co/gsdf/Counterfeit-V3.0/resolve/main/Counterfeit-V3.0_fix_fp16.safetensors", "name": "Counterfeit_V3.safetensors"},
215
- {"url": "https://civitai.com/api/download/models/137911", "name": "Counterfeit_V3-inpainting.safetensors"}
216
- ],
217
- "5.CuteColor [Anime] [V3]": [
218
- {"url": "https://civitai.com/api/download/models/138754", "name": "CuteColor_V3.safetensors"}
219
- ],
220
- "6.Dark-Sushi-Mix [Anime]": [
221
- {"url": "https://civitai.com/api/download/models/101640", "name": "DarkSushiMix_2_5D.safetensors"},
222
- {"url": "https://civitai.com/api/download/models/56071", "name": "DarkSushiMix_colorful.safetensors"}
223
- ],
224
- "7.Deliberate [Realism] [V6] + INP": [
225
- {"url": "https://huggingface.co/XpucT/Deliberate/resolve/main/Deliberate_v6.safetensors", "name": "Deliberate_V6.safetensors"},
226
- {"url": "https://huggingface.co/XpucT/Deliberate/resolve/main/Deliberate_v6-inpainting.safetensors", "name": "Deliberate_V6-inpainting.safetensors"}
227
- ],
228
- "8.Meina-Mix [Anime] [V11] + INP": [
229
- {"url": "https://civitai.com/api/download/models/119057", "name": "MeinaMix_V11.safetensors"},
230
- {"url": "https://civitai.com/api/download/models/120702", "name": "MeinaMix_V11-inpainting.safetensors"}
231
- ],
232
- "9.Mix-Pro [Anime] [V4] + INP": [
233
- {"url": "https://civitai.com/api/download/models/125668", "name": "MixPro_V4.safetensors"},
234
- {"url": "https://civitai.com/api/download/models/139878", "name": "MixPro_V4-inpainting.safetensors"}
235
- ]
236
- }
237
-
238
- vae_list = {
239
- "1.Anime.vae": [{"url": "https://civitai.com/api/download/models/311162", "name": "Anime.vae.safetensors"}],
240
- "2.Anything.vae": [{"url": "https://huggingface.co/NoCrypt/resources/resolve/main/VAE/any.vae.safetensors", "name": "Anything.vae.safetensors"}],
241
- "3.Blessed2.vae": [{"url": "https://huggingface.co/NoCrypt/resources/resolve/main/VAE/blessed2.vae.safetensors", "name": "Blessed2.vae.safetensors"}],
242
- "4.ClearVae.vae": [{"url": "https://civitai.com/api/download/models/88156", "name": "ClearVae_23.vae.safetensors"}],
243
- "5.WD.vae": [{"url": "https://huggingface.co/NoCrypt/resources/resolve/main/VAE/wd.vae.safetensors", "name": "WD.vae.safetensors"}]
244
- }
245
-
246
- controlnet_list = {
247
- "1.canny": [
248
- {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11p_sd15_canny_fp16.safetensors", "name": "control_v11p_sd15_canny_fp16.safetensors"},
249
- {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11p_sd15_canny_fp16.yaml", "name": "control_v11p_sd15_canny_fp16.yaml"}
250
- ],
251
- "2.openpose": [
252
- {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11p_sd15_openpose_fp16.safetensors", "name": "control_v11p_sd15_openpose_fp16.safetensors"},
253
- {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11p_sd15_openpose_fp16.yaml", "name": "control_v11p_sd15_openpose_fp16.yaml"}
254
- ],
255
- "3.depth": [
256
- {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11f1p_sd15_depth_fp16.safetensors", "name": "control_v11f1p_sd15_depth_fp16.safetensors"},
257
- {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11f1p_sd15_depth_fp16.yaml", "name": "control_v11f1p_sd15_depth_fp16.yaml"},
258
- {"url": "https://huggingface.co/NagisaNao/models/resolve/main/ControlNet_v11/control_v11p_sd15_depth_anything_fp16.safetensors", "name": "control_v11p_sd15_depth_anything_fp16.safetensors"}
259
- ],
260
- "4.normal_map": [
261
- {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11p_sd15_normalbae_fp16.safetensors", "name": "control_v11p_sd15_normalbae_fp16.safetensors"},
262
- {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11p_sd15_normalbae_fp16.yaml", "name": "control_v11p_sd15_normalbae_fp16.yaml"}
263
- ],
264
- "5.mlsd": [
265
- {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11p_sd15_mlsd_fp16.safetensors", "name": "control_v11p_sd15_mlsd_fp16.safetensors"},
266
- {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11p_sd15_mlsd_fp16.yaml", "name": "control_v11p_sd15_mlsd_fp16.yaml"}
267
- ],
268
- "6.lineart": [
269
- {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11p_sd15_lineart_fp16.safetensors", "name": "control_v11p_sd15_lineart_fp16.safetensors"},
270
- {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11p_sd15s2_lineart_anime_fp16.safetensors", "name": "control_v11p_sd15s2_lineart_anime_fp16.safetensors"},
271
- {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11p_sd15_lineart_fp16.yaml", "name": "control_v11p_sd15_lineart_fp16.yaml"},
272
- {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11p_sd15s2_lineart_anime_fp16.yaml", "name": "control_v11p_sd15s2_lineart_anime_fp16.yaml"}
273
- ],
274
- "7.soft_edge": [
275
- {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11p_sd15_softedge_fp16.safetensors", "name": "control_v11p_sd15_softedge_fp16.safetensors"},
276
- {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11p_sd15_softedge_fp16.yaml", "name": "control_v11p_sd15_softedge_fp16.yaml"}
277
- ],
278
- "8.scribble": [
279
- {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11p_sd15_scribble_fp16.safetensors", "name": "control_v11p_sd15_scribble_fp16.safetensors"},
280
- {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11p_sd15_scribble_fp16.yaml", "name": "control_v11p_sd15_scribble_fp16.yaml"}
281
- ],
282
- "9.segmentation": [
283
- {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11p_sd15_seg_fp16.safetensors", "name": "control_v11p_sd15_seg_fp16.safetensors"},
284
- {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11p_sd15_seg_fp16.yaml", "name": "control_v11p_sd15_seg_fp16.yaml"}
285
- ],
286
- "10.shuffle": [
287
- {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11e_sd15_shuffle_fp16.safetensors", "name": "control_v11e_sd15_shuffle_fp16.safetensors"},
288
- {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11e_sd15_shuffle_fp16.yaml", "name": "control_v11e_sd15_shuffle_fp16.yaml"}
289
- ],
290
- "11.tile": [
291
- {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11f1e_sd15_tile_fp16.safetensors", "name": "control_v11f1e_sd15_tile_fp16.safetensors"},
292
- {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11f1e_sd15_tile_fp16.yaml", "name": "control_v11f1e_sd15_tile_fp16.yaml"}
293
- ],
294
- "12.inpaint": [
295
- {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11p_sd15_inpaint_fp16.safetensors", "name": "control_v11p_sd15_inpaint_fp16.safetensors"},
296
- {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11p_sd15_inpaint_fp16.yaml", "name": "control_v11p_sd15_inpaint_fp16.yaml"}
297
- ],
298
- "13.instruct_p2p": [
299
- {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11e_sd15_ip2p_fp16.safetensors", "name": "control_v11e_sd15_ip2p_fp16.safetensors"},
300
- {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11e_sd15_ip2p_fp16.yaml", "name": "control_v11e_sd15_ip2p_fp16.yaml"}
301
- ]
302
- }
303
 
304
  url = ""
305
- prefixes = {
306
  "model": models_dir,
307
  "vae": vaes_dir,
308
  "lora": loras_dir,
@@ -314,7 +205,7 @@ prefixes = {
314
  }
315
 
316
  extension_repo = []
317
- directories = [value for key, value in prefixes.items()] # for unpucking zip files
318
  get_ipython().system('mkdir -p {" ".join(directories)}')
319
 
320
  hf_token = huggingface_token if huggingface_token else "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"
@@ -325,137 +216,123 @@ user_header = f"\"Authorization: Bearer {hf_token}\""
325
  from math import floor
326
 
327
  def center_text(text, terminal_width=45):
328
- text_length = len(text)
329
- left_padding = floor((terminal_width - text_length) / 2)
330
- right_padding = terminal_width - text_length - left_padding
331
- return f"\033[1m\033[36m{' ' * left_padding}{text}{' ' * right_padding}\033[0m\033[32m"
332
 
333
  def format_output(url, dst_dir, file_name):
334
- info = f"[{file_name.split('.')[0]}]"
335
- info = center_text(info)
336
 
337
- print(f"\n\033[32m{'---'*20}]{info}[{'---'*20}")
338
  print(f"\033[33mURL: \033[34m{url}")
339
  print(f"\033[33mSAVE DIR: \033[34m{dst_dir}")
340
  print(f"\033[33mFILE NAME: \033[34m{file_name}\033[0m")
341
 
342
  ''' GET CivitAi API - DATA '''
343
 
344
- def strip_(url, file_name=None):
345
- if 'github.com' in url:
346
- if '/blob/' in url:
347
- url = url.replace('/blob/', '/raw/')
348
-
349
- elif "civitai.com" in url:
350
- return CivitAi_API(url, file_name)
351
-
352
- elif "huggingface.co" in url:
353
- if '/blob/' in url:
354
- url = url.replace('/blob/', '/resolve/')
355
- if '?' in url:
356
- url = url.split('?')[0]
357
-
358
- return url
359
-
360
  def CivitAi_API(url, file_name=None):
361
- support_types = ('Checkpoint', 'Model', 'TextualInversion', 'LORA')
362
- civitai_token = "62c0c5956b2f9defbd844d754000180b"
363
 
364
- if '?token=' in url:
365
- url = url.split('?token=')[0]
366
- if '?type=' in url:
367
- url = url.replace('?type=', f'?token={civitai_token}&type=')
368
- else:
369
- url = f"{url}?token={civitai_token}"
370
-
371
- # Determine model or version id
372
- if "civitai.com/models/" in url:
373
- if '?modelVersionId=' in url:
374
- version_id = url.split('?modelVersionId=')[1]
375
- response = requests.get(f"https://civitai.com/api/v1/model-versions/{version_id}")
376
- # print(f"end - https://civitai.com/api/v1/model-versions/{version_id}")
377
  else:
378
- model_id = url.split('/models/')[1].split('/')[0]
379
- response = requests.get(f"https://civitai.com/api/v1/models/{model_id}")
380
- # print(f"end - https://civitai.com/api/v1/models/{model_id}")
381
- else:
382
- version_id = url.split('/models/')[1].split('/')[0]
383
- response = requests.get(f"https://civitai.com/api/v1/model-versions/{version_id}")
384
- # print(f"end - https://civitai.com/api/v1/model-versions/{version_id}")
385
 
386
- data = response.json()
387
 
388
- if response.status_code != 200:
389
  return None, None, None, None, None, None, None
390
 
391
- # Define model type and name
392
- if "civitai.com/models/" in url:
393
- if '?modelVersionId=' in url:
394
- model_type = data['model']['type']
395
- model_name = data['files'][0]['name']
 
 
 
 
 
 
 
 
 
396
  else:
397
- model_type = data['type']
398
- model_name = data['modelVersions'][0]['files'][0]['name']
399
- elif 'type=' in url:
400
- model_type = parse_qs(urlparse(url).query).get('type', [''])[0]
401
- if 'model' in model_type.lower():
402
  model_name = data['files'][0]['name']
403
- else:
404
- model_name = data['files'][1]['name']
405
- else:
406
- model_type = data['model']['type']
407
- model_name = data['files'][0]['name']
408
 
 
409
  model_name = file_name or model_name
410
 
411
- # Determine DownloadUrl
412
- if "civitai.com/models/" in url:
413
- if '?modelVersionId=' in url:
414
- download_url = data.get('downloadUrl')
415
- else:
416
- download_url = data["modelVersions"][0].get("downloadUrl", "")
417
- elif 'type=' in url:
418
- if any(t.lower() in model_type.lower() for t in support_types):
419
- download_url = data['files'][0]['downloadUrl']
 
 
420
  else:
421
- download_url = data['files'][1]['downloadUrl']
422
- else:
423
- download_url = data.get('downloadUrl')
424
 
425
- clean_url = re.sub(r'[?&]token=[^&]*', '', download_url) # hide token
 
426
 
427
- # Find a safe image: level less than 4 | Kaggle
428
- image_url, image_name = None, None
429
- if any(t in model_type for t in support_types):
430
- try:
431
- images = data.get('images') or data['modelVersions'][0].get('images', [])
432
- if env == 'Kaggle':
433
- image_url = next((image['url'] for image in images if image['nsfwLevel'] < 4), None)
434
- else:
435
- image_url = images[0]['url'] if images else None
436
- except KeyError:
437
- pass
 
 
 
438
 
439
- # Generate a name to save the image
440
- image_name = f"{model_name.split('.')[0]}.preview.{image_url.split('.')[-1]}" if image_url else None
441
 
442
- return f"{download_url}{'&' if '?' in download_url else '?'}token={civitai_token}", clean_url, model_type, model_name, image_url, image_name, data
443
 
444
  ''' Main Download Code '''
445
 
 
 
 
 
 
 
 
 
446
  def download(url):
447
  links_and_paths = [link_or_path.strip() for link_or_path in url.split(',') if link_or_path.strip()]
448
 
449
  for link_or_path in links_and_paths:
450
- if any(link_or_path.lower().startswith(prefix) for prefix in prefixes):
451
  handle_manual(link_or_path)
452
  else:
453
  url, dst_dir, file_name = link_or_path.split()
454
  manual_download(url, dst_dir, file_name)
455
 
456
- unpack_zip_files()
457
-
458
- def unpack_zip_files():
459
  for directory in directories:
460
  for root, _, files in os.walk(directory):
461
  for file in files:
@@ -475,8 +352,8 @@ def handle_manual(url):
475
  if file_name:
476
  path = re.sub(r'\[.*?\]', '', path)
477
 
478
- if prefix in prefixes:
479
- dir = prefixes[prefix]
480
  if prefix != "extension":
481
  try:
482
  manual_download(path, dir, file_name=file_name)
@@ -490,60 +367,52 @@ def manual_download(url, dst_dir, file_name):
490
  aria2c_header = "--header='User-Agent: Mozilla/5.0' --allow-overwrite=true"
491
  aria2_args = "--optimize-concurrent-downloads --console-log-level=error --summary-interval=10 --stderr=true -c -x16 -s16 -k1M -j5"
492
 
493
- if 'github.com' in url:
494
- url = strip_(url)
495
-
496
- # -- CivitAi APi+ V2 --
497
- elif 'civitai' in url:
498
- url, clean_url, model_type, file_name, image_url, image_name, data = strip_(url, file_name)
499
 
 
 
500
  if image_url and image_name:
501
- with capture.capture_output() as cap:
502
- get_ipython().system("aria2c {aria2_args} -d {dst_dir} -o '{image_name}' '{image_url}'")
503
- del cap
504
 
505
- elif "huggingface.co" in url:
506
- clean_url = strip_(url)
507
- basename = clean_url.split("/")[-1] if file_name is None else file_name
508
 
509
  """ Formatted info output """
510
- model_name_or_basename = file_name if not 'huggingface' in url else basename
511
  format_output(clean_url or url, dst_dir, model_name_or_basename)
512
 
513
- # ## -- for my tests --
514
  # print(url, dst_dir, model_name_or_basename)
515
- print(f"\033[31m[Data Info]:\033[0m Failed to retrieve data from the API.\n") if 'civitai' in url and not data else None
516
- if 'civitai' in url and data and image_name:
517
- print(f"\033[32m[Preview DL]:\033[0m {image_name} - {image_url}\n")
 
 
 
518
  # =====================
 
 
 
519
 
520
- # -- Git Hub --
521
- if 'github.com' in url or 'githubusercontent.com' in url:
522
- get_ipython().system("aria2c {aria2_args} -d {dst_dir} -o '{basename}' '{url}'")
523
-
524
- # -- GDrive --
525
- elif 'drive.google' in url:
526
- try:
527
- have_drive_link
528
- except:
529
- get_ipython().system('pip install -q gdown==5.2.0 > /dev/null')
530
- have_drive_link = True
531
 
532
  if 'folders' in url:
533
- get_ipython().system('gdown --folder "{url}" -O {dst_dir} --fuzzy -c')
534
  else:
535
- if file_name:
536
- get_ipython().system('gdown "{url}" -O {dst_dir}/{file_name} --fuzzy -c')
537
- else:
538
- get_ipython().system('gdown "{url}" -O {dst_dir} --fuzzy -c')
539
 
540
- # -- Hugging Face --
541
- elif 'huggingface' in url:
542
- get_ipython().system("aria2c {header_option} {aria2_args} -d {dst_dir} -o '{basename}' '{url}'")
543
 
544
- # -- Other --
545
  elif 'http' in url:
546
- get_ipython().system('aria2c {aria2c_header} {aria2_args} -d {dst_dir} -o "{file_name if file_name else \'\'}" \'{url}\'')
547
 
548
  ''' SubModels - Added URLs '''
549
 
@@ -583,7 +452,7 @@ url = handle_submodels(controlnet, controlnet_num, controlnet_list, control_dir,
583
 
584
  ''' file.txt - added urls '''
585
 
586
- def process_file_download(file_url, prefixes, unique_urls):
587
  files_urls = ""
588
 
589
  if file_url.startswith("http"):
@@ -598,8 +467,8 @@ def process_file_download(file_url, prefixes, unique_urls):
598
  current_tag = None
599
  for line in lines:
600
  line = line.strip()
601
- if any(f'# {tag}' in line.lower() for tag in prefixes):
602
- current_tag = next((tag for tag in prefixes if tag in line.lower()))
603
 
604
  urls = [url.split('#')[0].strip() for url in line.split(',')] # filter urls
605
  for url in urls:
@@ -623,13 +492,13 @@ if custom_file_urls:
623
  custom_file_url = f'{root_path}/{custom_file_url}'
624
 
625
  try:
626
- file_urls += process_file_download(custom_file_url, prefixes, unique_urls)
627
  except FileNotFoundError:
628
  pass
629
 
630
  # url prefixing
631
  urls = (Model_url, Vae_url, LoRA_url, Embedding_url, Extensions_url)
632
- prefixed_urls = (f"{prefix}:{url}" for prefix, url in zip(prefixes.keys(), urls) if url for url in url.replace(',', '').split())
633
  url += ", ".join(prefixed_urls) + ", " + file_urls
634
 
635
  if detailed_download == "on":
 
1
  ##~ DOWNLOADING CODE | BY: ANXETY ~##
2
 
3
+ from models_data import model_list, vae_list, controlnet_list
4
+
5
  import os
6
  import re
7
  import time
 
17
  from urllib.parse import urlparse, parse_qs
18
 
19
 
20
+ # Setup Env
21
+ env = os.environ.get('ENV_NAME')
22
+ root_path = os.environ.get('ROOT_PATH')
23
+ webui_path = os.environ.get('WEBUI_PATH')
24
+ free_plan = os.environ.get('FREE_PLAN')
 
 
 
 
 
 
 
 
25
 
26
 
27
  # ================ LIBRARIES V2 ================
 
60
  with capture.capture_output() as cap:
61
  get_ipython().system('curl -s -OL https://github.com/DEX-1101/sd-webui-notebook/raw/main/res/new_tunnel --output-dir {root_path}')
62
  get_ipython().system('curl -s -Lo /usr/bin/cl https://github.com/cloudflare/cloudflared/releases/latest/download/cloudflared-linux-amd64 && chmod +x /usr/bin/cl')
63
+ get_ipython().system('curl -sLO https://github.com/openziti/zrok/releases/download/v0.4.32/zrok_0.4.32_linux_amd64.tar.gz && tar -xzf zrok_0.4.32_linux_amd64.tar.gz -C /usr/bin && rm -f zrok_0.4.32_linux_amd64.tar.gz')
64
  del cap
65
 
66
  clear_output()
 
191
 
192
  ## Downloading model and stuff | oh~ Hey! If you're freaked out by that code too, don't worry, me too!
193
  print("📦 Downloading models and stuff...", end='')
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
194
 
195
  url = ""
196
+ PREFIXES = {
197
  "model": models_dir,
198
  "vae": vaes_dir,
199
  "lora": loras_dir,
 
205
  }
206
 
207
  extension_repo = []
208
+ directories = [value for key, value in PREFIXES.items()] # for unpucking zip files
209
  get_ipython().system('mkdir -p {" ".join(directories)}')
210
 
211
  hf_token = huggingface_token if huggingface_token else "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"
 
216
  from math import floor
217
 
218
  def center_text(text, terminal_width=45):
219
+ padding = (terminal_width - len(text)) // 2
220
+ return f"\033[1m\033[36m{' ' * padding}{text}{' ' * padding}\033[0m\033[32m"
 
 
221
 
222
  def format_output(url, dst_dir, file_name):
223
+ info = center_text(f"[{file_name.split('.')[0]}]")
224
+ separation_line = '\033[32m' + '---' * 20
225
 
226
+ print(f"\n{separation_line}{info}{separation_line}")
227
  print(f"\033[33mURL: \033[34m{url}")
228
  print(f"\033[33mSAVE DIR: \033[34m{dst_dir}")
229
  print(f"\033[33mFILE NAME: \033[34m{file_name}\033[0m")
230
 
231
  ''' GET CivitAi API - DATA '''
232
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
233
  def CivitAi_API(url, file_name=None):
234
+ SUPPORT_TYPES = ('Checkpoint', 'Model', 'TextualInversion', 'LORA')
235
+ CIVITAI_TOKEN = "62c0c5956b2f9defbd844d754000180b"
236
 
237
+ url = url.split('?token=')[0] if '?token=' in url else url
238
+ url = url.replace('?type=', f'?token={CIVITAI_TOKEN}&type=') if '?type=' in url else f"{url}?token={CIVITAI_TOKEN}"
239
+
240
+ def get_model_data(url):
241
+ if "civitai.com/models/" in url:
242
+ if '?modelVersionId=' in url:
243
+ version_id = url.split('?modelVersionId=')[1]
244
+ return requests.get(f"https://civitai.com/api/v1/model-versions/{version_id}").json()
245
+ else:
246
+ model_id = url.split('/models/')[1].split('/')[0]
247
+ return requests.get(f"https://civitai.com/api/v1/models/{model_id}").json()
 
 
248
  else:
249
+ version_id = url.split('/models/')[1].split('/')[0]
250
+ return requests.get(f"https://civitai.com/api/v1/model-versions/{version_id}").json()
 
 
 
 
 
251
 
252
+ data = get_model_data(url)
253
 
254
+ if not data:
255
  return None, None, None, None, None, None, None
256
 
257
+ def extract_model_info(url, data):
258
+ if "civitai.com/models/" in url:
259
+ if '?modelVersionId=' in url:
260
+ model_type = data['model']['type']
261
+ model_name = data['files'][0]['name']
262
+ else:
263
+ model_type = data['type']
264
+ model_name = data['modelVersions'][0]['files'][0]['name']
265
+ elif 'type=' in url:
266
+ model_type = parse_qs(urlparse(url).query).get('type', [''])[0]
267
+ if 'model' in model_type.lower():
268
+ model_name = data['files'][0]['name']
269
+ else:
270
+ model_name = data['files'][1]['name']
271
  else:
272
+ model_type = data['model']['type']
 
 
 
 
273
  model_name = data['files'][0]['name']
274
+ return model_type, model_name
 
 
 
 
275
 
276
+ model_type, model_name = extract_model_info(url, data)
277
  model_name = file_name or model_name
278
 
279
+ def get_download_url(url, data, model_type):
280
+ if "civitai.com/models/" in url:
281
+ if '?modelVersionId=' in url:
282
+ return data.get('downloadUrl')
283
+ else:
284
+ return data["modelVersions"][0].get("downloadUrl", "")
285
+ elif 'type=' in url:
286
+ if any(t.lower() in model_type.lower() for t in SUPPORT_TYPES):
287
+ return data['files'][0]['downloadUrl']
288
+ else:
289
+ return data['files'][1]['downloadUrl']
290
  else:
291
+ return data.get('downloadUrl')
 
 
292
 
293
+ download_url = get_download_url(url, data, model_type)
294
+ clean_url = re.sub(r'[?&]token=[^&]*', '', download_url)
295
 
296
+ def get_image_info(data, model_type, model_name):
297
+ image_url, image_name = None, None
298
+ if any(t in model_type for t in SUPPORT_TYPES):
299
+ try:
300
+ images = data.get('images') or data['modelVersions'][0].get('images', [])
301
+ if env == 'Kaggle':
302
+ image_url = next((image['url'] for image in images if image['nsfwLevel'] < 4), None)
303
+ else:
304
+ image_url = images[0]['url'] if images else None
305
+ except KeyError:
306
+ pass
307
+
308
+ image_name = f"{model_name.split('.')[0]}.preview.{image_url.split('.')[-1]}" if image_url else None
309
+ return image_url, image_name
310
 
311
+ image_url, image_name = get_image_info(data, model_type, model_name)
 
312
 
313
+ return f"{download_url}{'&' if '?' in download_url else '?'}token={CIVITAI_TOKEN}", clean_url, model_type, model_name, image_url, image_name, data
314
 
315
  ''' Main Download Code '''
316
 
317
+ def strip_(url):
318
+ if 'github.com' in url:
319
+ return url.replace('/blob/', '/raw/')
320
+ elif "huggingface.co" in url:
321
+ url = url.replace('/blob/', '/resolve/')
322
+ return url.split('?')[0] if '?' in url else url
323
+ return url
324
+
325
  def download(url):
326
  links_and_paths = [link_or_path.strip() for link_or_path in url.split(',') if link_or_path.strip()]
327
 
328
  for link_or_path in links_and_paths:
329
+ if any(link_or_path.lower().startswith(prefix) for prefix in PREFIXES):
330
  handle_manual(link_or_path)
331
  else:
332
  url, dst_dir, file_name = link_or_path.split()
333
  manual_download(url, dst_dir, file_name)
334
 
335
+ # Unpuck ZIPs Files
 
 
336
  for directory in directories:
337
  for root, _, files in os.walk(directory):
338
  for file in files:
 
352
  if file_name:
353
  path = re.sub(r'\[.*?\]', '', path)
354
 
355
+ if prefix in PREFIXES:
356
+ dir = PREFIXES[prefix]
357
  if prefix != "extension":
358
  try:
359
  manual_download(path, dir, file_name=file_name)
 
367
  aria2c_header = "--header='User-Agent: Mozilla/5.0' --allow-overwrite=true"
368
  aria2_args = "--optimize-concurrent-downloads --console-log-level=error --summary-interval=10 --stderr=true -c -x16 -s16 -k1M -j5"
369
 
370
+ clean_url = strip_(url)
 
 
 
 
 
371
 
372
+ if 'civitai' in url:
373
+ url, clean_url, model_type, file_name, image_url, image_name, data = CivitAi_API(url, file_name)
374
  if image_url and image_name:
375
+ command = ["aria2c"] + aria2_args.split() + ["-d", dst_dir, "-o", image_name, image_url]
376
+ subprocess.run(command, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)
 
377
 
378
+ elif 'github' in url or "huggingface.co" in url:
379
+ basename = url.split("/")[-1] if file_name is None else file_name
 
380
 
381
  """ Formatted info output """
382
+ model_name_or_basename = file_name if file_name else basename
383
  format_output(clean_url or url, dst_dir, model_name_or_basename)
384
 
 
385
  # print(url, dst_dir, model_name_or_basename)
386
+ if 'civitai' in url:
387
+ if not data:
388
+ print("\033[31m[Data Info]:\033[0m Failed to retrieve data from the API.\n")
389
+ if data and image_name:
390
+ print(f"\033[32m[Preview DL]:\033[0m {image_name} - {image_url}\n")
391
+
392
  # =====================
393
+ def run_aria2c(url, dst_dir, file_name=None, args="", header=""):
394
+ out = f"-o '{file_name}'" if file_name else ""
395
+ get_ipython().system("aria2c {header} {args} -d {dst_dir} {out} '{url}'")
396
 
397
+ # -- Google Drive --
398
+ if 'drive.google' in url:
399
+ if not globals().get('have_drive_link', False):
400
+ os.system("pip install -U gdown > /dev/null")
401
+ globals()['have_drive_link'] = True
 
 
 
 
 
 
402
 
403
  if 'folders' in url:
404
+ os.system(f"gdown --folder \"{url}\" -O {dst_dir} --fuzzy -c")
405
  else:
406
+ out_path = f"{dst_dir}/{file_name}" if file_name else dst_dir
407
+ os.system(f"gdown \"{url}\" -O {out_path} --fuzzy -c")
 
 
408
 
409
+ # -- GitHub or Hugging Face --
410
+ elif 'github' in url or 'huggingface' in url:
411
+ run_aria2c(clean_url, dst_dir, basename, aria2_args, header_option if 'huggingface' in url else '')
412
 
413
+ # -- Other HTTP/Sources --
414
  elif 'http' in url:
415
+ run_aria2c(url, dst_dir, file_name, aria2_args, aria2c_header)
416
 
417
  ''' SubModels - Added URLs '''
418
 
 
452
 
453
  ''' file.txt - added urls '''
454
 
455
+ def process_file_download(file_url, PREFIXES, unique_urls):
456
  files_urls = ""
457
 
458
  if file_url.startswith("http"):
 
467
  current_tag = None
468
  for line in lines:
469
  line = line.strip()
470
+ if any(f'# {tag}' in line.lower() for tag in PREFIXES):
471
+ current_tag = next((tag for tag in PREFIXES if tag in line.lower()))
472
 
473
  urls = [url.split('#')[0].strip() for url in line.split(',')] # filter urls
474
  for url in urls:
 
492
  custom_file_url = f'{root_path}/{custom_file_url}'
493
 
494
  try:
495
+ file_urls += process_file_download(custom_file_url, PREFIXES, unique_urls)
496
  except FileNotFoundError:
497
  pass
498
 
499
  # url prefixing
500
  urls = (Model_url, Vae_url, LoRA_url, Embedding_url, Extensions_url)
501
+ prefixed_urls = (f"{prefix}:{url}" for prefix, url in zip(PREFIXES.keys(), urls) if url for url in url.replace(',', '').split())
502
  url += ", ".join(prefixed_urls) + ", " + file_urls
503
 
504
  if detailed_download == "on":
files_cells/python/en/launch_en.py CHANGED
@@ -9,21 +9,13 @@ import cloudpickle as pickle
9
  from datetime import timedelta
10
  from IPython.display import clear_output
11
 
12
- # ================= DETECT ENV =================
13
- def detect_environment():
14
- free_plan = (os.sysconf('SC_PAGE_SIZE') * os.sysconf('SC_PHYS_PAGES') / (1024 ** 3) <= 20)
15
- environments = {
16
- 'COLAB_GPU': ('Google Colab', "/root" if free_plan else "/content"),
17
- 'KAGGLE_URL_BASE': ('Kaggle', "/kaggle/working/content")
18
- }
19
-
20
- for env_var, (environment, path) in environments.items():
21
- if env_var in os.environ:
22
- return environment, path, free_plan
23
- return 'Unknown', '/unknown/path', free_plan
24
-
25
- env, root_path, free_plan = detect_environment()
26
- webui_path = f"{root_path}/sdw"
27
 
28
  def load_settings():
29
  SETTINGS_FILE = f'{root_path}/settings.json'
@@ -38,6 +30,7 @@ zrok_token = settings.get('zrok_token', "")
38
  commandline_arguments = settings.get('commandline_arguments', "")
39
  change_webui = settings.get('change_webui', "")
40
 
 
41
  # ======================== TUNNEL V2 ========================
42
  print('Please Wait...')
43
 
@@ -71,6 +64,7 @@ if zrok_token:
71
 
72
  clear_output()
73
 
 
74
  # =============== Automatic Fixing Path V3 ===============
75
  paths_to_check = {
76
  "tagger_hf_cache_dir": f"{webui_path}/models/interrogators/",
 
9
  from datetime import timedelta
10
  from IPython.display import clear_output
11
 
12
+
13
+ # Setup Env
14
+ env = os.environ.get('ENV_NAME')
15
+ root_path = os.environ.get('ROOT_PATH')
16
+ webui_path = os.environ.get('WEBUI_PATH')
17
+ free_plan = os.environ.get('FREE_PLAN')
18
+
 
 
 
 
 
 
 
 
19
 
20
  def load_settings():
21
  SETTINGS_FILE = f'{root_path}/settings.json'
 
30
  commandline_arguments = settings.get('commandline_arguments', "")
31
  change_webui = settings.get('change_webui', "")
32
 
33
+
34
  # ======================== TUNNEL V2 ========================
35
  print('Please Wait...')
36
 
 
64
 
65
  clear_output()
66
 
67
+
68
  # =============== Automatic Fixing Path V3 ===============
69
  paths_to_check = {
70
  "tagger_hf_cache_dir": f"{webui_path}/models/interrogators/",
files_cells/python/en/widgets_en.py CHANGED
@@ -8,20 +8,11 @@ from ipywidgets import widgets, Layout, Label, Button, VBox, HBox
8
  from IPython.display import display, HTML, Javascript, clear_output
9
 
10
 
11
- # ================= DETECT ENV =================
12
- def detect_environment():
13
- free_plan = (os.sysconf('SC_PAGE_SIZE') * os.sysconf('SC_PHYS_PAGES') / (1024 ** 3) <= 20)
14
- environments = {
15
- 'COLAB_GPU': ('Google Colab', "/root" if free_plan else "/content"),
16
- 'KAGGLE_URL_BASE': ('Kaggle', "/kaggle/working/content")
17
- }
18
- for env_var, (environment, path) in environments.items():
19
- if env_var in os.environ:
20
- return environment, path, free_plan
21
-
22
- env, root_path, free_plan = detect_environment()
23
- webui_path = f"{root_path}/sdw"
24
- get_ipython().system('mkdir -p {root_path}')
25
 
26
 
27
  # ==================== CSS JS ====================
 
8
  from IPython.display import display, HTML, Javascript, clear_output
9
 
10
 
11
+ # Setup Env
12
+ env = os.environ.get('ENV_NAME')
13
+ root_path = os.environ.get('ROOT_PATH')
14
+ webui_path = os.environ.get('WEBUI_PATH')
15
+ free_plan = os.environ.get('FREE_PLAN')
 
 
 
 
 
 
 
 
 
16
 
17
 
18
  # ==================== CSS JS ====================
files_cells/python/ru/auto_cleaner_ru.py CHANGED
@@ -7,19 +7,11 @@ from ipywidgets import Label, Button, VBox, HBox
7
  from IPython.display import display, HTML
8
 
9
 
10
- # ================= DETECT ENV =================
11
- def detect_environment():
12
- free_plan = (os.sysconf('SC_PAGE_SIZE') * os.sysconf('SC_PHYS_PAGES') / (1024 ** 3) <= 20)
13
- environments = {
14
- 'COLAB_GPU': ('Google Colab', "/root" if free_plan else "/content"),
15
- 'KAGGLE_URL_BASE': ('Kaggle', "/kaggle/working/content")
16
- }
17
- for env_var, (environment, path) in environments.items():
18
- if env_var in os.environ:
19
- return environment, path, free_plan
20
-
21
- env, root_path, free_plan = detect_environment()
22
- webui_path = f"{root_path}/sdw"
23
 
24
 
25
  # ==================== CSS ====================
 
7
  from IPython.display import display, HTML
8
 
9
 
10
+ # Setup Env
11
+ env = os.environ.get('ENV_NAME')
12
+ root_path = os.environ.get('ROOT_PATH')
13
+ webui_path = os.environ.get('WEBUI_PATH')
14
+ free_plan = os.environ.get('FREE_PLAN')
 
 
 
 
 
 
 
 
15
 
16
 
17
  # ==================== CSS ====================
files_cells/python/ru/downloading_ru.py CHANGED
@@ -1,5 +1,7 @@
1
  ##~ DOWNLOADING CODE | BY: ANXETY ~##
2
 
 
 
3
  import os
4
  import re
5
  import time
@@ -15,19 +17,11 @@ from IPython.display import clear_output
15
  from urllib.parse import urlparse, parse_qs
16
 
17
 
18
- # ================= DETECT ENV =================
19
- def detect_environment():
20
- free_plan = (os.sysconf('SC_PAGE_SIZE') * os.sysconf('SC_PHYS_PAGES') / (1024 ** 3) <= 20)
21
- environments = {
22
- 'COLAB_GPU': ('Google Colab', "/root" if free_plan else "/content"),
23
- 'KAGGLE_URL_BASE': ('Kaggle', "/kaggle/working/content")
24
- }
25
- for env_var, (environment, path) in environments.items():
26
- if env_var in os.environ:
27
- return environment, path, free_plan
28
-
29
- env, root_path, free_plan = detect_environment()
30
- webui_path = f"{root_path}/sdw"
31
 
32
 
33
  # ================ LIBRARIES V2 ================
@@ -66,7 +60,7 @@ if not os.path.exists(flag_file):
66
  with capture.capture_output() as cap:
67
  get_ipython().system('curl -s -OL https://github.com/DEX-1101/sd-webui-notebook/raw/main/res/new_tunnel --output-dir {root_path}')
68
  get_ipython().system('curl -s -Lo /usr/bin/cl https://github.com/cloudflare/cloudflared/releases/latest/download/cloudflared-linux-amd64 && chmod +x /usr/bin/cl')
69
- get_ipython().system('curl -sLO https://github.com/openziti/zrok/releases/download/v0.4.23/zrok_0.4.23_linux_amd64.tar.gz && tar -xzf zrok_0.4.23_linux_amd64.tar.gz -C /usr/bin && rm -f zrok_0.4.23_linux_amd64.tar.gz')
70
  del cap
71
 
72
  clear_output()
@@ -197,112 +191,9 @@ if commit_hash:
197
 
198
  ## Downloading model and stuff | oh~ Hey! If you're freaked out by that code too, don't worry, me too!
199
  print("📦 Скачивание моделей и прочего...", end='')
200
- model_list = {
201
- "1.Anime (by XpucT) + INP": [
202
- {"url": "https://huggingface.co/XpucT/Anime/resolve/main/Anime_v2.safetensors", "name": "Anime_V2.safetensors"},
203
- {"url": "https://huggingface.co/XpucT/Anime/resolve/main/Anime_v2-inpainting.safetensors", "name": "Anime_V2-inpainting.safetensors"}
204
- ],
205
- "2.BluMix [Anime] [V7] + INP": [
206
- {"url": "https://civitai.com/api/download/models/361779", "name": "BluMix_V7.safetensors"},
207
- {"url": "https://civitai.com/api/download/models/363850", "name": "BluMix_V7-inpainting.safetensors"}
208
- ],
209
- "3.Cetus-Mix [Anime] [V4] + INP": [
210
- {"url": "https://civitai.com/api/download/models/130298", "name": "CetusMix_V4.safetensors"},
211
- {"url": "https://civitai.com/api/download/models/139882", "name": "CetusMix_V4-inpainting.safetensors"}
212
- ],
213
- "4.Counterfeit [Anime] [V3] + INP": [
214
- {"url": "https://huggingface.co/gsdf/Counterfeit-V3.0/resolve/main/Counterfeit-V3.0_fix_fp16.safetensors", "name": "Counterfeit_V3.safetensors"},
215
- {"url": "https://civitai.com/api/download/models/137911", "name": "Counterfeit_V3-inpainting.safetensors"}
216
- ],
217
- "5.CuteColor [Anime] [V3]": [
218
- {"url": "https://civitai.com/api/download/models/138754", "name": "CuteColor_V3.safetensors"}
219
- ],
220
- "6.Dark-Sushi-Mix [Anime]": [
221
- {"url": "https://civitai.com/api/download/models/101640", "name": "DarkSushiMix_2_5D.safetensors"},
222
- {"url": "https://civitai.com/api/download/models/56071", "name": "DarkSushiMix_colorful.safetensors"}
223
- ],
224
- "7.Deliberate [Realism] [V6] + INP": [
225
- {"url": "https://huggingface.co/XpucT/Deliberate/resolve/main/Deliberate_v6.safetensors", "name": "Deliberate_V6.safetensors"},
226
- {"url": "https://huggingface.co/XpucT/Deliberate/resolve/main/Deliberate_v6-inpainting.safetensors", "name": "Deliberate_V6-inpainting.safetensors"}
227
- ],
228
- "8.Meina-Mix [Anime] [V11] + INP": [
229
- {"url": "https://civitai.com/api/download/models/119057", "name": "MeinaMix_V11.safetensors"},
230
- {"url": "https://civitai.com/api/download/models/120702", "name": "MeinaMix_V11-inpainting.safetensors"}
231
- ],
232
- "9.Mix-Pro [Anime] [V4] + INP": [
233
- {"url": "https://civitai.com/api/download/models/125668", "name": "MixPro_V4.safetensors"},
234
- {"url": "https://civitai.com/api/download/models/139878", "name": "MixPro_V4-inpainting.safetensors"}
235
- ]
236
- }
237
-
238
- vae_list = {
239
- "1.Anime.vae": [{"url": "https://civitai.com/api/download/models/311162", "name": "Anime.vae.safetensors"}],
240
- "2.Anything.vae": [{"url": "https://huggingface.co/NoCrypt/resources/resolve/main/VAE/any.vae.safetensors", "name": "Anything.vae.safetensors"}],
241
- "3.Blessed2.vae": [{"url": "https://huggingface.co/NoCrypt/resources/resolve/main/VAE/blessed2.vae.safetensors", "name": "Blessed2.vae.safetensors"}],
242
- "4.ClearVae.vae": [{"url": "https://civitai.com/api/download/models/88156", "name": "ClearVae_23.vae.safetensors"}],
243
- "5.WD.vae": [{"url": "https://huggingface.co/NoCrypt/resources/resolve/main/VAE/wd.vae.safetensors", "name": "WD.vae.safetensors"}]
244
- }
245
-
246
- controlnet_list = {
247
- "1.canny": [
248
- {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11p_sd15_canny_fp16.safetensors", "name": "control_v11p_sd15_canny_fp16.safetensors"},
249
- {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11p_sd15_canny_fp16.yaml", "name": "control_v11p_sd15_canny_fp16.yaml"}
250
- ],
251
- "2.openpose": [
252
- {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11p_sd15_openpose_fp16.safetensors", "name": "control_v11p_sd15_openpose_fp16.safetensors"},
253
- {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11p_sd15_openpose_fp16.yaml", "name": "control_v11p_sd15_openpose_fp16.yaml"}
254
- ],
255
- "3.depth": [
256
- {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11f1p_sd15_depth_fp16.safetensors", "name": "control_v11f1p_sd15_depth_fp16.safetensors"},
257
- {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11f1p_sd15_depth_fp16.yaml", "name": "control_v11f1p_sd15_depth_fp16.yaml"},
258
- {"url": "https://huggingface.co/NagisaNao/models/resolve/main/ControlNet_v11/control_v11p_sd15_depth_anything_fp16.safetensors", "name": "control_v11p_sd15_depth_anything_fp16.safetensors"}
259
- ],
260
- "4.normal_map": [
261
- {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11p_sd15_normalbae_fp16.safetensors", "name": "control_v11p_sd15_normalbae_fp16.safetensors"},
262
- {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11p_sd15_normalbae_fp16.yaml", "name": "control_v11p_sd15_normalbae_fp16.yaml"}
263
- ],
264
- "5.mlsd": [
265
- {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11p_sd15_mlsd_fp16.safetensors", "name": "control_v11p_sd15_mlsd_fp16.safetensors"},
266
- {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11p_sd15_mlsd_fp16.yaml", "name": "control_v11p_sd15_mlsd_fp16.yaml"}
267
- ],
268
- "6.lineart": [
269
- {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11p_sd15_lineart_fp16.safetensors", "name": "control_v11p_sd15_lineart_fp16.safetensors"},
270
- {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11p_sd15s2_lineart_anime_fp16.safetensors", "name": "control_v11p_sd15s2_lineart_anime_fp16.safetensors"},
271
- {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11p_sd15_lineart_fp16.yaml", "name": "control_v11p_sd15_lineart_fp16.yaml"},
272
- {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11p_sd15s2_lineart_anime_fp16.yaml", "name": "control_v11p_sd15s2_lineart_anime_fp16.yaml"}
273
- ],
274
- "7.soft_edge": [
275
- {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11p_sd15_softedge_fp16.safetensors", "name": "control_v11p_sd15_softedge_fp16.safetensors"},
276
- {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11p_sd15_softedge_fp16.yaml", "name": "control_v11p_sd15_softedge_fp16.yaml"}
277
- ],
278
- "8.scribble": [
279
- {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11p_sd15_scribble_fp16.safetensors", "name": "control_v11p_sd15_scribble_fp16.safetensors"},
280
- {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11p_sd15_scribble_fp16.yaml", "name": "control_v11p_sd15_scribble_fp16.yaml"}
281
- ],
282
- "9.segmentation": [
283
- {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11p_sd15_seg_fp16.safetensors", "name": "control_v11p_sd15_seg_fp16.safetensors"},
284
- {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11p_sd15_seg_fp16.yaml", "name": "control_v11p_sd15_seg_fp16.yaml"}
285
- ],
286
- "10.shuffle": [
287
- {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11e_sd15_shuffle_fp16.safetensors", "name": "control_v11e_sd15_shuffle_fp16.safetensors"},
288
- {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11e_sd15_shuffle_fp16.yaml", "name": "control_v11e_sd15_shuffle_fp16.yaml"}
289
- ],
290
- "11.tile": [
291
- {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11f1e_sd15_tile_fp16.safetensors", "name": "control_v11f1e_sd15_tile_fp16.safetensors"},
292
- {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11f1e_sd15_tile_fp16.yaml", "name": "control_v11f1e_sd15_tile_fp16.yaml"}
293
- ],
294
- "12.inpaint": [
295
- {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11p_sd15_inpaint_fp16.safetensors", "name": "control_v11p_sd15_inpaint_fp16.safetensors"},
296
- {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11p_sd15_inpaint_fp16.yaml", "name": "control_v11p_sd15_inpaint_fp16.yaml"}
297
- ],
298
- "13.instruct_p2p": [
299
- {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11e_sd15_ip2p_fp16.safetensors", "name": "control_v11e_sd15_ip2p_fp16.safetensors"},
300
- {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11e_sd15_ip2p_fp16.yaml", "name": "control_v11e_sd15_ip2p_fp16.yaml"}
301
- ]
302
- }
303
 
304
  url = ""
305
- prefixes = {
306
  "model": models_dir,
307
  "vae": vaes_dir,
308
  "lora": loras_dir,
@@ -314,7 +205,7 @@ prefixes = {
314
  }
315
 
316
  extension_repo = []
317
- directories = [value for key, value in prefixes.items()] # for unpucking zip files
318
  get_ipython().system('mkdir -p {" ".join(directories)}')
319
 
320
  hf_token = huggingface_token if huggingface_token else "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"
@@ -325,137 +216,123 @@ user_header = f"\"Authorization: Bearer {hf_token}\""
325
  from math import floor
326
 
327
  def center_text(text, terminal_width=45):
328
- text_length = len(text)
329
- left_padding = floor((terminal_width - text_length) / 2)
330
- right_padding = terminal_width - text_length - left_padding
331
- return f"\033[1m\033[36m{' ' * left_padding}{text}{' ' * right_padding}\033[0m\033[32m"
332
 
333
  def format_output(url, dst_dir, file_name):
334
- info = f"[{file_name.split('.')[0]}]"
335
- info = center_text(info)
336
 
337
- print(f"\n\033[32m{'---'*20}]{info}[{'---'*20}")
338
  print(f"\033[33mURL: \033[34m{url}")
339
  print(f"\033[33mSAVE DIR: \033[34m{dst_dir}")
340
  print(f"\033[33mFILE NAME: \033[34m{file_name}\033[0m")
341
 
342
  ''' GET CivitAi API - DATA '''
343
 
344
- def strip_(url, file_name=None):
345
- if 'github.com' in url:
346
- if '/blob/' in url:
347
- url = url.replace('/blob/', '/raw/')
348
-
349
- elif "civitai.com" in url:
350
- return CivitAi_API(url, file_name)
351
-
352
- elif "huggingface.co" in url:
353
- if '/blob/' in url:
354
- url = url.replace('/blob/', '/resolve/')
355
- if '?' in url:
356
- url = url.split('?')[0]
357
-
358
- return url
359
-
360
  def CivitAi_API(url, file_name=None):
361
- support_types = ('Checkpoint', 'Model', 'TextualInversion', 'LORA')
362
- civitai_token = "62c0c5956b2f9defbd844d754000180b"
363
 
364
- if '?token=' in url:
365
- url = url.split('?token=')[0]
366
- if '?type=' in url:
367
- url = url.replace('?type=', f'?token={civitai_token}&type=')
368
- else:
369
- url = f"{url}?token={civitai_token}"
370
-
371
- # Determine model or version id
372
- if "civitai.com/models/" in url:
373
- if '?modelVersionId=' in url:
374
- version_id = url.split('?modelVersionId=')[1]
375
- response = requests.get(f"https://civitai.com/api/v1/model-versions/{version_id}")
376
- # print(f"end - https://civitai.com/api/v1/model-versions/{version_id}")
377
  else:
378
- model_id = url.split('/models/')[1].split('/')[0]
379
- response = requests.get(f"https://civitai.com/api/v1/models/{model_id}")
380
- # print(f"end - https://civitai.com/api/v1/models/{model_id}")
381
- else:
382
- version_id = url.split('/models/')[1].split('/')[0]
383
- response = requests.get(f"https://civitai.com/api/v1/model-versions/{version_id}")
384
- # print(f"end - https://civitai.com/api/v1/model-versions/{version_id}")
385
 
386
- data = response.json()
387
 
388
- if response.status_code != 200:
389
  return None, None, None, None, None, None, None
390
 
391
- # Define model type and name
392
- if "civitai.com/models/" in url:
393
- if '?modelVersionId=' in url:
394
- model_type = data['model']['type']
395
- model_name = data['files'][0]['name']
 
 
 
 
 
 
 
 
 
396
  else:
397
- model_type = data['type']
398
- model_name = data['modelVersions'][0]['files'][0]['name']
399
- elif 'type=' in url:
400
- model_type = parse_qs(urlparse(url).query).get('type', [''])[0]
401
- if 'model' in model_type.lower():
402
  model_name = data['files'][0]['name']
403
- else:
404
- model_name = data['files'][1]['name']
405
- else:
406
- model_type = data['model']['type']
407
- model_name = data['files'][0]['name']
408
 
 
409
  model_name = file_name or model_name
410
 
411
- # Determine DownloadUrl
412
- if "civitai.com/models/" in url:
413
- if '?modelVersionId=' in url:
414
- download_url = data.get('downloadUrl')
415
- else:
416
- download_url = data["modelVersions"][0].get("downloadUrl", "")
417
- elif 'type=' in url:
418
- if any(t.lower() in model_type.lower() for t in support_types):
419
- download_url = data['files'][0]['downloadUrl']
 
 
420
  else:
421
- download_url = data['files'][1]['downloadUrl']
422
- else:
423
- download_url = data.get('downloadUrl')
424
 
425
- clean_url = re.sub(r'[?&]token=[^&]*', '', download_url) # hide token
 
426
 
427
- # Find a safe image: level less than 4 | Kaggle
428
- image_url, image_name = None, None
429
- if any(t in model_type for t in support_types):
430
- try:
431
- images = data.get('images') or data['modelVersions'][0].get('images', [])
432
- if env == 'Kaggle':
433
- image_url = next((image['url'] for image in images if image['nsfwLevel'] < 4), None)
434
- else:
435
- image_url = images[0]['url'] if images else None
436
- except KeyError:
437
- pass
 
 
 
438
 
439
- # Generate a name to save the image
440
- image_name = f"{model_name.split('.')[0]}.preview.{image_url.split('.')[-1]}" if image_url else None
441
 
442
- return f"{download_url}{'&' if '?' in download_url else '?'}token={civitai_token}", clean_url, model_type, model_name, image_url, image_name, data
443
 
444
  ''' Main Download Code '''
445
 
 
 
 
 
 
 
 
 
446
  def download(url):
447
  links_and_paths = [link_or_path.strip() for link_or_path in url.split(',') if link_or_path.strip()]
448
 
449
  for link_or_path in links_and_paths:
450
- if any(link_or_path.lower().startswith(prefix) for prefix in prefixes):
451
  handle_manual(link_or_path)
452
  else:
453
  url, dst_dir, file_name = link_or_path.split()
454
  manual_download(url, dst_dir, file_name)
455
 
456
- unpack_zip_files()
457
-
458
- def unpack_zip_files():
459
  for directory in directories:
460
  for root, _, files in os.walk(directory):
461
  for file in files:
@@ -475,8 +352,8 @@ def handle_manual(url):
475
  if file_name:
476
  path = re.sub(r'\[.*?\]', '', path)
477
 
478
- if prefix in prefixes:
479
- dir = prefixes[prefix]
480
  if prefix != "extension":
481
  try:
482
  manual_download(path, dir, file_name=file_name)
@@ -490,60 +367,52 @@ def manual_download(url, dst_dir, file_name):
490
  aria2c_header = "--header='User-Agent: Mozilla/5.0' --allow-overwrite=true"
491
  aria2_args = "--optimize-concurrent-downloads --console-log-level=error --summary-interval=10 --stderr=true -c -x16 -s16 -k1M -j5"
492
 
493
- if 'github.com' in url:
494
- url = strip_(url)
495
-
496
- # -- CivitAi APi+ V2 --
497
- elif 'civitai' in url:
498
- url, clean_url, model_type, file_name, image_url, image_name, data = strip_(url, file_name)
499
 
 
 
500
  if image_url and image_name:
501
- with capture.capture_output() as cap:
502
- get_ipython().system("aria2c {aria2_args} -d {dst_dir} -o '{image_name}' '{image_url}'")
503
- del cap
504
 
505
- elif "huggingface.co" in url:
506
- clean_url = strip_(url)
507
- basename = clean_url.split("/")[-1] if file_name is None else file_name
508
 
509
  """ Formatted info output """
510
- model_name_or_basename = file_name if not 'huggingface' in url else basename
511
  format_output(clean_url or url, dst_dir, model_name_or_basename)
512
 
513
- # ## -- for my tests --
514
  # print(url, dst_dir, model_name_or_basename)
515
- print(f"\033[31m[Data Info]:\033[0m Failed to retrieve data from the API.\n") if 'civitai' in url and not data else None
516
- if 'civitai' in url and data and image_name:
517
- print(f"\033[32m[Preview DL]:\033[0m {image_name} - {image_url}\n")
 
 
 
518
  # =====================
 
 
 
519
 
520
- # -- Git Hub --
521
- if 'github.com' in url or 'githubusercontent.com' in url:
522
- get_ipython().system("aria2c {aria2_args} -d {dst_dir} -o '{basename}' '{url}'")
523
-
524
- # -- GDrive --
525
- elif 'drive.google' in url:
526
- try:
527
- have_drive_link
528
- except:
529
- get_ipython().system('pip install -q gdown==5.2.0 > /dev/null')
530
- have_drive_link = True
531
 
532
  if 'folders' in url:
533
- get_ipython().system('gdown --folder "{url}" -O {dst_dir} --fuzzy -c')
534
  else:
535
- if file_name:
536
- get_ipython().system('gdown "{url}" -O {dst_dir}/{file_name} --fuzzy -c')
537
- else:
538
- get_ipython().system('gdown "{url}" -O {dst_dir} --fuzzy -c')
539
 
540
- # -- Hugging Face --
541
- elif 'huggingface' in url:
542
- get_ipython().system("aria2c {header_option} {aria2_args} -d {dst_dir} -o '{basename}' '{url}'")
543
 
544
- # -- Other --
545
  elif 'http' in url:
546
- get_ipython().system('aria2c {aria2c_header} {aria2_args} -d {dst_dir} -o "{file_name if file_name else \'\'}" \'{url}\'')
547
 
548
  ''' SubModels - Added URLs '''
549
 
@@ -583,7 +452,7 @@ url = handle_submodels(controlnet, controlnet_num, controlnet_list, control_dir,
583
 
584
  ''' file.txt - added urls '''
585
 
586
- def process_file_download(file_url, prefixes, unique_urls):
587
  files_urls = ""
588
 
589
  if file_url.startswith("http"):
@@ -598,8 +467,8 @@ def process_file_download(file_url, prefixes, unique_urls):
598
  current_tag = None
599
  for line in lines:
600
  line = line.strip()
601
- if any(f'# {tag}' in line.lower() for tag in prefixes):
602
- current_tag = next((tag for tag in prefixes if tag in line.lower()))
603
 
604
  urls = [url.split('#')[0].strip() for url in line.split(',')] # filter urls
605
  for url in urls:
@@ -623,13 +492,13 @@ if custom_file_urls:
623
  custom_file_url = f'{root_path}/{custom_file_url}'
624
 
625
  try:
626
- file_urls += process_file_download(custom_file_url, prefixes, unique_urls)
627
  except FileNotFoundError:
628
  pass
629
 
630
  # url prefixing
631
  urls = (Model_url, Vae_url, LoRA_url, Embedding_url, Extensions_url)
632
- prefixed_urls = (f"{prefix}:{url}" for prefix, url in zip(prefixes.keys(), urls) if url for url in url.replace(',', '').split())
633
  url += ", ".join(prefixed_urls) + ", " + file_urls
634
 
635
  if detailed_download == "on":
 
1
  ##~ DOWNLOADING CODE | BY: ANXETY ~##
2
 
3
+ from models_data import model_list, vae_list, controlnet_list
4
+
5
  import os
6
  import re
7
  import time
 
17
  from urllib.parse import urlparse, parse_qs
18
 
19
 
20
+ # Setup Env
21
+ env = os.environ.get('ENV_NAME')
22
+ root_path = os.environ.get('ROOT_PATH')
23
+ webui_path = os.environ.get('WEBUI_PATH')
24
+ free_plan = os.environ.get('FREE_PLAN')
 
 
 
 
 
 
 
 
25
 
26
 
27
  # ================ LIBRARIES V2 ================
 
60
  with capture.capture_output() as cap:
61
  get_ipython().system('curl -s -OL https://github.com/DEX-1101/sd-webui-notebook/raw/main/res/new_tunnel --output-dir {root_path}')
62
  get_ipython().system('curl -s -Lo /usr/bin/cl https://github.com/cloudflare/cloudflared/releases/latest/download/cloudflared-linux-amd64 && chmod +x /usr/bin/cl')
63
+ get_ipython().system('curl -sLO https://github.com/openziti/zrok/releases/download/v0.4.32/zrok_0.4.32_linux_amd64.tar.gz && tar -xzf zrok_0.4.32_linux_amd64.tar.gz -C /usr/bin && rm -f zrok_0.4.32_linux_amd64.tar.gz')
64
  del cap
65
 
66
  clear_output()
 
191
 
192
  ## Downloading model and stuff | oh~ Hey! If you're freaked out by that code too, don't worry, me too!
193
  print("📦 Скачивание моделей и прочего...", end='')
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
194
 
195
  url = ""
196
+ PREFIXES = {
197
  "model": models_dir,
198
  "vae": vaes_dir,
199
  "lora": loras_dir,
 
205
  }
206
 
207
  extension_repo = []
208
+ directories = [value for key, value in PREFIXES.items()] # for unpucking zip files
209
  get_ipython().system('mkdir -p {" ".join(directories)}')
210
 
211
  hf_token = huggingface_token if huggingface_token else "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"
 
216
  from math import floor
217
 
218
  def center_text(text, terminal_width=45):
219
+ padding = (terminal_width - len(text)) // 2
220
+ return f"\033[1m\033[36m{' ' * padding}{text}{' ' * padding}\033[0m\033[32m"
 
 
221
 
222
  def format_output(url, dst_dir, file_name):
223
+ info = center_text(f"[{file_name.split('.')[0]}]")
224
+ separation_line = '\033[32m' + '---' * 20
225
 
226
+ print(f"\n{separation_line}{info}{separation_line}")
227
  print(f"\033[33mURL: \033[34m{url}")
228
  print(f"\033[33mSAVE DIR: \033[34m{dst_dir}")
229
  print(f"\033[33mFILE NAME: \033[34m{file_name}\033[0m")
230
 
231
  ''' GET CivitAi API - DATA '''
232
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
233
  def CivitAi_API(url, file_name=None):
234
+ SUPPORT_TYPES = ('Checkpoint', 'Model', 'TextualInversion', 'LORA')
235
+ CIVITAI_TOKEN = "62c0c5956b2f9defbd844d754000180b"
236
 
237
+ url = url.split('?token=')[0] if '?token=' in url else url
238
+ url = url.replace('?type=', f'?token={CIVITAI_TOKEN}&type=') if '?type=' in url else f"{url}?token={CIVITAI_TOKEN}"
239
+
240
+ def get_model_data(url):
241
+ if "civitai.com/models/" in url:
242
+ if '?modelVersionId=' in url:
243
+ version_id = url.split('?modelVersionId=')[1]
244
+ return requests.get(f"https://civitai.com/api/v1/model-versions/{version_id}").json()
245
+ else:
246
+ model_id = url.split('/models/')[1].split('/')[0]
247
+ return requests.get(f"https://civitai.com/api/v1/models/{model_id}").json()
 
 
248
  else:
249
+ version_id = url.split('/models/')[1].split('/')[0]
250
+ return requests.get(f"https://civitai.com/api/v1/model-versions/{version_id}").json()
 
 
 
 
 
251
 
252
+ data = get_model_data(url)
253
 
254
+ if not data:
255
  return None, None, None, None, None, None, None
256
 
257
+ def extract_model_info(url, data):
258
+ if "civitai.com/models/" in url:
259
+ if '?modelVersionId=' in url:
260
+ model_type = data['model']['type']
261
+ model_name = data['files'][0]['name']
262
+ else:
263
+ model_type = data['type']
264
+ model_name = data['modelVersions'][0]['files'][0]['name']
265
+ elif 'type=' in url:
266
+ model_type = parse_qs(urlparse(url).query).get('type', [''])[0]
267
+ if 'model' in model_type.lower():
268
+ model_name = data['files'][0]['name']
269
+ else:
270
+ model_name = data['files'][1]['name']
271
  else:
272
+ model_type = data['model']['type']
 
 
 
 
273
  model_name = data['files'][0]['name']
274
+ return model_type, model_name
 
 
 
 
275
 
276
+ model_type, model_name = extract_model_info(url, data)
277
  model_name = file_name or model_name
278
 
279
+ def get_download_url(url, data, model_type):
280
+ if "civitai.com/models/" in url:
281
+ if '?modelVersionId=' in url:
282
+ return data.get('downloadUrl')
283
+ else:
284
+ return data["modelVersions"][0].get("downloadUrl", "")
285
+ elif 'type=' in url:
286
+ if any(t.lower() in model_type.lower() for t in SUPPORT_TYPES):
287
+ return data['files'][0]['downloadUrl']
288
+ else:
289
+ return data['files'][1]['downloadUrl']
290
  else:
291
+ return data.get('downloadUrl')
 
 
292
 
293
+ download_url = get_download_url(url, data, model_type)
294
+ clean_url = re.sub(r'[?&]token=[^&]*', '', download_url)
295
 
296
+ def get_image_info(data, model_type, model_name):
297
+ image_url, image_name = None, None
298
+ if any(t in model_type for t in SUPPORT_TYPES):
299
+ try:
300
+ images = data.get('images') or data['modelVersions'][0].get('images', [])
301
+ if env == 'Kaggle':
302
+ image_url = next((image['url'] for image in images if image['nsfwLevel'] < 4), None)
303
+ else:
304
+ image_url = images[0]['url'] if images else None
305
+ except KeyError:
306
+ pass
307
+
308
+ image_name = f"{model_name.split('.')[0]}.preview.{image_url.split('.')[-1]}" if image_url else None
309
+ return image_url, image_name
310
 
311
+ image_url, image_name = get_image_info(data, model_type, model_name)
 
312
 
313
+ return f"{download_url}{'&' if '?' in download_url else '?'}token={CIVITAI_TOKEN}", clean_url, model_type, model_name, image_url, image_name, data
314
 
315
  ''' Main Download Code '''
316
 
317
+ def strip_(url):
318
+ if 'github.com' in url:
319
+ return url.replace('/blob/', '/raw/')
320
+ elif "huggingface.co" in url:
321
+ url = url.replace('/blob/', '/resolve/')
322
+ return url.split('?')[0] if '?' in url else url
323
+ return url
324
+
325
  def download(url):
326
  links_and_paths = [link_or_path.strip() for link_or_path in url.split(',') if link_or_path.strip()]
327
 
328
  for link_or_path in links_and_paths:
329
+ if any(link_or_path.lower().startswith(prefix) for prefix in PREFIXES):
330
  handle_manual(link_or_path)
331
  else:
332
  url, dst_dir, file_name = link_or_path.split()
333
  manual_download(url, dst_dir, file_name)
334
 
335
+ # Unpuck ZIPs Files
 
 
336
  for directory in directories:
337
  for root, _, files in os.walk(directory):
338
  for file in files:
 
352
  if file_name:
353
  path = re.sub(r'\[.*?\]', '', path)
354
 
355
+ if prefix in PREFIXES:
356
+ dir = PREFIXES[prefix]
357
  if prefix != "extension":
358
  try:
359
  manual_download(path, dir, file_name=file_name)
 
367
  aria2c_header = "--header='User-Agent: Mozilla/5.0' --allow-overwrite=true"
368
  aria2_args = "--optimize-concurrent-downloads --console-log-level=error --summary-interval=10 --stderr=true -c -x16 -s16 -k1M -j5"
369
 
370
+ clean_url = strip_(url)
 
 
 
 
 
371
 
372
+ if 'civitai' in url:
373
+ url, clean_url, model_type, file_name, image_url, image_name, data = CivitAi_API(url, file_name)
374
  if image_url and image_name:
375
+ command = ["aria2c"] + aria2_args.split() + ["-d", dst_dir, "-o", image_name, image_url]
376
+ subprocess.run(command, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)
 
377
 
378
+ elif 'github' in url or "huggingface.co" in url:
379
+ basename = url.split("/")[-1] if file_name is None else file_name
 
380
 
381
  """ Formatted info output """
382
+ model_name_or_basename = file_name if file_name else basename
383
  format_output(clean_url or url, dst_dir, model_name_or_basename)
384
 
 
385
  # print(url, dst_dir, model_name_or_basename)
386
+ if 'civitai' in url:
387
+ if not data:
388
+ print("\033[31m[Data Info]:\033[0m Failed to retrieve data from the API.\n")
389
+ if data and image_name:
390
+ print(f"\033[32m[Preview DL]:\033[0m {image_name} - {image_url}\n")
391
+
392
  # =====================
393
+ def run_aria2c(url, dst_dir, file_name=None, args="", header=""):
394
+ out = f"-o '{file_name}'" if file_name else ""
395
+ get_ipython().system("aria2c {header} {args} -d {dst_dir} {out} '{url}'")
396
 
397
+ # -- Google Drive --
398
+ if 'drive.google' in url:
399
+ if not globals().get('have_drive_link', False):
400
+ os.system("pip install -U gdown > /dev/null")
401
+ globals()['have_drive_link'] = True
 
 
 
 
 
 
402
 
403
  if 'folders' in url:
404
+ os.system(f"gdown --folder \"{url}\" -O {dst_dir} --fuzzy -c")
405
  else:
406
+ out_path = f"{dst_dir}/{file_name}" if file_name else dst_dir
407
+ os.system(f"gdown \"{url}\" -O {out_path} --fuzzy -c")
 
 
408
 
409
+ # -- GitHub or Hugging Face --
410
+ elif 'github' in url or 'huggingface' in url:
411
+ run_aria2c(clean_url, dst_dir, basename, aria2_args, header_option if 'huggingface' in url else '')
412
 
413
+ # -- Other HTTP/Sources --
414
  elif 'http' in url:
415
+ run_aria2c(url, dst_dir, file_name, aria2_args, aria2c_header)
416
 
417
  ''' SubModels - Added URLs '''
418
 
 
452
 
453
  ''' file.txt - added urls '''
454
 
455
+ def process_file_download(file_url, PREFIXES, unique_urls):
456
  files_urls = ""
457
 
458
  if file_url.startswith("http"):
 
467
  current_tag = None
468
  for line in lines:
469
  line = line.strip()
470
+ if any(f'# {tag}' in line.lower() for tag in PREFIXES):
471
+ current_tag = next((tag for tag in PREFIXES if tag in line.lower()))
472
 
473
  urls = [url.split('#')[0].strip() for url in line.split(',')] # filter urls
474
  for url in urls:
 
492
  custom_file_url = f'{root_path}/{custom_file_url}'
493
 
494
  try:
495
+ file_urls += process_file_download(custom_file_url, PREFIXES, unique_urls)
496
  except FileNotFoundError:
497
  pass
498
 
499
  # url prefixing
500
  urls = (Model_url, Vae_url, LoRA_url, Embedding_url, Extensions_url)
501
+ prefixed_urls = (f"{prefix}:{url}" for prefix, url in zip(PREFIXES.keys(), urls) if url for url in url.replace(',', '').split())
502
  url += ", ".join(prefixed_urls) + ", " + file_urls
503
 
504
  if detailed_download == "on":
files_cells/python/ru/launch_ru.py CHANGED
@@ -9,21 +9,13 @@ import cloudpickle as pickle
9
  from datetime import timedelta
10
  from IPython.display import clear_output
11
 
12
- # ================= DETECT ENV =================
13
- def detect_environment():
14
- free_plan = (os.sysconf('SC_PAGE_SIZE') * os.sysconf('SC_PHYS_PAGES') / (1024 ** 3) <= 20)
15
- environments = {
16
- 'COLAB_GPU': ('Google Colab', "/root" if free_plan else "/content"),
17
- 'KAGGLE_URL_BASE': ('Kaggle', "/kaggle/working/content")
18
- }
19
-
20
- for env_var, (environment, path) in environments.items():
21
- if env_var in os.environ:
22
- return environment, path, free_plan
23
- return 'Unknown', '/unknown/path', free_plan
24
-
25
- env, root_path, free_plan = detect_environment()
26
- webui_path = f"{root_path}/sdw"
27
 
28
  def load_settings():
29
  SETTINGS_FILE = f'{root_path}/settings.json'
@@ -38,6 +30,7 @@ zrok_token = settings.get('zrok_token', "")
38
  commandline_arguments = settings.get('commandline_arguments', "")
39
  change_webui = settings.get('change_webui', "")
40
 
 
41
  # ======================== TUNNEL V2 ========================
42
  print('Please Wait...')
43
 
@@ -71,6 +64,7 @@ if zrok_token:
71
 
72
  clear_output()
73
 
 
74
  # =============== Automatic Fixing Path V3 ===============
75
  paths_to_check = {
76
  "tagger_hf_cache_dir": f"{webui_path}/models/interrogators/",
 
9
  from datetime import timedelta
10
  from IPython.display import clear_output
11
 
12
+
13
+ # Setup Env
14
+ env = os.environ.get('ENV_NAME')
15
+ root_path = os.environ.get('ROOT_PATH')
16
+ webui_path = os.environ.get('WEBUI_PATH')
17
+ free_plan = os.environ.get('FREE_PLAN')
18
+
 
 
 
 
 
 
 
 
19
 
20
  def load_settings():
21
  SETTINGS_FILE = f'{root_path}/settings.json'
 
30
  commandline_arguments = settings.get('commandline_arguments', "")
31
  change_webui = settings.get('change_webui', "")
32
 
33
+
34
  # ======================== TUNNEL V2 ========================
35
  print('Please Wait...')
36
 
 
64
 
65
  clear_output()
66
 
67
+
68
  # =============== Automatic Fixing Path V3 ===============
69
  paths_to_check = {
70
  "tagger_hf_cache_dir": f"{webui_path}/models/interrogators/",
files_cells/python/ru/widgets_ru.py CHANGED
@@ -8,20 +8,11 @@ from ipywidgets import widgets, Layout, Label, Button, VBox, HBox
8
  from IPython.display import display, HTML, Javascript, clear_output
9
 
10
 
11
- # ================= DETECT ENV =================
12
- def detect_environment():
13
- free_plan = (os.sysconf('SC_PAGE_SIZE') * os.sysconf('SC_PHYS_PAGES') / (1024 ** 3) <= 20)
14
- environments = {
15
- 'COLAB_GPU': ('Google Colab', "/root" if free_plan else "/content"),
16
- 'KAGGLE_URL_BASE': ('Kaggle', "/kaggle/working/content")
17
- }
18
- for env_var, (environment, path) in environments.items():
19
- if env_var in os.environ:
20
- return environment, path, free_plan
21
-
22
- env, root_path, free_plan = detect_environment()
23
- webui_path = f"{root_path}/sdw"
24
- get_ipython().system('mkdir -p {root_path}')
25
 
26
 
27
  # ==================== CSS JS ====================
 
8
  from IPython.display import display, HTML, Javascript, clear_output
9
 
10
 
11
+ # Setup Env
12
+ env = os.environ.get('ENV_NAME')
13
+ root_path = os.environ.get('ROOT_PATH')
14
+ webui_path = os.environ.get('WEBUI_PATH')
15
+ free_plan = os.environ.get('FREE_PLAN')
 
 
 
 
 
 
 
 
 
16
 
17
 
18
  # ==================== CSS JS ====================
modules/models_data.py ADDED
@@ -0,0 +1,103 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ model_list = {
2
+ "1.Anime (by XpucT) + INP": [
3
+ {"url": "https://huggingface.co/XpucT/Anime/resolve/main/Anime_v2.safetensors", "name": "Anime_V2.safetensors"},
4
+ {"url": "https://huggingface.co/XpucT/Anime/resolve/main/Anime_v2-inpainting.safetensors", "name": "Anime_V2-inpainting.safetensors"}
5
+ ],
6
+ "2.BluMix [Anime] [V7] + INP": [
7
+ {"url": "https://civitai.com/api/download/models/361779", "name": "BluMix_V7.safetensors"},
8
+ {"url": "https://civitai.com/api/download/models/363850", "name": "BluMix_V7-inpainting.safetensors"}
9
+ ],
10
+ "3.Cetus-Mix [Anime] [V4] + INP": [
11
+ {"url": "https://civitai.com/api/download/models/130298", "name": "CetusMix_V4.safetensors"},
12
+ {"url": "https://civitai.com/api/download/models/139882", "name": "CetusMix_V4-inpainting.safetensors"}
13
+ ],
14
+ "4.Counterfeit [Anime] [V3] + INP": [
15
+ {"url": "https://huggingface.co/gsdf/Counterfeit-V3.0/resolve/main/Counterfeit-V3.0_fix_fp16.safetensors", "name": "Counterfeit_V3.safetensors"},
16
+ {"url": "https://civitai.com/api/download/models/137911", "name": "Counterfeit_V3-inpainting.safetensors"}
17
+ ],
18
+ "5.CuteColor [Anime] [V3]": [
19
+ {"url": "https://civitai.com/api/download/models/138754", "name": "CuteColor_V3.safetensors"}
20
+ ],
21
+ "6.Dark-Sushi-Mix [Anime]": [
22
+ {"url": "https://civitai.com/api/download/models/101640", "name": "DarkSushiMix_2_5D.safetensors"},
23
+ {"url": "https://civitai.com/api/download/models/56071", "name": "DarkSushiMix_colorful.safetensors"}
24
+ ],
25
+ "7.Deliberate [Realism] [V6] + INP": [
26
+ {"url": "https://huggingface.co/XpucT/Deliberate/resolve/main/Deliberate_v6.safetensors", "name": "Deliberate_V6.safetensors"},
27
+ {"url": "https://huggingface.co/XpucT/Deliberate/resolve/main/Deliberate_v6-inpainting.safetensors", "name": "Deliberate_V6-inpainting.safetensors"}
28
+ ],
29
+ "8.Meina-Mix [Anime] [V11] + INP": [
30
+ {"url": "https://civitai.com/api/download/models/119057", "name": "MeinaMix_V11.safetensors"},
31
+ {"url": "https://civitai.com/api/download/models/120702", "name": "MeinaMix_V11-inpainting.safetensors"}
32
+ ],
33
+ "9.Mix-Pro [Anime] [V4] + INP": [
34
+ {"url": "https://civitai.com/api/download/models/125668", "name": "MixPro_V4.safetensors"},
35
+ {"url": "https://civitai.com/api/download/models/139878", "name": "MixPro_V4-inpainting.safetensors"}
36
+ ]
37
+ }
38
+
39
+ vae_list = {
40
+ "1.Anime.vae": [{"url": "https://civitai.com/api/download/models/311162", "name": "Anime.vae.safetensors"}],
41
+ "2.Anything.vae": [{"url": "https://huggingface.co/NoCrypt/resources/resolve/main/VAE/any.vae.safetensors", "name": "Anything.vae.safetensors"}],
42
+ "3.Blessed2.vae": [{"url": "https://huggingface.co/NoCrypt/resources/resolve/main/VAE/blessed2.vae.safetensors", "name": "Blessed2.vae.safetensors"}],
43
+ "4.ClearVae.vae": [{"url": "https://civitai.com/api/download/models/88156", "name": "ClearVae_23.vae.safetensors"}],
44
+ "5.WD.vae": [{"url": "https://huggingface.co/NoCrypt/resources/resolve/main/VAE/wd.vae.safetensors", "name": "WD.vae.safetensors"}]
45
+ }
46
+
47
+ controlnet_list = {
48
+ "1.canny": [
49
+ {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11p_sd15_canny_fp16.safetensors", "name": "control_v11p_sd15_canny_fp16.safetensors"},
50
+ {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11p_sd15_canny_fp16.yaml", "name": "control_v11p_sd15_canny_fp16.yaml"}
51
+ ],
52
+ "2.openpose": [
53
+ {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11p_sd15_openpose_fp16.safetensors", "name": "control_v11p_sd15_openpose_fp16.safetensors"},
54
+ {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11p_sd15_openpose_fp16.yaml", "name": "control_v11p_sd15_openpose_fp16.yaml"}
55
+ ],
56
+ "3.depth": [
57
+ {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11f1p_sd15_depth_fp16.safetensors", "name": "control_v11f1p_sd15_depth_fp16.safetensors"},
58
+ {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11f1p_sd15_depth_fp16.yaml", "name": "control_v11f1p_sd15_depth_fp16.yaml"},
59
+ {"url": "https://huggingface.co/NagisaNao/models/resolve/main/ControlNet_v11/control_v11p_sd15_depth_anything_fp16.safetensors", "name": "control_v11p_sd15_depth_anything_fp16.safetensors"}
60
+ ],
61
+ "4.normal_map": [
62
+ {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11p_sd15_normalbae_fp16.safetensors", "name": "control_v11p_sd15_normalbae_fp16.safetensors"},
63
+ {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11p_sd15_normalbae_fp16.yaml", "name": "control_v11p_sd15_normalbae_fp16.yaml"}
64
+ ],
65
+ "5.mlsd": [
66
+ {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11p_sd15_mlsd_fp16.safetensors", "name": "control_v11p_sd15_mlsd_fp16.safetensors"},
67
+ {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11p_sd15_mlsd_fp16.yaml", "name": "control_v11p_sd15_mlsd_fp16.yaml"}
68
+ ],
69
+ "6.lineart": [
70
+ {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11p_sd15_lineart_fp16.safetensors", "name": "control_v11p_sd15_lineart_fp16.safetensors"},
71
+ {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11p_sd15s2_lineart_anime_fp16.safetensors", "name": "control_v11p_sd15s2_lineart_anime_fp16.safetensors"},
72
+ {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11p_sd15_lineart_fp16.yaml", "name": "control_v11p_sd15_lineart_fp16.yaml"},
73
+ {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11p_sd15s2_lineart_anime_fp16.yaml", "name": "control_v11p_sd15s2_lineart_anime_fp16.yaml"}
74
+ ],
75
+ "7.soft_edge": [
76
+ {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11p_sd15_softedge_fp16.safetensors", "name": "control_v11p_sd15_softedge_fp16.safetensors"},
77
+ {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11p_sd15_softedge_fp16.yaml", "name": "control_v11p_sd15_softedge_fp16.yaml"}
78
+ ],
79
+ "8.scribble": [
80
+ {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11p_sd15_scribble_fp16.safetensors", "name": "control_v11p_sd15_scribble_fp16.safetensors"},
81
+ {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11p_sd15_scribble_fp16.yaml", "name": "control_v11p_sd15_scribble_fp16.yaml"}
82
+ ],
83
+ "9.segmentation": [
84
+ {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11p_sd15_seg_fp16.safetensors", "name": "control_v11p_sd15_seg_fp16.safetensors"},
85
+ {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11p_sd15_seg_fp16.yaml", "name": "control_v11p_sd15_seg_fp16.yaml"}
86
+ ],
87
+ "10.shuffle": [
88
+ {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11e_sd15_shuffle_fp16.safetensors", "name": "control_v11e_sd15_shuffle_fp16.safetensors"},
89
+ {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11e_sd15_shuffle_fp16.yaml", "name": "control_v11e_sd15_shuffle_fp16.yaml"}
90
+ ],
91
+ "11.tile": [
92
+ {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11f1e_sd15_tile_fp16.safetensors", "name": "control_v11f1e_sd15_tile_fp16.safetensors"},
93
+ {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11f1e_sd15_tile_fp16.yaml", "name": "control_v11f1e_sd15_tile_fp16.yaml"}
94
+ ],
95
+ "12.inpaint": [
96
+ {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11p_sd15_inpaint_fp16.safetensors", "name": "control_v11p_sd15_inpaint_fp16.safetensors"},
97
+ {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11p_sd15_inpaint_fp16.yaml", "name": "control_v11p_sd15_inpaint_fp16.yaml"}
98
+ ],
99
+ "13.instruct_p2p": [
100
+ {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/resolve/main/control_v11e_sd15_ip2p_fp16.safetensors", "name": "control_v11e_sd15_ip2p_fp16.safetensors"},
101
+ {"url": "https://huggingface.co/ckpt/ControlNet-v1-1/raw/main/control_v11e_sd15_ip2p_fp16.yaml", "name": "control_v11e_sd15_ip2p_fp16.yaml"}
102
+ ]
103
+ }
modules/setup_en.py ADDED
@@ -0,0 +1,88 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ import sys
3
+ import shutil
4
+ import argparse
5
+ import importlib
6
+ import subprocess
7
+
8
+ def parse_args():
9
+ parser = argparse.ArgumentParser(description='Script configuration')
10
+ parser.add_argument('--lang', type=str, default='en', help='Код языка, по умолчанию "en"')
11
+ parser.add_argument('--repo', type=str, required=True, help='Repository Name')
12
+ return parser.parse_args()
13
+
14
+ def detect_environment():
15
+ free_plan = (os.sysconf('SC_PAGE_SIZE') * os.sysconf('SC_PHYS_PAGES') / (1024 ** 3) <= 20)
16
+ environments = {
17
+ 'COLAB_GPU': ('Google Colab', "/root" if free_plan else "/content"),
18
+ 'KAGGLE_URL_BASE': ('Kaggle', "/kaggle/working/content")
19
+ }
20
+ for env_var, (environment, path) in environments.items():
21
+ if env_var in os.environ:
22
+ return environment, path, free_plan
23
+ print("\033[31mError: an unsupported runtime environment was detected.\n\033[34mSupported environments:\033[0m Google Colab, Kaggle")
24
+ return None, None, None
25
+
26
+ def setup_module_folder(root_path):
27
+ modules_folder = os.path.join(root_path, "modules")
28
+ os.makedirs(modules_folder, exist_ok=True)
29
+ if modules_folder not in sys.path:
30
+ sys.path.append(modules_folder)
31
+
32
+ def clear_module_cache(modules_folder):
33
+ for module_name in list(sys.modules.keys()):
34
+ module = sys.modules[module_name]
35
+ if hasattr(module, '__file__') and module.__file__ and module.__file__.startswith(modules_folder):
36
+ del sys.modules[module_name]
37
+
38
+ importlib.invalidate_caches()
39
+
40
+ def download_files(root_path, lang, repo):
41
+ print("Please wait for the files to download... 👀", end='', flush=True)
42
+ files_dict = {
43
+ 'CSS': {'CSS': ['main_widgets.css', 'auto_cleaner.css']},
44
+ 'file_cell': {f'files_cells/python/{lang}': [f'widgets_{lang}.py', f'downloading_{lang}.py', f'launch_{lang}.py', f'auto_cleaner_{lang}.py']},
45
+ 'file_cell/special': {f'special': ['dl_display_results.py']},
46
+ 'modules': {f'modules': ['models_data.py']}
47
+ }
48
+ for folder, contents in files_dict.items():
49
+ folder_path = os.path.join(root_path, folder)
50
+ if os.path.exists(folder_path):
51
+ shutil.rmtree(folder_path)
52
+ os.makedirs(folder_path)
53
+ for path_url, files in contents.items():
54
+ for file in files:
55
+ file_url = f"https://huggingface.co/NagisaNao/{repo}/resolve/main/{path_url}/{file}"
56
+ file_path = os.path.join(folder_path, file)
57
+ os.system(f'wget -q {file_url} -O {file_path}')
58
+ print("\rDone! Now you can run the cells below. ☄️" + " "*30)
59
+
60
+ def main():
61
+ args = parse_args()
62
+ lang = args.lang
63
+ repo = args.repo
64
+
65
+ env, root_path, free_plan = detect_environment()
66
+
67
+ if env and root_path:
68
+ webui_path = f"{root_path}/sdw"
69
+ download_files(root_path, lang, repo)
70
+ clear_module_cache(os.path.join(root_path, "modules"))
71
+ setup_module_folder(root_path)
72
+
73
+ # Set global environment variable
74
+ os.environ['ENV_NAME'] = env
75
+ os.environ['ROOT_PATH'] = root_path
76
+ os.environ['WEBUI_PATH'] = webui_path
77
+ os.environ['FREE_PLAN'] = 'True' if free_plan else 'False'
78
+
79
+ print(f"Runtime environment: \033[33m{env}\033[0m")
80
+ if env == "Google Colab":
81
+ print(f"Colab Pro subscription: \033[34m{not free_plan}\033[0m")
82
+ print(f"File location: \033[32m{root_path}\033[0m")
83
+
84
+ if repo != 'fast_repo':
85
+ print('\n\033[31mWARNING: Test mode is used, there may be errors in use!\033[0m')
86
+
87
+ if __name__ == "__main__":
88
+ main()
modules/setup_ru.py ADDED
@@ -0,0 +1,88 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import os
2
+ import sys
3
+ import shutil
4
+ import argparse
5
+ import importlib
6
+ import subprocess
7
+
8
+ def parse_args():
9
+ parser = argparse.ArgumentParser(description='Script configuration')
10
+ parser.add_argument('--lang', type=str, default='en', help='Код языка, по умолчанию "en"')
11
+ parser.add_argument('--repo', type=str, required=True, help='Название репозитория')
12
+ return parser.parse_args()
13
+
14
+ def detect_environment():
15
+ free_plan = (os.sysconf('SC_PAGE_SIZE') * os.sysconf('SC_PHYS_PAGES') / (1024 ** 3) <= 20)
16
+ environments = {
17
+ 'COLAB_GPU': ('Google Colab', "/root" if free_plan else "/content"),
18
+ 'KAGGLE_URL_BASE': ('Kaggle', "/kaggle/working/content")
19
+ }
20
+ for env_var, (environment, path) in environments.items():
21
+ if env_var in os.environ:
22
+ return environment, path, free_plan
23
+ print("\033[31mОшибка: обнаружена неизвестная среда выполнения.\n\033[34mПоддерживаемые среды:\033[0m Google Colab, Kaggle.")
24
+ return None, None, None
25
+
26
+ def setup_module_folder(root_path):
27
+ modules_folder = os.path.join(root_path, "modules")
28
+ os.makedirs(modules_folder, exist_ok=True)
29
+ if modules_folder not in sys.path:
30
+ sys.path.append(modules_folder)
31
+
32
+ def clear_module_cache(modules_folder):
33
+ for module_name in list(sys.modules.keys()):
34
+ module = sys.modules[module_name]
35
+ if hasattr(module, '__file__') and module.__file__ and module.__file__.startswith(modules_folder):
36
+ del sys.modules[module_name]
37
+
38
+ importlib.invalidate_caches()
39
+
40
+ def download_files(root_path, lang, repo):
41
+ print("Пожалуйста, дождитесь загрузки файлов... 👀", end='', flush=True)
42
+ files_dict = {
43
+ 'CSS': {'CSS': ['main_widgets.css', 'auto_cleaner.css']},
44
+ 'file_cell': {f'files_cells/python/{lang}': [f'widgets_{lang}.py', f'downloading_{lang}.py', f'launch_{lang}.py', f'auto_cleaner_{lang}.py']},
45
+ 'file_cell/special': {f'special': ['dl_display_results.py']},
46
+ 'modules': {f'modules': ['models_data.py']}
47
+ }
48
+ for folder, contents in files_dict.items():
49
+ folder_path = os.path.join(root_path, folder)
50
+ if os.path.exists(folder_path):
51
+ shutil.rmtree(folder_path)
52
+ os.makedirs(folder_path)
53
+ for path_url, files in contents.items():
54
+ for file in files:
55
+ file_url = f"https://huggingface.co/NagisaNao/{repo}/resolve/main/{path_url}/{file}"
56
+ file_path = os.path.join(folder_path, file)
57
+ os.system(f'wget -q {file_url} -O {file_path}')
58
+ print(f"\rГотово! Теперь вы можете запустить ячейки ниже. ☄️" + " "*30)
59
+
60
+ def main():
61
+ args = parse_args()
62
+ lang = args.lang
63
+ repo = args.repo
64
+
65
+ env, root_path, free_plan = detect_environment()
66
+
67
+ if env and root_path:
68
+ webui_path = f"{root_path}/sdw"
69
+ download_files(root_path, lang, repo)
70
+ clear_module_cache(os.path.join(root_path, "modules"))
71
+ setup_module_folder(root_path)
72
+
73
+ # Set global environment variable
74
+ os.environ['ENV_NAME'] = env
75
+ os.environ['ROOT_PATH'] = root_path
76
+ os.environ['WEBUI_PATH'] = webui_path
77
+ os.environ['FREE_PLAN'] = 'True' if free_plan else 'False'
78
+
79
+ print(f"Среда выполнения: \033[33m{env}\033[0m")
80
+ if env == "Google Colab":
81
+ print(f"Подписка Colab Pro: \033[34m{not free_plan}\033[0m")
82
+ print(f"Расположение файлов: \033[32m{root_path}\033[0m")
83
+
84
+ if repo != 'fast_repo':
85
+ print('\n\033[31mВНИМАНИЕ: Используется тестовый режим, возможны ошибки при использовании!\033[0m')
86
+
87
+ if __name__ == "__main__":
88
+ main()