HumanxAI commited on
Commit
8b918d9
·
verified ·
1 Parent(s): 94d0f4d

Upload fluxprov.sh

Browse files
Files changed (1) hide show
  1. fluxprov.sh +94 -0
fluxprov.sh ADDED
@@ -0,0 +1,94 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ #!/bin/bash
2
+ # This file will be sourced in init.sh
3
+ # Namespace functions with provisioning_
4
+
5
+ # https://raw.githubusercontent.com/ai-dock/kohya_ss/main/config/provisioning/default.sh
6
+
7
+ ### Edit the following arrays to suit your workflow - values must be quoted and separated by newlines or spaces.
8
+
9
+ DISK_GB_REQUIRED=30
10
+
11
+ PIP_PACKAGES=(
12
+ #"package==version"
13
+ )
14
+
15
+ CHECKPOINT_MODELS=([
16
+ "https://huggingface.co/black-forest-labs/FLUX.1-dev/blob/main/flux1-dev.safetensors",
17
+ "https://huggingface.co/black-forest-labs/FLUX.1-dev/blob/main/ae.safetensors"
18
+ "https://huggingface.co/comfyanonymous/flux_text_encoders/blob/main/clip_l.safetensors",
19
+ "https://huggingface.co/comfyanonymous/flux_text_encoders/blob/main/t5xxl_fp16.safetensors"
20
+ ])
21
+
22
+
23
+ ### DO NOT EDIT BELOW HERE UNLESS YOU KNOW WHAT YOU ARE DOING ###
24
+
25
+ function provisioning_start() {
26
+ source /opt/ai-dock/etc/environment.sh
27
+ source /opt/ai-dock/bin/venv-set.sh kohya
28
+
29
+ DISK_GB_AVAILABLE=$(($(df --output=avail -m "${WORKSPACE}" | tail -n1) / 1000))
30
+ DISK_GB_USED=$(($(df --output=used -m "${WORKSPACE}" | tail -n1) / 1000))
31
+ DISK_GB_ALLOCATED=$(($DISK_GB_AVAILABLE + $DISK_GB_USED))
32
+ provisioning_print_header
33
+ provisioning_get_mamba_packages
34
+ provisioning_get_pip_packages
35
+ provisioning_get_models \
36
+ "${WORKSPACE}/storage/stable_diffusion/models/ckpt" \
37
+ "${CHECKPOINT_MODELS[@]}"
38
+
39
+ provisioning_print_end
40
+ }
41
+
42
+ function provisioning_get_pip_packages() {
43
+ if [[ -n $PIP_PACKAGES ]]; then
44
+ "$KOHYA_VENV_PIP" install --no-cache-dir ${PIP_PACKAGES[@]}
45
+ fi
46
+ }
47
+
48
+ function provisioning_get_models() {
49
+ if [[ -z $2 ]]; then return 1; fi
50
+ dir="$1"
51
+ mkdir -p "$dir"
52
+ shift
53
+ if [[ $DISK_GB_ALLOCATED -ge $DISK_GB_REQUIRED ]]; then
54
+ arr=("$@")
55
+ else
56
+ printf "WARNING: Low disk space allocation - Only the first model will be downloaded!\n"
57
+ arr=("$1")
58
+ fi
59
+
60
+ printf "Downloading %s model(s) to %s...\n" "${#arr[@]}" "$dir"
61
+ for url in "${arr[@]}"; do
62
+ printf "Downloading: %s\n" "${url}"
63
+ provisioning_download "${url}" "${dir}"
64
+ printf "\n"
65
+ done
66
+ }
67
+
68
+ function provisioning_print_header() {
69
+ printf "\n##############################################\n# #\n# Provisioning container #\n# #\n# This will take some time #\n# #\n# Your container will be ready on completion #\n# #\n##############################################\n\n"
70
+ if [[ $DISK_GB_ALLOCATED -lt $DISK_GB_REQUIRED ]]; then
71
+ printf "WARNING: Your allocated disk size (%sGB) is below the recommended %sGB - Some models will not be downloaded\n" "$DISK_GB_ALLOCATED" "$DISK_GB_REQUIRED"
72
+ fi
73
+ }
74
+
75
+ function provisioning_print_end() {
76
+ printf "\nProvisioning complete: Web UI will start now\n\n"
77
+ }
78
+
79
+ # Download from $1 URL to $2 file path
80
+ function provisioning_download() {
81
+ if [[ -n $HF_TOKEN && $1 =~ ^https://([a-zA-Z0-9_-]+\.)?huggingface\.co(/|$|\?) ]]; then
82
+ auth_token="$HF_TOKEN"
83
+ elif
84
+ [[ -n $CIVITAI_TOKEN && $1 =~ ^https://([a-zA-Z0-9_-]+\.)?civitai\.com(/|$|\?) ]]; then
85
+ auth_token="$CIVITAI_TOKEN"
86
+ fi
87
+ if [[ -n $auth_token ]];then
88
+ wget --header="Authorization: Bearer $auth_token" -qnc --content-disposition --show-progress -e dotbytes="${3:-4M}" -P "$2" "$1"
89
+ else
90
+ wget -qnc --content-disposition --show-progress -e dotbytes="${3:-4M}" -P "$2" "$1"
91
+ fi
92
+ }
93
+
94
+ provisioning_start