Delete finetune_analysis.sh
Browse files- finetune_analysis.sh +0 -130
finetune_analysis.sh
DELETED
|
@@ -1,130 +0,0 @@
|
|
| 1 |
-
#!/usr/bin/env bash
|
| 2 |
-
set -e
|
| 3 |
-
|
| 4 |
-
echo ">>> [Script 2] This script will handle Claude analysis and config update."
|
| 5 |
-
|
| 6 |
-
###############################################################################
|
| 7 |
-
# 1. Ожидание нажатия Enter и получение дополнительных комментариев
|
| 8 |
-
###############################################################################
|
| 9 |
-
echo ">>> [Script 2] Нажмите Enter, чтобы продолжить распаковку и анализ .txt файлов ..."
|
| 10 |
-
read
|
| 11 |
-
|
| 12 |
-
echo ">>> [Script 2] Введите дополнительные комментарии по поводу арта (или нажмите Enter, чтобы пропустить):"
|
| 13 |
-
read USER_COMMENTS
|
| 14 |
-
|
| 15 |
-
###############################################################################
|
| 16 |
-
# 2. Распаковываем все .zip в /workspace/MyLearningDataset/Images
|
| 17 |
-
###############################################################################
|
| 18 |
-
echo ">>> [Script 2] Unzipping all .zip in /workspace/MyLearningDataset/Images ..."
|
| 19 |
-
find /workspace/MyLearningDataset/Images -type f -name '*.zip' | while read zipf; do
|
| 20 |
-
unzip -o "$zipf" -d /workspace/MyLearningDataset/Images
|
| 21 |
-
rm -f "$zipf"
|
| 22 |
-
done
|
| 23 |
-
|
| 24 |
-
###############################################################################
|
| 25 |
-
# 3. Собираем до 35 .txt файлов
|
| 26 |
-
###############################################################################
|
| 27 |
-
echo ">>> [Script 2] Collecting up to 35 .txt files ..."
|
| 28 |
-
TXT_FILES=$(find /workspace/MyLearningDataset/Images -type f -name '*.txt' | head -n 35)
|
| 29 |
-
|
| 30 |
-
if [ -z "$TXT_FILES" ]; then
|
| 31 |
-
echo "[Script 2] WARNING: No .txt files found (up to 35)."
|
| 32 |
-
fi
|
| 33 |
-
|
| 34 |
-
###############################################################################
|
| 35 |
-
# 4. Устанавливаем библиотеку anthropic (если не установлена)
|
| 36 |
-
# И указываем ANTHROPIC_API_KEY
|
| 37 |
-
###############################################################################
|
| 38 |
-
echo ">>> [Script 2] Installing anthropic library (if needed) ..."
|
| 39 |
-
pip install anthropic
|
| 40 |
-
|
| 41 |
-
# Подставьте ваш реальный ключ, если нужно
|
| 42 |
-
ANTHROPIC_API_KEY="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"
|
| 43 |
-
export ANTHROPIC_API_KEY
|
| 44 |
-
|
| 45 |
-
###############################################################################
|
| 46 |
-
# 5. Запускаем Python-скрипт claude_analysis.py, передавая список файлов
|
| 47 |
-
###############################################################################
|
| 48 |
-
echo ">>> [Script 2] Sending request to Claude via claude_analysis.py ..."
|
| 49 |
-
PARSED_JSON=$(python /workspace/claude_analysis.py "$TXT_FILES" "$USER_COMMENTS" 2>/dev/null || true)
|
| 50 |
-
|
| 51 |
-
if [ -z "$PARSED_JSON" ]; then
|
| 52 |
-
echo "[Script 2] ERROR: Claude response is empty or not found."
|
| 53 |
-
exit 1
|
| 54 |
-
fi
|
| 55 |
-
|
| 56 |
-
# Проверка на ошибку (на всякий случай)
|
| 57 |
-
if [[ "$PARSED_JSON" == *"error"* ]]; then
|
| 58 |
-
echo "[Script 2] ERROR: JSON parse problem. See logs."
|
| 59 |
-
echo "$PARSED_JSON"
|
| 60 |
-
exit 1
|
| 61 |
-
fi
|
| 62 |
-
|
| 63 |
-
###############################################################################
|
| 64 |
-
# 6. Из полученного JSON извлекаем нужные поля (token, art_type, style_name, model_name, prompts)
|
| 65 |
-
###############################################################################
|
| 66 |
-
# Можно извлекать с помощью Python, jq или других инструментов. Для универсальности:
|
| 67 |
-
TOKEN=$(echo "$PARSED_JSON" | python -c 'import sys, json; d=json.load(sys.stdin); print(d["token"])')
|
| 68 |
-
ART_TYPE=$(echo "$PARSED_JSON" | python -c 'import sys, json; d=json.load(sys.stdin); print(d["art_type"])')
|
| 69 |
-
STYLE_NAME=$(echo "$PARSED_JSON" | python -c 'import sys, json; d=json.load(sys.stdin); print(d["style_name"])')
|
| 70 |
-
MODEL_NAME=$(echo "$PARSED_JSON" | python -c 'import sys, json; d=json.load(sys.stdin); print(d["model_name"])')
|
| 71 |
-
|
| 72 |
-
# Многострочные промпты (каждый элемент массива -> перенос строки)
|
| 73 |
-
PROMPTS=$(echo "$PARSED_JSON" | python -c '
|
| 74 |
-
import sys, json
|
| 75 |
-
d=json.load(sys.stdin)
|
| 76 |
-
prompts = d.get("prompts", [])
|
| 77 |
-
print("\n".join(prompts))
|
| 78 |
-
')
|
| 79 |
-
|
| 80 |
-
echo ">>> [Script 2] Claude parsed result:"
|
| 81 |
-
echo "token = $TOKEN"
|
| 82 |
-
echo "art_type = $ART_TYPE"
|
| 83 |
-
echo "style_name = $STYLE_NAME"
|
| 84 |
-
echo "model_name = $MODEL_NAME"
|
| 85 |
-
echo "prompts:"
|
| 86 |
-
echo "$PROMPTS"
|
| 87 |
-
echo
|
| 88 |
-
|
| 89 |
-
###############################################################################
|
| 90 |
-
# 7. Переносим .txt и .png файлы в папку /workspace/MyLearningDataset/Images/1_{model_name}_{style_name}
|
| 91 |
-
###############################################################################
|
| 92 |
-
NEW_FOLDER="/workspace/MyLearningDataset/Images/1_${MODEL_NAME}_${STYLE_NAME}"
|
| 93 |
-
mkdir -p "$NEW_FOLDER"
|
| 94 |
-
|
| 95 |
-
echo ">>> [Script 2] Moving all .txt and .png files into $NEW_FOLDER ..."
|
| 96 |
-
find /workspace/MyLearningDataset/Images -type f \( -name '*.txt' -o -name '*.png' \) -exec mv -f {} "$NEW_FOLDER" \; 2>/dev/null || true
|
| 97 |
-
|
| 98 |
-
###############################################################################
|
| 99 |
-
# 8. Ищем FluxDatasetConfig.json и редактируем нужные поля
|
| 100 |
-
###############################################################################
|
| 101 |
-
FLUX_CONFIG_PATH=$(find /workspace -name "FluxDatasetConfig.json" | head -n 1)
|
| 102 |
-
if [ -z "$FLUX_CONFIG_PATH" ]; then
|
| 103 |
-
echo "[Script 2] ERROR: FluxDatasetConfig.json not found!"
|
| 104 |
-
exit 1
|
| 105 |
-
fi
|
| 106 |
-
|
| 107 |
-
echo ">>> [Script 2] Updating FluxDatasetConfig.json at $FLUX_CONFIG_PATH ..."
|
| 108 |
-
cat <<EOF > /workspace/update_flux_config.py
|
| 109 |
-
import json
|
| 110 |
-
|
| 111 |
-
path = r"${FLUX_CONFIG_PATH}"
|
| 112 |
-
with open(path, "r", encoding="utf-8") as f:
|
| 113 |
-
config = json.load(f)
|
| 114 |
-
|
| 115 |
-
config["train_data_dir"] = "/workspace/MyLearningDataset/Images"
|
| 116 |
-
config["output_dir"] = "/workspace/MyLearningDataset/Models"
|
| 117 |
-
config["output_name"] = "${MODEL_NAME}"
|
| 118 |
-
config["huggingface_repo_id"] = "Gerchegg/${MODEL_NAME}"
|
| 119 |
-
config["logging_dir"] = "/workspace/MyLearningDataset/Logs"
|
| 120 |
-
|
| 121 |
-
# Многострочные промпты
|
| 122 |
-
config["sample_prompts"] = """${PROMPTS}"""
|
| 123 |
-
|
| 124 |
-
with open(path, "w", encoding="utf-8") as f:
|
| 125 |
-
json.dump(config, f, ensure_ascii=False, indent=2)
|
| 126 |
-
EOF
|
| 127 |
-
|
| 128 |
-
python /workspace/update_flux_config.py
|
| 129 |
-
|
| 130 |
-
echo ">>> [Script 2] Done. Analysis complete!"
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|