File size: 1,523 Bytes
b50a848
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
# Fast fine-tune for Zenith - World's First Autonomous AI Development Partner
param(
    [string]$PythonExe = "python"
)

Write-Host "🚀 Setting up ZENITH fine-tune for Aspetos (<2h training)..."
Write-Host "Building the most advanced AI development partner in existence!"

# Env vars for stable training (adjust if needed)
$Env:BASE_MODEL = "D:\aspetos\DeepSeek-Coder-V2-Lite-Instruct"
$Env:OUTPUT_DIR = "outputs\zenith-lora"
$Env:DATA_PATH = "data\zenith_combined.jsonl"
$Env:EPOCHS = "1"
$Env:BATCH = "4"  # Balanced for A100
$Env:GRAD_ACC = "4"  # Effective batch size = 16
$Env:LR = "1e-4"  # Stable learning rate for proper convergence
$Env:STEPS = "180"  # Sufficient steps for learning
$Env:MAX_SEQ_LEN = "2048"
$Env:USE_4BIT = "1"
$Env:SEED = "42"
$Env:MAX_GRAD_NORM = "1.0"  # Gradient clipping
$Env:WEIGHT_DECAY = "0.01"
$Env:WARMUP_RATIO = "0.05"
$Env:EARLY_STOP_PATIENCE = "5"  # Allow more patience for learning
$Env:EVAL_STEPS = "40"  # Balanced evaluation frequency
$Env:SAVE_STEPS = "40"  # Balanced save frequency

Write-Host "Installing dependencies..."
& $PythonExe -m pip install -r requirements.txt
if ($LASTEXITCODE -ne 0) { throw "pip install failed" }

Write-Host "🎯 Starting ZENITH fine-tune (target: 200 steps, ~1-2 hours on GPU)..."
Write-Host "Training the world's most advanced autonomous development partner!"
& $PythonExe train.py
if ($LASTEXITCODE -ne 0) { throw "Training failed" }
Write-Host "✅ ZENITH training complete - Ready for deployment!"