File size: 1,563 Bytes
0d0275b |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 |
# /// script
# dependencies = ["trl>=0.12.0", "peft>=0.7.0", "trackio", "torch", "transformers"]
# ///
from datasets import load_dataset
from peft import LoraConfig
from trl import SFTTrainer, SFTConfig
import trackio
print("๐ Starting quick proof-of-concept training...")
# Load tiny subset for quick test
dataset = load_dataset("trl-lib/Capybara", split="train[:50]")
print(f"๐ Dataset loaded: {len(dataset)} examples")
# LoRA configuration
peft_config = LoraConfig(
r=16,
lora_alpha=32,
lora_dropout=0.05,
target_modules=["q_proj", "v_proj", "k_proj", "o_proj"],
task_type="CAUSAL_LM"
)
# Training configuration
training_args = SFTConfig(
output_dir="comfyui-specialist-test",
num_train_epochs=1,
max_steps=50, # Just 50 steps for quick validation
per_device_train_batch_size=2,
gradient_accumulation_steps=4,
learning_rate=2e-4,
logging_steps=5,
save_strategy="steps",
save_steps=25,
push_to_hub=True,
hub_model_id="lokegud/comfyui-specialist-test",
hub_strategy="every_save",
report_to="trackio",
project="comfyui-specialist",
run_name="quick-test",
gradient_checkpointing=True,
)
print("๐ง Initializing trainer...")
# Initialize trainer
trainer = SFTTrainer(
model="Qwen/Qwen2.5-0.5B",
train_dataset=dataset,
peft_config=peft_config,
args=training_args,
)
print("๐๏ธ Training...")
trainer.train()
print("๐ค Pushing to Hub...")
trainer.push_to_hub()
print("โ
Quick test complete! Model saved to: lokegud/comfyui-specialist-test")
|