davidsmts commited on
Commit
fca888c
·
verified ·
1 Parent(s): eb33f97

Upload train_sft_qwen25.py with huggingface_hub

Browse files
Files changed (1) hide show
  1. train_sft_qwen25.py +125 -0
train_sft_qwen25.py ADDED
@@ -0,0 +1,125 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # /// script
2
+ # dependencies = [
3
+ # "trl>=0.12.0",
4
+ # "peft>=0.7.0",
5
+ # "transformers>=4.44.0",
6
+ # "accelerate>=0.34.0",
7
+ # "datasets>=2.19.0",
8
+ # "trackio",
9
+ # ]
10
+ # ///
11
+ """
12
+ SFT demo for Qwen/Qwen2.5-0.5B on the Capybara dataset with LoRA and Trackio.
13
+ Designed for Hugging Face Jobs (uv) with Hub push enabled.
14
+ """
15
+ import os
16
+ import random
17
+ from datasets import load_dataset
18
+ from transformers import AutoTokenizer
19
+ from peft import LoraConfig
20
+ from trl import SFTConfig, SFTTrainer
21
+ import trackio
22
+
23
+ MODEL_ID = os.environ.get("MODEL_ID", "Qwen/Qwen2.5-0.5B")
24
+ DATASET_ID = os.environ.get("DATASET_ID", "trl-lib/Capybara")
25
+ HUB_MODEL_ID = os.environ.get("HUB_MODEL_ID", "davidsmts/qwen2_5-0.5b-capybara-sft")
26
+ RUN_NAME = os.environ.get("RUN_NAME", "qwen25-0.5b-capybara-demo")
27
+ PROJECT = os.environ.get("TRACKIO_PROJECT", "qwen-sft-demo")
28
+ SPACE_ID = os.environ.get("TRACKIO_SPACE", "davidsmts/trackio")
29
+ MAX_TRAIN_SAMPLES = int(os.environ.get("MAX_TRAIN_SAMPLES", "200"))
30
+ SEED = int(os.environ.get("SEED", "42"))
31
+
32
+ random.seed(SEED)
33
+
34
+ print("Loading dataset...")
35
+ dataset = load_dataset(DATASET_ID, split="train")
36
+ print(f"Loaded {len(dataset)} examples")
37
+
38
+ if MAX_TRAIN_SAMPLES and len(dataset) > MAX_TRAIN_SAMPLES:
39
+ dataset = dataset.shuffle(seed=SEED).select(range(MAX_TRAIN_SAMPLES))
40
+ print(f"Subsampled to {len(dataset)} examples for quick demo")
41
+
42
+ print("Creating train/test split...")
43
+ split = dataset.train_test_split(test_size=0.1, seed=SEED)
44
+ train_dataset = split["train"]
45
+ eval_dataset = split["test"]
46
+
47
+ print("Loading tokenizer...")
48
+ tokenizer = AutoTokenizer.from_pretrained(MODEL_ID, use_fast=False)
49
+ if tokenizer.pad_token is None:
50
+ tokenizer.pad_token = tokenizer.eos_token
51
+
52
+ def formatting_func(example):
53
+ # Capybara provides conversation-style "messages"; apply chat template.
54
+ return tokenizer.apply_chat_template(example["messages"], tokenize=False, add_generation_prompt=False)
55
+
56
+ print("Initializing Trackio...")
57
+ trackio.init(
58
+ project=PROJECT,
59
+ name=RUN_NAME,
60
+ space_id=SPACE_ID,
61
+ config={
62
+ "model": MODEL_ID,
63
+ "dataset": DATASET_ID,
64
+ "lr": 2e-5,
65
+ "epochs": 1,
66
+ "max_train_samples": MAX_TRAIN_SAMPLES,
67
+ },
68
+ )
69
+
70
+ print("Building LoRA config...")
71
+ peft_config = LoraConfig(
72
+ r=16,
73
+ lora_alpha=32,
74
+ lora_dropout=0.05,
75
+ bias="none",
76
+ task_type="CAUSAL_LM",
77
+ target_modules=["q_proj", "v_proj"],
78
+ )
79
+
80
+ print("Setting trainer args...")
81
+ training_args = SFTConfig(
82
+ output_dir="./outputs",
83
+ push_to_hub=True,
84
+ hub_model_id=HUB_MODEL_ID,
85
+ hub_strategy="every_save",
86
+ num_train_epochs=1,
87
+ per_device_train_batch_size=2,
88
+ gradient_accumulation_steps=8,
89
+ learning_rate=2e-5,
90
+ max_length=1024,
91
+ logging_steps=5,
92
+ save_strategy="steps",
93
+ save_steps=50,
94
+ save_total_limit=2,
95
+ eval_strategy="steps",
96
+ eval_steps=50,
97
+ warmup_ratio=0.03,
98
+ lr_scheduler_type="cosine",
99
+ gradient_checkpointing=True,
100
+ fp16=True,
101
+ report_to="trackio",
102
+ project=PROJECT,
103
+ run_name=RUN_NAME,
104
+ )
105
+
106
+ print("Initializing trainer...")
107
+ trainer = SFTTrainer(
108
+ model=MODEL_ID,
109
+ tokenizer=tokenizer,
110
+ train_dataset=train_dataset,
111
+ eval_dataset=eval_dataset,
112
+ formatting_func=formatting_func,
113
+ peft_config=peft_config,
114
+ args=training_args,
115
+ )
116
+
117
+ print("Starting training...")
118
+ trainer.train()
119
+
120
+ print("Saving and pushing to hub...")
121
+ trainer.push_to_hub()
122
+ trackio.finish()
123
+
124
+ print(f"Done! Model pushed to https://huggingface.co/{HUB_MODEL_ID}")
125
+ print(f"Track metrics at https://huggingface.co/spaces/{SPACE_ID}")