smart-contract-lora / training_config.json
Thibault-GAREL's picture
LoRA v1 - 3 epochs full training
fbbd4e7 verified
{
"model_name": "TinyLlama/TinyLlama-1.1B-Chat-v1.0",
"seed": 42,
"epochs": 3,
"learning_rate": 0.0002,
"batch_size": 4,
"grad_accum": 4,
"effective_batch_size": 16,
"lora_r": 16,
"lora_alpha": 32,
"lora_dropout": 0.05,
"max_seq_length": 2048,
"max_code_tokens": 1500,
"quantization": "none",
"train_samples": 3254,
"val_samples": 814,
"dataset": "dataset/dataset_9l_w_v2 (1).csv",
"test_size": 0.2
}