| { | |
| "model_name": "codellama/CodeLlama-7b-hf", | |
| "sequence_length": 4096, | |
| "lora_config": { | |
| "rank": 64, | |
| "alpha": 32, | |
| "dropout": 0.1 | |
| }, | |
| "training_config": { | |
| "batch_size": 1, | |
| "accumulation_steps": 4, | |
| "epochs": 4, | |
| "learning_rate": 0.0002 | |
| }, | |
| "dataset_statistics": { | |
| "total_processed": 2653, | |
| "training_examples": 778, | |
| "evaluation_examples": 195, | |
| "discarded_examples": 1680, | |
| "retention_rate": "36.7%" | |
| } | |
| } |