File size: 314 Bytes
5b7c48a
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
{
  "model_type": "llava_with_lora",
  "base_model": "llava-hf/llava-1.5-7b-hf",
  "training_method": "LoRA",
  "training_samples": 14023,
  "validation_samples": 2475,
  "epochs": 1,
  "learning_rate": 2e-05,
  "batch_size": 16,
  "lora_config": {
    "r": 16,
    "lora_alpha": 32,
    "lora_dropout": 0.05
  }
}