File size: 332 Bytes
ef3c38b
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
{
  "layers": "26",
  "batch_size": 256,
  "total_steps_planned": 64000,
  "total_steps_executed": 54200,
  "last_eval_accuracy": 1.0,
  "number_base": 2,
  "max_token_digits": 2,
  "addition_up_to_digits": 50,
  "reversed_digits": true,
  "early_stopping": {
    "enabled": true,
    "target_accuracy": 1.0,
    "patience": 5
  }
}