File size: 413 Bytes
b6bc916
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
{
    "DATASET_A_1024": {
        "gpu_allocation_map": [8, 1, 0, 0, 0, 1, 20, 1, 1],
        "resolutions_wh": ["832,1248", "896,1152", "960,1088", "1024,1024", "1088,960", "1152,896", "1216,832", "1280,800", "1344,768"],
        "ratios": [0.67, 0.78, 0.88, 1, 1.13, 1.3, 1.46, 1.6, 1.75],
        "batch_size": 1,
        "s3_url": "s3://hot-data-foundations-{REGION_NAME}/precomputed/face_draft_data/"
    }
}