File size: 1,433 Bytes
30a32a2 54ae489 30a32a2 a049d72 30a32a2 0c18123 30a32a2 a049d72 30a32a2 a049d72 30a32a2 a049d72 30a32a2 a049d72 0c18123 a049d72 30a32a2 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 |
{
"architecture": "resnet18",
"num_classes": 10,
"num_features": 512,
"pretrained_cfg": {
"tag": "a1_in1k",
"custom_load": false,
"input_size": [
3,
256,
256
],
"test_input_size": [
3,
256,
256
],
"fixed_input_size": false,
"interpolation": "bicubic",
"crop_pct": 0.95,
"test_crop_pct": 1.0,
"crop_mode": "center",
"mean": [
0.49139968,
0.4821582,
0.44653124
],
"std": [
0.24703233,
0.24348505,
0.26158768
],
"num_classes": 10,
"pool_size": [
7,
7
],
"first_conv": "conv1",
"classifier": "fc",
"origin_url": "https://github.com/huggingface/pytorch-image-models",
"paper_ids": "arXiv:2110.00476"
},
"hf_hub_id": "SamAdamDay/resnet18_cifar10",
"source": "hf-hub",
"tag": "a1_in1k",
"custom_load": false,
"input_size": [
3,
256,
256
],
"test_input_size": [
3,
256,
256
],
"fixed_input_size": false,
"interpolation": "bicubic",
"crop_pct": 0.95,
"test_crop_pct": 1.0,
"crop_mode": "center",
"mean": [
0.49139968,
0.4821582,
0.44653124
],
"std": [
0.24703233,
0.24348505,
0.26158768
],
"pool_size": [
7,
7
],
"first_conv": "conv1",
"classifier": "fc",
"origin_url": "https://github.com/huggingface/pytorch-image-models",
"paper_ids": "arXiv:2110.00476"
} |