Spaces:
Runtime error
Runtime error
File size: 2,063 Bytes
982b011 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 |
import torchvision
# Config for the models that are supported by the extractor
MODEL_CONFIG = {
"resnet18": {
"weights": torchvision.models.ResNet18_Weights.DEFAULT,
"model": torchvision.models.resnet18,
"feat_layer": "flatten",
"feat_dims": 512,
},
"resnet34": {
"weights": torchvision.models.ResNet34_Weights.DEFAULT,
"model": torchvision.models.resnet34,
"feat_layer": "flatten",
"feat_dims": 512,
},
"resnet50": {
"weights": torchvision.models.ResNet50_Weights.DEFAULT,
"model": torchvision.models.resnet50,
"feat_layer": "flatten",
"feat_dims": 2048,
},
"resnet101": {
"weights": torchvision.models.ResNet101_Weights.DEFAULT,
"model": torchvision.models.resnet101,
"feat_layer": "flatten",
"feat_dims": 2048,
},
"resnet152": {
"weights": torchvision.models.ResNet152_Weights.DEFAULT,
"model": torchvision.models.resnet152,
"feat_layer": "flatten",
"feat_dims": 2048,
},
"vit_b_16": {
"weights": torchvision.models.ViT_B_16_Weights.DEFAULT,
"model": torchvision.models.vit_b_16,
"feat_layer": "getitem_5",
"feat_dims": 768,
},
"vit_b_32": {
"weights": torchvision.models.ViT_B_32_Weights.DEFAULT,
"model": torchvision.models.vit_b_32,
"feat_layer": "getitem_5",
"feat_dims": 768,
},
"vit_l_16": {
"weights": torchvision.models.ViT_L_16_Weights.DEFAULT,
"model": torchvision.models.vit_l_16,
"feat_layer": "getitem_5",
"feat_dims": 1024,
},
"vit_l_32": {
"weights": torchvision.models.ViT_L_32_Weights.DEFAULT,
"model": torchvision.models.vit_l_32,
"feat_layer": "getitem_5",
"feat_dims": 1024,
},
"vit_h_14": {
"weights": torchvision.models.ViT_H_14_Weights.DEFAULT,
"model": torchvision.models.vit_h_14,
"feat_layer": "getitem_5",
"feat_dims": 1280,
},
}
|