File size: 2,277 Bytes
a185f17 |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47 48 49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67 68 69 70 71 72 73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 |
{
"_name_or_path": "google/vit-base-patch16-224-in21k",
"architectures": [
"ViTForImageClassification"
],
"attention_probs_dropout_prob": 0.0,
"encoder_stride": 16,
"finetuning_task": "image-classification",
"hidden_act": "gelu",
"hidden_dropout_prob": 0.0,
"hidden_size": 768,
"id2label": {
"0": "aloevera",
"1": "arjun",
"10": "coffee",
"11": "coriander",
"12": "curry",
"13": "giloy",
"14": "ginger",
"15": "glochidion",
"16": "gotu kola",
"17": "hibiscus",
"18": "jasmine",
"19": "lemon",
"2": "ashwagandha",
"20": "madar",
"21": "mango",
"22": "marigold",
"23": "mint",
"24": "moringa",
"25": "naruneendi",
"26": "neem",
"27": "onion",
"28": "papaya",
"29": "ricinus",
"3": "babool",
"30": "rose",
"31": "sarpagandha",
"32": "shatavari",
"33": "stereoserpum",
"34": "tomato",
"35": "tulsi",
"36": "turmeric",
"37": "wedelia",
"4": "bael",
"5": "bakuchi",
"6": "barberry",
"7": "bhilawa",
"8": "bhringraj",
"9": "chilly"
},
"image_size": 224,
"initializer_range": 0.02,
"intermediate_size": 3072,
"label2id": {
"aloevera": "0",
"arjun": "1",
"ashwagandha": "2",
"babool": "3",
"bael": "4",
"bakuchi": "5",
"barberry": "6",
"bhilawa": "7",
"bhringraj": "8",
"chilly": "9",
"coffee": "10",
"coriander": "11",
"curry": "12",
"giloy": "13",
"ginger": "14",
"glochidion": "15",
"gotu kola": "16",
"hibiscus": "17",
"jasmine": "18",
"lemon": "19",
"madar": "20",
"mango": "21",
"marigold": "22",
"mint": "23",
"moringa": "24",
"naruneendi": "25",
"neem": "26",
"onion": "27",
"papaya": "28",
"ricinus": "29",
"rose": "30",
"sarpagandha": "31",
"shatavari": "32",
"stereoserpum": "33",
"tomato": "34",
"tulsi": "35",
"turmeric": "36",
"wedelia": "37"
},
"layer_norm_eps": 1e-12,
"model_type": "vit",
"num_attention_heads": 12,
"num_channels": 3,
"num_hidden_layers": 12,
"patch_size": 16,
"problem_type": "single_label_classification",
"qkv_bias": true,
"torch_dtype": "float32",
"transformers_version": "4.50.0.dev0"
}
|