Anmol0130's picture
End of training
84136bd
{
"_name_or_path": "google/vit-base-patch16-224-in21k",
"architectures": [
"ViTForImageClassification"
],
"attention_probs_dropout_prob": 0.0,
"encoder_stride": 16,
"hidden_act": "gelu",
"hidden_dropout_prob": 0.0,
"hidden_size": 768,
"id2label": {
"0": "Bacardi",
"1": "Bacardi_carta_oro",
"10": "coka_cola",
"11": "martini",
"2": "Bombay",
"3": "Coke",
"4": "Dewar%27s_12_Years",
"5": "Dewar%27s_white_lable",
"6": "bacardi_black",
"7": "bacardi_carta_blanca",
"8": "bacardi_carta_negra",
"9": "bombay_sapphire"
},
"image_size": 224,
"initializer_range": 0.02,
"intermediate_size": 3072,
"label2id": {
"Bacardi": "0",
"Bacardi_carta_oro": "1",
"Bombay": "2",
"Coke": "3",
"Dewar%27s_12_Years": "4",
"Dewar%27s_white_lable": "5",
"bacardi_black": "6",
"bacardi_carta_blanca": "7",
"bacardi_carta_negra": "8",
"bombay_sapphire": "9",
"coka_cola": "10",
"martini": "11"
},
"layer_norm_eps": 1e-12,
"model_type": "vit",
"num_attention_heads": 12,
"num_channels": 3,
"num_hidden_layers": 12,
"patch_size": 16,
"qkv_bias": true,
"transformers_version": "4.30.2"
}