prithivMLmods's picture
Upload folder using huggingface_hub
da51a68 verified
raw
history blame
2.82 kB
{
"architectures": [
"SiglipForImageClassification"
],
"id2label": {
"0": "Beige",
"1": "Black",
"2": "Blue",
"3": "Bronze",
"4": "Brown",
"5": "Burgundy",
"6": "Charcoal",
"7": "Coffee Brown",
"8": "Copper",
"9": "Cream",
"10": "Fluorescent Green",
"11": "Gold",
"12": "Green",
"13": "Grey",
"14": "Grey Melange",
"15": "Khaki",
"16": "Lavender",
"17": "Lime Green",
"18": "Magenta",
"19": "Maroon",
"20": "Mauve",
"21": "Metallic",
"22": "Multi",
"23": "Mushroom Brown",
"24": "Mustard",
"25": "Navy Blue",
"26": "Nude",
"27": "Off White",
"28": "Olive",
"29": "Orange",
"30": "Peach",
"31": "Pink",
"32": "Purple",
"33": "Red",
"34": "Rose",
"35": "Rust",
"36": "Sea Green",
"37": "Silver",
"38": "Skin",
"39": "Steel",
"40": "Tan",
"41": "Taupe",
"42": "Teal",
"43": "Turquoise Blue",
"44": "White",
"45": "Yellow"
},
"initializer_factor": 1.0,
"label2id": {
"Beige": 0,
"Black": 1,
"Blue": 2,
"Bronze": 3,
"Brown": 4,
"Burgundy": 5,
"Charcoal": 6,
"Coffee Brown": 7,
"Copper": 8,
"Cream": 9,
"Fluorescent Green": 10,
"Gold": 11,
"Green": 12,
"Grey": 13,
"Grey Melange": 14,
"Khaki": 15,
"Lavender": 16,
"Lime Green": 17,
"Magenta": 18,
"Maroon": 19,
"Mauve": 20,
"Metallic": 21,
"Multi": 22,
"Mushroom Brown": 23,
"Mustard": 24,
"Navy Blue": 25,
"Nude": 26,
"Off White": 27,
"Olive": 28,
"Orange": 29,
"Peach": 30,
"Pink": 31,
"Purple": 32,
"Red": 33,
"Rose": 34,
"Rust": 35,
"Sea Green": 36,
"Silver": 37,
"Skin": 38,
"Steel": 39,
"Tan": 40,
"Taupe": 41,
"Teal": 42,
"Turquoise Blue": 43,
"White": 44,
"Yellow": 45
},
"model_type": "siglip",
"problem_type": "single_label_classification",
"text_config": {
"attention_dropout": 0.0,
"hidden_act": "gelu_pytorch_tanh",
"hidden_size": 768,
"intermediate_size": 3072,
"layer_norm_eps": 1e-06,
"max_position_embeddings": 64,
"model_type": "siglip_text_model",
"num_attention_heads": 12,
"num_hidden_layers": 12,
"projection_size": 768,
"torch_dtype": "float32",
"vocab_size": 256000
},
"torch_dtype": "float32",
"transformers_version": "4.50.3",
"vision_config": {
"attention_dropout": 0.0,
"hidden_act": "gelu_pytorch_tanh",
"hidden_size": 768,
"image_size": 224,
"intermediate_size": 3072,
"layer_norm_eps": 1e-06,
"model_type": "siglip_vision_model",
"num_attention_heads": 12,
"num_channels": 3,
"num_hidden_layers": 12,
"patch_size": 16,
"torch_dtype": "float32"
}
}