EGAI-vision-encoder / v2 /model /config.json
ANISH-j's picture
Upload 2 files
e6b34f4 verified
{
"architectures": [
"SiglipForImageClassification"
],
"dtype": "float32",
"id2label": {
"0": "3AA",
"1": "3AB",
"2": "3AC",
"3": "3BA",
"4": "3BB",
"5": "3BC",
"6": "3CA",
"7": "3CB",
"8": "4AA",
"9": "4AB",
"10": "4AC",
"11": "4BA",
"12": "4BB",
"13": "4BC",
"14": "4CA",
"15": "4CB",
"16": "5AA",
"17": "5AB",
"18": "5AC",
"19": "5BA",
"20": "5BB",
"21": "5BC",
"22": "Not an Embryo"
},
"initializer_factor": 1.0,
"label2id": {
"3AA": 0,
"3AB": 1,
"3AC": 2,
"3BA": 3,
"3BB": 4,
"3BC": 5,
"3CA": 6,
"3CB": 7,
"4AA": 8,
"4AB": 9,
"4AC": 10,
"4BA": 11,
"4BB": 12,
"4BC": 13,
"4CA": 14,
"4CB": 15,
"5AA": 16,
"5AB": 17,
"5AC": 18,
"5BA": 19,
"5BB": 20,
"5BC": 21,
"Not an Embryo": 22
},
"model_type": "siglip",
"problem_type": "single_label_classification",
"text_config": {
"attention_dropout": 0.0,
"dtype": "float32",
"hidden_act": "gelu_pytorch_tanh",
"hidden_size": 768,
"intermediate_size": 3072,
"layer_norm_eps": 1e-06,
"max_position_embeddings": 64,
"model_type": "siglip_text_model",
"num_attention_heads": 12,
"num_hidden_layers": 12,
"projection_size": 768,
"vocab_size": 256000
},
"transformers_version": "4.56.1",
"vision_config": {
"attention_dropout": 0.0,
"dtype": "float32",
"hidden_act": "gelu_pytorch_tanh",
"hidden_size": 768,
"image_size": 224,
"intermediate_size": 3072,
"layer_norm_eps": 1e-06,
"model_type": "siglip_vision_model",
"num_attention_heads": 12,
"num_channels": 3,
"num_hidden_layers": 12,
"patch_size": 16
}
}