wissambenhaddad-bylaw's picture
Upload folder using huggingface_hub
4c9bb37
{
"_name_or_path": "microsoft/dit-large-finetuned-rvlcdip",
"architectures": [
"BylawBeitForImageClassification"
],
"attention_probs_dropout_prob": 0.0,
"auxiliary_channels": 256,
"auxiliary_concat_input": false,
"auxiliary_loss_weight": 0.4,
"auxiliary_num_convs": 1,
"drop_path_rate": 0.1,
"hidden_act": "gelu",
"hidden_dropout_prob": 0.0,
"hidden_size": 768,
"id2label": {
"0": "template0",
"1": "template1",
"2": "template2",
"3": "template3",
"4": "template4",
"5": "template5",
"6": "template6",
"7": "template7",
"8": "template8",
"9": "template9",
"10": "template10",
"11": "template11",
"12": "template12",
"13": "template13",
"14": "template14",
"15": "template15",
"16": "template16",
"17": "template17",
"18": "template18",
"19": "template19",
"20": "template20",
"21": "template21",
"22": "template22",
"23": "template23",
"24": "template24",
"25": "template25",
"26": "template26",
"27": "template27",
"28": "template28",
"29": "template29",
"30": "template30",
"31": "template31",
"32": "template32",
"33": "template33",
"34": "template34",
"35": "template35",
"36": "template36",
"37": "template37",
"38": "template38",
"39": "template39",
"40": "template40",
"41": "template41",
"42": "template42",
"43": "template43",
"44": "template44",
"45": "template45",
"46": "template46",
"47": "template47",
"48": "template48",
"49": "template49",
"50": "template50",
"51": "template51",
"52": "template52",
"53": "template53",
"54": "template54",
"55": "template55",
"56": "template56",
"57": "template57",
"58": "template-1"
},
"image_size": 224,
"initializer_range": 0.02,
"intermediate_size": 3072,
"label2id": {
"template-1": 58,
"template0": 0,
"template1": 1,
"template10": 10,
"template11": 11,
"template12": 12,
"template13": 13,
"template14": 14,
"template15": 15,
"template16": 16,
"template17": 17,
"template18": 18,
"template19": 19,
"template2": 2,
"template20": 20,
"template21": 21,
"template22": 22,
"template23": 23,
"template24": 24,
"template25": 25,
"template26": 26,
"template27": 27,
"template28": 28,
"template29": 29,
"template3": 3,
"template30": 30,
"template31": 31,
"template32": 32,
"template33": 33,
"template34": 34,
"template35": 35,
"template36": 36,
"template37": 37,
"template38": 38,
"template39": 39,
"template4": 4,
"template40": 40,
"template41": 41,
"template42": 42,
"template43": 43,
"template44": 44,
"template45": 45,
"template46": 46,
"template47": 47,
"template48": 48,
"template49": 49,
"template5": 5,
"template50": 50,
"template51": 51,
"template52": 52,
"template53": 53,
"template54": 54,
"template55": 55,
"template56": 56,
"template57": 57,
"template6": 6,
"template7": 7,
"template8": 8,
"template9": 9
},
"layer_norm_eps": 1e-12,
"layer_scale_init_value": 0.1,
"model_type": "beit",
"num_attention_heads": 12,
"num_channels": 3,
"num_hidden_layers": 12,
"out_indices": [
3,
5,
7,
11
],
"patch_size": 16,
"pool_scales": [
1,
2,
3,
6
],
"problem_type": "single_label_classification",
"semantic_loss_ignore_index": 255,
"torch_dtype": "float32",
"transformers_version": "4.35.2",
"use_absolute_position_embeddings": false,
"use_auxiliary_head": true,
"use_mask_token": false,
"use_mean_pooling": true,
"use_relative_position_bias": false,
"use_shared_relative_position_bias": false,
"vocab_size": 8192
}