NepPOS_Ebiquity / config.json
sunilregmi's picture
Upload folder using huggingface_hub
1ab791b verified
{
"_name_or_path": "NepBERTa/NepBERTa",
"architectures": [
"BertForTokenClassification"
],
"attention_probs_dropout_prob": 0.1,
"classifier_dropout": null,
"hidden_act": "gelu",
"hidden_dropout_prob": 0.1,
"hidden_size": 768,
"id2label": {
"0": "NN",
"1": "IKO",
"2": "FB",
"3": "II",
"4": "JX",
"5": "CC",
"6": "IKM",
"7": "VVYN1",
"8": "YF",
"9": "DDX",
"10": "IE",
"11": "NP",
"12": "VQ",
"13": "VE",
"14": "IA",
"15": "VVYX2",
"16": "DJX",
"17": "TT",
"18": "VI",
"19": "RR",
"20": "MM",
"21": "VDX",
"22": "RD",
"23": "IKF",
"24": "VN",
"25": "IKX",
"26": "MLO",
"27": "IH",
"28": "PXH",
"29": "JM",
"30": "PRF",
"31": "YM",
"32": "MOX",
"33": "MOM",
"34": "PMXKO",
"35": "JO",
"36": "DKM",
"37": "CSB",
"38": "VVMX1",
"39": "DKX",
"40": "PRFKO",
"41": "PRFKX",
"42": "DDM",
"43": "PMXKM",
"44": "PMX",
"45": "VVMX2",
"46": "RK",
"47": "VDO",
"48": "MLX",
"49": "DDO",
"50": "PRFKM",
"51": "FZ",
"52": "PTH",
"53": "DGX",
"54": "VOYN1",
"55": "VCM",
"56": "VVYM1F",
"57": "DGM",
"58": "RJ",
"59": "VDM",
"60": "FF",
"61": "VR",
"62": "PMXKX",
"63": "DKO",
"64": "DGO",
"65": "VOMX2",
"66": "VVYN1F",
"67": "MLF"
},
"initializer_range": 0.02,
"intermediate_size": 3072,
"label2id": {
"CC": 5,
"CSB": 37,
"DDM": 42,
"DDO": 49,
"DDX": 9,
"DGM": 57,
"DGO": 64,
"DGX": 53,
"DJX": 16,
"DKM": 36,
"DKO": 63,
"DKX": 39,
"FB": 2,
"FF": 60,
"FZ": 51,
"IA": 14,
"IE": 10,
"IH": 27,
"II": 3,
"IKF": 23,
"IKM": 6,
"IKO": 1,
"IKX": 25,
"JM": 29,
"JO": 35,
"JX": 4,
"MLF": 67,
"MLO": 26,
"MLX": 48,
"MM": 20,
"MOM": 33,
"MOX": 32,
"NN": 0,
"NP": 11,
"PMX": 44,
"PMXKM": 43,
"PMXKO": 34,
"PMXKX": 62,
"PRF": 30,
"PRFKM": 50,
"PRFKO": 40,
"PRFKX": 41,
"PTH": 52,
"PXH": 28,
"RD": 22,
"RJ": 58,
"RK": 46,
"RR": 19,
"TT": 17,
"VCM": 55,
"VDM": 59,
"VDO": 47,
"VDX": 21,
"VE": 13,
"VI": 18,
"VN": 24,
"VOMX2": 65,
"VOYN1": 54,
"VQ": 12,
"VR": 61,
"VVMX1": 38,
"VVMX2": 45,
"VVYM1F": 56,
"VVYN1": 7,
"VVYN1F": 66,
"VVYX2": 15,
"YF": 8,
"YM": 31
},
"layer_norm_eps": 1e-12,
"max_position_embeddings": 512,
"model_type": "bert",
"num_attention_heads": 12,
"num_hidden_layers": 12,
"output_hidden_state": true,
"pad_token_id": 0,
"position_embedding_type": "absolute",
"torch_dtype": "float32",
"transformers_version": "4.38.2",
"type_vocab_size": 2,
"use_cache": true,
"vocab_size": 30522
}