bert-base-mti881 / config.json
Ben10x's picture
Training in progress, epoch 1
74f34a8 verified
{
"architectures": [
"BertForTokenClassification"
],
"attention_probs_dropout_prob": 0.1,
"classifier_dropout": null,
"finetuning_task": "ner",
"gradient_checkpointing": false,
"hidden_act": "gelu",
"hidden_dropout_prob": 0.1,
"hidden_size": 768,
"id2label": {
"0": "I-T028",
"1": "B-T017",
"2": "B-T058",
"3": "B-T080",
"4": "B-T125",
"5": "B-T040",
"6": "I-T031",
"7": "I-T041",
"8": "I-T184",
"9": "I-T022",
"10": "I-T121",
"11": "I-T063",
"12": "B-T097",
"13": "B-T114",
"14": "B-T047",
"15": "B-T168",
"16": "I-T129",
"17": "B-T044",
"18": "B-T103",
"19": "I-T020",
"20": "B-T116",
"21": "B-T026",
"22": "B-T007",
"23": "B-T033",
"24": "B-T074",
"25": "B-T048",
"26": "B-T022",
"27": "B-T190",
"28": "B-T201",
"29": "B-T038",
"30": "I-T062",
"31": "I-T033",
"32": "B-T184",
"33": "B-T204",
"34": "B-T063",
"35": "I-T092",
"36": "B-T092",
"37": "I-T045",
"38": "I-T191",
"39": "I-T082",
"40": "O",
"41": "B-T062",
"42": "B-T167",
"43": "B-T169",
"44": "B-T170",
"45": "B-T082",
"46": "B-T081",
"47": "B-T109",
"48": "I-T058",
"49": "I-T043",
"50": "B-T043",
"51": "B-T046",
"52": "B-T123",
"53": "B-T020",
"54": "B-T104",
"55": "I-T039",
"56": "I-T019",
"57": "I-T109",
"58": "I-T007",
"59": "B-T028",
"60": "I-T168",
"61": "B-T121",
"62": "I-T044",
"63": "B-T025",
"64": "B-T039",
"65": "I-T097",
"66": "B-T024",
"67": "I-T023",
"68": "I-T005",
"69": "B-T197",
"70": "I-T103",
"71": "I-T046",
"72": "B-T005",
"73": "B-T041",
"74": "I-T024",
"75": "I-T091",
"76": "I-T059",
"77": "B-T031",
"78": "I-T048",
"79": "B-T070",
"80": "I-T170",
"81": "B-T045",
"82": "B-T098",
"83": "I-T025",
"84": "I-T190",
"85": "I-T017",
"86": "I-T201",
"87": "B-T091",
"88": "I-T114",
"89": "I-T074",
"90": "I-T167",
"91": "B-T019",
"92": "I-T037",
"93": "B-T129",
"94": "B-T191",
"95": "I-T125",
"96": "I-T026",
"97": "B-T131",
"98": "B-T037",
"99": "I-T070",
"100": "B-T059",
"101": "I-T047",
"102": "I-T204",
"103": "I-T098",
"104": "I-T038",
"105": "B-T023",
"106": "I-T116",
"107": "B-T196"
},
"initializer_range": 0.02,
"intermediate_size": 3072,
"label2id": {
"B-T005": 72,
"B-T007": 22,
"B-T017": 1,
"B-T019": 91,
"B-T020": 53,
"B-T022": 26,
"B-T023": 105,
"B-T024": 66,
"B-T025": 63,
"B-T026": 21,
"B-T028": 59,
"B-T031": 77,
"B-T033": 23,
"B-T037": 98,
"B-T038": 29,
"B-T039": 64,
"B-T040": 5,
"B-T041": 73,
"B-T043": 50,
"B-T044": 17,
"B-T045": 81,
"B-T046": 51,
"B-T047": 14,
"B-T048": 25,
"B-T058": 2,
"B-T059": 100,
"B-T062": 41,
"B-T063": 34,
"B-T070": 79,
"B-T074": 24,
"B-T080": 3,
"B-T081": 46,
"B-T082": 45,
"B-T091": 87,
"B-T092": 36,
"B-T097": 12,
"B-T098": 82,
"B-T103": 18,
"B-T104": 54,
"B-T109": 47,
"B-T114": 13,
"B-T116": 20,
"B-T121": 61,
"B-T123": 52,
"B-T125": 4,
"B-T129": 93,
"B-T131": 97,
"B-T167": 42,
"B-T168": 15,
"B-T169": 43,
"B-T170": 44,
"B-T184": 32,
"B-T190": 27,
"B-T191": 94,
"B-T196": 107,
"B-T197": 69,
"B-T201": 28,
"B-T204": 33,
"I-T005": 68,
"I-T007": 58,
"I-T017": 85,
"I-T019": 56,
"I-T020": 19,
"I-T022": 9,
"I-T023": 67,
"I-T024": 74,
"I-T025": 83,
"I-T026": 96,
"I-T028": 0,
"I-T031": 6,
"I-T033": 31,
"I-T037": 92,
"I-T038": 104,
"I-T039": 55,
"I-T041": 7,
"I-T043": 49,
"I-T044": 62,
"I-T045": 37,
"I-T046": 71,
"I-T047": 101,
"I-T048": 78,
"I-T058": 48,
"I-T059": 76,
"I-T062": 30,
"I-T063": 11,
"I-T070": 99,
"I-T074": 89,
"I-T082": 39,
"I-T091": 75,
"I-T092": 35,
"I-T097": 65,
"I-T098": 103,
"I-T103": 70,
"I-T109": 57,
"I-T114": 88,
"I-T116": 106,
"I-T121": 10,
"I-T125": 95,
"I-T129": 16,
"I-T167": 90,
"I-T168": 60,
"I-T170": 80,
"I-T184": 8,
"I-T190": 84,
"I-T191": 38,
"I-T201": 86,
"I-T204": 102,
"O": 40
},
"layer_norm_eps": 1e-12,
"max_position_embeddings": 512,
"model_type": "bert",
"num_attention_heads": 12,
"num_hidden_layers": 12,
"pad_token_id": 0,
"position_embedding_type": "absolute",
"torch_dtype": "float32",
"transformers_version": "4.50.3",
"type_vocab_size": 2,
"use_cache": true,
"vocab_size": 30522
}