novel_ner_v2 / config.json
jetaudio's picture
Upload TokenClassificationPipeline
4c1044f verified
{
"architectures": [
"ElectraForTokenClassification"
],
"attention_probs_dropout_prob": 0.1,
"classifier_dropout": null,
"dtype": "float32",
"embedding_size": 1024,
"hidden_act": "gelu",
"hidden_dropout_prob": 0.1,
"hidden_size": 1024,
"id2label": {
"0": "O",
"1": "B-PER",
"2": "I-PER",
"3": "E-PER",
"4": "S-PER",
"5": "B-ORG",
"6": "I-ORG",
"7": "E-ORG",
"8": "S-ORG",
"9": "B-LOC",
"10": "I-LOC",
"11": "E-LOC",
"12": "S-LOC",
"13": "B-ROLE",
"14": "I-ROLE",
"15": "E-ROLE",
"16": "S-ROLE",
"17": "B-RACE",
"18": "I-RACE",
"19": "E-RACE",
"20": "S-RACE",
"21": "B-ABILITY",
"22": "I-ABILITY",
"23": "E-ABILITY",
"24": "S-ABILITY",
"25": "B-CONCEPT",
"26": "I-CONCEPT",
"27": "E-CONCEPT",
"28": "S-CONCEPT",
"29": "B-POWER_LV",
"30": "I-POWER_LV",
"31": "E-POWER_LV",
"32": "S-POWER_LV",
"33": "B-ITEM",
"34": "I-ITEM",
"35": "E-ITEM",
"36": "S-ITEM",
"37": "B-EVENT",
"38": "I-EVENT",
"39": "E-EVENT",
"40": "S-EVENT",
"41": "B-GPE",
"42": "I-GPE",
"43": "E-GPE",
"44": "S-GPE"
},
"initializer_range": 0.02,
"intermediate_size": 4096,
"label2id": {
"B-ABILITY": 21,
"B-CONCEPT": 25,
"B-EVENT": 37,
"B-GPE": 41,
"B-ITEM": 33,
"B-LOC": 9,
"B-ORG": 5,
"B-PER": 1,
"B-POWER_LV": 29,
"B-RACE": 17,
"B-ROLE": 13,
"E-ABILITY": 23,
"E-CONCEPT": 27,
"E-EVENT": 39,
"E-GPE": 43,
"E-ITEM": 35,
"E-LOC": 11,
"E-ORG": 7,
"E-PER": 3,
"E-POWER_LV": 31,
"E-RACE": 19,
"E-ROLE": 15,
"I-ABILITY": 22,
"I-CONCEPT": 26,
"I-EVENT": 38,
"I-GPE": 42,
"I-ITEM": 34,
"I-LOC": 10,
"I-ORG": 6,
"I-PER": 2,
"I-POWER_LV": 30,
"I-RACE": 18,
"I-ROLE": 14,
"O": 0,
"S-ABILITY": 24,
"S-CONCEPT": 28,
"S-EVENT": 40,
"S-GPE": 44,
"S-ITEM": 36,
"S-LOC": 12,
"S-ORG": 8,
"S-PER": 4,
"S-POWER_LV": 32,
"S-RACE": 20,
"S-ROLE": 16
},
"layer_norm_eps": 1e-12,
"max_position_embeddings": 512,
"model_type": "electra",
"num_attention_heads": 16,
"num_hidden_layers": 24,
"pad_token_id": 0,
"position_embedding_type": "absolute",
"summary_activation": "gelu",
"summary_last_dropout": 0.1,
"summary_type": "first",
"summary_use_proj": true,
"transformers_version": "4.56.1",
"type_vocab_size": 2,
"use_cache": true,
"vocab_size": 21128
}