NerGuard-0.3B / config.json
exdsgift's picture
Upload mdeberta-pii-safe model
815d3c9 verified
{
"architectures": [
"DebertaV2ForTokenClassification"
],
"attention_probs_dropout_prob": 0.1,
"bos_token_id": 1,
"dtype": "float32",
"eos_token_id": 2,
"hidden_act": "gelu",
"hidden_dropout_prob": 0.1,
"hidden_size": 768,
"id2label": {
"0": "O",
"1": "B-AGE",
"2": "B-BUILDINGNUM",
"3": "B-CITY",
"4": "B-CREDITCARDNUMBER",
"5": "B-DATE",
"6": "B-DRIVERLICENSENUM",
"7": "B-EMAIL",
"8": "B-GENDER",
"9": "B-GIVENNAME",
"10": "B-IDCARDNUM",
"11": "B-PASSPORTNUM",
"12": "B-SEX",
"13": "B-SOCIALNUM",
"14": "B-STREET",
"15": "B-SURNAME",
"16": "B-TAXNUM",
"17": "B-TELEPHONENUM",
"18": "B-TIME",
"19": "B-TITLE",
"20": "B-ZIPCODE",
"21": "I-AGE",
"22": "I-BUILDINGNUM",
"23": "I-CITY",
"24": "I-CREDITCARDNUMBER",
"25": "I-DATE",
"26": "I-DRIVERLICENSENUM",
"27": "I-EMAIL",
"28": "I-GENDER",
"29": "I-GIVENNAME",
"30": "I-IDCARDNUM",
"31": "I-PASSPORTNUM",
"32": "I-SEX",
"33": "I-SOCIALNUM",
"34": "I-STREET",
"35": "I-SURNAME",
"36": "I-TAXNUM",
"37": "I-TELEPHONENUM",
"38": "I-TIME",
"39": "I-TITLE",
"40": "I-ZIPCODE"
},
"initializer_range": 0.02,
"intermediate_size": 3072,
"label2id": {
"B-AGE": 1,
"B-BUILDINGNUM": 2,
"B-CITY": 3,
"B-CREDITCARDNUMBER": 4,
"B-DATE": 5,
"B-DRIVERLICENSENUM": 6,
"B-EMAIL": 7,
"B-GENDER": 8,
"B-GIVENNAME": 9,
"B-IDCARDNUM": 10,
"B-PASSPORTNUM": 11,
"B-SEX": 12,
"B-SOCIALNUM": 13,
"B-STREET": 14,
"B-SURNAME": 15,
"B-TAXNUM": 16,
"B-TELEPHONENUM": 17,
"B-TIME": 18,
"B-TITLE": 19,
"B-ZIPCODE": 20,
"I-AGE": 21,
"I-BUILDINGNUM": 22,
"I-CITY": 23,
"I-CREDITCARDNUMBER": 24,
"I-DATE": 25,
"I-DRIVERLICENSENUM": 26,
"I-EMAIL": 27,
"I-GENDER": 28,
"I-GIVENNAME": 29,
"I-IDCARDNUM": 30,
"I-PASSPORTNUM": 31,
"I-SEX": 32,
"I-SOCIALNUM": 33,
"I-STREET": 34,
"I-SURNAME": 35,
"I-TAXNUM": 36,
"I-TELEPHONENUM": 37,
"I-TIME": 38,
"I-TITLE": 39,
"I-ZIPCODE": 40,
"O": 0
},
"layer_norm_eps": 1e-07,
"legacy": true,
"max_position_embeddings": 512,
"max_relative_positions": -1,
"model_type": "deberta-v2",
"norm_rel_ebd": "layer_norm",
"num_attention_heads": 12,
"num_hidden_layers": 12,
"pad_token_id": 0,
"pooler_dropout": 0,
"pooler_hidden_act": "gelu",
"pooler_hidden_size": 768,
"pos_att_type": [
"p2c",
"c2p"
],
"position_biased_input": false,
"position_buckets": 256,
"relative_attention": true,
"share_att_key": true,
"transformers_version": "4.57.0",
"type_vocab_size": 0,
"vocab_size": 251000
}