| { | |
| "architectures": [ | |
| "MPNetForTokenClassification" | |
| ], | |
| "attention_probs_dropout_prob": 0.1, | |
| "bos_token_id": 0, | |
| "eos_token_id": 2, | |
| "hidden_act": "gelu", | |
| "hidden_dropout_prob": 0.1, | |
| "hidden_size": 768, | |
| "id2label": { | |
| "0": "O", | |
| "1": "COMMA", | |
| "2": "PERIOD", | |
| "3": "QUESTION", | |
| "4": "EXCLAMATION", | |
| "5": "SEMICOLON", | |
| "6": "COLON", | |
| "7": "HYPHEN", | |
| "8": "EN_DASH", | |
| "9": "EM_DASH", | |
| "10": "LEFT_PAREN", | |
| "11": "RIGHT_PAREN", | |
| "12": "LEFT_BRACKET", | |
| "13": "RIGHT_BRACKET", | |
| "14": "LEFT_BRACE", | |
| "15": "RIGHT_BRACE", | |
| "16": "DOUBLE_QUOTE", | |
| "17": "SINGLE_QUOTE", | |
| "18": "ELLIPSIS", | |
| "19": "SLASH", | |
| "20": "BACKSLASH", | |
| "21": "AT_SYMBOL", | |
| "22": "HASH", | |
| "23": "DOLLAR", | |
| "24": "PERCENT", | |
| "25": "AMPERSAND", | |
| "26": "ASTERISK", | |
| "27": "PLUS", | |
| "28": "EQUALS", | |
| "29": "LESS_THAN", | |
| "30": "GREATER_THAN", | |
| "31": "PIPE", | |
| "32": "CARET", | |
| "33": "BACKTICK", | |
| "34": "TILDE" | |
| }, | |
| "initializer_range": 0.02, | |
| "intermediate_size": 3072, | |
| "label2id": { | |
| "AMPERSAND": 25, | |
| "ASTERISK": 26, | |
| "AT_SYMBOL": 21, | |
| "BACKSLASH": 20, | |
| "BACKTICK": 33, | |
| "CARET": 32, | |
| "COLON": 6, | |
| "COMMA": 1, | |
| "DOLLAR": 23, | |
| "DOUBLE_QUOTE": 16, | |
| "ELLIPSIS": 18, | |
| "EM_DASH": 9, | |
| "EN_DASH": 8, | |
| "EQUALS": 28, | |
| "EXCLAMATION": 4, | |
| "GREATER_THAN": 30, | |
| "HASH": 22, | |
| "HYPHEN": 7, | |
| "LEFT_BRACE": 14, | |
| "LEFT_BRACKET": 12, | |
| "LEFT_PAREN": 10, | |
| "LESS_THAN": 29, | |
| "O": 0, | |
| "PERCENT": 24, | |
| "PERIOD": 2, | |
| "PIPE": 31, | |
| "PLUS": 27, | |
| "QUESTION": 3, | |
| "RIGHT_BRACE": 15, | |
| "RIGHT_BRACKET": 13, | |
| "RIGHT_PAREN": 11, | |
| "SEMICOLON": 5, | |
| "SINGLE_QUOTE": 17, | |
| "SLASH": 19, | |
| "TILDE": 34 | |
| }, | |
| "layer_norm_eps": 1e-05, | |
| "max_position_embeddings": 514, | |
| "model_type": "mpnet", | |
| "num_attention_heads": 12, | |
| "num_hidden_layers": 12, | |
| "pad_token_id": 1, | |
| "relative_attention_num_buckets": 32, | |
| "torch_dtype": "float32", | |
| "transformers_version": "4.50.0", | |
| "vocab_size": 30527 | |
| } | |