| { | |
| "_name_or_path": "microsoft/deberta-base", | |
| "architectures": [ | |
| "DebertaForSequenceClassification" | |
| ], | |
| "attention_probs_dropout_prob": 0.1, | |
| "hidden_act": "gelu", | |
| "hidden_dropout_prob": 0.1, | |
| "hidden_size": 768, | |
| "id2label": { | |
| "0": "can_lodge", | |
| "1": "has_legal_basis", | |
| "2": "receives_from", | |
| "3": "manages", | |
| "4": "complies_with", | |
| "5": "ensures", | |
| "6": "subjected_to", | |
| "7": "appoints", | |
| "8": "provides", | |
| "9": "has_right", | |
| "10": "transfers_to", | |
| "11": "delegates", | |
| "12": "utilises", | |
| "13": "processes", | |
| "14": "retains", | |
| "15": "processes_for", | |
| "16": "oversees", | |
| "17": "implements" | |
| }, | |
| "initializer_range": 0.02, | |
| "intermediate_size": 3072, | |
| "label2id": { | |
| "appoints": 7, | |
| "can_lodge": 0, | |
| "complies_with": 4, | |
| "delegates": 11, | |
| "ensures": 5, | |
| "has_legal_basis": 1, | |
| "has_right": 9, | |
| "implements": 17, | |
| "manages": 3, | |
| "oversees": 16, | |
| "processes": 13, | |
| "processes_for": 15, | |
| "provides": 8, | |
| "receives_from": 2, | |
| "retains": 14, | |
| "subjected_to": 6, | |
| "transfers_to": 10, | |
| "utilises": 12 | |
| }, | |
| "layer_norm_eps": 1e-07, | |
| "max_position_embeddings": 512, | |
| "max_relative_positions": -1, | |
| "model_type": "deberta", | |
| "num_attention_heads": 12, | |
| "num_hidden_layers": 12, | |
| "pad_token_id": 0, | |
| "pooler_dropout": 0, | |
| "pooler_hidden_act": "gelu", | |
| "pooler_hidden_size": 768, | |
| "pos_att_type": [ | |
| "c2p", | |
| "p2c" | |
| ], | |
| "position_biased_input": false, | |
| "problem_type": "multi_label_classification", | |
| "relative_attention": true, | |
| "torch_dtype": "float32", | |
| "transformers_version": "4.41.2", | |
| "type_vocab_size": 0, | |
| "vocab_size": 50265 | |
| } | |