| { |
| "_name_or_path": "dslim/distilbert-NER", |
| "activation": "gelu", |
| "architectures": [ |
| "DistilBertForTokenClassification" |
| ], |
| "attention_dropout": 0.1, |
| "dim": 768, |
| "dropout": 0.1, |
| "hidden_dim": 3072, |
| "id2label": { |
| "0": "O", |
| "1": "B-PER", |
| "2": "I-PER", |
| "3": "B-ORG", |
| "4": "I-ORG", |
| "5": "B-LOC", |
| "6": "I-LOC", |
| "7": "B-MISC", |
| "8": "I-MISC" |
| }, |
| "initializer_range": 0.02, |
| "label2id": { |
| "O": 0, |
| "B-PER": 1, |
| "I-PER": 2, |
| "B-ORG": 3, |
| "I-ORG": 4, |
| "B-LOC": 5, |
| "I-LOC": 6, |
| "B-MISC": 7, |
| "I-MISC": 8 |
| }, |
| "max_position_embeddings": 512, |
| "model_type": "distilbert", |
| "n_heads": 12, |
| "n_layers": 6, |
| "output_past": true, |
| "pad_token_id": 0, |
| "qa_dropout": 0.1, |
| "seq_classif_dropout": 0.2, |
| "sinusoidal_pos_embds": false, |
| "tie_weights_": true, |
| "torch_dtype": "float32", |
| "transformers_version": "4.36.2", |
| "vocab_size": 28996 |
| } |
|
|