{ "_name_or_path": "models/transformer_f", "activation": "gelu", "architectures": [ "DistilBertForTokenClassification" ], "attention_dropout": 0.1, "dim": 64, "dropout": 0.1, "hidden_dim": 128, "id2label": { "0": "A", "1": "Adv", "2": "Cj", "3": "Interj", "4": "N", "5": "Num", "6": "Other", "7": "Pp", "8": "Pron", "9": "Punct", "10": "V" }, "initializer_range": 0.02, "label2id": { "A": "0", "Adv": "1", "Cj": "2", "Interj": "3", "N": "4", "Num": "5", "Other": "6", "Pp": "7", "Pron": "8", "Punct": "9", "V": "10" }, "max_position_embeddings": 200, "model_type": "distilbert", "n_classes": 11, "n_heads": 1, "n_layers": 1, "pad_token_id": 1, "qa_dropout": 0.1, "seq_classif_dropout": 0.2, "sinusoidal_pos_embds": false, "torch_dtype": "float32", "transformers_version": "4.31.0", "vocab_size": 6000 }