{ "_name_or_path": "roberta-base", "architectures": [ "BertForSequenceClassification" ], "attention_probs_dropout_prob": 0.1, "bos_token_id": 0, "classifier_dropout": null, "eos_token_id": 2, "hidden_act": "gelu", "hidden_dropout_prob": 0.1, "hidden_size": 768, "id2label": { "0": "NCT03180736", "1": "NCT01985126", "2": "NCT03158688", "3": "NCT02541383", "4": "NCT02076009", "5": "NCT01080391", "6": "NCT01734928", "7": "NCT02963493", "8": "NCT01712789", "9": "NCT00430365", "10": "NCT04557098", "11": "NCT03548207", "12": "NCT01023308", "13": "NCT03412565", "14": "NCT02336815", "15": "NCT03651128", "16": "NCT02252172", "17": "NCT02412878", "18": "NCT02874742", "19": "NCT02990338", "20": "NCT03361748", "21": "NCT01660633", "22": "NCT05066646", "23": "NCT02195479", "24": "NCT01568866", "25": "NCT01239797", "26": "NCT02654132", "27": "NCT01998971", "28": "NCT01311687", "29": "NCT00103662", "30": "NCT03399799", "31": "NCT02136134", "32": "NCT03525678", "33": "NCT04162210", "34": "NCT03110562", "35": "NCT01564537", "36": "NCT04181827", "37": "NCT03151811", "38": "NCT05083169", "39": "NCT00114101", "40": "NCT01554852", "41": "ISRCTN17354232", "42": "NCT03277105", "43": "NCT00551928", "44": "NCT03275285" }, "initializer_range": 0.02, "intermediate_size": 3072, "label2id": { "ISRCTN17354232": 41, "NCT00103662": 29, "NCT00114101": 39, "NCT00430365": 9, "NCT00551928": 43, "NCT01023308": 12, "NCT01080391": 5, "NCT01239797": 25, "NCT01311687": 28, "NCT01554852": 40, "NCT01564537": 35, "NCT01568866": 24, "NCT01660633": 21, "NCT01712789": 8, "NCT01734928": 6, "NCT01985126": 1, "NCT01998971": 27, "NCT02076009": 4, "NCT02136134": 31, "NCT02195479": 23, "NCT02252172": 16, "NCT02336815": 14, "NCT02412878": 17, "NCT02541383": 3, "NCT02654132": 26, "NCT02874742": 18, "NCT02963493": 7, "NCT02990338": 19, "NCT03110562": 34, "NCT03151811": 37, "NCT03158688": 2, "NCT03180736": 0, "NCT03275285": 44, "NCT03277105": 42, "NCT03361748": 20, "NCT03399799": 30, "NCT03412565": 13, "NCT03525678": 32, "NCT03548207": 11, "NCT03651128": 15, "NCT04162210": 33, "NCT04181827": 36, "NCT04557098": 10, "NCT05066646": 22, "NCT05083169": 38 }, "layer_norm_eps": 1e-05, "max_position_embeddings": 514, "model_type": "bert", "num_attention_heads": 12, "num_hidden_layers": 12, "pad_token_id": 1, "position_embedding_type": "absolute", "problem_type": "multi_label_classification", "torch_dtype": "float32", "transformers_version": "4.33.2", "type_vocab_size": 1, "use_cache": true, "vocab_size": 50265 }