{ "architectures": [ "RobertaForSequenceClassification" ], "attention_probs_dropout_prob": 0.1, "auto_map": { "AutoConfig": "configuration_roberta.RobertaConfig", "AutoModel": "modeling_roberta.RobertaModel", "AutoModelForCausalLM": "modeling_roberta.RobertaForCausalLM", "AutoModelForMaskedLM": "modeling_roberta.RobertaForMaskedLM", "AutoModelForMultipleChoice": "modeling_roberta.RobertaForMultipleChoice", "AutoModelForQuestionAnswering": "modeling_roberta.RobertaForQuestionAnswering", "AutoModelForSequenceClassification": "modeling_roberta.RobertaForSequenceClassification", "AutoModelForTokenClassification": "modeling_roberta.RobertaForTokenClassification" }, "bos_token_id": 0, "classifier_dropout": null, "dtype": "float32", "eos_token_id": 2, "hidden_act": "gelu", "hidden_dropout_prob": 0.1, "hidden_size": 1024, "id2label": { "0": "BINFO", "1": "DANE_ARCH", "2": "DIERŻ_ST_HYD", "3": "INFO_DW", "4": "INSP", "5": "INTERW_AW_K", "6": "INTERW_AW_W", "7": "INTERW_ODTW", "8": "INTERW_ZAP", "9": "NEGOC_DESZCZ", "10": "ODWOD_KS", "11": "OP_PRZY_WK", "12": "OP_SIEC_WK", "13": "OP_UM", "14": "POZYTYW", "15": "POZ_SPR_WIND", "16": "PRZE", "17": "PYT", "18": "REKLAMACJA", "19": "ROW_EKSP", "20": "SK", "21": "UDOST_WN", "22": "UM_PARTYCY", "23": "UZ_SIEĆ_WK", "24": "WAR_W+K", "25": "WAR_W+K+KD", "26": "WOD_OGR_PRZY", "27": "WPIN_SIEĆ", "28": "ZAŚW_KOŃC", "29": "ZGŁ_ODCZ", "30": "ZM", "31": "ZW_NADPŁ" }, "initializer_range": 0.02, "intermediate_size": 4096, "label2id": { "BINFO": 0, "DANE_ARCH": 1, "DIERŻ_ST_HYD": 2, "INFO_DW": 3, "INSP": 4, "INTERW_AW_K": 5, "INTERW_AW_W": 6, "INTERW_ODTW": 7, "INTERW_ZAP": 8, "NEGOC_DESZCZ": 9, "ODWOD_KS": 10, "OP_PRZY_WK": 11, "OP_SIEC_WK": 12, "OP_UM": 13, "POZYTYW": 14, "POZ_SPR_WIND": 15, "PRZE": 16, "PYT": 17, "REKLAMACJA": 18, "ROW_EKSP": 19, "SK": 20, "UDOST_WN": 21, "UM_PARTYCY": 22, "UZ_SIEĆ_WK": 23, "WAR_WK": 24, "WAR_WKKD": 25, "WOD_OGR_PRZY": 26, "WPIN_SIEĆ": 27, "ZAŚW_KOŃC": 28, "ZGŁ_ODCZ": 29, "ZM": 30, "ZW_NADPŁ": 31 }, "layer_norm_eps": 1e-05, "max_position_embeddings": 8194, "model_type": "roberta", "num_attention_heads": 16, "num_hidden_layers": 24, "pad_token_id": 1, "position_embedding_type": "absolute", "problem_type": "single_label_classification", "transformers_version": "4.57.1", "type_vocab_size": 1, "use_cache": true, "vocab_size": 128064 }