khanmu2003's picture
Upload RobertaForSequenceClassification
91f5d43 verified
{
"architectures": [
"RobertaForSequenceClassification"
],
"attention_probs_dropout_prob": 0.1,
"bos_token_id": 0,
"classifier_dropout": null,
"dtype": "float32",
"eos_token_id": 2,
"hidden_act": "gelu",
"hidden_dropout_prob": 0.1,
"hidden_size": 768,
"id2label": {
"0": "bug_high",
"1": "bug_medium",
"2": "bug_low",
"3": "feature_request_high",
"4": "feature_request_medium",
"5": "feature_request_low",
"6": "documentation_high",
"7": "documentation_medium",
"8": "documentation_low",
"9": "question_support_high",
"10": "question_support_medium",
"11": "question_support_low",
"12": "enhancement_high",
"13": "enhancement_medium",
"14": "enhancement_low",
"15": "performance_high",
"16": "performance_medium",
"17": "performance_low",
"18": "security_high",
"19": "security_medium",
"20": "security_low",
"21": "refactor_high",
"22": "refactor_medium",
"23": "refactor_low",
"24": "testing_high",
"25": "testing_medium",
"26": "testing_low",
"27": "build_high",
"28": "build_medium",
"29": "build_low",
"30": "design_high",
"31": "design_medium",
"32": "design_low"
},
"initializer_range": 0.02,
"intermediate_size": 3072,
"label2id": {
"bug_high": 0,
"bug_low": 2,
"bug_medium": 1,
"build_high": 27,
"build_low": 29,
"build_medium": 28,
"design_high": 30,
"design_low": 32,
"design_medium": 31,
"documentation_high": 6,
"documentation_low": 8,
"documentation_medium": 7,
"enhancement_high": 12,
"enhancement_low": 14,
"enhancement_medium": 13,
"feature_request_high": 3,
"feature_request_low": 5,
"feature_request_medium": 4,
"performance_high": 15,
"performance_low": 17,
"performance_medium": 16,
"question_support_high": 9,
"question_support_low": 11,
"question_support_medium": 10,
"refactor_high": 21,
"refactor_low": 23,
"refactor_medium": 22,
"security_high": 18,
"security_low": 20,
"security_medium": 19,
"testing_high": 24,
"testing_low": 26,
"testing_medium": 25
},
"layer_norm_eps": 1e-05,
"max_position_embeddings": 514,
"model_type": "roberta",
"num_attention_heads": 12,
"num_hidden_layers": 12,
"pad_token_id": 1,
"position_embedding_type": "absolute",
"problem_type": "single_label_classification",
"transformers_version": "4.57.2",
"type_vocab_size": 1,
"use_cache": true,
"vocab_size": 50265
}