maxpe commited on
Commit
98d0282
·
1 Parent(s): a9bc8fc

add to config problem_type

Browse files
Files changed (1) hide show
  1. config.json +1 -0
config.json CHANGED
@@ -45,6 +45,7 @@
45
  "num_hidden_layers": 12,
46
  "pad_token_id": 1,
47
  "position_embedding_type": "absolute",
 
48
  "torch_dtype": "float32",
49
  "transformers_version": "4.21.2",
50
  "type_vocab_size": 1,
 
45
  "num_hidden_layers": 12,
46
  "pad_token_id": 1,
47
  "position_embedding_type": "absolute",
48
+ "problem_type": "multi_label_classification",
49
  "torch_dtype": "float32",
50
  "transformers_version": "4.21.2",
51
  "type_vocab_size": 1,