Halfotter commited on
Commit
5937c73
ยท
verified ยท
1 Parent(s): 2f1dcf0

Upload folder using huggingface_hub

Browse files
Files changed (1) hide show
  1. config.json +28 -9
config.json CHANGED
@@ -1,6 +1,31 @@
1
  {
2
- "model_type": "custom_classifier",
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
3
  "num_labels": 66,
 
 
 
 
 
 
 
 
4
  "id2label": {
5
  "0": "์ ๊ฒฐํƒ„",
6
  "1": "์‚ฐํ™”๋งˆ๊ทธ๋„ค์Š˜",
@@ -136,11 +161,5 @@
136
  "๊ณ ์˜จ ์„ฑํ˜• ํ™˜์›์ฒ ": 63,
137
  "ํœ˜๋ฐœ์œ ": 64,
138
  "ํƒ„์‚ฐ์ŠคํŠธ๋ก ํŠฌ": 65
139
- },
140
- "architectures": [
141
- "SimpleClassifier"
142
- ],
143
- "max_position_embeddings": 512,
144
- "hidden_size": 256,
145
- "intermediate_size": 128
146
- }
 
1
  {
2
+ "_name_or_path": "xlm-roberta-base",
3
+ "architectures": [
4
+ "XLMRobertaForSequenceClassification"
5
+ ],
6
+ "attention_probs_dropout_prob": 0.1,
7
+ "bos_token_id": 0,
8
+ "classifier_dropout": 0.1,
9
+ "eos_token_id": 2,
10
+ "hidden_act": "gelu",
11
+ "hidden_dropout_prob": 0.1,
12
+ "hidden_size": 768,
13
+ "initializer_range": 0.02,
14
+ "intermediate_size": 3072,
15
+ "layer_norm_eps": 1e-05,
16
+ "max_position_embeddings": 514,
17
+ "model_type": "xlm-roberta",
18
+ "num_attention_heads": 12,
19
+ "num_hidden_layers": 12,
20
  "num_labels": 66,
21
+ "output_past": true,
22
+ "pad_token_id": 1,
23
+ "position_embedding_type": "absolute",
24
+ "torch_dtype": "float32",
25
+ "transformers_version": "4.35.2",
26
+ "type_vocab_size": 1,
27
+ "use_cache": true,
28
+ "vocab_size": 250002,
29
  "id2label": {
30
  "0": "์ ๊ฒฐํƒ„",
31
  "1": "์‚ฐํ™”๋งˆ๊ทธ๋„ค์Š˜",
 
161
  "๊ณ ์˜จ ์„ฑํ˜• ํ™˜์›์ฒ ": 63,
162
  "ํœ˜๋ฐœ์œ ": 64,
163
  "ํƒ„์‚ฐ์ŠคํŠธ๋ก ํŠฌ": 65
164
+ }
165
+ }