mhb
Browse files- config.json +6 -6
config.json
CHANGED
|
@@ -8,16 +8,16 @@
|
|
| 8 |
"AutoModelForCausalLM": "modelLM.OBILanguageModel",
|
| 9 |
"AutoModelForQuestionAnswering": "modelLM.OBILanguageModel"
|
| 10 |
},
|
| 11 |
-
"batch_size":
|
| 12 |
-
"block_size":
|
| 13 |
"device": "cpu",
|
| 14 |
"eval_interval": 100,
|
| 15 |
"hidden_dropout_prob": 0.1,
|
| 16 |
-
"hidden_size":
|
| 17 |
"learning_rate": 0.001,
|
| 18 |
-
"max_iters":
|
| 19 |
-
"num_attention_heads":
|
| 20 |
-
"num_hidden_layers":
|
| 21 |
"torch_dtype": "float32",
|
| 22 |
"transformers_version": "4.30.2",
|
| 23 |
"vocab_size": 5000
|
|
|
|
| 8 |
"AutoModelForCausalLM": "modelLM.OBILanguageModel",
|
| 9 |
"AutoModelForQuestionAnswering": "modelLM.OBILanguageModel"
|
| 10 |
},
|
| 11 |
+
"batch_size": 160,
|
| 12 |
+
"block_size": 120,
|
| 13 |
"device": "cpu",
|
| 14 |
"eval_interval": 100,
|
| 15 |
"hidden_dropout_prob": 0.1,
|
| 16 |
+
"hidden_size": 24,
|
| 17 |
"learning_rate": 0.001,
|
| 18 |
+
"max_iters": 1000,
|
| 19 |
+
"num_attention_heads": 6,
|
| 20 |
+
"num_hidden_layers": 6,
|
| 21 |
"torch_dtype": "float32",
|
| 22 |
"transformers_version": "4.30.2",
|
| 23 |
"vocab_size": 5000
|