Update config.json
Browse files- config.json +3 -11
config.json
CHANGED
|
@@ -19,14 +19,6 @@
|
|
| 19 |
"tie_word_embeddings": false,
|
| 20 |
"torch_dtype": "bfloat16",
|
| 21 |
"transformers_version": "4.34.0.dev0",
|
| 22 |
-
"use_cache":
|
| 23 |
-
"vocab_size": 32000
|
| 24 |
-
|
| 25 |
-
"lora_dropout": 0.1, // Specific to your LoRA configuration
|
| 26 |
-
"r": 64, // Specific to your LoRA configuration
|
| 27 |
-
"peft_type": "LORA", // Since you used LoRA-based adaptations
|
| 28 |
-
"task_type": "CAUSAL_LM", // Your specific task type
|
| 29 |
-
"pretraining_tp": 1, // You added this, though it's unclear if needed post-training; typically more related to training setup
|
| 30 |
-
"fp16": true, // Indicates you used fp16 during training
|
| 31 |
-
"bf16": false // Indicates you did not use bf16 during training
|
| 32 |
-
}
|
|
|
|
| 19 |
"tie_word_embeddings": false,
|
| 20 |
"torch_dtype": "bfloat16",
|
| 21 |
"transformers_version": "4.34.0.dev0",
|
| 22 |
+
"use_cache": true,
|
| 23 |
+
"vocab_size": 32000
|
| 24 |
+
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|