ardneebwar commited on
Commit
b9ef378
·
verified ·
1 Parent(s): 6f641b4

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +3 -11
config.json CHANGED
@@ -19,14 +19,6 @@
19
  "tie_word_embeddings": false,
20
  "torch_dtype": "bfloat16",
21
  "transformers_version": "4.34.0.dev0",
22
- "use_cache": false, // You set this to false during your fine-tuning
23
- "vocab_size": 32000,
24
- "lora_alpha": 16, // Specific to your LoRA configuration
25
- "lora_dropout": 0.1, // Specific to your LoRA configuration
26
- "r": 64, // Specific to your LoRA configuration
27
- "peft_type": "LORA", // Since you used LoRA-based adaptations
28
- "task_type": "CAUSAL_LM", // Your specific task type
29
- "pretraining_tp": 1, // You added this, though it's unclear if needed post-training; typically more related to training setup
30
- "fp16": true, // Indicates you used fp16 during training
31
- "bf16": false // Indicates you did not use bf16 during training
32
- }
 
19
  "tie_word_embeddings": false,
20
  "torch_dtype": "bfloat16",
21
  "transformers_version": "4.34.0.dev0",
22
+ "use_cache": true,
23
+ "vocab_size": 32000
24
+ }