Cyvyhvyyv commited on
Commit
4cd4b20
·
verified ·
1 Parent(s): b1fff5f

Update config.json

Browse files
Files changed (1) hide show
  1. config.json +2 -33
config.json CHANGED
@@ -35,36 +35,5 @@
35
  "torch_dtype": "bfloat16",
36
  "transformers_version": "4.45.0.dev0",
37
  "use_cache": true,
38
- "vocab_size": 128256,
39
- "alpha_pattern": {},
40
- "auto_mapping": null,
41
- "base_model_name_or_path": "lilmeaty/testing_semifinal",
42
- "bias": "none",
43
- "eva_config": null,
44
- "exclude_modules": null,
45
- "fan_in_fan_out": false,
46
- "inference_mode": true,
47
- "init_lora_weights": true,
48
- "loftq_config": {},
49
- "lora_alpha": 16,
50
- "lora_bias": false,
51
- "lora_dropout": 0,
52
- "megatron_config": null,
53
- "megatron_core": "megatron.core",
54
- "modules_to_save": null,
55
- "peft_type": "LORA",
56
- "r": 16,
57
- "rank_pattern": {},
58
- "target_modules": [
59
- "k_proj",
60
- "q_proj",
61
- "down_proj",
62
- "up_proj",
63
- "o_proj",
64
- "v_proj",
65
- "gate_proj"
66
- ],
67
- "task_type": "CAUSAL_LM",
68
- "use_dora": false,
69
- "use_rslora": false
70
- }
 
35
  "torch_dtype": "bfloat16",
36
  "transformers_version": "4.45.0.dev0",
37
  "use_cache": true,
38
+ "vocab_size": 128256
39
+ }