PanzerBread commited on
Commit
111d5ff
·
verified ·
1 Parent(s): ce1f11b

Upload folder using huggingface_hub

Browse files
coding-0.2/q/latest/README.md CHANGED
@@ -31,7 +31,7 @@ print(output["generated_text"])
31
 
32
  ## Training procedure
33
 
34
- [<img src="https://raw.githubusercontent.com/wandb/assets/main/wandb-github-badge-28.svg" alt="Visualize in Weights & Biases" width="150" height="24"/>](https://wandb.ai/kstaron-/huggingface/runs/8rxpc2rv)
35
 
36
 
37
  This model was trained with SFT.
 
31
 
32
  ## Training procedure
33
 
34
+ [<img src="https://raw.githubusercontent.com/wandb/assets/main/wandb-github-badge-28.svg" alt="Visualize in Weights & Biases" width="150" height="24"/>](https://wandb.ai/kstaron-/huggingface/runs/0t2a41t5)
35
 
36
 
37
  This model was trained with SFT.
coding-0.2/q/latest/adapter_config.json CHANGED
@@ -20,7 +20,7 @@
20
  "layers_pattern": null,
21
  "layers_to_transform": null,
22
  "loftq_config": {},
23
- "lora_alpha": 32,
24
  "lora_bias": false,
25
  "lora_dropout": 0,
26
  "megatron_config": null,
@@ -33,13 +33,13 @@
33
  "rank_pattern": {},
34
  "revision": null,
35
  "target_modules": [
36
- "up_proj",
37
- "k_proj",
38
  "q_proj",
39
- "down_proj",
40
- "gate_proj",
41
  "v_proj",
42
- "o_proj"
 
 
 
 
43
  ],
44
  "target_parameters": null,
45
  "task_type": "CAUSAL_LM",
 
20
  "layers_pattern": null,
21
  "layers_to_transform": null,
22
  "loftq_config": {},
23
+ "lora_alpha": 64,
24
  "lora_bias": false,
25
  "lora_dropout": 0,
26
  "megatron_config": null,
 
33
  "rank_pattern": {},
34
  "revision": null,
35
  "target_modules": [
 
 
36
  "q_proj",
 
 
37
  "v_proj",
38
+ "up_proj",
39
+ "gate_proj",
40
+ "k_proj",
41
+ "o_proj",
42
+ "down_proj"
43
  ],
44
  "target_parameters": null,
45
  "task_type": "CAUSAL_LM",
coding-0.2/q/latest/adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:a6bbb916dc12a2fca99fee550382057f9199d8894073c49e3d57821194b22e4f
3
  size 645975704
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:121954251f213ee58d7c89ce3881ff28da0b7fc8c7a3c91b614a0e07361d2b00
3
  size 645975704
coding-0.2/q/latest/base_adapter/adapter_config.json ADDED
@@ -0,0 +1,50 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "alora_invocation_tokens": null,
3
+ "alpha_pattern": {},
4
+ "arrow_config": null,
5
+ "auto_mapping": {
6
+ "base_model_class": "Qwen2ForCausalLM",
7
+ "parent_library": "transformers.models.qwen2.modeling_qwen2",
8
+ "unsloth_fixed": true
9
+ },
10
+ "base_model_name_or_path": "unsloth/deepseek-r1-distill-qwen-7b-unsloth-bnb-4bit",
11
+ "bias": "none",
12
+ "corda_config": null,
13
+ "ensure_weight_tying": false,
14
+ "eva_config": null,
15
+ "exclude_modules": null,
16
+ "fan_in_fan_out": false,
17
+ "inference_mode": true,
18
+ "init_lora_weights": true,
19
+ "layer_replication": null,
20
+ "layers_pattern": null,
21
+ "layers_to_transform": null,
22
+ "loftq_config": {},
23
+ "lora_alpha": 64,
24
+ "lora_bias": false,
25
+ "lora_dropout": 0,
26
+ "megatron_config": null,
27
+ "megatron_core": "megatron.core",
28
+ "modules_to_save": null,
29
+ "peft_type": "LORA",
30
+ "peft_version": "0.18.0",
31
+ "qalora_group_size": 16,
32
+ "r": 64,
33
+ "rank_pattern": {},
34
+ "revision": null,
35
+ "target_modules": [
36
+ "q_proj",
37
+ "v_proj",
38
+ "up_proj",
39
+ "gate_proj",
40
+ "k_proj",
41
+ "o_proj",
42
+ "down_proj"
43
+ ],
44
+ "target_parameters": null,
45
+ "task_type": "CAUSAL_LM",
46
+ "trainable_token_indices": null,
47
+ "use_dora": false,
48
+ "use_qalora": false,
49
+ "use_rslora": false
50
+ }
coding-0.2/q/latest/base_adapter/adapter_model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7ae9dcce72b493a3d3c471d5fa8f933a33eb74a58dbf7aff65bdb623a2706468
3
+ size 645975704
coding-0.2/q/latest/checkpoint-1/adapter_config.json CHANGED
@@ -20,7 +20,7 @@
20
  "layers_pattern": null,
21
  "layers_to_transform": null,
22
  "loftq_config": {},
23
- "lora_alpha": 32,
24
  "lora_bias": false,
25
  "lora_dropout": 0,
26
  "megatron_config": null,
@@ -33,13 +33,13 @@
33
  "rank_pattern": {},
34
  "revision": null,
35
  "target_modules": [
36
- "up_proj",
37
- "k_proj",
38
  "q_proj",
39
- "down_proj",
40
- "gate_proj",
41
  "v_proj",
42
- "o_proj"
 
 
 
 
43
  ],
44
  "target_parameters": null,
45
  "task_type": "CAUSAL_LM",
 
20
  "layers_pattern": null,
21
  "layers_to_transform": null,
22
  "loftq_config": {},
23
+ "lora_alpha": 64,
24
  "lora_bias": false,
25
  "lora_dropout": 0,
26
  "megatron_config": null,
 
33
  "rank_pattern": {},
34
  "revision": null,
35
  "target_modules": [
 
 
36
  "q_proj",
 
 
37
  "v_proj",
38
+ "up_proj",
39
+ "gate_proj",
40
+ "k_proj",
41
+ "o_proj",
42
+ "down_proj"
43
  ],
44
  "target_parameters": null,
45
  "task_type": "CAUSAL_LM",
coding-0.2/q/latest/checkpoint-1/adapter_model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:a6bbb916dc12a2fca99fee550382057f9199d8894073c49e3d57821194b22e4f
3
  size 645975704
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:121954251f213ee58d7c89ce3881ff28da0b7fc8c7a3c91b614a0e07361d2b00
3
  size 645975704
coding-0.2/q/latest/checkpoint-1/base_adapter/adapter_config.json ADDED
@@ -0,0 +1,50 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "alora_invocation_tokens": null,
3
+ "alpha_pattern": {},
4
+ "arrow_config": null,
5
+ "auto_mapping": {
6
+ "base_model_class": "Qwen2ForCausalLM",
7
+ "parent_library": "transformers.models.qwen2.modeling_qwen2",
8
+ "unsloth_fixed": true
9
+ },
10
+ "base_model_name_or_path": "unsloth/deepseek-r1-distill-qwen-7b-unsloth-bnb-4bit",
11
+ "bias": "none",
12
+ "corda_config": null,
13
+ "ensure_weight_tying": false,
14
+ "eva_config": null,
15
+ "exclude_modules": null,
16
+ "fan_in_fan_out": false,
17
+ "inference_mode": true,
18
+ "init_lora_weights": true,
19
+ "layer_replication": null,
20
+ "layers_pattern": null,
21
+ "layers_to_transform": null,
22
+ "loftq_config": {},
23
+ "lora_alpha": 64,
24
+ "lora_bias": false,
25
+ "lora_dropout": 0,
26
+ "megatron_config": null,
27
+ "megatron_core": "megatron.core",
28
+ "modules_to_save": null,
29
+ "peft_type": "LORA",
30
+ "peft_version": "0.18.0",
31
+ "qalora_group_size": 16,
32
+ "r": 64,
33
+ "rank_pattern": {},
34
+ "revision": null,
35
+ "target_modules": [
36
+ "q_proj",
37
+ "v_proj",
38
+ "up_proj",
39
+ "gate_proj",
40
+ "k_proj",
41
+ "o_proj",
42
+ "down_proj"
43
+ ],
44
+ "target_parameters": null,
45
+ "task_type": "CAUSAL_LM",
46
+ "trainable_token_indices": null,
47
+ "use_dora": false,
48
+ "use_qalora": false,
49
+ "use_rslora": false
50
+ }
coding-0.2/q/latest/checkpoint-1/base_adapter/adapter_model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7ae9dcce72b493a3d3c471d5fa8f933a33eb74a58dbf7aff65bdb623a2706468
3
+ size 645975704
coding-0.2/q/latest/checkpoint-1/optimizer.pt CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:c82eb3590ed9fcceb5d6294bdce16feff6612301207b8d6d2c50b29ed5275d52
3
  size 328468869
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:9d44113f9e85a36b16d1d84fa20b1ae07f1f8e3e540d40c71897d0399407dfa3
3
  size 328468869
coding-0.2/q/latest/checkpoint-1/trainer_state.json CHANGED
@@ -26,7 +26,7 @@
26
  "attributes": {}
27
  }
28
  },
29
- "total_flos": 2221701070848000.0,
30
  "train_batch_size": 90,
31
  "trial_name": null,
32
  "trial_params": null
 
26
  "attributes": {}
27
  }
28
  },
29
+ "total_flos": 36553862209536.0,
30
  "train_batch_size": 90,
31
  "trial_name": null,
32
  "trial_params": null
coding-0.2/q/latest/checkpoint-1/training_args.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:fe48554188b058f1d7a48c8dcc13f932912a9314fdd8092cf4127cbf0a284433
3
  size 6225
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:910cad4b749ef8c4803d0a741d767850043ac438f5ae0d643857592dff05dd07
3
  size 6225