nie10 commited on
Commit
e876779
·
verified ·
1 Parent(s): 55a6b53

Upload folder using huggingface_hub

Browse files
.gitattributes CHANGED
@@ -33,3 +33,4 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
+ log.log filter=lfs diff=lfs merge=lfs -text
bench2-a1-dev.jsonl ADDED
The diff for this file is too large to render. See raw diff
 
bench2-a1-test.jsonl ADDED
The diff for this file is too large to render. See raw diff
 
bench_a1_dev.jsonl ADDED
The diff for this file is too large to render. See raw diff
 
bench_a1_test.jsonl ADDED
The diff for this file is too large to render. See raw diff
 
checkpoint-1000/config.json ADDED
@@ -0,0 +1,49 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "/mnt/workspace/workgroup/lyt/Bunny/Bunny/weight/Bunny-v1_0-3B/",
3
+ "architectures": [
4
+ "BunnyPhiForCausalLM"
5
+ ],
6
+ "attention_dropout": 0.0,
7
+ "auto_map": {
8
+ "AutoConfig": "configuration_bunny_phi.BunnyPhiConfig",
9
+ "AutoModelForCausalLM": "modeling_bunny_phi.BunnyPhiForCausalLM"
10
+ },
11
+ "bert_type": "qformer_pretrain_full",
12
+ "bos_token_id": 50256,
13
+ "compress_type": null,
14
+ "embd_pdrop": 0.0,
15
+ "eos_token_id": 50256,
16
+ "freeze_mm_mlp_adapter": false,
17
+ "hidden_act": "gelu_new",
18
+ "hidden_size": 2560,
19
+ "image_aspect_ratio": "pad",
20
+ "initializer_range": 0.02,
21
+ "intermediate_size": 10240,
22
+ "layer_norm_eps": 1e-05,
23
+ "max_position_embeddings": 2048,
24
+ "mm_hidden_size": 1152,
25
+ "mm_projector_lr": 2e-05,
26
+ "mm_projector_type": "mlp2x_gelu",
27
+ "mm_vision_tower": "/path/to/siglip-so400m-patch14-384",
28
+ "model_type": "bunny-qformer-phi_v3_bib",
29
+ "num_attention_heads": 32,
30
+ "num_hidden_layers": 32,
31
+ "num_key_value_heads": 32,
32
+ "num_query": 32,
33
+ "pad_token_id": 50256,
34
+ "partial_rotary_factor": 0.4,
35
+ "qk_layernorm": false,
36
+ "resid_pdrop": 0.1,
37
+ "rope_scaling": null,
38
+ "rope_theta": 10000.0,
39
+ "tie_word_embeddings": false,
40
+ "tokenizer_model_max_length": 2048,
41
+ "tokenizer_padding_side": "right",
42
+ "torch_dtype": "float16",
43
+ "transformers_version": "4.39.3",
44
+ "tune_mm_mlp_adapter": true,
45
+ "use_cache": false,
46
+ "use_mm_proj": true,
47
+ "vlm_layers": 28,
48
+ "vocab_size": 50295
49
+ }
checkpoint-1000/mm_projector.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:70081d0f468d9f9ab949cb2e2481e935311c4ef412599a3260a1417b231d5eb4
3
+ size 415617251
checkpoint-1500/config.json ADDED
@@ -0,0 +1,49 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "/mnt/workspace/workgroup/lyt/Bunny/Bunny/weight/Bunny-v1_0-3B/",
3
+ "architectures": [
4
+ "BunnyPhiForCausalLM"
5
+ ],
6
+ "attention_dropout": 0.0,
7
+ "auto_map": {
8
+ "AutoConfig": "configuration_bunny_phi.BunnyPhiConfig",
9
+ "AutoModelForCausalLM": "modeling_bunny_phi.BunnyPhiForCausalLM"
10
+ },
11
+ "bert_type": "qformer_pretrain_full",
12
+ "bos_token_id": 50256,
13
+ "compress_type": null,
14
+ "embd_pdrop": 0.0,
15
+ "eos_token_id": 50256,
16
+ "freeze_mm_mlp_adapter": false,
17
+ "hidden_act": "gelu_new",
18
+ "hidden_size": 2560,
19
+ "image_aspect_ratio": "pad",
20
+ "initializer_range": 0.02,
21
+ "intermediate_size": 10240,
22
+ "layer_norm_eps": 1e-05,
23
+ "max_position_embeddings": 2048,
24
+ "mm_hidden_size": 1152,
25
+ "mm_projector_lr": 2e-05,
26
+ "mm_projector_type": "mlp2x_gelu",
27
+ "mm_vision_tower": "/path/to/siglip-so400m-patch14-384",
28
+ "model_type": "bunny-qformer-phi_v3_bib",
29
+ "num_attention_heads": 32,
30
+ "num_hidden_layers": 32,
31
+ "num_key_value_heads": 32,
32
+ "num_query": 32,
33
+ "pad_token_id": 50256,
34
+ "partial_rotary_factor": 0.4,
35
+ "qk_layernorm": false,
36
+ "resid_pdrop": 0.1,
37
+ "rope_scaling": null,
38
+ "rope_theta": 10000.0,
39
+ "tie_word_embeddings": false,
40
+ "tokenizer_model_max_length": 2048,
41
+ "tokenizer_padding_side": "right",
42
+ "torch_dtype": "float16",
43
+ "transformers_version": "4.39.3",
44
+ "tune_mm_mlp_adapter": true,
45
+ "use_cache": false,
46
+ "use_mm_proj": true,
47
+ "vlm_layers": 28,
48
+ "vocab_size": 50295
49
+ }
checkpoint-1500/mm_projector.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:20a6b1f36fe65b3cd0fb4da928ce47208939c812287560890fd94a0257f06a7c
3
+ size 415617251
checkpoint-2000/config.json ADDED
@@ -0,0 +1,49 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "/mnt/workspace/workgroup/lyt/Bunny/Bunny/weight/Bunny-v1_0-3B/",
3
+ "architectures": [
4
+ "BunnyPhiForCausalLM"
5
+ ],
6
+ "attention_dropout": 0.0,
7
+ "auto_map": {
8
+ "AutoConfig": "configuration_bunny_phi.BunnyPhiConfig",
9
+ "AutoModelForCausalLM": "modeling_bunny_phi.BunnyPhiForCausalLM"
10
+ },
11
+ "bert_type": "qformer_pretrain_full",
12
+ "bos_token_id": 50256,
13
+ "compress_type": null,
14
+ "embd_pdrop": 0.0,
15
+ "eos_token_id": 50256,
16
+ "freeze_mm_mlp_adapter": false,
17
+ "hidden_act": "gelu_new",
18
+ "hidden_size": 2560,
19
+ "image_aspect_ratio": "pad",
20
+ "initializer_range": 0.02,
21
+ "intermediate_size": 10240,
22
+ "layer_norm_eps": 1e-05,
23
+ "max_position_embeddings": 2048,
24
+ "mm_hidden_size": 1152,
25
+ "mm_projector_lr": 2e-05,
26
+ "mm_projector_type": "mlp2x_gelu",
27
+ "mm_vision_tower": "/path/to/siglip-so400m-patch14-384",
28
+ "model_type": "bunny-qformer-phi_v3_bib",
29
+ "num_attention_heads": 32,
30
+ "num_hidden_layers": 32,
31
+ "num_key_value_heads": 32,
32
+ "num_query": 32,
33
+ "pad_token_id": 50256,
34
+ "partial_rotary_factor": 0.4,
35
+ "qk_layernorm": false,
36
+ "resid_pdrop": 0.1,
37
+ "rope_scaling": null,
38
+ "rope_theta": 10000.0,
39
+ "tie_word_embeddings": false,
40
+ "tokenizer_model_max_length": 2048,
41
+ "tokenizer_padding_side": "right",
42
+ "torch_dtype": "float16",
43
+ "transformers_version": "4.39.3",
44
+ "tune_mm_mlp_adapter": true,
45
+ "use_cache": false,
46
+ "use_mm_proj": true,
47
+ "vlm_layers": 28,
48
+ "vocab_size": 50295
49
+ }
checkpoint-2000/mm_projector.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0d54034cd80e03ac748dba7c4ef7bb51e79caadd89abb6f0de95e1a087f8e35e
3
+ size 415617251
checkpoint-2500/config.json ADDED
@@ -0,0 +1,49 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "/mnt/workspace/workgroup/lyt/Bunny/Bunny/weight/Bunny-v1_0-3B/",
3
+ "architectures": [
4
+ "BunnyPhiForCausalLM"
5
+ ],
6
+ "attention_dropout": 0.0,
7
+ "auto_map": {
8
+ "AutoConfig": "configuration_bunny_phi.BunnyPhiConfig",
9
+ "AutoModelForCausalLM": "modeling_bunny_phi.BunnyPhiForCausalLM"
10
+ },
11
+ "bert_type": "qformer_pretrain_full",
12
+ "bos_token_id": 50256,
13
+ "compress_type": null,
14
+ "embd_pdrop": 0.0,
15
+ "eos_token_id": 50256,
16
+ "freeze_mm_mlp_adapter": false,
17
+ "hidden_act": "gelu_new",
18
+ "hidden_size": 2560,
19
+ "image_aspect_ratio": "pad",
20
+ "initializer_range": 0.02,
21
+ "intermediate_size": 10240,
22
+ "layer_norm_eps": 1e-05,
23
+ "max_position_embeddings": 2048,
24
+ "mm_hidden_size": 1152,
25
+ "mm_projector_lr": 2e-05,
26
+ "mm_projector_type": "mlp2x_gelu",
27
+ "mm_vision_tower": "/path/to/siglip-so400m-patch14-384",
28
+ "model_type": "bunny-qformer-phi_v3_bib",
29
+ "num_attention_heads": 32,
30
+ "num_hidden_layers": 32,
31
+ "num_key_value_heads": 32,
32
+ "num_query": 32,
33
+ "pad_token_id": 50256,
34
+ "partial_rotary_factor": 0.4,
35
+ "qk_layernorm": false,
36
+ "resid_pdrop": 0.1,
37
+ "rope_scaling": null,
38
+ "rope_theta": 10000.0,
39
+ "tie_word_embeddings": false,
40
+ "tokenizer_model_max_length": 2048,
41
+ "tokenizer_padding_side": "right",
42
+ "torch_dtype": "float16",
43
+ "transformers_version": "4.39.3",
44
+ "tune_mm_mlp_adapter": true,
45
+ "use_cache": false,
46
+ "use_mm_proj": true,
47
+ "vlm_layers": 28,
48
+ "vocab_size": 50295
49
+ }
checkpoint-2500/mm_projector.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:fad134132289d5426a076d23455dd0bd84353d6f8448e4905004779a4779202d
3
+ size 415617251
checkpoint-3000/config.json ADDED
@@ -0,0 +1,49 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "/mnt/workspace/workgroup/lyt/Bunny/Bunny/weight/Bunny-v1_0-3B/",
3
+ "architectures": [
4
+ "BunnyPhiForCausalLM"
5
+ ],
6
+ "attention_dropout": 0.0,
7
+ "auto_map": {
8
+ "AutoConfig": "configuration_bunny_phi.BunnyPhiConfig",
9
+ "AutoModelForCausalLM": "modeling_bunny_phi.BunnyPhiForCausalLM"
10
+ },
11
+ "bert_type": "qformer_pretrain_full",
12
+ "bos_token_id": 50256,
13
+ "compress_type": null,
14
+ "embd_pdrop": 0.0,
15
+ "eos_token_id": 50256,
16
+ "freeze_mm_mlp_adapter": false,
17
+ "hidden_act": "gelu_new",
18
+ "hidden_size": 2560,
19
+ "image_aspect_ratio": "pad",
20
+ "initializer_range": 0.02,
21
+ "intermediate_size": 10240,
22
+ "layer_norm_eps": 1e-05,
23
+ "max_position_embeddings": 2048,
24
+ "mm_hidden_size": 1152,
25
+ "mm_projector_lr": 2e-05,
26
+ "mm_projector_type": "mlp2x_gelu",
27
+ "mm_vision_tower": "/path/to/siglip-so400m-patch14-384",
28
+ "model_type": "bunny-qformer-phi_v3_bib",
29
+ "num_attention_heads": 32,
30
+ "num_hidden_layers": 32,
31
+ "num_key_value_heads": 32,
32
+ "num_query": 32,
33
+ "pad_token_id": 50256,
34
+ "partial_rotary_factor": 0.4,
35
+ "qk_layernorm": false,
36
+ "resid_pdrop": 0.1,
37
+ "rope_scaling": null,
38
+ "rope_theta": 10000.0,
39
+ "tie_word_embeddings": false,
40
+ "tokenizer_model_max_length": 2048,
41
+ "tokenizer_padding_side": "right",
42
+ "torch_dtype": "float16",
43
+ "transformers_version": "4.39.3",
44
+ "tune_mm_mlp_adapter": true,
45
+ "use_cache": false,
46
+ "use_mm_proj": true,
47
+ "vlm_layers": 28,
48
+ "vocab_size": 50295
49
+ }
checkpoint-3000/mm_projector.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4c79cdba830d96fb503af4e357c5830d1cbd5f31853a85cbf089ad32d82eace4
3
+ size 415617251
checkpoint-3500/config.json ADDED
@@ -0,0 +1,49 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "/mnt/workspace/workgroup/lyt/Bunny/Bunny/weight/Bunny-v1_0-3B/",
3
+ "architectures": [
4
+ "BunnyPhiForCausalLM"
5
+ ],
6
+ "attention_dropout": 0.0,
7
+ "auto_map": {
8
+ "AutoConfig": "configuration_bunny_phi.BunnyPhiConfig",
9
+ "AutoModelForCausalLM": "modeling_bunny_phi.BunnyPhiForCausalLM"
10
+ },
11
+ "bert_type": "qformer_pretrain_full",
12
+ "bos_token_id": 50256,
13
+ "compress_type": null,
14
+ "embd_pdrop": 0.0,
15
+ "eos_token_id": 50256,
16
+ "freeze_mm_mlp_adapter": false,
17
+ "hidden_act": "gelu_new",
18
+ "hidden_size": 2560,
19
+ "image_aspect_ratio": "pad",
20
+ "initializer_range": 0.02,
21
+ "intermediate_size": 10240,
22
+ "layer_norm_eps": 1e-05,
23
+ "max_position_embeddings": 2048,
24
+ "mm_hidden_size": 1152,
25
+ "mm_projector_lr": 2e-05,
26
+ "mm_projector_type": "mlp2x_gelu",
27
+ "mm_vision_tower": "/path/to/siglip-so400m-patch14-384",
28
+ "model_type": "bunny-qformer-phi_v3_bib",
29
+ "num_attention_heads": 32,
30
+ "num_hidden_layers": 32,
31
+ "num_key_value_heads": 32,
32
+ "num_query": 32,
33
+ "pad_token_id": 50256,
34
+ "partial_rotary_factor": 0.4,
35
+ "qk_layernorm": false,
36
+ "resid_pdrop": 0.1,
37
+ "rope_scaling": null,
38
+ "rope_theta": 10000.0,
39
+ "tie_word_embeddings": false,
40
+ "tokenizer_model_max_length": 2048,
41
+ "tokenizer_padding_side": "right",
42
+ "torch_dtype": "float16",
43
+ "transformers_version": "4.39.3",
44
+ "tune_mm_mlp_adapter": true,
45
+ "use_cache": false,
46
+ "use_mm_proj": true,
47
+ "vlm_layers": 28,
48
+ "vocab_size": 50295
49
+ }
checkpoint-3500/mm_projector.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4994214c1f512046d637ecbaedc72719832f0a0783eb6a10765bb9ac34c16ac5
3
+ size 415617251
checkpoint-4000/config.json ADDED
@@ -0,0 +1,49 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "/mnt/workspace/workgroup/lyt/Bunny/Bunny/weight/Bunny-v1_0-3B/",
3
+ "architectures": [
4
+ "BunnyPhiForCausalLM"
5
+ ],
6
+ "attention_dropout": 0.0,
7
+ "auto_map": {
8
+ "AutoConfig": "configuration_bunny_phi.BunnyPhiConfig",
9
+ "AutoModelForCausalLM": "modeling_bunny_phi.BunnyPhiForCausalLM"
10
+ },
11
+ "bert_type": "qformer_pretrain_full",
12
+ "bos_token_id": 50256,
13
+ "compress_type": null,
14
+ "embd_pdrop": 0.0,
15
+ "eos_token_id": 50256,
16
+ "freeze_mm_mlp_adapter": false,
17
+ "hidden_act": "gelu_new",
18
+ "hidden_size": 2560,
19
+ "image_aspect_ratio": "pad",
20
+ "initializer_range": 0.02,
21
+ "intermediate_size": 10240,
22
+ "layer_norm_eps": 1e-05,
23
+ "max_position_embeddings": 2048,
24
+ "mm_hidden_size": 1152,
25
+ "mm_projector_lr": 2e-05,
26
+ "mm_projector_type": "mlp2x_gelu",
27
+ "mm_vision_tower": "/path/to/siglip-so400m-patch14-384",
28
+ "model_type": "bunny-qformer-phi_v3_bib",
29
+ "num_attention_heads": 32,
30
+ "num_hidden_layers": 32,
31
+ "num_key_value_heads": 32,
32
+ "num_query": 32,
33
+ "pad_token_id": 50256,
34
+ "partial_rotary_factor": 0.4,
35
+ "qk_layernorm": false,
36
+ "resid_pdrop": 0.1,
37
+ "rope_scaling": null,
38
+ "rope_theta": 10000.0,
39
+ "tie_word_embeddings": false,
40
+ "tokenizer_model_max_length": 2048,
41
+ "tokenizer_padding_side": "right",
42
+ "torch_dtype": "float16",
43
+ "transformers_version": "4.39.3",
44
+ "tune_mm_mlp_adapter": true,
45
+ "use_cache": false,
46
+ "use_mm_proj": true,
47
+ "vlm_layers": 28,
48
+ "vocab_size": 50295
49
+ }
checkpoint-4000/mm_projector.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:47b4c7c6b711bfb5547e1219552e25c4d9c830fe9e2d7a6e4bd2acd0cdbb637f
3
+ size 415617251
checkpoint-4500/config.json ADDED
@@ -0,0 +1,49 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "/mnt/workspace/workgroup/lyt/Bunny/Bunny/weight/Bunny-v1_0-3B/",
3
+ "architectures": [
4
+ "BunnyPhiForCausalLM"
5
+ ],
6
+ "attention_dropout": 0.0,
7
+ "auto_map": {
8
+ "AutoConfig": "configuration_bunny_phi.BunnyPhiConfig",
9
+ "AutoModelForCausalLM": "modeling_bunny_phi.BunnyPhiForCausalLM"
10
+ },
11
+ "bert_type": "qformer_pretrain_full",
12
+ "bos_token_id": 50256,
13
+ "compress_type": null,
14
+ "embd_pdrop": 0.0,
15
+ "eos_token_id": 50256,
16
+ "freeze_mm_mlp_adapter": false,
17
+ "hidden_act": "gelu_new",
18
+ "hidden_size": 2560,
19
+ "image_aspect_ratio": "pad",
20
+ "initializer_range": 0.02,
21
+ "intermediate_size": 10240,
22
+ "layer_norm_eps": 1e-05,
23
+ "max_position_embeddings": 2048,
24
+ "mm_hidden_size": 1152,
25
+ "mm_projector_lr": 2e-05,
26
+ "mm_projector_type": "mlp2x_gelu",
27
+ "mm_vision_tower": "/path/to/siglip-so400m-patch14-384",
28
+ "model_type": "bunny-qformer-phi_v3_bib",
29
+ "num_attention_heads": 32,
30
+ "num_hidden_layers": 32,
31
+ "num_key_value_heads": 32,
32
+ "num_query": 32,
33
+ "pad_token_id": 50256,
34
+ "partial_rotary_factor": 0.4,
35
+ "qk_layernorm": false,
36
+ "resid_pdrop": 0.1,
37
+ "rope_scaling": null,
38
+ "rope_theta": 10000.0,
39
+ "tie_word_embeddings": false,
40
+ "tokenizer_model_max_length": 2048,
41
+ "tokenizer_padding_side": "right",
42
+ "torch_dtype": "float16",
43
+ "transformers_version": "4.39.3",
44
+ "tune_mm_mlp_adapter": true,
45
+ "use_cache": false,
46
+ "use_mm_proj": true,
47
+ "vlm_layers": 28,
48
+ "vocab_size": 50295
49
+ }
checkpoint-4500/mm_projector.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:290b42fe8a17166640ea7c7f53a9e7f0aacab8baf916dc4440d0f87efd831f6d
3
+ size 415617251
checkpoint-500/config.json ADDED
@@ -0,0 +1,49 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "/mnt/workspace/workgroup/lyt/Bunny/Bunny/weight/Bunny-v1_0-3B/",
3
+ "architectures": [
4
+ "BunnyPhiForCausalLM"
5
+ ],
6
+ "attention_dropout": 0.0,
7
+ "auto_map": {
8
+ "AutoConfig": "configuration_bunny_phi.BunnyPhiConfig",
9
+ "AutoModelForCausalLM": "modeling_bunny_phi.BunnyPhiForCausalLM"
10
+ },
11
+ "bert_type": "qformer_pretrain_full",
12
+ "bos_token_id": 50256,
13
+ "compress_type": null,
14
+ "embd_pdrop": 0.0,
15
+ "eos_token_id": 50256,
16
+ "freeze_mm_mlp_adapter": false,
17
+ "hidden_act": "gelu_new",
18
+ "hidden_size": 2560,
19
+ "image_aspect_ratio": "pad",
20
+ "initializer_range": 0.02,
21
+ "intermediate_size": 10240,
22
+ "layer_norm_eps": 1e-05,
23
+ "max_position_embeddings": 2048,
24
+ "mm_hidden_size": 1152,
25
+ "mm_projector_lr": 2e-05,
26
+ "mm_projector_type": "mlp2x_gelu",
27
+ "mm_vision_tower": "/path/to/siglip-so400m-patch14-384",
28
+ "model_type": "bunny-qformer-phi_v3_bib",
29
+ "num_attention_heads": 32,
30
+ "num_hidden_layers": 32,
31
+ "num_key_value_heads": 32,
32
+ "num_query": 32,
33
+ "pad_token_id": 50256,
34
+ "partial_rotary_factor": 0.4,
35
+ "qk_layernorm": false,
36
+ "resid_pdrop": 0.1,
37
+ "rope_scaling": null,
38
+ "rope_theta": 10000.0,
39
+ "tie_word_embeddings": false,
40
+ "tokenizer_model_max_length": 2048,
41
+ "tokenizer_padding_side": "right",
42
+ "torch_dtype": "float16",
43
+ "transformers_version": "4.39.3",
44
+ "tune_mm_mlp_adapter": true,
45
+ "use_cache": false,
46
+ "use_mm_proj": true,
47
+ "vlm_layers": 28,
48
+ "vocab_size": 50295
49
+ }
checkpoint-500/mm_projector.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:37bc7505e6dbd724f233985005737b9e69f310776c2ecf5b59f2ba32a03542e6
3
+ size 415617251
checkpoint-5000/config.json ADDED
@@ -0,0 +1,49 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "/mnt/workspace/workgroup/lyt/Bunny/Bunny/weight/Bunny-v1_0-3B/",
3
+ "architectures": [
4
+ "BunnyPhiForCausalLM"
5
+ ],
6
+ "attention_dropout": 0.0,
7
+ "auto_map": {
8
+ "AutoConfig": "configuration_bunny_phi.BunnyPhiConfig",
9
+ "AutoModelForCausalLM": "modeling_bunny_phi.BunnyPhiForCausalLM"
10
+ },
11
+ "bert_type": "qformer_pretrain_full",
12
+ "bos_token_id": 50256,
13
+ "compress_type": null,
14
+ "embd_pdrop": 0.0,
15
+ "eos_token_id": 50256,
16
+ "freeze_mm_mlp_adapter": false,
17
+ "hidden_act": "gelu_new",
18
+ "hidden_size": 2560,
19
+ "image_aspect_ratio": "pad",
20
+ "initializer_range": 0.02,
21
+ "intermediate_size": 10240,
22
+ "layer_norm_eps": 1e-05,
23
+ "max_position_embeddings": 2048,
24
+ "mm_hidden_size": 1152,
25
+ "mm_projector_lr": 2e-05,
26
+ "mm_projector_type": "mlp2x_gelu",
27
+ "mm_vision_tower": "/path/to/siglip-so400m-patch14-384",
28
+ "model_type": "bunny-qformer-phi_v3_bib",
29
+ "num_attention_heads": 32,
30
+ "num_hidden_layers": 32,
31
+ "num_key_value_heads": 32,
32
+ "num_query": 32,
33
+ "pad_token_id": 50256,
34
+ "partial_rotary_factor": 0.4,
35
+ "qk_layernorm": false,
36
+ "resid_pdrop": 0.1,
37
+ "rope_scaling": null,
38
+ "rope_theta": 10000.0,
39
+ "tie_word_embeddings": false,
40
+ "tokenizer_model_max_length": 2048,
41
+ "tokenizer_padding_side": "right",
42
+ "torch_dtype": "float16",
43
+ "transformers_version": "4.39.3",
44
+ "tune_mm_mlp_adapter": true,
45
+ "use_cache": false,
46
+ "use_mm_proj": true,
47
+ "vlm_layers": 28,
48
+ "vocab_size": 50295
49
+ }
checkpoint-5000/mm_projector.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:47f4f98b787d55ba1194a342c1b3c3ecff6dd943e1c1427ebd122aa6e412b029
3
+ size 415617251
checkpoint-5500/config.json ADDED
@@ -0,0 +1,49 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "/mnt/workspace/workgroup/lyt/Bunny/Bunny/weight/Bunny-v1_0-3B/",
3
+ "architectures": [
4
+ "BunnyPhiForCausalLM"
5
+ ],
6
+ "attention_dropout": 0.0,
7
+ "auto_map": {
8
+ "AutoConfig": "configuration_bunny_phi.BunnyPhiConfig",
9
+ "AutoModelForCausalLM": "modeling_bunny_phi.BunnyPhiForCausalLM"
10
+ },
11
+ "bert_type": "qformer_pretrain_full",
12
+ "bos_token_id": 50256,
13
+ "compress_type": null,
14
+ "embd_pdrop": 0.0,
15
+ "eos_token_id": 50256,
16
+ "freeze_mm_mlp_adapter": false,
17
+ "hidden_act": "gelu_new",
18
+ "hidden_size": 2560,
19
+ "image_aspect_ratio": "pad",
20
+ "initializer_range": 0.02,
21
+ "intermediate_size": 10240,
22
+ "layer_norm_eps": 1e-05,
23
+ "max_position_embeddings": 2048,
24
+ "mm_hidden_size": 1152,
25
+ "mm_projector_lr": 2e-05,
26
+ "mm_projector_type": "mlp2x_gelu",
27
+ "mm_vision_tower": "/path/to/siglip-so400m-patch14-384",
28
+ "model_type": "bunny-qformer-phi_v3_bib",
29
+ "num_attention_heads": 32,
30
+ "num_hidden_layers": 32,
31
+ "num_key_value_heads": 32,
32
+ "num_query": 32,
33
+ "pad_token_id": 50256,
34
+ "partial_rotary_factor": 0.4,
35
+ "qk_layernorm": false,
36
+ "resid_pdrop": 0.1,
37
+ "rope_scaling": null,
38
+ "rope_theta": 10000.0,
39
+ "tie_word_embeddings": false,
40
+ "tokenizer_model_max_length": 2048,
41
+ "tokenizer_padding_side": "right",
42
+ "torch_dtype": "float16",
43
+ "transformers_version": "4.39.3",
44
+ "tune_mm_mlp_adapter": true,
45
+ "use_cache": false,
46
+ "use_mm_proj": true,
47
+ "vlm_layers": 28,
48
+ "vocab_size": 50295
49
+ }
checkpoint-5500/mm_projector.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:85092259a4230d3b6daf19680943f4c618b5a3a45ee786326e7f2370a60643f6
3
+ size 415617251
checkpoint-6000/config.json ADDED
@@ -0,0 +1,49 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "/mnt/workspace/workgroup/lyt/Bunny/Bunny/weight/Bunny-v1_0-3B/",
3
+ "architectures": [
4
+ "BunnyPhiForCausalLM"
5
+ ],
6
+ "attention_dropout": 0.0,
7
+ "auto_map": {
8
+ "AutoConfig": "configuration_bunny_phi.BunnyPhiConfig",
9
+ "AutoModelForCausalLM": "modeling_bunny_phi.BunnyPhiForCausalLM"
10
+ },
11
+ "bert_type": "qformer_pretrain_full",
12
+ "bos_token_id": 50256,
13
+ "compress_type": null,
14
+ "embd_pdrop": 0.0,
15
+ "eos_token_id": 50256,
16
+ "freeze_mm_mlp_adapter": false,
17
+ "hidden_act": "gelu_new",
18
+ "hidden_size": 2560,
19
+ "image_aspect_ratio": "pad",
20
+ "initializer_range": 0.02,
21
+ "intermediate_size": 10240,
22
+ "layer_norm_eps": 1e-05,
23
+ "max_position_embeddings": 2048,
24
+ "mm_hidden_size": 1152,
25
+ "mm_projector_lr": 2e-05,
26
+ "mm_projector_type": "mlp2x_gelu",
27
+ "mm_vision_tower": "/path/to/siglip-so400m-patch14-384",
28
+ "model_type": "bunny-qformer-phi_v3_bib",
29
+ "num_attention_heads": 32,
30
+ "num_hidden_layers": 32,
31
+ "num_key_value_heads": 32,
32
+ "num_query": 32,
33
+ "pad_token_id": 50256,
34
+ "partial_rotary_factor": 0.4,
35
+ "qk_layernorm": false,
36
+ "resid_pdrop": 0.1,
37
+ "rope_scaling": null,
38
+ "rope_theta": 10000.0,
39
+ "tie_word_embeddings": false,
40
+ "tokenizer_model_max_length": 2048,
41
+ "tokenizer_padding_side": "right",
42
+ "torch_dtype": "float16",
43
+ "transformers_version": "4.39.3",
44
+ "tune_mm_mlp_adapter": true,
45
+ "use_cache": false,
46
+ "use_mm_proj": true,
47
+ "vlm_layers": 28,
48
+ "vocab_size": 50295
49
+ }
checkpoint-6000/mm_projector.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:8a805f45968a5783e279f3506cea2b7dcf036fcc828e59e48975dc0a4395b96b
3
+ size 415617251
config.json ADDED
@@ -0,0 +1,49 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "_name_or_path": "/mnt/workspace/workgroup/lyt/Bunny/Bunny/weight/Bunny-v1_0-3B/",
3
+ "architectures": [
4
+ "BunnyPhiForCausalLM"
5
+ ],
6
+ "attention_dropout": 0.0,
7
+ "auto_map": {
8
+ "AutoConfig": "configuration_bunny_phi.BunnyPhiConfig",
9
+ "AutoModelForCausalLM": "modeling_bunny_phi.BunnyPhiForCausalLM"
10
+ },
11
+ "bert_type": "qformer_pretrain_full",
12
+ "bos_token_id": 50256,
13
+ "compress_type": null,
14
+ "embd_pdrop": 0.0,
15
+ "eos_token_id": 50256,
16
+ "freeze_mm_mlp_adapter": false,
17
+ "hidden_act": "gelu_new",
18
+ "hidden_size": 2560,
19
+ "image_aspect_ratio": "pad",
20
+ "initializer_range": 0.02,
21
+ "intermediate_size": 10240,
22
+ "layer_norm_eps": 1e-05,
23
+ "max_position_embeddings": 2048,
24
+ "mm_hidden_size": 1152,
25
+ "mm_projector_lr": 2e-05,
26
+ "mm_projector_type": "mlp2x_gelu",
27
+ "mm_vision_tower": "/path/to/siglip-so400m-patch14-384",
28
+ "model_type": "bunny-qformer-phi_v3_bib",
29
+ "num_attention_heads": 32,
30
+ "num_hidden_layers": 32,
31
+ "num_key_value_heads": 32,
32
+ "num_query": 32,
33
+ "pad_token_id": 50256,
34
+ "partial_rotary_factor": 0.4,
35
+ "qk_layernorm": false,
36
+ "resid_pdrop": 0.1,
37
+ "rope_scaling": null,
38
+ "rope_theta": 10000.0,
39
+ "tie_word_embeddings": false,
40
+ "tokenizer_model_max_length": 2048,
41
+ "tokenizer_padding_side": "right",
42
+ "torch_dtype": "float16",
43
+ "transformers_version": "4.39.3",
44
+ "tune_mm_mlp_adapter": true,
45
+ "use_cache": true,
46
+ "use_mm_proj": true,
47
+ "vlm_layers": 28,
48
+ "vocab_size": 50295
49
+ }
log.log ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c84beee90c99151db2b2f99e408a10d5ab447f6a8ede48b927887b423456dd1a
3
+ size 42119830
log_eval_-dev-a1.log ADDED
The diff for this file is too large to render. See raw diff
 
log_eval_2-test-a1.log ADDED
The diff for this file is too large to render. See raw diff
 
log_eval_dev1.log ADDED
The diff for this file is too large to render. See raw diff
 
log_eval_est-a1.log ADDED
The diff for this file is too large to render. See raw diff
 
log_eval_h_test1.log ADDED
The diff for this file is too large to render. See raw diff
 
log_eval_trueloadlora_0.7719.log ADDED
The diff for this file is too large to render. See raw diff
 
mm_projector.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f5e00a79058975233c5269aaaebf441e51f102ba37de695b4b8b2468114b7270
3
+ size 415617251
mme_res.txt ADDED
@@ -0,0 +1,24 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ =========== Perception ===========
2
+ total score: 1343.7580032012806
3
+
4
+ existence score: 180.0
5
+ count score: 138.33333333333331
6
+ position score: 138.33333333333331
7
+ color score: 168.33333333333331
8
+ posters score: 93.53741496598639
9
+ celebrity score: 71.47058823529412
10
+ scene score: 148.5
11
+ landmark score: 141.0
12
+ artwork score: 109.25
13
+ OCR score: 155.0
14
+
15
+
16
+ =========== Cognition ===========
17
+ total score: 271.07142857142856
18
+
19
+ commonsense_reasoning score: 118.57142857142857
20
+ numerical_calculation score: 55.0
21
+ text_translation score: 50.0
22
+ code_reasoning score: 47.5
23
+
24
+
trainer_state.json ADDED
The diff for this file is too large to render. See raw diff