wonderwind271 commited on
Commit
12d0bc6
·
verified ·
1 Parent(s): be11506
This view is limited to 50 files because it contains too many changes.   See raw diff
Files changed (50) hide show
  1. .gitattributes +25 -0
  2. trabank_vl_dino_pretrain2/checkpoint-0/config.json +53 -0
  3. trabank_vl_dino_pretrain2/checkpoint-0/generation_config.json +7 -0
  4. trabank_vl_dino_pretrain2/checkpoint-0/model.safetensors +3 -0
  5. trabank_vl_dino_pretrain2/checkpoint-0/special_tokens_map.json +24 -0
  6. trabank_vl_dino_pretrain2/checkpoint-0/tokenizer_config.json +44 -0
  7. trabank_vl_dino_pretrain2/checkpoint-0/training_args.bin +3 -0
  8. trabank_vl_dino_pretrain2/checkpoint-0/vocab.json +0 -0
  9. trabank_vl_dino_pretrain2/checkpoint-10000/config.json +53 -0
  10. trabank_vl_dino_pretrain2/checkpoint-10000/generation_config.json +7 -0
  11. trabank_vl_dino_pretrain2/checkpoint-10000/model.safetensors +3 -0
  12. trabank_vl_dino_pretrain2/checkpoint-10000/optimizer.pt +3 -0
  13. trabank_vl_dino_pretrain2/checkpoint-10000/rng_state_0.pth +3 -0
  14. trabank_vl_dino_pretrain2/checkpoint-10000/rng_state_1.pth +3 -0
  15. trabank_vl_dino_pretrain2/checkpoint-10000/scheduler.pt +3 -0
  16. trabank_vl_dino_pretrain2/checkpoint-10000/special_tokens_map.json +24 -0
  17. trabank_vl_dino_pretrain2/checkpoint-10000/tokenizer_config.json +44 -0
  18. trabank_vl_dino_pretrain2/checkpoint-10000/trainer_state.json +0 -0
  19. trabank_vl_dino_pretrain2/checkpoint-10000/training_args.bin +3 -0
  20. trabank_vl_dino_pretrain2/checkpoint-10000/vocab.json +0 -0
  21. trabank_vl_dino_pretrain2/checkpoint-100000/config.json +53 -0
  22. trabank_vl_dino_pretrain2/checkpoint-100000/generation_config.json +7 -0
  23. trabank_vl_dino_pretrain2/checkpoint-100000/model.safetensors +3 -0
  24. trabank_vl_dino_pretrain2/checkpoint-100000/optimizer.pt +3 -0
  25. trabank_vl_dino_pretrain2/checkpoint-100000/rng_state_0.pth +3 -0
  26. trabank_vl_dino_pretrain2/checkpoint-100000/rng_state_1.pth +3 -0
  27. trabank_vl_dino_pretrain2/checkpoint-100000/scheduler.pt +3 -0
  28. trabank_vl_dino_pretrain2/checkpoint-100000/special_tokens_map.json +24 -0
  29. trabank_vl_dino_pretrain2/checkpoint-100000/tokenizer_config.json +44 -0
  30. trabank_vl_dino_pretrain2/checkpoint-100000/trainer_state.json +3 -0
  31. trabank_vl_dino_pretrain2/checkpoint-100000/training_args.bin +3 -0
  32. trabank_vl_dino_pretrain2/checkpoint-100000/vocab.json +0 -0
  33. trabank_vl_dino_pretrain2/checkpoint-110000/config.json +53 -0
  34. trabank_vl_dino_pretrain2/checkpoint-110000/generation_config.json +7 -0
  35. trabank_vl_dino_pretrain2/checkpoint-110000/model.safetensors +3 -0
  36. trabank_vl_dino_pretrain2/checkpoint-110000/optimizer.pt +3 -0
  37. trabank_vl_dino_pretrain2/checkpoint-110000/rng_state_0.pth +3 -0
  38. trabank_vl_dino_pretrain2/checkpoint-110000/rng_state_1.pth +3 -0
  39. trabank_vl_dino_pretrain2/checkpoint-110000/scheduler.pt +3 -0
  40. trabank_vl_dino_pretrain2/checkpoint-110000/special_tokens_map.json +24 -0
  41. trabank_vl_dino_pretrain2/checkpoint-110000/tokenizer_config.json +44 -0
  42. trabank_vl_dino_pretrain2/checkpoint-110000/trainer_state.json +3 -0
  43. trabank_vl_dino_pretrain2/checkpoint-110000/training_args.bin +3 -0
  44. trabank_vl_dino_pretrain2/checkpoint-110000/vocab.json +0 -0
  45. trabank_vl_dino_pretrain2/checkpoint-120000/config.json +53 -0
  46. trabank_vl_dino_pretrain2/checkpoint-120000/generation_config.json +7 -0
  47. trabank_vl_dino_pretrain2/checkpoint-120000/model.safetensors +3 -0
  48. trabank_vl_dino_pretrain2/checkpoint-120000/optimizer.pt +3 -0
  49. trabank_vl_dino_pretrain2/checkpoint-120000/rng_state_0.pth +3 -0
  50. trabank_vl_dino_pretrain2/checkpoint-120000/rng_state_1.pth +3 -0
.gitattributes CHANGED
@@ -33,3 +33,28 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
+ trabank_vl_dino_pretrain2/checkpoint-100000/trainer_state.json filter=lfs diff=lfs merge=lfs -text
37
+ trabank_vl_dino_pretrain2/checkpoint-110000/trainer_state.json filter=lfs diff=lfs merge=lfs -text
38
+ trabank_vl_dino_pretrain2/checkpoint-120000/trainer_state.json filter=lfs diff=lfs merge=lfs -text
39
+ trabank_vl_dino_pretrain2/checkpoint-130000/trainer_state.json filter=lfs diff=lfs merge=lfs -text
40
+ trabank_vl_dino_pretrain2/checkpoint-140000/trainer_state.json filter=lfs diff=lfs merge=lfs -text
41
+ trabank_vl_dino_pretrain2/checkpoint-150000/trainer_state.json filter=lfs diff=lfs merge=lfs -text
42
+ trabank_vl_dino_pretrain2/checkpoint-160000/trainer_state.json filter=lfs diff=lfs merge=lfs -text
43
+ trabank_vl_dino_pretrain2/checkpoint-170000/trainer_state.json filter=lfs diff=lfs merge=lfs -text
44
+ trabank_vl_dino_pretrain2/checkpoint-180000/trainer_state.json filter=lfs diff=lfs merge=lfs -text
45
+ trabank_vl_dino_pretrain2/checkpoint-190000/trainer_state.json filter=lfs diff=lfs merge=lfs -text
46
+ trabank_vl_dino_pretrain2/checkpoint-200000/trainer_state.json filter=lfs diff=lfs merge=lfs -text
47
+ trabank_vl_dino_pretrain2/checkpoint-210000/trainer_state.json filter=lfs diff=lfs merge=lfs -text
48
+ trabank_vl_dino_pretrain2/checkpoint-220000/trainer_state.json filter=lfs diff=lfs merge=lfs -text
49
+ trabank_vl_dino_pretrain2/checkpoint-230000/trainer_state.json filter=lfs diff=lfs merge=lfs -text
50
+ trabank_vl_dino_pretrain2/checkpoint-240000/trainer_state.json filter=lfs diff=lfs merge=lfs -text
51
+ trabank_vl_dino_pretrain2/checkpoint-250000/trainer_state.json filter=lfs diff=lfs merge=lfs -text
52
+ trabank_vl_dino_pretrain2/checkpoint-260000/trainer_state.json filter=lfs diff=lfs merge=lfs -text
53
+ trabank_vl_dino_pretrain2/checkpoint-270000/trainer_state.json filter=lfs diff=lfs merge=lfs -text
54
+ trabank_vl_dino_pretrain2/checkpoint-280000/trainer_state.json filter=lfs diff=lfs merge=lfs -text
55
+ trabank_vl_dino_pretrain2/checkpoint-290000/trainer_state.json filter=lfs diff=lfs merge=lfs -text
56
+ trabank_vl_dino_pretrain2/checkpoint-300000/trainer_state.json filter=lfs diff=lfs merge=lfs -text
57
+ trabank_vl_dino_pretrain2/checkpoint-70000/trainer_state.json filter=lfs diff=lfs merge=lfs -text
58
+ trabank_vl_dino_pretrain2/checkpoint-80000/trainer_state.json filter=lfs diff=lfs merge=lfs -text
59
+ trabank_vl_dino_pretrain2/checkpoint-90000/trainer_state.json filter=lfs diff=lfs merge=lfs -text
60
+ trabank_vl_dino_pretrain2/trainer_state.json filter=lfs diff=lfs merge=lfs -text
trabank_vl_dino_pretrain2/checkpoint-0/config.json ADDED
@@ -0,0 +1,53 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "activation_function": "gelu_new",
3
+ "architectures": [
4
+ "LlavaGPTForCausalLM"
5
+ ],
6
+ "attention_bias": false,
7
+ "attention_dropout": 0.0,
8
+ "attn_pdrop": 0.1,
9
+ "bos_token_id": 1,
10
+ "detect_loss": false,
11
+ "embd_pdrop": 0.1,
12
+ "eos_token_id": 2,
13
+ "freeze_mm_mlp_adapter": false,
14
+ "hidden_act": "silu",
15
+ "image_aspect_ratio": "pad",
16
+ "initializer_range": 0.02,
17
+ "intermediate_size": 3072,
18
+ "is_decoder": true,
19
+ "layer_norm_epsilon": 1e-05,
20
+ "mm_projector_lr": null,
21
+ "mm_use_im_patch_token": false,
22
+ "mm_use_im_start_end": false,
23
+ "mm_vision_tower": "dino",
24
+ "model_type": "gpt2",
25
+ "n_embd": 768,
26
+ "n_head": 12,
27
+ "n_inner": null,
28
+ "n_layer": 12,
29
+ "n_positions": 1024,
30
+ "num_key_value_heads": 12,
31
+ "reorder_and_upcast_attn": false,
32
+ "resid_pdrop": 0.1,
33
+ "rms_norm_eps": 1e-05,
34
+ "rope_scaling": null,
35
+ "rope_theta": 10000.0,
36
+ "scale_attn_by_inverse_layer_idx": false,
37
+ "scale_attn_weights": true,
38
+ "summary_activation": null,
39
+ "summary_first_dropout": 0.1,
40
+ "summary_proj_to_labels": true,
41
+ "summary_type": "cls_index",
42
+ "summary_use_proj": true,
43
+ "tie_word_embeddings": false,
44
+ "tokenizer_class": "LlamaTokenizer",
45
+ "tokenizer_model_max_length": 2048,
46
+ "tokenizer_padding_side": "right",
47
+ "torch_dtype": "float32",
48
+ "transformers_version": "4.38.0",
49
+ "tune_mm_mlp_adapter": false,
50
+ "use_cache": false,
51
+ "vision_tower_type": "dino",
52
+ "vocab_size": 25005
53
+ }
trabank_vl_dino_pretrain2/checkpoint-0/generation_config.json ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ {
2
+ "_from_model_config": true,
3
+ "bos_token_id": 1,
4
+ "eos_token_id": 2,
5
+ "transformers_version": "4.38.0",
6
+ "use_cache": false
7
+ }
trabank_vl_dino_pretrain2/checkpoint-0/model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:faa84e86541e3086ec685a513764937c1040a222727ce57d02a8e059fa87c929
3
+ size 670210056
trabank_vl_dino_pretrain2/checkpoint-0/special_tokens_map.json ADDED
@@ -0,0 +1,24 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token": {
3
+ "content": "<SOS>",
4
+ "lstrip": false,
5
+ "normalized": false,
6
+ "rstrip": false,
7
+ "single_word": false
8
+ },
9
+ "eos_token": {
10
+ "content": "<EOS>",
11
+ "lstrip": false,
12
+ "normalized": false,
13
+ "rstrip": false,
14
+ "single_word": false
15
+ },
16
+ "pad_token": "<UNK>",
17
+ "unk_token": {
18
+ "content": "<UNK>",
19
+ "lstrip": false,
20
+ "normalized": false,
21
+ "rstrip": false,
22
+ "single_word": false
23
+ }
24
+ }
trabank_vl_dino_pretrain2/checkpoint-0/tokenizer_config.json ADDED
@@ -0,0 +1,44 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "added_tokens_decoder": {
3
+ "0": {
4
+ "content": "<PAD>",
5
+ "lstrip": false,
6
+ "normalized": false,
7
+ "rstrip": false,
8
+ "single_word": false,
9
+ "special": true
10
+ },
11
+ "1": {
12
+ "content": "<UNK>",
13
+ "lstrip": false,
14
+ "normalized": false,
15
+ "rstrip": false,
16
+ "single_word": false,
17
+ "special": true
18
+ },
19
+ "2": {
20
+ "content": "<SOS>",
21
+ "lstrip": false,
22
+ "normalized": false,
23
+ "rstrip": false,
24
+ "single_word": false,
25
+ "special": true
26
+ },
27
+ "3": {
28
+ "content": "<EOS>",
29
+ "lstrip": false,
30
+ "normalized": false,
31
+ "rstrip": false,
32
+ "single_word": false,
33
+ "special": true
34
+ }
35
+ },
36
+ "bos_token": "<SOS>",
37
+ "clean_up_tokenization_spaces": true,
38
+ "eos_token": "<EOS>",
39
+ "model_max_length": 2048,
40
+ "pad_token": "<UNK>",
41
+ "padding_side": "right",
42
+ "tokenizer_class": "CustomWordTokenizer",
43
+ "unk_token": "<UNK>"
44
+ }
trabank_vl_dino_pretrain2/checkpoint-0/training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2e5cac6962892ba51458c4ff730d1530be81b09903879daeaa8214d165a7d5fb
3
+ size 5905
trabank_vl_dino_pretrain2/checkpoint-0/vocab.json ADDED
The diff for this file is too large to render. See raw diff
 
trabank_vl_dino_pretrain2/checkpoint-10000/config.json ADDED
@@ -0,0 +1,53 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "activation_function": "gelu_new",
3
+ "architectures": [
4
+ "LlavaGPTForCausalLM"
5
+ ],
6
+ "attention_bias": false,
7
+ "attention_dropout": 0.0,
8
+ "attn_pdrop": 0.1,
9
+ "bos_token_id": 1,
10
+ "detect_loss": false,
11
+ "embd_pdrop": 0.1,
12
+ "eos_token_id": 2,
13
+ "freeze_mm_mlp_adapter": false,
14
+ "hidden_act": "silu",
15
+ "image_aspect_ratio": "pad",
16
+ "initializer_range": 0.02,
17
+ "intermediate_size": 3072,
18
+ "is_decoder": true,
19
+ "layer_norm_epsilon": 1e-05,
20
+ "mm_projector_lr": null,
21
+ "mm_use_im_patch_token": false,
22
+ "mm_use_im_start_end": false,
23
+ "mm_vision_tower": "dino",
24
+ "model_type": "gpt2",
25
+ "n_embd": 768,
26
+ "n_head": 12,
27
+ "n_inner": null,
28
+ "n_layer": 12,
29
+ "n_positions": 1024,
30
+ "num_key_value_heads": 12,
31
+ "reorder_and_upcast_attn": false,
32
+ "resid_pdrop": 0.1,
33
+ "rms_norm_eps": 1e-05,
34
+ "rope_scaling": null,
35
+ "rope_theta": 10000.0,
36
+ "scale_attn_by_inverse_layer_idx": false,
37
+ "scale_attn_weights": true,
38
+ "summary_activation": null,
39
+ "summary_first_dropout": 0.1,
40
+ "summary_proj_to_labels": true,
41
+ "summary_type": "cls_index",
42
+ "summary_use_proj": true,
43
+ "tie_word_embeddings": false,
44
+ "tokenizer_class": "LlamaTokenizer",
45
+ "tokenizer_model_max_length": 2048,
46
+ "tokenizer_padding_side": "right",
47
+ "torch_dtype": "float32",
48
+ "transformers_version": "4.38.0",
49
+ "tune_mm_mlp_adapter": false,
50
+ "use_cache": false,
51
+ "vision_tower_type": "dino",
52
+ "vocab_size": 25005
53
+ }
trabank_vl_dino_pretrain2/checkpoint-10000/generation_config.json ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ {
2
+ "_from_model_config": true,
3
+ "bos_token_id": 1,
4
+ "eos_token_id": 2,
5
+ "transformers_version": "4.38.0",
6
+ "use_cache": false
7
+ }
trabank_vl_dino_pretrain2/checkpoint-10000/model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:90ef9fa2011850327f84e64fb9db9c572d76cc4d952811b832f86fb7e3ff4a74
3
+ size 670210056
trabank_vl_dino_pretrain2/checkpoint-10000/optimizer.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1c5ae70950b05fea1c2935aff21b1e19e999b02fbe44575fcf3c2314d0afd8b9
3
+ size 994126027
trabank_vl_dino_pretrain2/checkpoint-10000/rng_state_0.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:e800e27c87e3293aa81fb875383cedc75fbc8052937e1473bed31266c559561f
3
+ size 14917
trabank_vl_dino_pretrain2/checkpoint-10000/rng_state_1.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:638e8f22db8db88620f5f0208f57f1bcb0b2b1565c8b9c04bd676d85e2a3d1ef
3
+ size 14917
trabank_vl_dino_pretrain2/checkpoint-10000/scheduler.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4aeaedc1e6e4ac9ac4f28a8691779e4a906da7b051e36d0bbe385033f066178d
3
+ size 1465
trabank_vl_dino_pretrain2/checkpoint-10000/special_tokens_map.json ADDED
@@ -0,0 +1,24 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token": {
3
+ "content": "<SOS>",
4
+ "lstrip": false,
5
+ "normalized": false,
6
+ "rstrip": false,
7
+ "single_word": false
8
+ },
9
+ "eos_token": {
10
+ "content": "<EOS>",
11
+ "lstrip": false,
12
+ "normalized": false,
13
+ "rstrip": false,
14
+ "single_word": false
15
+ },
16
+ "pad_token": "<UNK>",
17
+ "unk_token": {
18
+ "content": "<UNK>",
19
+ "lstrip": false,
20
+ "normalized": false,
21
+ "rstrip": false,
22
+ "single_word": false
23
+ }
24
+ }
trabank_vl_dino_pretrain2/checkpoint-10000/tokenizer_config.json ADDED
@@ -0,0 +1,44 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "added_tokens_decoder": {
3
+ "0": {
4
+ "content": "<PAD>",
5
+ "lstrip": false,
6
+ "normalized": false,
7
+ "rstrip": false,
8
+ "single_word": false,
9
+ "special": true
10
+ },
11
+ "1": {
12
+ "content": "<UNK>",
13
+ "lstrip": false,
14
+ "normalized": false,
15
+ "rstrip": false,
16
+ "single_word": false,
17
+ "special": true
18
+ },
19
+ "2": {
20
+ "content": "<SOS>",
21
+ "lstrip": false,
22
+ "normalized": false,
23
+ "rstrip": false,
24
+ "single_word": false,
25
+ "special": true
26
+ },
27
+ "3": {
28
+ "content": "<EOS>",
29
+ "lstrip": false,
30
+ "normalized": false,
31
+ "rstrip": false,
32
+ "single_word": false,
33
+ "special": true
34
+ }
35
+ },
36
+ "bos_token": "<SOS>",
37
+ "clean_up_tokenization_spaces": true,
38
+ "eos_token": "<EOS>",
39
+ "model_max_length": 2048,
40
+ "pad_token": "<UNK>",
41
+ "padding_side": "right",
42
+ "tokenizer_class": "CustomWordTokenizer",
43
+ "unk_token": "<UNK>"
44
+ }
trabank_vl_dino_pretrain2/checkpoint-10000/trainer_state.json ADDED
The diff for this file is too large to render. See raw diff
 
trabank_vl_dino_pretrain2/checkpoint-10000/training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2d9afc445078d7a0e08b66aa2a6a92038125de135bbe646908f84d8a45adb76c
3
+ size 5905
trabank_vl_dino_pretrain2/checkpoint-10000/vocab.json ADDED
The diff for this file is too large to render. See raw diff
 
trabank_vl_dino_pretrain2/checkpoint-100000/config.json ADDED
@@ -0,0 +1,53 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "activation_function": "gelu_new",
3
+ "architectures": [
4
+ "LlavaGPTForCausalLM"
5
+ ],
6
+ "attention_bias": false,
7
+ "attention_dropout": 0.0,
8
+ "attn_pdrop": 0.1,
9
+ "bos_token_id": 1,
10
+ "detect_loss": false,
11
+ "embd_pdrop": 0.1,
12
+ "eos_token_id": 2,
13
+ "freeze_mm_mlp_adapter": false,
14
+ "hidden_act": "silu",
15
+ "image_aspect_ratio": "pad",
16
+ "initializer_range": 0.02,
17
+ "intermediate_size": 3072,
18
+ "is_decoder": true,
19
+ "layer_norm_epsilon": 1e-05,
20
+ "mm_projector_lr": null,
21
+ "mm_use_im_patch_token": false,
22
+ "mm_use_im_start_end": false,
23
+ "mm_vision_tower": "dino",
24
+ "model_type": "gpt2",
25
+ "n_embd": 768,
26
+ "n_head": 12,
27
+ "n_inner": null,
28
+ "n_layer": 12,
29
+ "n_positions": 1024,
30
+ "num_key_value_heads": 12,
31
+ "reorder_and_upcast_attn": false,
32
+ "resid_pdrop": 0.1,
33
+ "rms_norm_eps": 1e-05,
34
+ "rope_scaling": null,
35
+ "rope_theta": 10000.0,
36
+ "scale_attn_by_inverse_layer_idx": false,
37
+ "scale_attn_weights": true,
38
+ "summary_activation": null,
39
+ "summary_first_dropout": 0.1,
40
+ "summary_proj_to_labels": true,
41
+ "summary_type": "cls_index",
42
+ "summary_use_proj": true,
43
+ "tie_word_embeddings": false,
44
+ "tokenizer_class": "LlamaTokenizer",
45
+ "tokenizer_model_max_length": 2048,
46
+ "tokenizer_padding_side": "right",
47
+ "torch_dtype": "float32",
48
+ "transformers_version": "4.38.0",
49
+ "tune_mm_mlp_adapter": false,
50
+ "use_cache": false,
51
+ "vision_tower_type": "dino",
52
+ "vocab_size": 25005
53
+ }
trabank_vl_dino_pretrain2/checkpoint-100000/generation_config.json ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ {
2
+ "_from_model_config": true,
3
+ "bos_token_id": 1,
4
+ "eos_token_id": 2,
5
+ "transformers_version": "4.38.0",
6
+ "use_cache": false
7
+ }
trabank_vl_dino_pretrain2/checkpoint-100000/model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:00ca1d52fafd1ebb7155f384954fca2382ad880b08882a469f26305989683c44
3
+ size 670210056
trabank_vl_dino_pretrain2/checkpoint-100000/optimizer.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:fc1375d754822e018db4d5360d8b8139166bb624a0810a42081c9def61875b3e
3
+ size 994126027
trabank_vl_dino_pretrain2/checkpoint-100000/rng_state_0.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:4ab5a99960a2fc56c2309ba16e43bd3d1513a48678b37cd8816fb12463dbde78
3
+ size 14917
trabank_vl_dino_pretrain2/checkpoint-100000/rng_state_1.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ca4afb4b6b62eddff7750304b1a91e9b4ebc1efbbcca96cf8995080f0a278d3e
3
+ size 14917
trabank_vl_dino_pretrain2/checkpoint-100000/scheduler.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:ae0a341cc06fce0d21bf7169c4964d449cefbf6ff943711fe1a78667c32ac1df
3
+ size 1465
trabank_vl_dino_pretrain2/checkpoint-100000/special_tokens_map.json ADDED
@@ -0,0 +1,24 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token": {
3
+ "content": "<SOS>",
4
+ "lstrip": false,
5
+ "normalized": false,
6
+ "rstrip": false,
7
+ "single_word": false
8
+ },
9
+ "eos_token": {
10
+ "content": "<EOS>",
11
+ "lstrip": false,
12
+ "normalized": false,
13
+ "rstrip": false,
14
+ "single_word": false
15
+ },
16
+ "pad_token": "<UNK>",
17
+ "unk_token": {
18
+ "content": "<UNK>",
19
+ "lstrip": false,
20
+ "normalized": false,
21
+ "rstrip": false,
22
+ "single_word": false
23
+ }
24
+ }
trabank_vl_dino_pretrain2/checkpoint-100000/tokenizer_config.json ADDED
@@ -0,0 +1,44 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "added_tokens_decoder": {
3
+ "0": {
4
+ "content": "<PAD>",
5
+ "lstrip": false,
6
+ "normalized": false,
7
+ "rstrip": false,
8
+ "single_word": false,
9
+ "special": true
10
+ },
11
+ "1": {
12
+ "content": "<UNK>",
13
+ "lstrip": false,
14
+ "normalized": false,
15
+ "rstrip": false,
16
+ "single_word": false,
17
+ "special": true
18
+ },
19
+ "2": {
20
+ "content": "<SOS>",
21
+ "lstrip": false,
22
+ "normalized": false,
23
+ "rstrip": false,
24
+ "single_word": false,
25
+ "special": true
26
+ },
27
+ "3": {
28
+ "content": "<EOS>",
29
+ "lstrip": false,
30
+ "normalized": false,
31
+ "rstrip": false,
32
+ "single_word": false,
33
+ "special": true
34
+ }
35
+ },
36
+ "bos_token": "<SOS>",
37
+ "clean_up_tokenization_spaces": true,
38
+ "eos_token": "<EOS>",
39
+ "model_max_length": 2048,
40
+ "pad_token": "<UNK>",
41
+ "padding_side": "right",
42
+ "tokenizer_class": "CustomWordTokenizer",
43
+ "unk_token": "<UNK>"
44
+ }
trabank_vl_dino_pretrain2/checkpoint-100000/trainer_state.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:80216fca02fe92705013c6dc436585ecffa30ba06861d7f7a95269b1c905c478
3
+ size 16064021
trabank_vl_dino_pretrain2/checkpoint-100000/training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2d9afc445078d7a0e08b66aa2a6a92038125de135bbe646908f84d8a45adb76c
3
+ size 5905
trabank_vl_dino_pretrain2/checkpoint-100000/vocab.json ADDED
The diff for this file is too large to render. See raw diff
 
trabank_vl_dino_pretrain2/checkpoint-110000/config.json ADDED
@@ -0,0 +1,53 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "activation_function": "gelu_new",
3
+ "architectures": [
4
+ "LlavaGPTForCausalLM"
5
+ ],
6
+ "attention_bias": false,
7
+ "attention_dropout": 0.0,
8
+ "attn_pdrop": 0.1,
9
+ "bos_token_id": 1,
10
+ "detect_loss": false,
11
+ "embd_pdrop": 0.1,
12
+ "eos_token_id": 2,
13
+ "freeze_mm_mlp_adapter": false,
14
+ "hidden_act": "silu",
15
+ "image_aspect_ratio": "pad",
16
+ "initializer_range": 0.02,
17
+ "intermediate_size": 3072,
18
+ "is_decoder": true,
19
+ "layer_norm_epsilon": 1e-05,
20
+ "mm_projector_lr": null,
21
+ "mm_use_im_patch_token": false,
22
+ "mm_use_im_start_end": false,
23
+ "mm_vision_tower": "dino",
24
+ "model_type": "gpt2",
25
+ "n_embd": 768,
26
+ "n_head": 12,
27
+ "n_inner": null,
28
+ "n_layer": 12,
29
+ "n_positions": 1024,
30
+ "num_key_value_heads": 12,
31
+ "reorder_and_upcast_attn": false,
32
+ "resid_pdrop": 0.1,
33
+ "rms_norm_eps": 1e-05,
34
+ "rope_scaling": null,
35
+ "rope_theta": 10000.0,
36
+ "scale_attn_by_inverse_layer_idx": false,
37
+ "scale_attn_weights": true,
38
+ "summary_activation": null,
39
+ "summary_first_dropout": 0.1,
40
+ "summary_proj_to_labels": true,
41
+ "summary_type": "cls_index",
42
+ "summary_use_proj": true,
43
+ "tie_word_embeddings": false,
44
+ "tokenizer_class": "LlamaTokenizer",
45
+ "tokenizer_model_max_length": 2048,
46
+ "tokenizer_padding_side": "right",
47
+ "torch_dtype": "float32",
48
+ "transformers_version": "4.38.0",
49
+ "tune_mm_mlp_adapter": false,
50
+ "use_cache": false,
51
+ "vision_tower_type": "dino",
52
+ "vocab_size": 25005
53
+ }
trabank_vl_dino_pretrain2/checkpoint-110000/generation_config.json ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ {
2
+ "_from_model_config": true,
3
+ "bos_token_id": 1,
4
+ "eos_token_id": 2,
5
+ "transformers_version": "4.38.0",
6
+ "use_cache": false
7
+ }
trabank_vl_dino_pretrain2/checkpoint-110000/model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:bd991a24cfcd991ee7a4cbdcf9c45f3ec1dd85b5e32b2b07349f4da70a02316f
3
+ size 670210056
trabank_vl_dino_pretrain2/checkpoint-110000/optimizer.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:0d727eb52ef6f54653b290538a3642a59995c84c9e5f0cd461c717d574b10d1d
3
+ size 994126027
trabank_vl_dino_pretrain2/checkpoint-110000/rng_state_0.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b9b970859d419f254ebd66499f32f0829a2c6ad668c2fa698c82a334b2ec740d
3
+ size 14917
trabank_vl_dino_pretrain2/checkpoint-110000/rng_state_1.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d9591c12f58d7515ccb077714e9edbc56659e1783c5cec0bb5b4dde82022ded7
3
+ size 14917
trabank_vl_dino_pretrain2/checkpoint-110000/scheduler.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:db6067b09a272acaf5c85ca7f6a9e4741d56ce0b4071a298c7b399949ec03a20
3
+ size 1465
trabank_vl_dino_pretrain2/checkpoint-110000/special_tokens_map.json ADDED
@@ -0,0 +1,24 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "bos_token": {
3
+ "content": "<SOS>",
4
+ "lstrip": false,
5
+ "normalized": false,
6
+ "rstrip": false,
7
+ "single_word": false
8
+ },
9
+ "eos_token": {
10
+ "content": "<EOS>",
11
+ "lstrip": false,
12
+ "normalized": false,
13
+ "rstrip": false,
14
+ "single_word": false
15
+ },
16
+ "pad_token": "<UNK>",
17
+ "unk_token": {
18
+ "content": "<UNK>",
19
+ "lstrip": false,
20
+ "normalized": false,
21
+ "rstrip": false,
22
+ "single_word": false
23
+ }
24
+ }
trabank_vl_dino_pretrain2/checkpoint-110000/tokenizer_config.json ADDED
@@ -0,0 +1,44 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "added_tokens_decoder": {
3
+ "0": {
4
+ "content": "<PAD>",
5
+ "lstrip": false,
6
+ "normalized": false,
7
+ "rstrip": false,
8
+ "single_word": false,
9
+ "special": true
10
+ },
11
+ "1": {
12
+ "content": "<UNK>",
13
+ "lstrip": false,
14
+ "normalized": false,
15
+ "rstrip": false,
16
+ "single_word": false,
17
+ "special": true
18
+ },
19
+ "2": {
20
+ "content": "<SOS>",
21
+ "lstrip": false,
22
+ "normalized": false,
23
+ "rstrip": false,
24
+ "single_word": false,
25
+ "special": true
26
+ },
27
+ "3": {
28
+ "content": "<EOS>",
29
+ "lstrip": false,
30
+ "normalized": false,
31
+ "rstrip": false,
32
+ "single_word": false,
33
+ "special": true
34
+ }
35
+ },
36
+ "bos_token": "<SOS>",
37
+ "clean_up_tokenization_spaces": true,
38
+ "eos_token": "<EOS>",
39
+ "model_max_length": 2048,
40
+ "pad_token": "<UNK>",
41
+ "padding_side": "right",
42
+ "tokenizer_class": "CustomWordTokenizer",
43
+ "unk_token": "<UNK>"
44
+ }
trabank_vl_dino_pretrain2/checkpoint-110000/trainer_state.json ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:337d569e2828dd3fd086a52ffa81af64d92a6dc9f24b17cbfde25747b84c4bc9
3
+ size 17683025
trabank_vl_dino_pretrain2/checkpoint-110000/training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2d9afc445078d7a0e08b66aa2a6a92038125de135bbe646908f84d8a45adb76c
3
+ size 5905
trabank_vl_dino_pretrain2/checkpoint-110000/vocab.json ADDED
The diff for this file is too large to render. See raw diff
 
trabank_vl_dino_pretrain2/checkpoint-120000/config.json ADDED
@@ -0,0 +1,53 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "activation_function": "gelu_new",
3
+ "architectures": [
4
+ "LlavaGPTForCausalLM"
5
+ ],
6
+ "attention_bias": false,
7
+ "attention_dropout": 0.0,
8
+ "attn_pdrop": 0.1,
9
+ "bos_token_id": 1,
10
+ "detect_loss": false,
11
+ "embd_pdrop": 0.1,
12
+ "eos_token_id": 2,
13
+ "freeze_mm_mlp_adapter": false,
14
+ "hidden_act": "silu",
15
+ "image_aspect_ratio": "pad",
16
+ "initializer_range": 0.02,
17
+ "intermediate_size": 3072,
18
+ "is_decoder": true,
19
+ "layer_norm_epsilon": 1e-05,
20
+ "mm_projector_lr": null,
21
+ "mm_use_im_patch_token": false,
22
+ "mm_use_im_start_end": false,
23
+ "mm_vision_tower": "dino",
24
+ "model_type": "gpt2",
25
+ "n_embd": 768,
26
+ "n_head": 12,
27
+ "n_inner": null,
28
+ "n_layer": 12,
29
+ "n_positions": 1024,
30
+ "num_key_value_heads": 12,
31
+ "reorder_and_upcast_attn": false,
32
+ "resid_pdrop": 0.1,
33
+ "rms_norm_eps": 1e-05,
34
+ "rope_scaling": null,
35
+ "rope_theta": 10000.0,
36
+ "scale_attn_by_inverse_layer_idx": false,
37
+ "scale_attn_weights": true,
38
+ "summary_activation": null,
39
+ "summary_first_dropout": 0.1,
40
+ "summary_proj_to_labels": true,
41
+ "summary_type": "cls_index",
42
+ "summary_use_proj": true,
43
+ "tie_word_embeddings": false,
44
+ "tokenizer_class": "LlamaTokenizer",
45
+ "tokenizer_model_max_length": 2048,
46
+ "tokenizer_padding_side": "right",
47
+ "torch_dtype": "float32",
48
+ "transformers_version": "4.38.0",
49
+ "tune_mm_mlp_adapter": false,
50
+ "use_cache": false,
51
+ "vision_tower_type": "dino",
52
+ "vocab_size": 25005
53
+ }
trabank_vl_dino_pretrain2/checkpoint-120000/generation_config.json ADDED
@@ -0,0 +1,7 @@
 
 
 
 
 
 
 
 
1
+ {
2
+ "_from_model_config": true,
3
+ "bos_token_id": 1,
4
+ "eos_token_id": 2,
5
+ "transformers_version": "4.38.0",
6
+ "use_cache": false
7
+ }
trabank_vl_dino_pretrain2/checkpoint-120000/model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d45552ece60140f6e63778efc55f311bbd80fc80c89272cd86016f90db301d67
3
+ size 670210056
trabank_vl_dino_pretrain2/checkpoint-120000/optimizer.pt ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:f645138e83688a85932b1e92166f38000c3cf17d7ad9d6a1b00ac2ddecec8288
3
+ size 994126027
trabank_vl_dino_pretrain2/checkpoint-120000/rng_state_0.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:50b0d8fa412a6cc7161325ac315c1e403a306eb94fd435484bdf388232dd33d7
3
+ size 14917
trabank_vl_dino_pretrain2/checkpoint-120000/rng_state_1.pth ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:d7304e2a27c7dba656585f92022f096860b4dc88b1503d5dcb71e348722b3a5c
3
+ size 14917