lbasile commited on
Commit
990edf1
·
verified ·
1 Parent(s): 62168bd

Port weights and add standard config

Browse files
config.json CHANGED
@@ -2,10 +2,12 @@
2
  "architectures": [
3
  "LlavaForConditionalGeneration"
4
  ],
 
5
  "ignore_index": -100,
6
- "image_token_id": 32000,
7
  "image_token_index": 32000,
8
  "model_type": "llava",
 
9
  "pad_token_id": 32001,
10
  "projector_hidden_act": "gelu",
11
  "text_config": {
@@ -13,21 +15,42 @@
13
  "architectures": [
14
  "LlamaForCausalLM"
15
  ],
 
 
 
 
 
 
 
 
16
  "max_position_embeddings": 4096,
 
17
  "model_type": "llama",
18
- "pad_token_id": 0,
 
 
 
19
  "rms_norm_eps": 1e-05,
20
- "torch_dtype": "float16",
 
 
21
  "vocab_size": 32064
22
  },
23
- "torch_dtype": "bfloat16",
24
- "transformers_version": "4.37.2",
25
  "vision_config": {
 
 
 
26
  "hidden_size": 1024,
27
  "image_size": 336,
 
 
28
  "intermediate_size": 4096,
 
29
  "model_type": "clip_vision_model",
30
  "num_attention_heads": 16,
 
31
  "num_hidden_layers": 24,
32
  "patch_size": 14,
33
  "projection_dim": 768,
 
2
  "architectures": [
3
  "LlavaForConditionalGeneration"
4
  ],
5
+ "dtype": "bfloat16",
6
  "ignore_index": -100,
7
+ "image_seq_length": 576,
8
  "image_token_index": 32000,
9
  "model_type": "llava",
10
+ "multimodal_projector_bias": true,
11
  "pad_token_id": 32001,
12
  "projector_hidden_act": "gelu",
13
  "text_config": {
 
15
  "architectures": [
16
  "LlamaForCausalLM"
17
  ],
18
+ "attention_bias": false,
19
+ "attention_dropout": 0.0,
20
+ "dtype": "bfloat16",
21
+ "head_dim": 128,
22
+ "hidden_act": "silu",
23
+ "hidden_size": 4096,
24
+ "initializer_range": 0.02,
25
+ "intermediate_size": 11008,
26
  "max_position_embeddings": 4096,
27
+ "mlp_bias": false,
28
  "model_type": "llama",
29
+ "num_attention_heads": 32,
30
+ "num_hidden_layers": 32,
31
+ "num_key_value_heads": 32,
32
+ "pretraining_tp": 1,
33
  "rms_norm_eps": 1e-05,
34
+ "rope_scaling": null,
35
+ "rope_theta": 10000.0,
36
+ "use_cache": true,
37
  "vocab_size": 32064
38
  },
39
+ "tie_word_embeddings": false,
40
+ "transformers_version": "4.57.3",
41
  "vision_config": {
42
+ "attention_dropout": 0.0,
43
+ "dtype": "bfloat16",
44
+ "hidden_act": "quick_gelu",
45
  "hidden_size": 1024,
46
  "image_size": 336,
47
+ "initializer_factor": 1.0,
48
+ "initializer_range": 0.02,
49
  "intermediate_size": 4096,
50
+ "layer_norm_eps": 1e-05,
51
  "model_type": "clip_vision_model",
52
  "num_attention_heads": 16,
53
+ "num_channels": 3,
54
  "num_hidden_layers": 24,
55
  "patch_size": 14,
56
  "projection_dim": 768,
generation_config.json CHANGED
@@ -3,5 +3,5 @@
3
  "bos_token_id": 1,
4
  "eos_token_id": 2,
5
  "pad_token_id": 32001,
6
- "transformers_version": "4.37.2"
7
  }
 
3
  "bos_token_id": 1,
4
  "eos_token_id": 2,
5
  "pad_token_id": 32001,
6
+ "transformers_version": "4.57.3"
7
  }
model-00003-of-00003.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:5c039d9a43a5ad55f5e8e933a569c2f5b246fbbb255378b77fabfbbd43eccd1d
3
  size 4176137496
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:78a4eabbc4836520030be8f5609f09e8fb2845d524476ac716e35060200b4c6c
3
  size 4176137496
model.safetensors.index.json CHANGED
@@ -1,5 +1,6 @@
1
  {
2
  "metadata": {
 
3
  "total_size": 14126854144
4
  },
5
  "weight_map": {
 
1
  {
2
  "metadata": {
3
+ "total_parameters": 7063427072,
4
  "total_size": 14126854144
5
  },
6
  "weight_map": {