lbasile commited on
Commit
cfbb8c2
·
verified ·
1 Parent(s): c116cba

Port weights and add standard config

Browse files
config.json CHANGED
@@ -2,38 +2,55 @@
2
  "architectures": [
3
  "LlavaForConditionalGeneration"
4
  ],
 
5
  "ignore_index": -100,
6
- "image_token_id": 32000,
7
  "image_token_index": 32000,
8
  "model_type": "llava",
 
9
  "pad_token_id": 32001,
10
  "projector_hidden_act": "gelu",
11
  "text_config": {
12
- "_name_or_path": "lmsys/vicuna-13b-v1.5",
13
  "architectures": [
14
  "LlamaForCausalLM"
15
  ],
 
 
 
 
 
16
  "hidden_size": 5120,
 
17
  "intermediate_size": 13824,
18
- "max_length": 4096,
19
  "max_position_embeddings": 4096,
 
20
  "model_type": "llama",
21
  "num_attention_heads": 40,
22
  "num_hidden_layers": 40,
23
  "num_key_value_heads": 40,
24
- "pad_token_id": 0,
25
  "rms_norm_eps": 1e-05,
26
- "torch_dtype": "float16",
 
 
27
  "vocab_size": 32064
28
  },
29
- "torch_dtype": "bfloat16",
30
- "transformers_version": "4.37.2",
31
  "vision_config": {
 
 
 
32
  "hidden_size": 1024,
33
  "image_size": 336,
 
 
34
  "intermediate_size": 4096,
 
35
  "model_type": "clip_vision_model",
36
  "num_attention_heads": 16,
 
37
  "num_hidden_layers": 24,
38
  "patch_size": 14,
39
  "projection_dim": 768,
 
2
  "architectures": [
3
  "LlavaForConditionalGeneration"
4
  ],
5
+ "dtype": "bfloat16",
6
  "ignore_index": -100,
7
+ "image_seq_length": 576,
8
  "image_token_index": 32000,
9
  "model_type": "llava",
10
+ "multimodal_projector_bias": true,
11
  "pad_token_id": 32001,
12
  "projector_hidden_act": "gelu",
13
  "text_config": {
14
+ "_name_or_path": "meta-llama/Llama-2-13b-hf",
15
  "architectures": [
16
  "LlamaForCausalLM"
17
  ],
18
+ "attention_bias": false,
19
+ "attention_dropout": 0.0,
20
+ "dtype": "bfloat16",
21
+ "head_dim": 128,
22
+ "hidden_act": "silu",
23
  "hidden_size": 5120,
24
+ "initializer_range": 0.02,
25
  "intermediate_size": 13824,
 
26
  "max_position_embeddings": 4096,
27
+ "mlp_bias": false,
28
  "model_type": "llama",
29
  "num_attention_heads": 40,
30
  "num_hidden_layers": 40,
31
  "num_key_value_heads": 40,
32
+ "pretraining_tp": 1,
33
  "rms_norm_eps": 1e-05,
34
+ "rope_scaling": null,
35
+ "rope_theta": 10000.0,
36
+ "use_cache": true,
37
  "vocab_size": 32064
38
  },
39
+ "tie_word_embeddings": false,
40
+ "transformers_version": "4.57.3",
41
  "vision_config": {
42
+ "attention_dropout": 0.0,
43
+ "dtype": "bfloat16",
44
+ "hidden_act": "quick_gelu",
45
  "hidden_size": 1024,
46
  "image_size": 336,
47
+ "initializer_factor": 1.0,
48
+ "initializer_range": 0.02,
49
  "intermediate_size": 4096,
50
+ "layer_norm_eps": 1e-05,
51
  "model_type": "clip_vision_model",
52
  "num_attention_heads": 16,
53
+ "num_channels": 3,
54
  "num_hidden_layers": 24,
55
  "patch_size": 14,
56
  "projection_dim": 768,
generation_config.json CHANGED
@@ -2,7 +2,6 @@
2
  "_from_model_config": true,
3
  "bos_token_id": 1,
4
  "eos_token_id": 2,
5
- "max_length": 4096,
6
  "pad_token_id": 32001,
7
- "transformers_version": "4.37.2"
8
  }
 
2
  "_from_model_config": true,
3
  "bos_token_id": 1,
4
  "eos_token_id": 2,
 
5
  "pad_token_id": 32001,
6
+ "transformers_version": "4.57.3"
7
  }
model-00006-of-00006.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:250f6743557e5cecceb103b19ea29530deac0000d927ad297d46331d20e2cb67
3
  size 2021860536
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b2fbca5c630b1101b0438665c3bbc3dba65b4b7ef26bfe8f0e55aa5fa8bf537e
3
  size 2021860536
model.safetensors.index.json CHANGED
@@ -1,5 +1,6 @@
1
  {
2
  "metadata": {
 
3
  "total_size": 26702989312
4
  },
5
  "weight_map": {
 
1
  {
2
  "metadata": {
3
+ "total_parameters": 13351494656,
4
  "total_size": 26702989312
5
  },
6
  "weight_map": {