English
naveensp commited on
Commit
8f276b6
·
verified ·
1 Parent(s): 66f3ac2

Delete config.json

Browse files
Files changed (1) hide show
  1. config.json +0 -72
config.json DELETED
@@ -1,72 +0,0 @@
1
- {
2
- "activation_type": "swiglu",
3
- "alibi": false,
4
- "alibi_bias_max": 8.0,
5
- "architectures": [
6
- "LlavaOLMoBitnet1BForCausalLM"
7
- ],
8
- "attention_dropout": 0.0,
9
- "attention_layer_norm": false,
10
- "attention_layer_norm_with_affine": false,
11
- "auto_map": {
12
- "AutoConfig": "configuration_olmo.OLMoConfig",
13
- "AutoModelForCausalLM": "modeling_olmo.OLMoForCausalLM"
14
- },
15
- "bias_for_layer_norm": false,
16
- "block_group_size": 1,
17
- "block_type": "sequential",
18
- "bos_token_id": 50279,
19
- "clip_qkv": null,
20
- "d_model": 2048,
21
- "embedding_dropout": 0.0,
22
- "embedding_size": 50304,
23
- "eos_token_id": 50279,
24
- "flash_attention": true,
25
- "freeze_mm_mlp_adapter": false,
26
- "freeze_mm_vision_resampler": false,
27
- "image_aspect_ratio": "pad",
28
- "include_bias": false,
29
- "inference_mode": false,
30
- "init_cutoff_factor": null,
31
- "init_device": "cpu",
32
- "init_fn": "mitchell",
33
- "init_std": 0.02,
34
- "layer_norm_type": "rms",
35
- "layer_norm_with_affine": true,
36
- "max_sequence_length": 2048,
37
- "mlp_hidden_size": null,
38
- "mlp_ratio": 8,
39
- "mm_hidden_size": 1024,
40
- "mm_patch_merge_type": "flat",
41
- "mm_projector_lr": null,
42
- "mm_projector_type": "mlp2x_gelu",
43
- "mm_resampler_type": null,
44
- "mm_use_im_patch_token": false,
45
- "mm_use_im_start_end": false,
46
- "mm_vision_select_feature": "patch",
47
- "mm_vision_select_layer": -2,
48
- "mm_vision_tower": "openai/clip-vit-large-patch14-336",
49
- "model_type": "IntelLabs/LlavaOLMoBitnet1B",
50
- "multi_query_attention": false,
51
- "n_heads": 16,
52
- "n_kv_heads": null,
53
- "n_layers": 16,
54
- "pad_token_id": 1,
55
- "precision": "amp_bf16",
56
- "residual_dropout": 0.0,
57
- "rope": true,
58
- "rope_full_precision": true,
59
- "scale_logits": false,
60
- "ternary": true,
61
- "tokenizer_model_max_length": 2048,
62
- "tokenizer_padding_side": "right",
63
- "torch_dtype": "float32",
64
- "transformers_version": "4.37.2",
65
- "tune_mm_mlp_adapter": false,
66
- "tune_mm_vision_resampler": false,
67
- "unfreeze_mm_vision_tower": false,
68
- "use_cache": true,
69
- "use_mm_proj": true,
70
- "vocab_size": 50280,
71
- "weight_tying": true
72
- }