ranggafermata commited on
Commit
efb2fe9
·
verified ·
1 Parent(s): fc85c99

Delete config.json

Browse files
Files changed (1) hide show
  1. config.json +0 -47
config.json DELETED
@@ -1,47 +0,0 @@
1
- {
2
- "architectures": [
3
- "GemmaForCausalLM"
4
- ],
5
- "auto_map": {
6
- "AutoModelForCausalLM": "gemma.modeling_gemma.GemmaForCausalLM",
7
- "AutoTokenizer": "tokenization_gemma.GemmaTokenizer"
8
- },
9
- "model_type": "gemma",
10
- "torch_dtype": "bfloat16",
11
- "transformers_version": "4.50.0",
12
- "use_cache": true,
13
- "vocab_size": 256000,
14
- "hidden_size": 3072,
15
- "intermediate_size": 8192,
16
- "num_hidden_layers": 28,
17
- "num_attention_heads": 16,
18
- "max_position_embeddings": 8192,
19
- "tie_word_embeddings": false,
20
- "rope_scaling": null
21
- }
22
- {
23
- "rope_theta": 1000000.0,
24
- "sliding_window": 1024,
25
- "sliding_window_pattern": 6,
26
- "torch_dtype": "bfloat16",
27
- "use_cache": true,
28
- "vocab_size": 262208
29
- },
30
- "torch_dtype": "bfloat16",
31
- "transformers_version": "4.50.0.dev0",
32
- "vision_config": {
33
- "attention_dropout": 0.0,
34
- "hidden_act": "gelu_pytorch_tanh",
35
- "hidden_size": 1152,
36
- "image_size": 896,
37
- "intermediate_size": 4304,
38
- "layer_norm_eps": 1e-06,
39
- "model_type": "siglip_vision_model",
40
- "num_attention_heads": 16,
41
- "num_channels": 3,
42
- "num_hidden_layers": 27,
43
- "patch_size": 14,
44
- "torch_dtype": "bfloat16",
45
- "vision_use_head": false
46
- }
47
- }