jonatanklosko commited on
Commit
e075e7c
·
verified ·
1 Parent(s): 3f1b14e

Upload Gemma3ForCausalLM

Browse files
Files changed (2) hide show
  1. config.json +2 -2
  2. model.safetensors +1 -1
config.json CHANGED
@@ -20,7 +20,7 @@
20
  "intermediate_size": 37,
21
  "layer_types": [
22
  "sliding_attention",
23
- "sliding_attention"
24
  ],
25
  "max_position_embeddings": 512,
26
  "model_type": "gemma3_text",
@@ -28,7 +28,7 @@
28
  "num_hidden_layers": 2,
29
  "num_key_value_heads": 2,
30
  "pad_token_id": 0,
31
- "query_pre_attn_scalar": 16,
32
  "rms_norm_eps": 1e-06,
33
  "rope_local_base_freq": 10000.0,
34
  "rope_scaling": null,
 
20
  "intermediate_size": 37,
21
  "layer_types": [
22
  "sliding_attention",
23
+ "full_attention"
24
  ],
25
  "max_position_embeddings": 512,
26
  "model_type": "gemma3_text",
 
28
  "num_hidden_layers": 2,
29
  "num_key_value_heads": 2,
30
  "pad_token_id": 0,
31
+ "query_pre_attn_scalar": 8,
32
  "rms_norm_eps": 1e-06,
33
  "rope_local_base_freq": 10000.0,
34
  "rope_scaling": null,
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:5ba20d857d41f79ae75bf121cee3a0c03be141d2946b5c1c893947259656f7db
3
  size 954096
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c6d80df4c7423e5a75e37a82fa00d142f0bab2335e558ef40833b6b908b4bd9f
3
  size 954096