boopathiraj commited on
Commit
65d9cc9
·
verified ·
1 Parent(s): d1a50fe

Upload Gemma3ForCausalLM

Browse files
Files changed (2) hide show
  1. config.json +1 -1
  2. model.safetensors +2 -2
config.json CHANGED
@@ -7,7 +7,7 @@
7
  "attention_dropout": 0.0,
8
  "attn_logit_softcapping": null,
9
  "bos_token_id": 2,
10
- "dtype": "float16",
11
  "eos_token_id": 1,
12
  "final_logit_softcapping": null,
13
  "head_dim": 256,
 
7
  "attention_dropout": 0.0,
8
  "attn_logit_softcapping": null,
9
  "bos_token_id": 2,
10
+ "dtype": "float32",
11
  "eos_token_id": 1,
12
  "final_logit_softcapping": null,
13
  "head_dim": 256,
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:23c9828a4ba86e5a2445aecaadfa7c200bb5f593a6f06c1654e3a32c84ad7cac
3
- size 536222816
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2ce9b58c570eb52fc0dbfb6fe99349d72f864f163912196bcb3df3d80d1a5652
3
+ size 1072419256