kaizen9 commited on
Commit
4d305c3
·
verified ·
1 Parent(s): 7c0b297

Upload PhiRotForCausalLM

Browse files
Files changed (2) hide show
  1. config.json +1 -1
  2. model.safetensors +2 -2
config.json CHANGED
@@ -23,7 +23,7 @@
23
  "rope_scaling": null,
24
  "rope_theta": 10000.0,
25
  "tie_word_embeddings": false,
26
- "torch_dtype": "float16",
27
  "transformers_version": "4.47.0",
28
  "use_cache": true,
29
  "vocab_size": 51200
 
23
  "rope_scaling": null,
24
  "rope_theta": 10000.0,
25
  "tie_word_embeddings": false,
26
+ "torch_dtype": "bfloat16",
27
  "transformers_version": "4.47.0",
28
  "use_cache": true,
29
  "vocab_size": 51200
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:c04d3f5274fbe61537d122cd9a669cf1edd18a72411fdfb742f209f1e48727aa
3
- size 2836578696
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:2d98bce47248062ccbb254b86ee037444ef6411aa0b27897d453a76e3e75f07a
3
+ size 2836579040