Sayan01 commited on
Commit
7b33b1f
·
verified ·
1 Parent(s): 913dbe4

Upload PhiForCausalLM

Browse files
Files changed (3) hide show
  1. config.json +1 -1
  2. generation_config.json +1 -1
  3. model.safetensors +1 -1
config.json CHANGED
@@ -26,7 +26,7 @@
26
  "rope_theta": 10000.0,
27
  "tie_word_embeddings": false,
28
  "torch_dtype": "bfloat16",
29
- "transformers_version": "4.45.2",
30
  "use_cache": true,
31
  "vocab_size": 51200
32
  }
 
26
  "rope_theta": 10000.0,
27
  "tie_word_embeddings": false,
28
  "torch_dtype": "bfloat16",
29
+ "transformers_version": "4.46.3",
30
  "use_cache": true,
31
  "vocab_size": 51200
32
  }
generation_config.json CHANGED
@@ -4,5 +4,5 @@
4
  "early_stopping": true,
5
  "eos_token_id": 50256,
6
  "num_beams": 4,
7
- "transformers_version": "4.45.2"
8
  }
 
4
  "early_stopping": true,
5
  "eos_token_id": 50256,
6
  "num_beams": 4,
7
+ "transformers_version": "4.46.3"
8
  }
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:d8f26762b2fd74994c145bd4abba4c57d46cc4d0038a0c078aa9b615c6b03490
3
  size 2202884688
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:aa8996d99a56729ff915c1f8871ac07b738c5c66f04f82c9d4b57000ee37e9ca
3
  size 2202884688