cipher982 commited on
Commit
9c7a101
·
1 Parent(s): 998b96c

Upload LlamaForCausalLM

Browse files
config.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
- "_name_or_path": "meta-llama/Llama-2-7b-hf",
3
  "architectures": [
4
  "LlamaForCausalLM"
5
  ],
@@ -22,5 +22,5 @@
22
  "torch_dtype": "bfloat16",
23
  "transformers_version": "4.31.0",
24
  "use_cache": true,
25
- "vocab_size": 32000
26
  }
 
1
  {
2
+ "_name_or_path": "resized/llama-2-7b-hf-4tokens",
3
  "architectures": [
4
  "LlamaForCausalLM"
5
  ],
 
22
  "torch_dtype": "bfloat16",
23
  "transformers_version": "4.31.0",
24
  "use_cache": true,
25
+ "vocab_size": 32004
26
  }
model-00001-of-00002.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:e54e0ceac67dad811214504771b6fffa180897ec59e579b66a6a04753b9dd9fc
3
- size 9976579144
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7a801c432d7f9df00d5e0e78a871dce85b032def7783cbde2a9d67132fc603ea
3
+ size 9976611912
model-00002-of-00002.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:82132acda9e1eecc9c313ff0205ba0c66a4133186cbe7529b9f74f4f08567ff6
3
- size 3500297424
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c18c487c96ed880a09a8289264552801b1e490235e78cad449d72b6cc9b1e374
3
+ size 3500330192
model.safetensors.index.json CHANGED
@@ -1,6 +1,6 @@
1
  {
2
  "metadata": {
3
- "total_size": 13476839424
4
  },
5
  "weight_map": {
6
  "lm_head.weight": "model-00002-of-00002.safetensors",
 
1
  {
2
  "metadata": {
3
+ "total_size": 13476904960
4
  },
5
  "weight_map": {
6
  "lm_head.weight": "model-00002-of-00002.safetensors",