gnokit commited on
Commit
fab0690
·
verified ·
1 Parent(s): 71e8b20

Upload LlamaForCausalLM

Browse files
Files changed (2) hide show
  1. config.json +2 -2
  2. model.safetensors +2 -2
config.json CHANGED
@@ -1,5 +1,5 @@
1
  {
2
- "_name_or_path": "HuggingFaceTB/SmolLM2-135M",
3
  "architectures": [
4
  "LlamaForCausalLM"
5
  ],
@@ -26,7 +26,7 @@
26
  "rope_scaling": null,
27
  "rope_theta": 100000,
28
  "tie_word_embeddings": true,
29
- "torch_dtype": "float32",
30
  "transformers_version": "4.49.0",
31
  "use_cache": true,
32
  "vocab_size": 49152
 
1
  {
2
+ "_name_or_path": "gnokit/SmolLM2-135M-Chat-v2",
3
  "architectures": [
4
  "LlamaForCausalLM"
5
  ],
 
26
  "rope_scaling": null,
27
  "rope_theta": 100000,
28
  "tie_word_embeddings": true,
29
+ "torch_dtype": "bfloat16",
30
  "transformers_version": "4.49.0",
31
  "use_cache": true,
32
  "vocab_size": 49152
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:b1c30dd64b5450c23826f06249ee98b19e2b2e23b7997951be834ce1b454603e
3
- size 538090408
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:c128f9262c8d1bd882ce0b84fcf715d9eb29f6ddaaf792a9369a7c0ee1821c9b
3
+ size 269060552