qgallouedec HF Staff commited on
Commit
2e6197e
·
verified ·
1 Parent(s): 40456c7

Upload model

Browse files
Files changed (2) hide show
  1. config.json +3 -8
  2. model.safetensors +2 -2
config.json CHANGED
@@ -5,18 +5,19 @@
5
  ],
6
  "attn_pdrop": 0.1,
7
  "bos_token_id": 50256,
8
- "dtype": "bfloat16",
9
  "embd_pdrop": 0.1,
10
  "eos_token_id": 50256,
11
  "initializer_range": 0.02,
 
12
  "layer_norm_epsilon": 1e-05,
13
  "model_type": "gpt2",
14
- "n_ctx": 1024,
15
  "n_embd": 8,
16
  "n_head": 4,
17
  "n_inner": null,
18
  "n_layer": 2,
19
  "n_positions": 1024,
 
20
  "reorder_and_upcast_attn": false,
21
  "resid_pdrop": 0.1,
22
  "scale_attn_by_inverse_layer_idx": false,
@@ -26,12 +27,6 @@
26
  "summary_proj_to_labels": true,
27
  "summary_type": "cls_index",
28
  "summary_use_proj": true,
29
- "task_specific_params": {
30
- "text-generation": {
31
- "do_sample": true,
32
- "max_length": 50
33
- }
34
- },
35
  "transformers_version": "4.57.0.dev0",
36
  "use_cache": true,
37
  "vocab_size": 50257
 
5
  ],
6
  "attn_pdrop": 0.1,
7
  "bos_token_id": 50256,
8
+ "dtype": "float32",
9
  "embd_pdrop": 0.1,
10
  "eos_token_id": 50256,
11
  "initializer_range": 0.02,
12
+ "intermediate_size": 32,
13
  "layer_norm_epsilon": 1e-05,
14
  "model_type": "gpt2",
 
15
  "n_embd": 8,
16
  "n_head": 4,
17
  "n_inner": null,
18
  "n_layer": 2,
19
  "n_positions": 1024,
20
+ "num_key_value_heads": 2,
21
  "reorder_and_upcast_attn": false,
22
  "resid_pdrop": 0.1,
23
  "scale_attn_by_inverse_layer_idx": false,
 
27
  "summary_proj_to_labels": true,
28
  "summary_type": "cls_index",
29
  "summary_use_proj": true,
 
 
 
 
 
 
30
  "transformers_version": "4.57.0.dev0",
31
  "use_cache": true,
32
  "vocab_size": 50257
model.safetensors CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:07b862c46681bcbf918b437645266488f0de4c38eba6b7417116b384ff0c38a9
3
- size 826536
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:7be33b67605f25d79fc603526434e8ca451c0a4dd743ddda54a90c9804aa2e72
3
+ size 1650536