ErrickMiron commited on
Commit
691b6d6
·
verified ·
1 Parent(s): 5acdd99

Fix config.json to match GPT-2 architecture

Browse files
Files changed (1) hide show
  1. config.json +7 -6
config.json CHANGED
@@ -16,7 +16,6 @@
16
  "n_inner": null,
17
  "n_layer": 12,
18
  "n_positions": 1024,
19
- "pad_token_id": 50256,
20
  "reorder_and_upcast_attn": false,
21
  "resid_pdrop": 0.1,
22
  "scale_attn_by_inverse_layer_idx": false,
@@ -29,11 +28,13 @@
29
  "task_specific_params": {
30
  "text-generation": {
31
  "do_sample": true,
32
- "max_length": 50
 
33
  }
34
  },
35
- "torch_dtype": "float32",
36
- "transformers_version": "4.52.4",
37
  "use_cache": true,
38
- "vocab_size": 50257
39
- }
 
 
 
16
  "n_inner": null,
17
  "n_layer": 12,
18
  "n_positions": 1024,
 
19
  "reorder_and_upcast_attn": false,
20
  "resid_pdrop": 0.1,
21
  "scale_attn_by_inverse_layer_idx": false,
 
28
  "task_specific_params": {
29
  "text-generation": {
30
  "do_sample": true,
31
+ "max_length": 200,
32
+ "temperature": 0.7
33
  }
34
  },
35
+ "transformers_version": "4.46.3",
 
36
  "use_cache": true,
37
+ "vocab_size": 50257,
38
+ "_name_or_path": "gpt2",
39
+ "torch_dtype": "float32"
40
+ }