ChunB1 commited on
Commit
3ee98a9
·
1 Parent(s): eba5c6a

Upload GPTNeoForCausalLM

Browse files
Files changed (2) hide show
  1. config.json +1 -0
  2. generation_config.json +1 -0
config.json CHANGED
@@ -37,6 +37,7 @@
37
  "model_type": "gpt_neo",
38
  "num_heads": 16,
39
  "num_layers": 8,
 
40
  "resid_dropout": 0,
41
  "summary_activation": null,
42
  "summary_first_dropout": 0.1,
 
37
  "model_type": "gpt_neo",
38
  "num_heads": 16,
39
  "num_layers": 8,
40
+ "pad_token_id": 50256,
41
  "resid_dropout": 0,
42
  "summary_activation": null,
43
  "summary_first_dropout": 0.1,
generation_config.json CHANGED
@@ -2,5 +2,6 @@
2
  "_from_model_config": true,
3
  "bos_token_id": 50256,
4
  "eos_token_id": 50256,
 
5
  "transformers_version": "4.36.1"
6
  }
 
2
  "_from_model_config": true,
3
  "bos_token_id": 50256,
4
  "eos_token_id": 50256,
5
+ "pad_token_id": 50256,
6
  "transformers_version": "4.36.1"
7
  }