DavidLanz commited on
Commit
e04ca8a
·
1 Parent(s): f51e78d

Upload folder using huggingface_hub

Browse files
Files changed (5) hide show
  1. config.json +3 -1
  2. generation_config.json +2 -0
  3. pytorch_model.bin +1 -1
  4. tokenizer.json +0 -0
  5. vocab.json +0 -0
config.json CHANGED
@@ -1,15 +1,17 @@
1
  {
2
- "_name_or_path": "gpt2",
3
  "activation_function": "gelu_new",
4
  "architectures": [
5
  "GPT2LMHeadModel"
6
  ],
7
  "attn_pdrop": 0.1,
8
  "bos_token_id": 50256,
 
9
  "embd_pdrop": 0.1,
10
  "eos_token_id": 50256,
11
  "initializer_range": 0.02,
12
  "layer_norm_epsilon": 1e-05,
 
13
  "model_type": "gpt2",
14
  "n_ctx": 1024,
15
  "n_embd": 768,
 
1
  {
2
+ "_name_or_path": "DavidLanz/tcp2023",
3
  "activation_function": "gelu_new",
4
  "architectures": [
5
  "GPT2LMHeadModel"
6
  ],
7
  "attn_pdrop": 0.1,
8
  "bos_token_id": 50256,
9
+ "do_sample": true,
10
  "embd_pdrop": 0.1,
11
  "eos_token_id": 50256,
12
  "initializer_range": 0.02,
13
  "layer_norm_epsilon": 1e-05,
14
+ "max_length": 50,
15
  "model_type": "gpt2",
16
  "n_ctx": 1024,
17
  "n_embd": 768,
generation_config.json CHANGED
@@ -1,6 +1,8 @@
1
  {
2
  "_from_model_config": true,
3
  "bos_token_id": 50256,
 
4
  "eos_token_id": 50256,
 
5
  "transformers_version": "4.32.1"
6
  }
 
1
  {
2
  "_from_model_config": true,
3
  "bos_token_id": 50256,
4
+ "do_sample": true,
5
  "eos_token_id": 50256,
6
+ "max_length": 50,
7
  "transformers_version": "4.32.1"
8
  }
pytorch_model.bin CHANGED
@@ -1,3 +1,3 @@
1
  version https://git-lfs.github.com/spec/v1
2
- oid sha256:8d209c0c02de777165b13df4ff4b86b7d3266252b3ef98d039b3a5c65f29f089
3
  size 497807197
 
1
  version https://git-lfs.github.com/spec/v1
2
+ oid sha256:34c336e3480ae79100d9e50e8c4140ff338ab58b841e05e203cd646711727210
3
  size 497807197
tokenizer.json ADDED
The diff for this file is too large to render. See raw diff
 
vocab.json CHANGED
The diff for this file is too large to render. See raw diff