jonasknobloch commited on
Commit
aec7599
·
verified ·
1 Parent(s): d1a3500

Upload folder using huggingface_hub

Browse files
README.md ADDED
@@ -0,0 +1,89 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ ---
2
+ tags:
3
+ - generated_from_trainer
4
+ datasets:
5
+ - roneneldan/TinyStories
6
+ metrics:
7
+ - accuracy
8
+ model-index:
9
+ - name: gpt2_m020_tiny-stories_1024
10
+ results:
11
+ - task:
12
+ name: Causal Language Modeling
13
+ type: text-generation
14
+ dataset:
15
+ name: roneneldan/TinyStories
16
+ type: roneneldan/TinyStories
17
+ metrics:
18
+ - name: Accuracy
19
+ type: accuracy
20
+ value: 0.6759859395777024
21
+ ---
22
+
23
+ <!-- This model card has been generated automatically according to the information the Trainer had access to. You
24
+ should probably proofread and complete it, then remove this comment. -->
25
+
26
+ [<img src="https://raw.githubusercontent.com/wandb/assets/main/wandb-github-badge-28.svg" alt="Visualize in Weights & Biases" width="200" height="32"/>](https://wandb.ai/scads-nlp/morph-gpt_gpt2_tiny-stories/runs/43jyrhid)
27
+ # gpt2_m020_tiny-stories_1024
28
+
29
+ This model is a fine-tuned version of [](https://huggingface.co/) on the roneneldan/TinyStories dataset.
30
+ It achieves the following results on the evaluation set:
31
+ - Loss: 1.2217
32
+ - Accuracy: 0.6760
33
+
34
+ ## Model description
35
+
36
+ More information needed
37
+
38
+ ## Intended uses & limitations
39
+
40
+ More information needed
41
+
42
+ ## Training and evaluation data
43
+
44
+ More information needed
45
+
46
+ ## Training procedure
47
+
48
+ ### Training hyperparameters
49
+
50
+ The following hyperparameters were used during training:
51
+ - learning_rate: 5e-05
52
+ - train_batch_size: 32
53
+ - eval_batch_size: 32
54
+ - seed: 42
55
+ - optimizer: Adam with betas=(0.9,0.999) and epsilon=1e-08
56
+ - lr_scheduler_type: linear
57
+ - num_epochs: 1.0
58
+
59
+ ### Training results
60
+
61
+ | Training Loss | Epoch | Step | Validation Loss | Accuracy |
62
+ |:-------------:|:------:|:-----:|:---------------:|:--------:|
63
+ | 2.9424 | 0.0525 | 1000 | 2.4920 | 0.4375 |
64
+ | 1.9972 | 0.1050 | 2000 | 1.8161 | 0.5644 |
65
+ | 1.7424 | 0.1575 | 3000 | 1.6275 | 0.5979 |
66
+ | 1.6233 | 0.2100 | 4000 | 1.5269 | 0.6162 |
67
+ | 1.5467 | 0.2625 | 5000 | 1.4625 | 0.6284 |
68
+ | 1.4918 | 0.3150 | 6000 | 1.4187 | 0.6369 |
69
+ | 1.4537 | 0.3675 | 7000 | 1.3823 | 0.6438 |
70
+ | 1.4228 | 0.4200 | 8000 | 1.3529 | 0.6496 |
71
+ | 1.3987 | 0.4725 | 9000 | 1.3283 | 0.6543 |
72
+ | 1.378 | 0.5250 | 10000 | 1.3099 | 0.6578 |
73
+ | 1.3567 | 0.5775 | 11000 | 1.2938 | 0.6611 |
74
+ | 1.3436 | 0.6300 | 12000 | 1.2784 | 0.6643 |
75
+ | 1.328 | 0.6825 | 13000 | 1.2664 | 0.6666 |
76
+ | 1.3157 | 0.7350 | 14000 | 1.2546 | 0.6690 |
77
+ | 1.3052 | 0.7875 | 15000 | 1.2462 | 0.6707 |
78
+ | 1.2966 | 0.8400 | 16000 | 1.2370 | 0.6726 |
79
+ | 1.2943 | 0.8925 | 17000 | 1.2296 | 0.6742 |
80
+ | 1.2837 | 0.9450 | 18000 | 1.2244 | 0.6754 |
81
+ | 1.2825 | 0.9975 | 19000 | 1.2217 | 0.6760 |
82
+
83
+
84
+ ### Framework versions
85
+
86
+ - Transformers 4.42.3
87
+ - Pytorch 2.2.2+cu121
88
+ - Datasets 2.20.0
89
+ - Tokenizers 0.19.1
all_results.json ADDED
@@ -0,0 +1,16 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "epoch": 1.0,
3
+ "eval_accuracy": 0.6759859395777024,
4
+ "eval_loss": 1.221665620803833,
5
+ "eval_runtime": 52.5898,
6
+ "eval_samples": 6128,
7
+ "eval_samples_per_second": 116.524,
8
+ "eval_steps_per_second": 3.651,
9
+ "perplexity": 3.39283420511172,
10
+ "total_flos": 3.18502967574528e+17,
11
+ "train_loss": 1.5702329817773064,
12
+ "train_runtime": 7426.3864,
13
+ "train_samples": 609477,
14
+ "train_samples_per_second": 82.069,
15
+ "train_steps_per_second": 2.565
16
+ }
config.json ADDED
@@ -0,0 +1,31 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "activation_function": "gelu_new",
3
+ "architectures": [
4
+ "GPT2LMHeadModel"
5
+ ],
6
+ "attn_pdrop": 0.1,
7
+ "bos_token_id": 1024,
8
+ "embd_pdrop": 0.1,
9
+ "eos_token_id": 1024,
10
+ "initializer_range": 0.02,
11
+ "layer_norm_epsilon": 1e-05,
12
+ "model_type": "gpt2",
13
+ "n_embd": 768,
14
+ "n_head": 12,
15
+ "n_inner": null,
16
+ "n_layer": 12,
17
+ "n_positions": 1024,
18
+ "reorder_and_upcast_attn": false,
19
+ "resid_pdrop": 0.1,
20
+ "scale_attn_by_inverse_layer_idx": false,
21
+ "scale_attn_weights": true,
22
+ "summary_activation": null,
23
+ "summary_first_dropout": 0.1,
24
+ "summary_proj_to_labels": true,
25
+ "summary_type": "cls_index",
26
+ "summary_use_proj": true,
27
+ "torch_dtype": "float32",
28
+ "transformers_version": "4.42.3",
29
+ "use_cache": true,
30
+ "vocab_size": 50257
31
+ }
eval_results.json ADDED
@@ -0,0 +1,10 @@
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "epoch": 1.0,
3
+ "eval_accuracy": 0.6759859395777024,
4
+ "eval_loss": 1.221665620803833,
5
+ "eval_runtime": 52.5898,
6
+ "eval_samples": 6128,
7
+ "eval_samples_per_second": 116.524,
8
+ "eval_steps_per_second": 3.651,
9
+ "perplexity": 3.39283420511172
10
+ }
generation_config.json ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ {
2
+ "_from_model_config": true,
3
+ "bos_token_id": 1024,
4
+ "eos_token_id": 1024,
5
+ "transformers_version": "4.42.3"
6
+ }
model.safetensors ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:1b4a4fef21424803410f39f7f7216be711d2d179a3baa1c00f15f1f4d4af9a66
3
+ size 497774208
special_tokens_map.json ADDED
@@ -0,0 +1 @@
 
 
1
+ {}
tokenizer.json ADDED
@@ -0,0 +1,1919 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "version": "1.0",
3
+ "truncation": null,
4
+ "padding": null,
5
+ "added_tokens": [
6
+ {
7
+ "id": 1024,
8
+ "content": "<|endoftext|>",
9
+ "single_word": false,
10
+ "lstrip": false,
11
+ "rstrip": false,
12
+ "normalized": true,
13
+ "special": true
14
+ }
15
+ ],
16
+ "normalizer": null,
17
+ "pre_tokenizer": {
18
+ "type": "ByteLevel",
19
+ "add_prefix_space": false,
20
+ "trim_offsets": true,
21
+ "use_regex": true
22
+ },
23
+ "post_processor": {
24
+ "type": "ByteLevel",
25
+ "add_prefix_space": true,
26
+ "trim_offsets": false,
27
+ "use_regex": true
28
+ },
29
+ "decoder": {
30
+ "type": "ByteLevel",
31
+ "add_prefix_space": true,
32
+ "trim_offsets": true,
33
+ "use_regex": true
34
+ },
35
+ "model": {
36
+ "type": "BPE",
37
+ "dropout": null,
38
+ "unk_token": null,
39
+ "continuing_subword_prefix": null,
40
+ "end_of_word_suffix": null,
41
+ "fuse_unk": false,
42
+ "byte_fallback": false,
43
+ "ignore_merges": false,
44
+ "vocab": {
45
+ "!": 0,
46
+ "\"": 1,
47
+ "#": 2,
48
+ "$": 3,
49
+ "%": 4,
50
+ "&": 5,
51
+ "'": 6,
52
+ "(": 7,
53
+ ")": 8,
54
+ "*": 9,
55
+ "+": 10,
56
+ ",": 11,
57
+ "-": 12,
58
+ ".": 13,
59
+ "/": 14,
60
+ "0": 15,
61
+ "1": 16,
62
+ "2": 17,
63
+ "3": 18,
64
+ "4": 19,
65
+ "5": 20,
66
+ "6": 21,
67
+ "7": 22,
68
+ "8": 23,
69
+ "9": 24,
70
+ ":": 25,
71
+ ";": 26,
72
+ "<": 27,
73
+ "=": 28,
74
+ ">": 29,
75
+ "?": 30,
76
+ "@": 31,
77
+ "A": 32,
78
+ "B": 33,
79
+ "C": 34,
80
+ "D": 35,
81
+ "E": 36,
82
+ "F": 37,
83
+ "G": 38,
84
+ "H": 39,
85
+ "I": 40,
86
+ "J": 41,
87
+ "K": 42,
88
+ "L": 43,
89
+ "M": 44,
90
+ "N": 45,
91
+ "O": 46,
92
+ "P": 47,
93
+ "Q": 48,
94
+ "R": 49,
95
+ "S": 50,
96
+ "T": 51,
97
+ "U": 52,
98
+ "V": 53,
99
+ "W": 54,
100
+ "X": 55,
101
+ "Y": 56,
102
+ "Z": 57,
103
+ "[": 58,
104
+ "\\": 59,
105
+ "]": 60,
106
+ "_": 61,
107
+ "`": 62,
108
+ "a": 63,
109
+ "b": 64,
110
+ "c": 65,
111
+ "d": 66,
112
+ "e": 67,
113
+ "f": 68,
114
+ "g": 69,
115
+ "h": 70,
116
+ "i": 71,
117
+ "j": 72,
118
+ "k": 73,
119
+ "l": 74,
120
+ "m": 75,
121
+ "n": 76,
122
+ "o": 77,
123
+ "p": 78,
124
+ "q": 79,
125
+ "r": 80,
126
+ "s": 81,
127
+ "t": 82,
128
+ "u": 83,
129
+ "v": 84,
130
+ "w": 85,
131
+ "x": 86,
132
+ "y": 87,
133
+ "z": 88,
134
+ "{": 89,
135
+ "|": 90,
136
+ "}": 91,
137
+ "~": 92,
138
+ "¡": 93,
139
+ "¢": 94,
140
+ "£": 95,
141
+ "¤": 96,
142
+ "¥": 97,
143
+ "¦": 98,
144
+ "§": 99,
145
+ "¨": 100,
146
+ "©": 101,
147
+ "ª": 102,
148
+ "«": 103,
149
+ "¬": 104,
150
+ "®": 105,
151
+ "¯": 106,
152
+ "°": 107,
153
+ "±": 108,
154
+ "³": 109,
155
+ "´": 110,
156
+ "µ": 111,
157
+ "¶": 112,
158
+ "·": 113,
159
+ "¸": 114,
160
+ "¹": 115,
161
+ "º": 116,
162
+ "»": 117,
163
+ "¼": 118,
164
+ "½": 119,
165
+ "¾": 120,
166
+ "¿": 121,
167
+ "Â": 122,
168
+ "Ã": 123,
169
+ "Ä": 124,
170
+ "Å": 125,
171
+ "É": 126,
172
+ "Ê": 127,
173
+ "Ñ": 128,
174
+ "Ò": 129,
175
+ "á": 130,
176
+ "â": 131,
177
+ "ã": 132,
178
+ "ä": 133,
179
+ "å": 134,
180
+ "æ": 135,
181
+ "ç": 136,
182
+ "è": 137,
183
+ "é": 138,
184
+ "î": 139,
185
+ "ï": 140,
186
+ "ð": 141,
187
+ "ĉ": 142,
188
+ "Ċ": 143,
189
+ "Ġ": 144,
190
+ "Ģ": 145,
191
+ "ģ": 146,
192
+ "Ĥ": 147,
193
+ "ĥ": 148,
194
+ "Ħ": 149,
195
+ "ħ": 150,
196
+ "Ĩ": 151,
197
+ "ĩ": 152,
198
+ "Ī": 153,
199
+ "ī": 154,
200
+ "Ĭ": 155,
201
+ "ĭ": 156,
202
+ "Į": 157,
203
+ "į": 158,
204
+ "İ": 159,
205
+ "ı": 160,
206
+ "IJ": 161,
207
+ "ij": 162,
208
+ "Ĵ": 163,
209
+ "ĵ": 164,
210
+ "Ķ": 165,
211
+ "ķ": 166,
212
+ "ĸ": 167,
213
+ "ĺ": 168,
214
+ "Ļ": 169,
215
+ "ļ": 170,
216
+ "Ľ": 171,
217
+ "ľ": 172,
218
+ "Ŀ": 173,
219
+ "ŀ": 174,
220
+ "Ł": 175,
221
+ "ł": 176,
222
+ "Ń": 177,
223
+ "he": 178,
224
+ "Ġt": 179,
225
+ "Ġa": 180,
226
+ "Ġs": 181,
227
+ "nd": 182,
228
+ "Ġw": 183,
229
+ "Ġthe": 184,
230
+ "ed": 185,
231
+ "Ġb": 186,
232
+ "Ġto": 187,
233
+ "ĠT": 188,
234
+ "Ġand": 189,
235
+ "Ġh": 190,
236
+ "Ġf": 191,
237
+ "in": 192,
238
+ "Ġwa": 193,
239
+ "re": 194,
240
+ "it": 195,
241
+ "ou": 196,
242
+ "Ġl": 197,
243
+ "Ġd": 198,
244
+ "Ġc": 199,
245
+ "Ġp": 200,
246
+ "ay": 201,
247
+ "Ġm": 202,
248
+ "er": 203,
249
+ "Ġwas": 204,
250
+ "om": 205,
251
+ "Ġhe": 206,
252
+ "ĠThe": 207,
253
+ "is": 208,
254
+ "im": 209,
255
+ "ar": 210,
256
+ "Ġn": 211,
257
+ "on": 212,
258
+ "Ġsa": 213,
259
+ "ĠS": 214,
260
+ "id": 215,
261
+ "ll": 216,
262
+ "Ġha": 217,
263
+ "Ġg": 218,
264
+ "at": 219,
265
+ "ing": 220,
266
+ "ot": 221,
267
+ "en": 222,
268
+ "an": 223,
269
+ "le": 224,
270
+ "end": 225,
271
+ "or": 226,
272
+ "Ġ\"": 227,
273
+ "ir": 228,
274
+ "of": 229,
275
+ "ĠH": 230,
276
+ "am": 231,
277
+ "et": 232,
278
+ "Ġit": 233,
279
+ "Ġth": 234,
280
+ "ig": 235,
281
+ "il": 236,
282
+ "ĠHe": 237,
283
+ "Ġin": 238,
284
+ "Ġpl": 239,
285
+ "ow": 240,
286
+ "ver": 241,
287
+ "ri": 242,
288
+ "ĠO": 243,
289
+ "ut": 244,
290
+ "ĠThey": 245,
291
+ "Ġbe": 246,
292
+ "Ġu": 247,
293
+ "Ġplay": 248,
294
+ "Ġsaid": 249,
295
+ "ith": 250,
296
+ "pp": 251,
297
+ "Ġwith": 252,
298
+ "Ġday": 253,
299
+ "Ġy": 254,
300
+ "oo": 255,
301
+ "ex": 256,
302
+ "Ġr": 257,
303
+ "ce": 258,
304
+ "ĠI": 259,
305
+ "ck": 260,
306
+ "Ġher": 261,
307
+ "ld": 262,
308
+ "Ġhis": 263,
309
+ "ĠL": 264,
310
+ "ke": 265,
311
+ "Ġst": 266,
312
+ "Ġbig": 267,
313
+ "nt": 268,
314
+ "very": 269,
315
+ "Ġyou": 270,
316
+ "st": 271,
317
+ "ĠB": 272,
318
+ "ĠTim": 273,
319
+ "ne": 274,
320
+ "ve": 275,
321
+ "ked": 276,
322
+ "ext": 277,
323
+ "Ġhapp": 278,
324
+ "Ġon": 279,
325
+ "ĠShe": 280,
326
+ "un": 281,
327
+ "ĠM": 282,
328
+ "ily": 283,
329
+ "text": 284,
330
+ "all": 285,
331
+ "riend": 286,
332
+ "Ġfriend": 287,
333
+ "Ġthey": 288,
334
+ "Ġli": 289,
335
+ "Ġwe": 290,
336
+ "Ġhad": 291,
337
+ "Ġnot": 292,
338
+ "Ġup": 293,
339
+ "her": 294,
340
+ "Ġwant": 295,
341
+ "Ġof": 296,
342
+ "ad": 297,
343
+ "Ġ<": 298,
344
+ "se": 299,
345
+ "|>": 300,
346
+ "Ġ<|": 301,
347
+ "itt": 302,
348
+ "Ġe": 303,
349
+ "Ġdo": 304,
350
+ "Ġhappy": 305,
351
+ "ĠA": 306,
352
+ "ent": 307,
353
+ "Ġvery": 308,
354
+ "Ġthat": 309,
355
+ "Ġsaw": 310,
356
+ "oftext": 311,
357
+ "'s": 312,
358
+ "ould": 313,
359
+ "Ġmom": 314,
360
+ "Ġfor": 315,
361
+ "Ġsh": 316,
362
+ "es": 317,
363
+ "ittle": 318,
364
+ "Ġlittle": 319,
365
+ "Ġshe": 320,
366
+ "ime": 321,
367
+ "ch": 322,
368
+ "Ġk": 323,
369
+ "endoftext": 324,
370
+ "Ġnam": 325,
371
+ ".\"": 326,
372
+ "Ġtime": 327,
373
+ "ound": 328,
374
+ "Ġso": 329,
375
+ "Ġthere": 330,
376
+ "Ġnamed": 331,
377
+ "Ġbo": 332,
378
+ "Ġwere": 333,
379
+ "Ġne": 334,
380
+ "ĠLily": 335,
381
+ "Ġwanted": 336,
382
+ "out": 337,
383
+ "Ġbut": 338,
384
+ "ĠOne": 339,
385
+ "nce": 340,
386
+ "Ġfriends": 341,
387
+ "!\"": 342,
388
+ "ht": 343,
389
+ "ird": 344,
390
+ "ved": 345,
391
+ "Ġsm": 346,
392
+ "Ġan": 347,
393
+ "al": 348,
394
+ "Ġbird": 349,
395
+ "ĠTom": 350,
396
+ "el": 351,
397
+ "ake": 352,
398
+ "ue": 353,
399
+ "ug": 354,
400
+ "ome": 355,
401
+ "ĠIt": 356,
402
+ "Ġtoo": 357,
403
+ "ĠĊ": 358,
404
+ "ide": 359,
405
+ "Ġhel": 360,
406
+ "Ġwent": 361,
407
+ "Ġwh": 362,
408
+ "Ġhelp": 363,
409
+ "Ġis": 364,
410
+ "Ġall": 365,
411
+ "Ġloo": 366,
412
+ "Ġlo": 367,
413
+ "ry": 368,
414
+ "ter": 369,
415
+ "Ġupon": 370,
416
+ "ore": 371,
417
+ "ill": 372,
418
+ "ame": 373,
419
+ "ind": 374,
420
+ "Ġfun": 375,
421
+ "Ġtoy": 376,
422
+ "get": 377,
423
+ "ra": 378,
424
+ "Ġas": 379,
425
+ "Ġdid": 380,
426
+ "Ġat": 381,
427
+ "Ġj": 382,
428
+ "Ġre": 383,
429
+ "gether": 384,
430
+ "ur": 385,
431
+ "Ġo": 386,
432
+ "ack": 387,
433
+ "Ġse": 388,
434
+ "ly": 389,
435
+ "Ġtogether": 390,
436
+ "Ġtre": 391,
437
+ "Ġcat": 392,
438
+ "ĠOnce": 393,
439
+ "ood": 394,
440
+ "Ġcould": 395,
441
+ "ted": 396,
442
+ "Ġdog": 397,
443
+ "Ġcan": 398,
444
+ "Ġtheir": 399,
445
+ "ec": 400,
446
+ "ard": 401,
447
+ "ark": 402,
448
+ "my": 403,
449
+ "Ġgir": 404,
450
+ "Ġplayed": 405,
451
+ "Ġhim": 406,
452
+ "Ġball": 407,
453
+ "Ġro": 408,
454
+ "?\"": 409,
455
+ "Ġgirl": 410,
456
+ "ax": 411,
457
+ "way": 412,
458
+ "Ġgo": 413,
459
+ "um": 414,
460
+ "Ġare": 415,
461
+ "Ġout": 416,
462
+ "Ġle": 417,
463
+ "Ġfr": 418,
464
+ "ain": 419,
465
+ "hen": 420,
466
+ "ĠW": 421,
467
+ "'t": 422,
468
+ "Ġthem": 423,
469
+ "Ġsad": 424,
470
+ "ul": 425,
471
+ "Ġboy": 426,
472
+ "Ġtree": 427,
473
+ "other": 428,
474
+ "Ġhave": 429,
475
+ "Ġman": 430,
476
+ "hed": 431,
477
+ "Ġcl": 432,
478
+ "oug": 433,
479
+ "Ġloved": 434,
480
+ "Ġlooked": 435,
481
+ "ic": 436,
482
+ "Ġfound": 437,
483
+ "Ġsp": 438,
484
+ "one": 439,
485
+ "Ġstar": 440,
486
+ "Ġback": 441,
487
+ "Ġsc": 442,
488
+ "hing": 443,
489
+ "own": 444,
490
+ "Ġlike": 445,
491
+ "are": 446,
492
+ "ful": 447,
493
+ "ĠJ": 448,
494
+ "side": 449,
495
+ "Ġme": 450,
496
+ "ĠBut": 451,
497
+ "ight": 452,
498
+ "Ġla": 453,
499
+ "ong": 454,
500
+ "Ġcar": 455,
501
+ "Ġpark": 456,
502
+ "op": 457,
503
+ "ick": 458,
504
+ "Ġwould": 459,
505
+ "elt": 460,
506
+ "ĠSue": 461,
507
+ "Ġmake": 462,
508
+ "ell": 463,
509
+ "round": 464,
510
+ "Ġfa": 465,
511
+ "Ġfelt": 466,
512
+ "Ġsee": 467,
513
+ "Ġno": 468,
514
+ "Ġasked": 469,
515
+ "ag": 470,
516
+ "omet": 471,
517
+ "ĠMax": 472,
518
+ "Ġnew": 473,
519
+ "ice": 474,
520
+ "ĠF": 475,
521
+ "ouse": 476,
522
+ "Ġstarted": 477,
523
+ "Ġcame": 478,
524
+ "Ġother": 479,
525
+ "ared": 480,
526
+ "Ġsay": 481,
527
+ "Ġal": 482,
528
+ "ss": 483,
529
+ "pot": 484,
530
+ "ob": 485,
531
+ "ought": 486,
532
+ "Ġsomet": 487,
533
+ "Ġag": 488,
534
+ "Ġgood": 489,
535
+ "Ġsmall": 490,
536
+ "Ġbr": 491,
537
+ "ia": 492,
538
+ "ĠBen": 493,
539
+ "ade": 494,
540
+ "Ġbec": 495,
541
+ "ĠSam": 496,
542
+ "ings": 497,
543
+ "ried": 498,
544
+ "Ġwor": 499,
545
+ "Ġfind": 500,
546
+ "Ġsmil": 501,
547
+ "Ġex": 502,
548
+ "Ġwal": 503,
549
+ "Ġaway": 504,
550
+ "Ġput": 505,
551
+ "Ġliked": 506,
552
+ "ty": 507,
553
+ "ook": 508,
554
+ "Ġwhat": 509,
555
+ "Ġco": 510,
556
+ "Ġsomething": 511,
557
+ "Ġfrom": 512,
558
+ "Ġmade": 513,
559
+ "Ġthought": 514,
560
+ "Ġhome": 515,
561
+ "Ġevery": 516,
562
+ "You": 517,
563
+ "Ġplaying": 518,
564
+ "Ġlaug": 519,
565
+ "Ġmu": 520,
566
+ "ach": 521,
567
+ "ile": 522,
568
+ "uc": 523,
569
+ "arn": 524,
570
+ "ie": 525,
571
+ "Ġran": 526,
572
+ "ave": 527,
573
+ "Ġsome": 528,
574
+ "Ġagain": 529,
575
+ "now": 530,
576
+ "Ġfl": 531,
577
+ "ny": 532,
578
+ "Ġhouse": 533,
579
+ "Ġdown": 534,
580
+ "king": 535,
581
+ "ure": 536,
582
+ "Ġtook": 537,
583
+ "Ġscared": 538,
584
+ "Ġpr": 539,
585
+ "Ġtoys": 540,
586
+ "Ġlearn": 541,
587
+ "Ġsmiled": 542,
588
+ "if": 543,
589
+ "ened": 544,
590
+ "Ġwill": 545,
591
+ "Ġbox": 546,
592
+ "dd": 547,
593
+ "ret": 548,
594
+ "ab": 549,
595
+ "ep": 550,
596
+ "Ġmy": 551,
597
+ "uck": 552,
598
+ "Ġbl": 553,
599
+ "Ġthings": 554,
600
+ "Ġaround": 555,
601
+ "Ġyour": 556,
602
+ "oud": 557,
603
+ "ish": 558,
604
+ "Ġfe": 559,
605
+ "Ġlived": 560,
606
+ "Ġsun": 561,
607
+ "Ġthen": 562,
608
+ "ank": 563,
609
+ "as": 564,
610
+ "ĠSpot": 565,
611
+ ",\"": 566,
612
+ "us": 567,
613
+ "Ġwhen": 568,
614
+ "Ġch": 569,
615
+ "hat": 570,
616
+ "Ġlot": 571,
617
+ "Ġsw": 572,
618
+ "ump": 573,
619
+ "Ġab": 574,
620
+ "ĠSo": 575,
621
+ "ĠD": 576,
622
+ "pped": 577,
623
+ "Ġget": 578,
624
+ "ist": 579,
625
+ "ucy": 580,
626
+ "ĠLucy": 581,
627
+ "ap": 582,
628
+ "oth": 583,
629
+ "ust": 584,
630
+ "Ġtried": 585,
631
+ "Ġknow": 586,
632
+ "Ġgot": 587,
633
+ "Ġdec": 588,
634
+ "Ġwho": 589,
635
+ "ĠMia": 590,
636
+ "new": 591,
637
+ "Ġmany": 592,
638
+ "nder": 593,
639
+ "Ġany": 594,
640
+ "Ġint": 595,
641
+ "Ġabout": 596,
642
+ "ited": 597,
643
+ "Ġpret": 598,
644
+ "au": 599,
645
+ "ause": 600,
646
+ "ally": 601,
647
+ "ive": 602,
648
+ "ĠYou": 603,
649
+ "Ġdecid": 604,
650
+ "Ġred": 605,
651
+ "ace": 606,
652
+ "fter": 607,
653
+ "Ġmore": 608,
654
+ "ous": 609,
655
+ "ise": 610,
656
+ "ĠMom": 611,
657
+ "Ġv": 612,
658
+ "ĠE": 613,
659
+ "qu": 614,
660
+ "Ġcare": 615,
661
+ "Ġhug": 616,
662
+ "Ġpo": 617,
663
+ "Ġshow": 618,
664
+ "Ġlearned": 619,
665
+ "Ġwater": 620,
666
+ "urp": 621,
667
+ "Ġbest": 622,
668
+ "Ġlook": 623,
669
+ "Ġjump": 624,
670
+ "Ġun": 625,
671
+ "Ġop": 626,
672
+ "Ġgre": 627,
673
+ "Ġexc": 628,
674
+ "ways": 629,
675
+ "âĢ": 630,
676
+ "Ġoutside": 631,
677
+ "Ġalways": 632,
678
+ "ant": 633,
679
+ "fe": 634,
680
+ "Ġroom": 635,
681
+ "Ġinto": 636,
682
+ "Ġho": 637,
683
+ "Ġeat": 638,
684
+ "nn": 639,
685
+ "ite": 640,
686
+ "Ġpe": 641,
687
+ "Ġboth": 642,
688
+ "Ġdad": 643,
689
+ "Ġke": 644,
690
+ "ower": 645,
691
+ "Ġsays": 646,
692
+ "Ġone": 647,
693
+ "Ġfeel": 648,
694
+ "Ġexcited": 649,
695
+ "Ġnice": 650,
696
+ "Ġthis": 651,
697
+ "ĠBob": 652,
698
+ "Ġfast": 653,
699
+ "udd": 654,
700
+ "Ġrun": 655,
701
+ "urpr": 656,
702
+ "Ġlong": 657,
703
+ "Ġsurpr": 658,
704
+ "nna": 659,
705
+ "Ġsor": 660,
706
+ "Ġam": 661,
707
+ "Ġtold": 662,
708
+ "Ġsk": 663,
709
+ "our": 664,
710
+ "Ġmo": 665,
711
+ "ĠC": 666,
712
+ "ara": 667,
713
+ "Ġinside": 668,
714
+ "Ġtr": 669,
715
+ "ull": 670,
716
+ "ink": 671,
717
+ "Ġknew": 672,
718
+ "imal": 673,
719
+ "Ġanimal": 674,
720
+ "Ġthan": 675,
721
+ "Ġpretty": 676,
722
+ "iny": 677,
723
+ "og": 678,
724
+ "Ġrock": 679,
725
+ "Ġgra": 680,
726
+ "Ġpick": 681,
727
+ "Ġtake": 682,
728
+ "hank": 683,
729
+ "ĠAnd": 684,
730
+ "Ġeach": 685,
731
+ "Ġgave": 686,
732
+ "Ġbecame": 687,
733
+ "Ġsl": 688,
734
+ "Ġmuch": 689,
735
+ "Ġstr": 690,
736
+ "Ġhow": 691,
737
+ "Ġneed": 692,
738
+ "Ġlaughed": 693,
739
+ "Ġtow": 694,
740
+ "ven": 695,
741
+ "etter": 696,
742
+ "Ġor": 697,
743
+ "ĠAs": 698,
744
+ "Ġdecided": 699,
745
+ "Ġbecause": 700,
746
+ "Ġunder": 701,
747
+ "lew": 702,
748
+ "ged": 703,
749
+ "Ġold": 704,
750
+ "ĠThen": 705,
751
+ "ge": 706,
752
+ "ro": 707,
753
+ "here": 708,
754
+ "rom": 709,
755
+ "urt": 710,
756
+ "Ġcle": 711,
757
+ "Ġfish": 712,
758
+ "ase": 713,
759
+ "and": 714,
760
+ "Ġflower": 715,
761
+ "Ġclo": 716,
762
+ "ast": 717,
763
+ "Ġbear": 718,
764
+ "ĠWhen": 719,
765
+ "ess": 720,
766
+ "Yes": 721,
767
+ "Ġlist": 722,
768
+ "Ġhand": 723,
769
+ "pl": 724,
770
+ "urn": 725,
771
+ "Ġkind": 726,
772
+ "ĠWe": 727,
773
+ "Ġtry": 728,
774
+ "Ġjust": 729,
775
+ "Ġte": 730,
776
+ "Ġfood": 731,
777
+ "Ġnear": 732,
778
+ "Ġhig": 733,
779
+ "ine": 734,
780
+ "Ġwat": 735,
781
+ "Ġide": 736,
782
+ "ĠIn": 737,
783
+ "Ġsky": 738,
784
+ "Ġfi": 739,
785
+ "ĠHis": 740,
786
+ "ving": 741,
787
+ "pec": 742,
788
+ "Ġus": 743,
789
+ "more": 744,
790
+ "Ġidea": 745,
791
+ "Ġbetter": 746,
792
+ "Ġtw": 747,
793
+ "Ġbug": 748,
794
+ "Ġheard": 749,
795
+ "gry": 750,
796
+ "Ġits": 751,
797
+ "ate": 752,
798
+ "Ġen": 753,
799
+ "able": 754,
800
+ "Ġlet": 755,
801
+ "ff": 756,
802
+ "ber": 757,
803
+ "Ġcareful": 758,
804
+ "Ġshare": 759,
805
+ "Ġsurpris": 760,
806
+ "lf": 761,
807
+ "Ġif": 762,
808
+ "Ġfly": 763,
809
+ "ĠAnna": 764,
810
+ "Ġstor": 765,
811
+ "ĠAmy": 766,
812
+ "ial": 767,
813
+ "Ġflew": 768,
814
+ "ion": 769,
815
+ "ĠK": 770,
816
+ "Ġcom": 771,
817
+ "Ġspec": 772,
818
+ "Ġdan": 773,
819
+ "Ġspecial": 774,
820
+ "ĠSara": 775,
821
+ "his": 776,
822
+ "ream": 777,
823
+ "Ġfo": 778,
824
+ "Ġnever": 779,
825
+ "ched": 780,
826
+ "Ġby": 781,
827
+ "Ġsorry": 782,
828
+ "Ġbu": 783,
829
+ "Ġgr": 784,
830
+ "Ġwind": 785,
831
+ "Ġlove": 786,
832
+ "Ġdon": 787,
833
+ "Ġcol": 788,
834
+ "ort": 789,
835
+ "rm": 790,
836
+ "Ġtal": 791,
837
+ "Ġclean": 792,
838
+ "Ġend": 793,
839
+ "pected": 794,
840
+ "expected": 795,
841
+ "Ġeven": 796,
842
+ "ople": 797,
843
+ "Ġmag": 798,
844
+ "Ġwalked": 799,
845
+ "Ġhard": 800,
846
+ "Ġshiny": 801,
847
+ "Ġover": 802,
848
+ "Ġcolor": 803,
849
+ "Ġcake": 804,
850
+ "ak": 805,
851
+ "Ġbook": 806,
852
+ "Ġturn": 807,
853
+ "Ġfore": 808,
854
+ "hy": 809,
855
+ "Ġfam": 810,
856
+ "Ġbad": 811,
857
+ "imb": 812,
858
+ "Ġclimb": 813,
859
+ "Ġsafe": 814,
860
+ "Ġafter": 815,
861
+ "ady": 816,
862
+ "Ġproud": 817,
863
+ "bb": 818,
864
+ "Ġpeople": 819,
865
+ "Ġhurt": 820,
866
+ "Ġhappened": 821,
867
+ "Ġcu": 822,
868
+ "Ġhigh": 823,
869
+ "ĠHer": 824,
870
+ "udden": 825,
871
+ "Ġdidn": 826,
872
+ "Ġloud": 827,
873
+ "Ġcome": 828,
874
+ "Ġche": 829,
875
+ "arden": 830,
876
+ "Ġdoor": 831,
877
+ "Ġgarden": 832,
878
+ "Ġanimals": 833,
879
+ "Ġgl": 834,
880
+ "Ġopened": 835,
881
+ "Ġground": 836,
882
+ "Ġpicked": 837,
883
+ "ĠP": 838,
884
+ "Ġim": 839,
885
+ "Ġgive": 840,
886
+ "ail": 841,
887
+ "ild": 842,
888
+ "ĠFrom": 843,
889
+ "Ġway": 844,
890
+ "Ġblue": 845,
891
+ "'m": 846,
892
+ "Ġstill": 847,
893
+ "Ġever": 848,
894
+ "ĠN": 849,
895
+ "Ġhugged": 850,
896
+ "iz": 851,
897
+ "Ġcall": 852,
898
+ "Ġfar": 853,
899
+ "ip": 854,
900
+ "age": 855,
901
+ "Ġoff": 856,
902
+ "Ġmagic": 857,
903
+ "ough": 858,
904
+ "Thank": 859,
905
+ "kay": 860,
906
+ "ĠEvery": 861,
907
+ "Ġshould": 862,
908
+ "Ġanymore": 863,
909
+ "ane": 864,
910
+ "Ġpar": 865,
911
+ "Ġfamily": 866,
912
+ "ool": 867,
913
+ "Ġstay": 868,
914
+ "Ġplace": 869,
915
+ "Ġkid": 870,
916
+ "Ġpic": 871,
917
+ "uddenly": 872,
918
+ "Ġlots": 873,
919
+ "Ġqu": 874,
920
+ "uff": 875,
921
+ "ock": 876,
922
+ "ct": 877,
923
+ "Ġwalk": 878,
924
+ "Ġnow": 879,
925
+ "Ġgreat": 880,
926
+ "em": 881,
927
+ "ture": 882,
928
+ "Ġstrong": 883,
929
+ "be": 884,
930
+ "dy": 885,
931
+ "Ġunt": 886,
932
+ "aut": 887,
933
+ "Ġforest": 888,
934
+ "lease": 889,
935
+ "No": 890,
936
+ "Ġbra": 891,
937
+ "Ġfrog": 892,
938
+ "ers": 893,
939
+ "Ġbro": 894,
940
+ "Ġuntil": 895,
941
+ "Ġbeaut": 896,
942
+ "opped": 897,
943
+ "Ġsqu": 898,
944
+ "Ġstick": 899,
945
+ "Ġnext": 900,
946
+ "Ġboat": 901,
947
+ "Ġapp": 902,
948
+ "ning": 903,
949
+ "Ġlisten": 904,
950
+ "oy": 905,
951
+ "Ġtra": 906,
952
+ "hes": 907,
953
+ "Ġpicture": 908,
954
+ "Ġkids": 909,
955
+ "Ġyum": 910,
956
+ "aking": 911,
957
+ "Ġbeauti": 912,
958
+ "Ġbeautiful": 913,
959
+ "ary": 914,
960
+ "Ġsurprised": 915,
961
+ "Ġdra": 916,
962
+ "Let": 917,
963
+ "Ġclos": 918,
964
+ "Ġstory": 919,
965
+ "ĠR": 920,
966
+ "Ġrain": 921,
967
+ "Ġwhile": 922,
968
+ "Ġimp": 923,
969
+ "rel": 924,
970
+ "Ġwo": 925,
971
+ "Ġtown": 926,
972
+ "pt": 927,
973
+ "Ġunexpected": 928,
974
+ "ress": 929,
975
+ "Ġsof": 930,
976
+ "ĠAt": 931,
977
+ "unny": 932,
978
+ "Ġbeing": 933,
979
+ "Ġhat": 934,
980
+ "ree": 935,
981
+ "Ġmor": 936,
982
+ "Ġthanked": 937,
983
+ "Ġad": 938,
984
+ "Ġcalled": 939,
985
+ "dded": 940,
986
+ "Ġrem": 941,
987
+ "Ġmet": 942,
988
+ "ies": 943,
989
+ "Ġcry": 944,
990
+ "Ġgame": 945,
991
+ "by": 946,
992
+ "Ġopen": 947,
993
+ "ger": 948,
994
+ "Ġangry": 949,
995
+ "Ġtruck": 950,
996
+ "itty": 951,
997
+ "Ġsoft": 952,
998
+ "Ġkeep": 953,
999
+ "Ġjumped": 954,
1000
+ "vent": 955,
1001
+ "les": 956,
1002
+ "Ġlost": 957,
1003
+ "Ġbed": 958,
1004
+ "self": 959,
1005
+ "uffy": 960,
1006
+ "Ġdoll": 961,
1007
+ "Ġwarm": 962,
1008
+ "oon": 963,
1009
+ "illy": 964,
1010
+ "Ġbrave": 965,
1011
+ "Ġsmile": 966,
1012
+ "ĠJo": 967,
1013
+ "Ġmouse": 968,
1014
+ "Ġate": 969,
1015
+ "Ġde": 970,
1016
+ "It": 971,
1017
+ "fore": 972,
1018
+ "so": 973,
1019
+ "Ġwatch": 974,
1020
+ "iss": 975,
1021
+ "Ġleave": 976,
1022
+ "az": 977,
1023
+ "Ġalso": 978,
1024
+ "Ġfell": 979,
1025
+ "Ġsn": 980,
1026
+ "ĠSuddenly": 981,
1027
+ "Ġgreen": 982,
1028
+ "Ġtwo": 983,
1029
+ "Ġface": 984,
1030
+ "ĠG": 985,
1031
+ "ma": 986,
1032
+ "gan": 987,
1033
+ "bit": 988,
1034
+ "ble": 989,
1035
+ "ĠâĢ": 990,
1036
+ "ĠFl": 991,
1037
+ "abbit": 992,
1038
+ "Ġnoise": 993,
1039
+ "owl": 994,
1040
+ "Ġexpl": 995,
1041
+ "Ġwhere": 996,
1042
+ "irst": 997,
1043
+ "Ġsoon": 998,
1044
+ "Ġrabbit": 999,
1045
+ "Ġslide": 1000,
1046
+ "Ġhole": 1001,
1047
+ "Ġhear": 1002,
1048
+ "Ġpus": 1003,
1049
+ "irrel": 1004,
1050
+ "ĠSally": 1005,
1051
+ "ear": 1006,
1052
+ "ĠAfter": 1007,
1053
+ "Ġbefore": 1008,
1054
+ "Ġsat": 1009,
1055
+ "air": 1010,
1056
+ "Ġcook": 1011,
1057
+ "Ġhappily": 1012,
1058
+ "ĠFluffy": 1013,
1059
+ "Ġsquirrel": 1014,
1060
+ "Ġuse": 1015,
1061
+ "Ġkept": 1016,
1062
+ "ĠâĢľ": 1017,
1063
+ "Hi": 1018,
1064
+ "ĠKitty": 1019,
1065
+ "âĢĿ": 1020,
1066
+ "ired": 1021,
1067
+ "Ġhelped": 1022,
1068
+ "Ġshowed": 1023
1069
+ },
1070
+ "merges": [
1071
+ "h e",
1072
+ "Ġ t",
1073
+ "Ġ a",
1074
+ "Ġ s",
1075
+ "n d",
1076
+ "Ġ w",
1077
+ "Ġt he",
1078
+ "e d",
1079
+ "Ġ b",
1080
+ "Ġt o",
1081
+ "Ġ T",
1082
+ "Ġa nd",
1083
+ "Ġ h",
1084
+ "Ġ f",
1085
+ "i n",
1086
+ "Ġw a",
1087
+ "r e",
1088
+ "i t",
1089
+ "o u",
1090
+ "Ġ l",
1091
+ "Ġ d",
1092
+ "Ġ c",
1093
+ "Ġ p",
1094
+ "a y",
1095
+ "Ġ m",
1096
+ "e r",
1097
+ "Ġwa s",
1098
+ "o m",
1099
+ "Ġ he",
1100
+ "ĠT he",
1101
+ "i s",
1102
+ "i m",
1103
+ "a r",
1104
+ "Ġ n",
1105
+ "o n",
1106
+ "Ġs a",
1107
+ "Ġ S",
1108
+ "i d",
1109
+ "l l",
1110
+ "Ġh a",
1111
+ "Ġ g",
1112
+ "a t",
1113
+ "in g",
1114
+ "o t",
1115
+ "e n",
1116
+ "a n",
1117
+ "l e",
1118
+ "e nd",
1119
+ "o r",
1120
+ "Ġ \"",
1121
+ "i r",
1122
+ "o f",
1123
+ "Ġ H",
1124
+ "a m",
1125
+ "e t",
1126
+ "Ġ it",
1127
+ "Ġt h",
1128
+ "i g",
1129
+ "i l",
1130
+ "ĠH e",
1131
+ "Ġ in",
1132
+ "Ġp l",
1133
+ "o w",
1134
+ "v er",
1135
+ "r i",
1136
+ "Ġ O",
1137
+ "u t",
1138
+ "ĠThe y",
1139
+ "Ġb e",
1140
+ "Ġ u",
1141
+ "Ġpl ay",
1142
+ "Ġsa id",
1143
+ "it h",
1144
+ "p p",
1145
+ "Ġw ith",
1146
+ "Ġd ay",
1147
+ "Ġ y",
1148
+ "o o",
1149
+ "e x",
1150
+ "Ġ r",
1151
+ "c e",
1152
+ "Ġ I",
1153
+ "c k",
1154
+ "Ġhe r",
1155
+ "l d",
1156
+ "Ġh is",
1157
+ "Ġ L",
1158
+ "k e",
1159
+ "Ġs t",
1160
+ "Ġb ig",
1161
+ "n t",
1162
+ "ver y",
1163
+ "Ġy ou",
1164
+ "s t",
1165
+ "Ġ B",
1166
+ "ĠT im",
1167
+ "n e",
1168
+ "v e",
1169
+ "k ed",
1170
+ "ex t",
1171
+ "Ġha pp",
1172
+ "Ġ on",
1173
+ "ĠS he",
1174
+ "u n",
1175
+ "Ġ M",
1176
+ "il y",
1177
+ "t ext",
1178
+ "a ll",
1179
+ "ri end",
1180
+ "Ġf riend",
1181
+ "Ġthe y",
1182
+ "Ġl i",
1183
+ "Ġw e",
1184
+ "Ġha d",
1185
+ "Ġn ot",
1186
+ "Ġu p",
1187
+ "he r",
1188
+ "Ġwa nt",
1189
+ "Ġ of",
1190
+ "a d",
1191
+ "Ġ <",
1192
+ "s e",
1193
+ "| >",
1194
+ "Ġ< |",
1195
+ "it t",
1196
+ "Ġ e",
1197
+ "Ġd o",
1198
+ "Ġhapp y",
1199
+ "Ġ A",
1200
+ "en t",
1201
+ "Ġ very",
1202
+ "Ġth at",
1203
+ "Ġsa w",
1204
+ "of text",
1205
+ "' s",
1206
+ "ou ld",
1207
+ "Ġm om",
1208
+ "Ġf or",
1209
+ "Ġs h",
1210
+ "e s",
1211
+ "itt le",
1212
+ "Ġl ittle",
1213
+ "Ġs he",
1214
+ "im e",
1215
+ "c h",
1216
+ "Ġ k",
1217
+ "end oftext",
1218
+ "Ġn am",
1219
+ ". \"",
1220
+ "Ġt ime",
1221
+ "ou nd",
1222
+ "Ġs o",
1223
+ "Ġthe re",
1224
+ "Ġnam ed",
1225
+ "Ġb o",
1226
+ "Ġwe re",
1227
+ "Ġn e",
1228
+ "ĠL ily",
1229
+ "Ġwant ed",
1230
+ "ou t",
1231
+ "Ġb ut",
1232
+ "ĠO ne",
1233
+ "n ce",
1234
+ "Ġfriend s",
1235
+ "! \"",
1236
+ "h t",
1237
+ "ir d",
1238
+ "v ed",
1239
+ "Ġs m",
1240
+ "Ġa n",
1241
+ "a l",
1242
+ "Ġb ird",
1243
+ "ĠT om",
1244
+ "e l",
1245
+ "a ke",
1246
+ "u e",
1247
+ "u g",
1248
+ "om e",
1249
+ "ĠI t",
1250
+ "Ġto o",
1251
+ "Ġ Ċ",
1252
+ "id e",
1253
+ "Ġhe l",
1254
+ "Ġw ent",
1255
+ "Ġw h",
1256
+ "Ġhel p",
1257
+ "Ġ is",
1258
+ "Ġa ll",
1259
+ "Ġl oo",
1260
+ "Ġl o",
1261
+ "r y",
1262
+ "t er",
1263
+ "Ġup on",
1264
+ "o re",
1265
+ "i ll",
1266
+ "am e",
1267
+ "i nd",
1268
+ "Ġf un",
1269
+ "Ġto y",
1270
+ "g et",
1271
+ "r a",
1272
+ "Ġa s",
1273
+ "Ġd id",
1274
+ "Ġa t",
1275
+ "Ġ j",
1276
+ "Ġ re",
1277
+ "get her",
1278
+ "u r",
1279
+ "Ġ o",
1280
+ "a ck",
1281
+ "Ġs e",
1282
+ "l y",
1283
+ "Ġto gether",
1284
+ "Ġt re",
1285
+ "Ġc at",
1286
+ "ĠO nce",
1287
+ "oo d",
1288
+ "Ġc ould",
1289
+ "t ed",
1290
+ "Ġdo g",
1291
+ "Ġc an",
1292
+ "Ġthe ir",
1293
+ "e c",
1294
+ "ar d",
1295
+ "ar k",
1296
+ "m y",
1297
+ "Ġg ir",
1298
+ "Ġplay ed",
1299
+ "Ġh im",
1300
+ "Ġb all",
1301
+ "Ġr o",
1302
+ "? \"",
1303
+ "Ġgir l",
1304
+ "a x",
1305
+ "w ay",
1306
+ "Ġg o",
1307
+ "u m",
1308
+ "Ġa re",
1309
+ "Ġ out",
1310
+ "Ġl e",
1311
+ "Ġf r",
1312
+ "a in",
1313
+ "he n",
1314
+ "Ġ W",
1315
+ "' t",
1316
+ "Ġthe m",
1317
+ "Ġsa d",
1318
+ "u l",
1319
+ "Ġbo y",
1320
+ "Ġtre e",
1321
+ "ot her",
1322
+ "Ġha ve",
1323
+ "Ġm an",
1324
+ "he d",
1325
+ "Ġc l",
1326
+ "ou g",
1327
+ "Ġlo ved",
1328
+ "Ġloo ked",
1329
+ "i c",
1330
+ "Ġf ound",
1331
+ "Ġs p",
1332
+ "on e",
1333
+ "Ġst ar",
1334
+ "Ġb ack",
1335
+ "Ġs c",
1336
+ "h ing",
1337
+ "ow n",
1338
+ "Ġli ke",
1339
+ "a re",
1340
+ "f ul",
1341
+ "Ġ J",
1342
+ "s ide",
1343
+ "Ġm e",
1344
+ "ĠB ut",
1345
+ "ig ht",
1346
+ "Ġl a",
1347
+ "on g",
1348
+ "Ġc ar",
1349
+ "Ġp ark",
1350
+ "o p",
1351
+ "i ck",
1352
+ "Ġw ould",
1353
+ "el t",
1354
+ "ĠS ue",
1355
+ "Ġm ake",
1356
+ "e ll",
1357
+ "r ound",
1358
+ "Ġf a",
1359
+ "Ġf elt",
1360
+ "Ġse e",
1361
+ "Ġn o",
1362
+ "Ġas ked",
1363
+ "a g",
1364
+ "om et",
1365
+ "ĠM ax",
1366
+ "Ġne w",
1367
+ "i ce",
1368
+ "Ġ F",
1369
+ "ou se",
1370
+ "Ġstar ted",
1371
+ "Ġc ame",
1372
+ "Ġ other",
1373
+ "ar ed",
1374
+ "Ġs ay",
1375
+ "Ġa l",
1376
+ "s s",
1377
+ "p ot",
1378
+ "o b",
1379
+ "oug ht",
1380
+ "Ġs omet",
1381
+ "Ġa g",
1382
+ "Ġg ood",
1383
+ "Ġsm all",
1384
+ "Ġb r",
1385
+ "i a",
1386
+ "ĠB en",
1387
+ "ad e",
1388
+ "Ġbe c",
1389
+ "ĠS am",
1390
+ "ing s",
1391
+ "ri ed",
1392
+ "Ġw or",
1393
+ "Ġf ind",
1394
+ "Ġsm il",
1395
+ "Ġ ex",
1396
+ "Ġwa l",
1397
+ "Ġa way",
1398
+ "Ġp ut",
1399
+ "Ġli ked",
1400
+ "t y",
1401
+ "oo k",
1402
+ "Ġwh at",
1403
+ "Ġc o",
1404
+ "Ġsomet hing",
1405
+ "Ġfr om",
1406
+ "Ġm ade",
1407
+ "Ġth ought",
1408
+ "Ġh ome",
1409
+ "Ġe very",
1410
+ "Y ou",
1411
+ "Ġplay ing",
1412
+ "Ġla ug",
1413
+ "Ġm u",
1414
+ "a ch",
1415
+ "i le",
1416
+ "u c",
1417
+ "ar n",
1418
+ "i e",
1419
+ "Ġr an",
1420
+ "a ve",
1421
+ "Ġs ome",
1422
+ "Ġag ain",
1423
+ "n ow",
1424
+ "Ġf l",
1425
+ "n y",
1426
+ "Ġh ouse",
1427
+ "Ġd own",
1428
+ "k ing",
1429
+ "u re",
1430
+ "Ġtoo k",
1431
+ "Ġsc ared",
1432
+ "Ġp r",
1433
+ "Ġtoy s",
1434
+ "Ġle arn",
1435
+ "Ġsmil ed",
1436
+ "i f",
1437
+ "en ed",
1438
+ "Ġw ill",
1439
+ "Ġbo x",
1440
+ "d d",
1441
+ "re t",
1442
+ "a b",
1443
+ "e p",
1444
+ "Ġm y",
1445
+ "u ck",
1446
+ "Ġb l",
1447
+ "Ġth ings",
1448
+ "Ġa round",
1449
+ "Ġyou r",
1450
+ "ou d",
1451
+ "is h",
1452
+ "Ġf e",
1453
+ "Ġli ved",
1454
+ "Ġs un",
1455
+ "Ġthe n",
1456
+ "an k",
1457
+ "a s",
1458
+ "ĠS pot",
1459
+ ", \"",
1460
+ "u s",
1461
+ "Ġw hen",
1462
+ "Ġc h",
1463
+ "h at",
1464
+ "Ġl ot",
1465
+ "Ġs w",
1466
+ "um p",
1467
+ "Ġa b",
1468
+ "ĠS o",
1469
+ "Ġ D",
1470
+ "pp ed",
1471
+ "Ġg et",
1472
+ "is t",
1473
+ "uc y",
1474
+ "ĠL ucy",
1475
+ "a p",
1476
+ "ot h",
1477
+ "u st",
1478
+ "Ġt ried",
1479
+ "Ġk now",
1480
+ "Ġg ot",
1481
+ "Ġd ec",
1482
+ "Ġwh o",
1483
+ "ĠM ia",
1484
+ "ne w",
1485
+ "Ġman y",
1486
+ "nd er",
1487
+ "Ġan y",
1488
+ "Ġin t",
1489
+ "Ġab out",
1490
+ "it ed",
1491
+ "Ġp ret",
1492
+ "a u",
1493
+ "au se",
1494
+ "all y",
1495
+ "i ve",
1496
+ "Ġ You",
1497
+ "Ġdec id",
1498
+ "Ġr ed",
1499
+ "a ce",
1500
+ "f ter",
1501
+ "Ġm ore",
1502
+ "ou s",
1503
+ "is e",
1504
+ "ĠM om",
1505
+ "Ġ v",
1506
+ "Ġ E",
1507
+ "q u",
1508
+ "Ġc are",
1509
+ "Ġh ug",
1510
+ "Ġp o",
1511
+ "Ġsh ow",
1512
+ "Ġlearn ed",
1513
+ "Ġwa ter",
1514
+ "ur p",
1515
+ "Ġbe st",
1516
+ "Ġloo k",
1517
+ "Ġj ump",
1518
+ "Ġu n",
1519
+ "Ġo p",
1520
+ "Ġg re",
1521
+ "Ġex c",
1522
+ "way s",
1523
+ "â Ģ",
1524
+ "Ġout side",
1525
+ "Ġal ways",
1526
+ "an t",
1527
+ "f e",
1528
+ "Ġro om",
1529
+ "Ġint o",
1530
+ "Ġh o",
1531
+ "Ġe at",
1532
+ "n n",
1533
+ "it e",
1534
+ "Ġp e",
1535
+ "Ġb oth",
1536
+ "Ġd ad",
1537
+ "Ġ ke",
1538
+ "ow er",
1539
+ "Ġsay s",
1540
+ "Ġon e",
1541
+ "Ġfe el",
1542
+ "Ġexc ited",
1543
+ "Ġn ice",
1544
+ "Ġth is",
1545
+ "ĠB ob",
1546
+ "Ġfa st",
1547
+ "u dd",
1548
+ "Ġr un",
1549
+ "urp r",
1550
+ "Ġl ong",
1551
+ "Ġs urpr",
1552
+ "nn a",
1553
+ "Ġs or",
1554
+ "Ġa m",
1555
+ "Ġto ld",
1556
+ "Ġs k",
1557
+ "ou r",
1558
+ "Ġm o",
1559
+ "Ġ C",
1560
+ "ar a",
1561
+ "Ġin side",
1562
+ "Ġt r",
1563
+ "u ll",
1564
+ "in k",
1565
+ "Ġk new",
1566
+ "im al",
1567
+ "Ġan imal",
1568
+ "Ġth an",
1569
+ "Ġpret ty",
1570
+ "in y",
1571
+ "o g",
1572
+ "Ġro ck",
1573
+ "Ġg ra",
1574
+ "Ġp ick",
1575
+ "Ġt ake",
1576
+ "h ank",
1577
+ "ĠA nd",
1578
+ "Ġe ach",
1579
+ "Ġg ave",
1580
+ "Ġbec ame",
1581
+ "Ġs l",
1582
+ "Ġmu ch",
1583
+ "Ġst r",
1584
+ "Ġh ow",
1585
+ "Ġne ed",
1586
+ "Ġlaug hed",
1587
+ "Ġto w",
1588
+ "v en",
1589
+ "et ter",
1590
+ "Ġ or",
1591
+ "ĠA s",
1592
+ "Ġdecid ed",
1593
+ "Ġbec ause",
1594
+ "Ġu nder",
1595
+ "le w",
1596
+ "g ed",
1597
+ "Ġo ld",
1598
+ "ĠThe n",
1599
+ "g e",
1600
+ "r o",
1601
+ "he re",
1602
+ "r om",
1603
+ "ur t",
1604
+ "Ġc le",
1605
+ "Ġf ish",
1606
+ "a se",
1607
+ "a nd",
1608
+ "Ġfl ower",
1609
+ "Ġcl o",
1610
+ "a st",
1611
+ "Ġbe ar",
1612
+ "ĠW hen",
1613
+ "es s",
1614
+ "Y es",
1615
+ "Ġl ist",
1616
+ "Ġha nd",
1617
+ "p l",
1618
+ "ur n",
1619
+ "Ġk ind",
1620
+ "ĠW e",
1621
+ "Ġt ry",
1622
+ "Ġj ust",
1623
+ "Ġt e",
1624
+ "Ġf ood",
1625
+ "Ġne ar",
1626
+ "Ġh ig",
1627
+ "in e",
1628
+ "Ġwa t",
1629
+ "Ġ ide",
1630
+ "ĠI n",
1631
+ "Ġsk y",
1632
+ "Ġf i",
1633
+ "ĠH is",
1634
+ "v ing",
1635
+ "p ec",
1636
+ "Ġu s",
1637
+ "m ore",
1638
+ "Ġide a",
1639
+ "Ġb etter",
1640
+ "Ġt w",
1641
+ "Ġb ug",
1642
+ "Ġhe ard",
1643
+ "g ry",
1644
+ "Ġit s",
1645
+ "at e",
1646
+ "Ġ en",
1647
+ "ab le",
1648
+ "Ġl et",
1649
+ "f f",
1650
+ "b er",
1651
+ "Ġcare ful",
1652
+ "Ġsh are",
1653
+ "Ġsurpr is",
1654
+ "l f",
1655
+ "Ġ if",
1656
+ "Ġf ly",
1657
+ "ĠA nna",
1658
+ "Ġst or",
1659
+ "ĠA my",
1660
+ "i al",
1661
+ "Ġf lew",
1662
+ "i on",
1663
+ "Ġ K",
1664
+ "Ġc om",
1665
+ "Ġsp ec",
1666
+ "Ġd an",
1667
+ "Ġspec ial",
1668
+ "ĠS ara",
1669
+ "h is",
1670
+ "re am",
1671
+ "Ġf o",
1672
+ "Ġne ver",
1673
+ "c hed",
1674
+ "Ġb y",
1675
+ "Ġsor ry",
1676
+ "Ġb u",
1677
+ "Ġg r",
1678
+ "Ġw ind",
1679
+ "Ġlo ve",
1680
+ "Ġd on",
1681
+ "Ġco l",
1682
+ "or t",
1683
+ "r m",
1684
+ "Ġt al",
1685
+ "Ġcle an",
1686
+ "Ġ end",
1687
+ "pec ted",
1688
+ "ex pected",
1689
+ "Ġe ven",
1690
+ "op le",
1691
+ "Ġm ag",
1692
+ "Ġwal ked",
1693
+ "Ġh ard",
1694
+ "Ġsh iny",
1695
+ "Ġo ver",
1696
+ "Ġcol or",
1697
+ "Ġc ake",
1698
+ "a k",
1699
+ "Ġb ook",
1700
+ "Ġt urn",
1701
+ "Ġf ore",
1702
+ "h y",
1703
+ "Ġf am",
1704
+ "Ġb ad",
1705
+ "im b",
1706
+ "Ġcl imb",
1707
+ "Ġsa fe",
1708
+ "Ġa fter",
1709
+ "ad y",
1710
+ "Ġpr oud",
1711
+ "b b",
1712
+ "Ġpe ople",
1713
+ "Ġh urt",
1714
+ "Ġhapp ened",
1715
+ "Ġc u",
1716
+ "Ġhig h",
1717
+ "ĠH er",
1718
+ "udd en",
1719
+ "Ġdid n",
1720
+ "Ġl oud",
1721
+ "Ġc ome",
1722
+ "Ġc he",
1723
+ "ard en",
1724
+ "Ġdo or",
1725
+ "Ġg arden",
1726
+ "Ġanimal s",
1727
+ "Ġg l",
1728
+ "Ġop ened",
1729
+ "Ġg round",
1730
+ "Ġpick ed",
1731
+ "Ġ P",
1732
+ "Ġ im",
1733
+ "Ġg ive",
1734
+ "a il",
1735
+ "il d",
1736
+ "ĠF rom",
1737
+ "Ġwa y",
1738
+ "Ġbl ue",
1739
+ "' m",
1740
+ "Ġst ill",
1741
+ "Ġe ver",
1742
+ "Ġ N",
1743
+ "Ġhug ged",
1744
+ "i z",
1745
+ "Ġc all",
1746
+ "Ġf ar",
1747
+ "i p",
1748
+ "ag e",
1749
+ "Ġof f",
1750
+ "Ġmag ic",
1751
+ "oug h",
1752
+ "T hank",
1753
+ "k ay",
1754
+ "ĠE very",
1755
+ "Ġsh ould",
1756
+ "Ġany more",
1757
+ "an e",
1758
+ "Ġp ar",
1759
+ "Ġfam ily",
1760
+ "oo l",
1761
+ "Ġst ay",
1762
+ "Ġpl ace",
1763
+ "Ġk id",
1764
+ "Ġp ic",
1765
+ "udden ly",
1766
+ "Ġlot s",
1767
+ "Ġ qu",
1768
+ "u ff",
1769
+ "o ck",
1770
+ "c t",
1771
+ "Ġwal k",
1772
+ "Ġn ow",
1773
+ "Ġgre at",
1774
+ "e m",
1775
+ "t ure",
1776
+ "Ġstr ong",
1777
+ "b e",
1778
+ "d y",
1779
+ "Ġu nt",
1780
+ "a ut",
1781
+ "Ġfore st",
1782
+ "le ase",
1783
+ "N o",
1784
+ "Ġb ra",
1785
+ "Ġfr og",
1786
+ "er s",
1787
+ "Ġbr o",
1788
+ "Ġunt il",
1789
+ "Ġbe aut",
1790
+ "o pped",
1791
+ "Ġs qu",
1792
+ "Ġst ick",
1793
+ "Ġn ext",
1794
+ "Ġbo at",
1795
+ "Ġa pp",
1796
+ "n ing",
1797
+ "Ġlist en",
1798
+ "o y",
1799
+ "Ġt ra",
1800
+ "he s",
1801
+ "Ġpic ture",
1802
+ "Ġkid s",
1803
+ "Ġy um",
1804
+ "a king",
1805
+ "Ġbeaut i",
1806
+ "Ġbeauti ful",
1807
+ "ar y",
1808
+ "Ġsurpris ed",
1809
+ "Ġd ra",
1810
+ "L et",
1811
+ "Ġclo s",
1812
+ "Ġstor y",
1813
+ "Ġ R",
1814
+ "Ġr ain",
1815
+ "Ġwh ile",
1816
+ "Ġim p",
1817
+ "re l",
1818
+ "Ġw o",
1819
+ "Ġtow n",
1820
+ "p t",
1821
+ "Ġun expected",
1822
+ "re ss",
1823
+ "Ġs of",
1824
+ "ĠA t",
1825
+ "un ny",
1826
+ "Ġbe ing",
1827
+ "Ġha t",
1828
+ "re e",
1829
+ "Ġm or",
1830
+ "Ġthan ked",
1831
+ "Ġa d",
1832
+ "Ġcall ed",
1833
+ "dd ed",
1834
+ "Ġre m",
1835
+ "Ġm et",
1836
+ "i es",
1837
+ "Ġc ry",
1838
+ "Ġg ame",
1839
+ "b y",
1840
+ "Ġop en",
1841
+ "g er",
1842
+ "Ġan gry",
1843
+ "Ġtr uck",
1844
+ "itt y",
1845
+ "Ġsof t",
1846
+ "Ġke ep",
1847
+ "Ġjump ed",
1848
+ "v ent",
1849
+ "le s",
1850
+ "Ġlo st",
1851
+ "Ġb ed",
1852
+ "se lf",
1853
+ "uff y",
1854
+ "Ġdo ll",
1855
+ "Ġwa rm",
1856
+ "o on",
1857
+ "ill y",
1858
+ "Ġbra ve",
1859
+ "Ġsm ile",
1860
+ "ĠJ o",
1861
+ "Ġm ouse",
1862
+ "Ġat e",
1863
+ "Ġd e",
1864
+ "I t",
1865
+ "f ore",
1866
+ "s o",
1867
+ "Ġwat ch",
1868
+ "is s",
1869
+ "Ġle ave",
1870
+ "a z",
1871
+ "Ġal so",
1872
+ "Ġf ell",
1873
+ "Ġs n",
1874
+ "ĠS uddenly",
1875
+ "Ġgre en",
1876
+ "Ġtw o",
1877
+ "Ġfa ce",
1878
+ "Ġ G",
1879
+ "m a",
1880
+ "g an",
1881
+ "b it",
1882
+ "b le",
1883
+ "Ġ âĢ",
1884
+ "ĠF l",
1885
+ "ab bit",
1886
+ "Ġno ise",
1887
+ "ow l",
1888
+ "Ġex pl",
1889
+ "Ġw here",
1890
+ "ir st",
1891
+ "Ġso on",
1892
+ "Ġr abbit",
1893
+ "Ġsl ide",
1894
+ "Ġho le",
1895
+ "Ġhe ar",
1896
+ "Ġp us",
1897
+ "ir rel",
1898
+ "ĠS ally",
1899
+ "e ar",
1900
+ "ĠA fter",
1901
+ "Ġbe fore",
1902
+ "Ġsa t",
1903
+ "a ir",
1904
+ "Ġc ook",
1905
+ "Ġhapp ily",
1906
+ "ĠFl uffy",
1907
+ "Ġsqu irrel",
1908
+ "Ġu se",
1909
+ "Ġke pt",
1910
+ "ĠâĢ ľ",
1911
+ "H i",
1912
+ "ĠK itty",
1913
+ "âĢ Ŀ",
1914
+ "ir ed",
1915
+ "Ġhelp ed",
1916
+ "Ġshow ed"
1917
+ ]
1918
+ }
1919
+ }
tokenizer_config.json ADDED
@@ -0,0 +1,15 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "added_tokens_decoder": {
3
+ "1024": {
4
+ "content": "<|endoftext|>",
5
+ "lstrip": false,
6
+ "normalized": true,
7
+ "rstrip": false,
8
+ "single_word": false,
9
+ "special": true
10
+ }
11
+ },
12
+ "clean_up_tokenization_spaces": true,
13
+ "model_max_length": 1000000000000000019884624838656,
14
+ "tokenizer_class": "PreTrainedTokenizerFast"
15
+ }
train_results.json ADDED
@@ -0,0 +1,9 @@
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "epoch": 1.0,
3
+ "total_flos": 3.18502967574528e+17,
4
+ "train_loss": 1.5702329817773064,
5
+ "train_runtime": 7426.3864,
6
+ "train_samples": 609477,
7
+ "train_samples_per_second": 82.069,
8
+ "train_steps_per_second": 2.565
9
+ }
trainer_state.json ADDED
@@ -0,0 +1,479 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ {
2
+ "best_metric": null,
3
+ "best_model_checkpoint": null,
4
+ "epoch": 1.0,
5
+ "eval_steps": 1000,
6
+ "global_step": 19047,
7
+ "is_hyper_param_search": false,
8
+ "is_local_process_zero": true,
9
+ "is_world_process_zero": true,
10
+ "log_history": [
11
+ {
12
+ "epoch": 0.026250853152727464,
13
+ "grad_norm": 1.8290232419967651,
14
+ "learning_rate": 4.868745734236363e-05,
15
+ "loss": 4.3877,
16
+ "step": 500
17
+ },
18
+ {
19
+ "epoch": 0.05250170630545493,
20
+ "grad_norm": 1.7457512617111206,
21
+ "learning_rate": 4.7374914684727256e-05,
22
+ "loss": 2.9424,
23
+ "step": 1000
24
+ },
25
+ {
26
+ "epoch": 0.05250170630545493,
27
+ "eval_accuracy": 0.4374649063701957,
28
+ "eval_loss": 2.49202299118042,
29
+ "eval_runtime": 52.9352,
30
+ "eval_samples_per_second": 115.764,
31
+ "eval_steps_per_second": 3.627,
32
+ "step": 1000
33
+ },
34
+ {
35
+ "epoch": 0.0787525594581824,
36
+ "grad_norm": 2.0498085021972656,
37
+ "learning_rate": 4.606237202709088e-05,
38
+ "loss": 2.3234,
39
+ "step": 1500
40
+ },
41
+ {
42
+ "epoch": 0.10500341261090985,
43
+ "grad_norm": 2.0251238346099854,
44
+ "learning_rate": 4.474982936945451e-05,
45
+ "loss": 1.9972,
46
+ "step": 2000
47
+ },
48
+ {
49
+ "epoch": 0.10500341261090985,
50
+ "eval_accuracy": 0.5644156336378184,
51
+ "eval_loss": 1.816135287284851,
52
+ "eval_runtime": 52.6607,
53
+ "eval_samples_per_second": 116.368,
54
+ "eval_steps_per_second": 3.646,
55
+ "step": 2000
56
+ },
57
+ {
58
+ "epoch": 0.13125426576363733,
59
+ "grad_norm": 2.362931966781616,
60
+ "learning_rate": 4.343728671181814e-05,
61
+ "loss": 1.8434,
62
+ "step": 2500
63
+ },
64
+ {
65
+ "epoch": 0.1575051189163648,
66
+ "grad_norm": 1.366226315498352,
67
+ "learning_rate": 4.2124744054181764e-05,
68
+ "loss": 1.7424,
69
+ "step": 3000
70
+ },
71
+ {
72
+ "epoch": 0.1575051189163648,
73
+ "eval_accuracy": 0.5978558749288556,
74
+ "eval_loss": 1.6274888515472412,
75
+ "eval_runtime": 52.8918,
76
+ "eval_samples_per_second": 115.859,
77
+ "eval_steps_per_second": 3.63,
78
+ "step": 3000
79
+ },
80
+ {
81
+ "epoch": 0.18375597206909225,
82
+ "grad_norm": 1.248099446296692,
83
+ "learning_rate": 4.081220139654539e-05,
84
+ "loss": 1.6762,
85
+ "step": 3500
86
+ },
87
+ {
88
+ "epoch": 0.2100068252218197,
89
+ "grad_norm": 1.2411609888076782,
90
+ "learning_rate": 3.949965873890902e-05,
91
+ "loss": 1.6233,
92
+ "step": 4000
93
+ },
94
+ {
95
+ "epoch": 0.2100068252218197,
96
+ "eval_accuracy": 0.6161887871386313,
97
+ "eval_loss": 1.5268635749816895,
98
+ "eval_runtime": 52.4436,
99
+ "eval_samples_per_second": 116.849,
100
+ "eval_steps_per_second": 3.661,
101
+ "step": 4000
102
+ },
103
+ {
104
+ "epoch": 0.23625767837454717,
105
+ "grad_norm": 1.3604716062545776,
106
+ "learning_rate": 3.8187116081272645e-05,
107
+ "loss": 1.5804,
108
+ "step": 4500
109
+ },
110
+ {
111
+ "epoch": 0.26250853152727466,
112
+ "grad_norm": 1.4850926399230957,
113
+ "learning_rate": 3.687457342363627e-05,
114
+ "loss": 1.5467,
115
+ "step": 5000
116
+ },
117
+ {
118
+ "epoch": 0.26250853152727466,
119
+ "eval_accuracy": 0.6283651600652359,
120
+ "eval_loss": 1.4624590873718262,
121
+ "eval_runtime": 52.7312,
122
+ "eval_samples_per_second": 116.212,
123
+ "eval_steps_per_second": 3.641,
124
+ "step": 5000
125
+ },
126
+ {
127
+ "epoch": 0.2887593846800021,
128
+ "grad_norm": 1.1516271829605103,
129
+ "learning_rate": 3.55620307659999e-05,
130
+ "loss": 1.518,
131
+ "step": 5500
132
+ },
133
+ {
134
+ "epoch": 0.3150102378327296,
135
+ "grad_norm": 1.2089773416519165,
136
+ "learning_rate": 3.4249488108363525e-05,
137
+ "loss": 1.4918,
138
+ "step": 6000
139
+ },
140
+ {
141
+ "epoch": 0.3150102378327296,
142
+ "eval_accuracy": 0.6369396504419245,
143
+ "eval_loss": 1.4186700582504272,
144
+ "eval_runtime": 52.6824,
145
+ "eval_samples_per_second": 116.32,
146
+ "eval_steps_per_second": 3.644,
147
+ "step": 6000
148
+ },
149
+ {
150
+ "epoch": 0.34126109098545704,
151
+ "grad_norm": 1.18433678150177,
152
+ "learning_rate": 3.293694545072715e-05,
153
+ "loss": 1.4721,
154
+ "step": 6500
155
+ },
156
+ {
157
+ "epoch": 0.3675119441381845,
158
+ "grad_norm": 1.1634047031402588,
159
+ "learning_rate": 3.162440279309078e-05,
160
+ "loss": 1.4537,
161
+ "step": 7000
162
+ },
163
+ {
164
+ "epoch": 0.3675119441381845,
165
+ "eval_accuracy": 0.6438454387214179,
166
+ "eval_loss": 1.382287859916687,
167
+ "eval_runtime": 53.055,
168
+ "eval_samples_per_second": 115.503,
169
+ "eval_steps_per_second": 3.619,
170
+ "step": 7000
171
+ },
172
+ {
173
+ "epoch": 0.39376279729091196,
174
+ "grad_norm": 1.172624111175537,
175
+ "learning_rate": 3.0311860135454406e-05,
176
+ "loss": 1.4409,
177
+ "step": 7500
178
+ },
179
+ {
180
+ "epoch": 0.4200136504436394,
181
+ "grad_norm": 1.242315649986267,
182
+ "learning_rate": 2.8999317477818033e-05,
183
+ "loss": 1.4228,
184
+ "step": 8000
185
+ },
186
+ {
187
+ "epoch": 0.4200136504436394,
188
+ "eval_accuracy": 0.649608131768285,
189
+ "eval_loss": 1.3529176712036133,
190
+ "eval_runtime": 52.82,
191
+ "eval_samples_per_second": 116.017,
192
+ "eval_steps_per_second": 3.635,
193
+ "step": 8000
194
+ },
195
+ {
196
+ "epoch": 0.4462645035963669,
197
+ "grad_norm": 1.1905977725982666,
198
+ "learning_rate": 2.7686774820181653e-05,
199
+ "loss": 1.4123,
200
+ "step": 8500
201
+ },
202
+ {
203
+ "epoch": 0.47251535674909434,
204
+ "grad_norm": 1.108475685119629,
205
+ "learning_rate": 2.637423216254528e-05,
206
+ "loss": 1.3987,
207
+ "step": 9000
208
+ },
209
+ {
210
+ "epoch": 0.47251535674909434,
211
+ "eval_accuracy": 0.6543280654604667,
212
+ "eval_loss": 1.3283472061157227,
213
+ "eval_runtime": 52.9443,
214
+ "eval_samples_per_second": 115.744,
215
+ "eval_steps_per_second": 3.626,
216
+ "step": 9000
217
+ },
218
+ {
219
+ "epoch": 0.4987662099018218,
220
+ "grad_norm": 1.173604965209961,
221
+ "learning_rate": 2.5061689504908907e-05,
222
+ "loss": 1.3864,
223
+ "step": 9500
224
+ },
225
+ {
226
+ "epoch": 0.5250170630545493,
227
+ "grad_norm": 1.187168836593628,
228
+ "learning_rate": 2.3749146847272537e-05,
229
+ "loss": 1.378,
230
+ "step": 10000
231
+ },
232
+ {
233
+ "epoch": 0.5250170630545493,
234
+ "eval_accuracy": 0.6578261027694616,
235
+ "eval_loss": 1.3098640441894531,
236
+ "eval_runtime": 52.7359,
237
+ "eval_samples_per_second": 116.202,
238
+ "eval_steps_per_second": 3.641,
239
+ "step": 10000
240
+ },
241
+ {
242
+ "epoch": 0.5512679162072768,
243
+ "grad_norm": 1.1469584703445435,
244
+ "learning_rate": 2.2436604189636164e-05,
245
+ "loss": 1.3679,
246
+ "step": 10500
247
+ },
248
+ {
249
+ "epoch": 0.5775187693600042,
250
+ "grad_norm": 1.1091519594192505,
251
+ "learning_rate": 2.112406153199979e-05,
252
+ "loss": 1.3567,
253
+ "step": 11000
254
+ },
255
+ {
256
+ "epoch": 0.5775187693600042,
257
+ "eval_accuracy": 0.6610596617229313,
258
+ "eval_loss": 1.2938392162322998,
259
+ "eval_runtime": 53.1407,
260
+ "eval_samples_per_second": 115.317,
261
+ "eval_steps_per_second": 3.613,
262
+ "step": 11000
263
+ },
264
+ {
265
+ "epoch": 0.6037696225127317,
266
+ "grad_norm": 1.1372675895690918,
267
+ "learning_rate": 1.9811518874363418e-05,
268
+ "loss": 1.3486,
269
+ "step": 11500
270
+ },
271
+ {
272
+ "epoch": 0.6300204756654592,
273
+ "grad_norm": 1.1002944707870483,
274
+ "learning_rate": 1.8498976216727045e-05,
275
+ "loss": 1.3436,
276
+ "step": 12000
277
+ },
278
+ {
279
+ "epoch": 0.6300204756654592,
280
+ "eval_accuracy": 0.6642933801929001,
281
+ "eval_loss": 1.2784197330474854,
282
+ "eval_runtime": 54.2879,
283
+ "eval_samples_per_second": 112.88,
284
+ "eval_steps_per_second": 3.537,
285
+ "step": 12000
286
+ },
287
+ {
288
+ "epoch": 0.6562713288181866,
289
+ "grad_norm": 1.1172466278076172,
290
+ "learning_rate": 1.718643355909067e-05,
291
+ "loss": 1.3349,
292
+ "step": 12500
293
+ },
294
+ {
295
+ "epoch": 0.6825221819709141,
296
+ "grad_norm": 1.129073143005371,
297
+ "learning_rate": 1.5873890901454296e-05,
298
+ "loss": 1.328,
299
+ "step": 13000
300
+ },
301
+ {
302
+ "epoch": 0.6825221819709141,
303
+ "eval_accuracy": 0.6665915343955856,
304
+ "eval_loss": 1.2663512229919434,
305
+ "eval_runtime": 52.6536,
306
+ "eval_samples_per_second": 116.383,
307
+ "eval_steps_per_second": 3.646,
308
+ "step": 13000
309
+ },
310
+ {
311
+ "epoch": 0.7087730351236415,
312
+ "grad_norm": 1.1130038499832153,
313
+ "learning_rate": 1.4561348243817924e-05,
314
+ "loss": 1.3209,
315
+ "step": 13500
316
+ },
317
+ {
318
+ "epoch": 0.735023888276369,
319
+ "grad_norm": 1.1710587739944458,
320
+ "learning_rate": 1.3248805586181551e-05,
321
+ "loss": 1.3157,
322
+ "step": 14000
323
+ },
324
+ {
325
+ "epoch": 0.735023888276369,
326
+ "eval_accuracy": 0.669028148919499,
327
+ "eval_loss": 1.2545733451843262,
328
+ "eval_runtime": 53.0376,
329
+ "eval_samples_per_second": 115.541,
330
+ "eval_steps_per_second": 3.62,
331
+ "step": 14000
332
+ },
333
+ {
334
+ "epoch": 0.7612747414290965,
335
+ "grad_norm": 1.1190043687820435,
336
+ "learning_rate": 1.1936262928545178e-05,
337
+ "loss": 1.3127,
338
+ "step": 14500
339
+ },
340
+ {
341
+ "epoch": 0.7875255945818239,
342
+ "grad_norm": 1.1405632495880127,
343
+ "learning_rate": 1.0623720270908805e-05,
344
+ "loss": 1.3052,
345
+ "step": 15000
346
+ },
347
+ {
348
+ "epoch": 0.7875255945818239,
349
+ "eval_accuracy": 0.6706863229277531,
350
+ "eval_loss": 1.2461665868759155,
351
+ "eval_runtime": 52.9855,
352
+ "eval_samples_per_second": 115.654,
353
+ "eval_steps_per_second": 3.624,
354
+ "step": 15000
355
+ },
356
+ {
357
+ "epoch": 0.8137764477345514,
358
+ "grad_norm": 1.159489393234253,
359
+ "learning_rate": 9.311177613272432e-06,
360
+ "loss": 1.3019,
361
+ "step": 15500
362
+ },
363
+ {
364
+ "epoch": 0.8400273008872788,
365
+ "grad_norm": 1.077958345413208,
366
+ "learning_rate": 7.998634955636059e-06,
367
+ "loss": 1.2966,
368
+ "step": 16000
369
+ },
370
+ {
371
+ "epoch": 0.8400273008872788,
372
+ "eval_accuracy": 0.6726187057979781,
373
+ "eval_loss": 1.2370483875274658,
374
+ "eval_runtime": 53.1969,
375
+ "eval_samples_per_second": 115.195,
376
+ "eval_steps_per_second": 3.609,
377
+ "step": 16000
378
+ },
379
+ {
380
+ "epoch": 0.8662781540400063,
381
+ "grad_norm": 1.1217402219772339,
382
+ "learning_rate": 6.686092297999686e-06,
383
+ "loss": 1.2936,
384
+ "step": 16500
385
+ },
386
+ {
387
+ "epoch": 0.8925290071927338,
388
+ "grad_norm": 1.1466121673583984,
389
+ "learning_rate": 5.373549640363313e-06,
390
+ "loss": 1.2943,
391
+ "step": 17000
392
+ },
393
+ {
394
+ "epoch": 0.8925290071927338,
395
+ "eval_accuracy": 0.6742480073198931,
396
+ "eval_loss": 1.2296273708343506,
397
+ "eval_runtime": 52.8045,
398
+ "eval_samples_per_second": 116.051,
399
+ "eval_steps_per_second": 3.636,
400
+ "step": 17000
401
+ },
402
+ {
403
+ "epoch": 0.9187798603454612,
404
+ "grad_norm": 1.1291422843933105,
405
+ "learning_rate": 4.061006982726939e-06,
406
+ "loss": 1.2877,
407
+ "step": 17500
408
+ },
409
+ {
410
+ "epoch": 0.9450307134981887,
411
+ "grad_norm": 1.1145981550216675,
412
+ "learning_rate": 2.7484643250905653e-06,
413
+ "loss": 1.2837,
414
+ "step": 18000
415
+ },
416
+ {
417
+ "epoch": 0.9450307134981887,
418
+ "eval_accuracy": 0.6753689297591429,
419
+ "eval_loss": 1.224404215812683,
420
+ "eval_runtime": 53.0184,
421
+ "eval_samples_per_second": 115.583,
422
+ "eval_steps_per_second": 3.621,
423
+ "step": 18000
424
+ },
425
+ {
426
+ "epoch": 0.9712815666509161,
427
+ "grad_norm": 1.0810720920562744,
428
+ "learning_rate": 1.4359216674541924e-06,
429
+ "loss": 1.2835,
430
+ "step": 18500
431
+ },
432
+ {
433
+ "epoch": 0.9975324198036436,
434
+ "grad_norm": 1.1252995729446411,
435
+ "learning_rate": 1.233790098178191e-07,
436
+ "loss": 1.2825,
437
+ "step": 19000
438
+ },
439
+ {
440
+ "epoch": 0.9975324198036436,
441
+ "eval_accuracy": 0.6759672761473065,
442
+ "eval_loss": 1.2216600179672241,
443
+ "eval_runtime": 52.7997,
444
+ "eval_samples_per_second": 116.061,
445
+ "eval_steps_per_second": 3.636,
446
+ "step": 19000
447
+ },
448
+ {
449
+ "epoch": 1.0,
450
+ "step": 19047,
451
+ "total_flos": 3.18502967574528e+17,
452
+ "train_loss": 1.5702329817773064,
453
+ "train_runtime": 7426.3864,
454
+ "train_samples_per_second": 82.069,
455
+ "train_steps_per_second": 2.565
456
+ }
457
+ ],
458
+ "logging_steps": 500,
459
+ "max_steps": 19047,
460
+ "num_input_tokens_seen": 0,
461
+ "num_train_epochs": 1,
462
+ "save_steps": 1000,
463
+ "stateful_callbacks": {
464
+ "TrainerControl": {
465
+ "args": {
466
+ "should_epoch_stop": false,
467
+ "should_evaluate": false,
468
+ "should_log": false,
469
+ "should_save": true,
470
+ "should_training_stop": true
471
+ },
472
+ "attributes": {}
473
+ }
474
+ },
475
+ "total_flos": 3.18502967574528e+17,
476
+ "train_batch_size": 32,
477
+ "trial_name": null,
478
+ "trial_params": null
479
+ }
training_args.bin ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:cf702862710b5bc311637f38f103bf1b146017dede9a874ef882b58c8aa0db0c
3
+ size 5176